re PR debug/88046 (ICE in add_data_member_location_attribute at gcc/dwarf2out.c:19237...
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2019 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1223 section *sect;
1224 dw_fde_ref fde = cfun->fde;
1225
1226 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1227
1228 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1229 current_function_funcdef_no);
1230
1231 fde->dw_fde_second_begin = ggc_strdup (label);
1232 if (!in_cold_section_p)
1233 {
1234 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1235 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1236 }
1237 else
1238 {
1239 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1240 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1241 }
1242 have_multiple_function_sections = true;
1243
1244 /* There is no need to mark used sections when not debugging. */
1245 if (cold_text_section != NULL)
1246 dwarf2out_note_section_used ();
1247
1248 if (dwarf2out_do_cfi_asm ())
1249 fprintf (asm_out_file, "\t.cfi_endproc\n");
1250
1251 /* Now do the real section switch. */
1252 sect = current_function_section ();
1253 switch_to_section (sect);
1254
1255 fde->second_in_std_section
1256 = (sect == text_section
1257 || (cold_text_section && sect == cold_text_section));
1258
1259 if (dwarf2out_do_cfi_asm ())
1260 dwarf2out_do_cfi_startproc (true);
1261
1262 var_location_switch_text_section ();
1263
1264 if (cold_text_section != NULL)
1265 set_cur_line_info_table (sect);
1266 }
1267 \f
1268 /* And now, the subset of the debugging information support code necessary
1269 for emitting location expressions. */
1270
1271 /* Data about a single source file. */
1272 struct GTY((for_user)) dwarf_file_data {
1273 const char * filename;
1274 int emitted_number;
1275 };
1276
1277 /* Describe an entry into the .debug_addr section. */
1278
1279 enum ate_kind {
1280 ate_kind_rtx,
1281 ate_kind_rtx_dtprel,
1282 ate_kind_label
1283 };
1284
1285 struct GTY((for_user)) addr_table_entry {
1286 enum ate_kind kind;
1287 unsigned int refcount;
1288 unsigned int index;
1289 union addr_table_entry_struct_union
1290 {
1291 rtx GTY ((tag ("0"))) rtl;
1292 char * GTY ((tag ("1"))) label;
1293 }
1294 GTY ((desc ("%1.kind"))) addr;
1295 };
1296
1297 typedef unsigned int var_loc_view;
1298
1299 /* Location lists are ranges + location descriptions for that range,
1300 so you can track variables that are in different places over
1301 their entire life. */
1302 typedef struct GTY(()) dw_loc_list_struct {
1303 dw_loc_list_ref dw_loc_next;
1304 const char *begin; /* Label and addr_entry for start of range */
1305 addr_table_entry *begin_entry;
1306 const char *end; /* Label for end of range */
1307 char *ll_symbol; /* Label for beginning of location list.
1308 Only on head of list. */
1309 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1310 const char *section; /* Section this loclist is relative to */
1311 dw_loc_descr_ref expr;
1312 var_loc_view vbegin, vend;
1313 hashval_t hash;
1314 /* True if all addresses in this and subsequent lists are known to be
1315 resolved. */
1316 bool resolved_addr;
1317 /* True if this list has been replaced by dw_loc_next. */
1318 bool replaced;
1319 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1320 section. */
1321 unsigned char emitted : 1;
1322 /* True if hash field is index rather than hash value. */
1323 unsigned char num_assigned : 1;
1324 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1325 unsigned char offset_emitted : 1;
1326 /* True if note_variable_value_in_expr has been called on it. */
1327 unsigned char noted_variable_value : 1;
1328 /* True if the range should be emitted even if begin and end
1329 are the same. */
1330 bool force;
1331 } dw_loc_list_node;
1332
1333 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1334 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1335
1336 /* Convert a DWARF stack opcode into its string name. */
1337
1338 static const char *
1339 dwarf_stack_op_name (unsigned int op)
1340 {
1341 const char *name = get_DW_OP_name (op);
1342
1343 if (name != NULL)
1344 return name;
1345
1346 return "OP_<unknown>";
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as a separate
1350 attribute next to the location lists, as an extension compatible
1351 with DWARF 2 and above. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_attribute ()
1355 {
1356 return debug_variable_location_views == 1;
1357 }
1358
1359 /* Return TRUE iff we're to output location view lists as part of the
1360 location lists, as proposed for standardization after DWARF 5. */
1361
1362 static inline bool
1363 dwarf2out_locviews_in_loclist ()
1364 {
1365 #ifndef DW_LLE_view_pair
1366 return false;
1367 #else
1368 return debug_variable_location_views == -1;
1369 #endif
1370 }
1371
1372 /* Return a pointer to a newly allocated location description. Location
1373 descriptions are simple expression terms that can be strung
1374 together to form more complicated location (address) descriptions. */
1375
1376 static inline dw_loc_descr_ref
1377 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1378 unsigned HOST_WIDE_INT oprnd2)
1379 {
1380 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1381
1382 descr->dw_loc_opc = op;
1383 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd1.val_entry = NULL;
1385 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1386 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd2.val_entry = NULL;
1388 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1389
1390 return descr;
1391 }
1392
1393 /* Add a location description term to a location description expression. */
1394
1395 static inline void
1396 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1397 {
1398 dw_loc_descr_ref *d;
1399
1400 /* Find the end of the chain. */
1401 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1402 ;
1403
1404 *d = descr;
1405 }
1406
1407 /* Compare two location operands for exact equality. */
1408
1409 static bool
1410 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1411 {
1412 if (a->val_class != b->val_class)
1413 return false;
1414 switch (a->val_class)
1415 {
1416 case dw_val_class_none:
1417 return true;
1418 case dw_val_class_addr:
1419 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1420
1421 case dw_val_class_offset:
1422 case dw_val_class_unsigned_const:
1423 case dw_val_class_const:
1424 case dw_val_class_unsigned_const_implicit:
1425 case dw_val_class_const_implicit:
1426 case dw_val_class_range_list:
1427 /* These are all HOST_WIDE_INT, signed or unsigned. */
1428 return a->v.val_unsigned == b->v.val_unsigned;
1429
1430 case dw_val_class_loc:
1431 return a->v.val_loc == b->v.val_loc;
1432 case dw_val_class_loc_list:
1433 return a->v.val_loc_list == b->v.val_loc_list;
1434 case dw_val_class_view_list:
1435 return a->v.val_view_list == b->v.val_view_list;
1436 case dw_val_class_die_ref:
1437 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1438 case dw_val_class_fde_ref:
1439 return a->v.val_fde_index == b->v.val_fde_index;
1440 case dw_val_class_symview:
1441 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1442 case dw_val_class_lbl_id:
1443 case dw_val_class_lineptr:
1444 case dw_val_class_macptr:
1445 case dw_val_class_loclistsptr:
1446 case dw_val_class_high_pc:
1447 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1448 case dw_val_class_str:
1449 return a->v.val_str == b->v.val_str;
1450 case dw_val_class_flag:
1451 return a->v.val_flag == b->v.val_flag;
1452 case dw_val_class_file:
1453 case dw_val_class_file_implicit:
1454 return a->v.val_file == b->v.val_file;
1455 case dw_val_class_decl_ref:
1456 return a->v.val_decl_ref == b->v.val_decl_ref;
1457
1458 case dw_val_class_const_double:
1459 return (a->v.val_double.high == b->v.val_double.high
1460 && a->v.val_double.low == b->v.val_double.low);
1461
1462 case dw_val_class_wide_int:
1463 return *a->v.val_wide == *b->v.val_wide;
1464
1465 case dw_val_class_vec:
1466 {
1467 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1468 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1469
1470 return (a_len == b_len
1471 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1472 }
1473
1474 case dw_val_class_data8:
1475 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1476
1477 case dw_val_class_vms_delta:
1478 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1479 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1480
1481 case dw_val_class_discr_value:
1482 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1483 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1484 case dw_val_class_discr_list:
1485 /* It makes no sense comparing two discriminant value lists. */
1486 return false;
1487 }
1488 gcc_unreachable ();
1489 }
1490
1491 /* Compare two location atoms for exact equality. */
1492
1493 static bool
1494 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1495 {
1496 if (a->dw_loc_opc != b->dw_loc_opc)
1497 return false;
1498
1499 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1500 address size, but since we always allocate cleared storage it
1501 should be zero for other types of locations. */
1502 if (a->dtprel != b->dtprel)
1503 return false;
1504
1505 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1506 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1507 }
1508
1509 /* Compare two complete location expressions for exact equality. */
1510
1511 bool
1512 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1513 {
1514 while (1)
1515 {
1516 if (a == b)
1517 return true;
1518 if (a == NULL || b == NULL)
1519 return false;
1520 if (!loc_descr_equal_p_1 (a, b))
1521 return false;
1522
1523 a = a->dw_loc_next;
1524 b = b->dw_loc_next;
1525 }
1526 }
1527
1528
1529 /* Add a constant POLY_OFFSET to a location expression. */
1530
1531 static void
1532 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1533 {
1534 dw_loc_descr_ref loc;
1535 HOST_WIDE_INT *p;
1536
1537 gcc_assert (*list_head != NULL);
1538
1539 if (known_eq (poly_offset, 0))
1540 return;
1541
1542 /* Find the end of the chain. */
1543 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1544 ;
1545
1546 HOST_WIDE_INT offset;
1547 if (!poly_offset.is_constant (&offset))
1548 {
1549 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1550 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1551 return;
1552 }
1553
1554 p = NULL;
1555 if (loc->dw_loc_opc == DW_OP_fbreg
1556 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1557 p = &loc->dw_loc_oprnd1.v.val_int;
1558 else if (loc->dw_loc_opc == DW_OP_bregx)
1559 p = &loc->dw_loc_oprnd2.v.val_int;
1560
1561 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1562 offset. Don't optimize if an signed integer overflow would happen. */
1563 if (p != NULL
1564 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1565 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1566 *p += offset;
1567
1568 else if (offset > 0)
1569 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1570
1571 else
1572 {
1573 loc->dw_loc_next
1574 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1575 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1576 }
1577 }
1578
1579 /* Return a pointer to a newly allocated location description for
1580 REG and OFFSET. */
1581
1582 static inline dw_loc_descr_ref
1583 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1584 {
1585 HOST_WIDE_INT const_offset;
1586 if (offset.is_constant (&const_offset))
1587 {
1588 if (reg <= 31)
1589 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1590 const_offset, 0);
1591 else
1592 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1593 }
1594 else
1595 {
1596 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1597 loc_descr_plus_const (&ret, offset);
1598 return ret;
1599 }
1600 }
1601
1602 /* Add a constant OFFSET to a location list. */
1603
1604 static void
1605 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1606 {
1607 dw_loc_list_ref d;
1608 for (d = list_head; d != NULL; d = d->dw_loc_next)
1609 loc_descr_plus_const (&d->expr, offset);
1610 }
1611
1612 #define DWARF_REF_SIZE \
1613 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1614
1615 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1616 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1617 DW_FORM_data16 with 128 bits. */
1618 #define DWARF_LARGEST_DATA_FORM_BITS \
1619 (dwarf_version >= 5 ? 128 : 64)
1620
1621 /* Utility inline function for construction of ops that were GNU extension
1622 before DWARF 5. */
1623 static inline enum dwarf_location_atom
1624 dwarf_OP (enum dwarf_location_atom op)
1625 {
1626 switch (op)
1627 {
1628 case DW_OP_implicit_pointer:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_implicit_pointer;
1631 break;
1632
1633 case DW_OP_entry_value:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_entry_value;
1636 break;
1637
1638 case DW_OP_const_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_const_type;
1641 break;
1642
1643 case DW_OP_regval_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_regval_type;
1646 break;
1647
1648 case DW_OP_deref_type:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_deref_type;
1651 break;
1652
1653 case DW_OP_convert:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_convert;
1656 break;
1657
1658 case DW_OP_reinterpret:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_reinterpret;
1661 break;
1662
1663 case DW_OP_addrx:
1664 if (dwarf_version < 5)
1665 return DW_OP_GNU_addr_index;
1666 break;
1667
1668 case DW_OP_constx:
1669 if (dwarf_version < 5)
1670 return DW_OP_GNU_const_index;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return op;
1677 }
1678
1679 /* Similarly for attributes. */
1680 static inline enum dwarf_attribute
1681 dwarf_AT (enum dwarf_attribute at)
1682 {
1683 switch (at)
1684 {
1685 case DW_AT_call_return_pc:
1686 if (dwarf_version < 5)
1687 return DW_AT_low_pc;
1688 break;
1689
1690 case DW_AT_call_tail_call:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_tail_call;
1693 break;
1694
1695 case DW_AT_call_origin:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_target:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_target;
1703 break;
1704
1705 case DW_AT_call_target_clobbered:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_target_clobbered;
1708 break;
1709
1710 case DW_AT_call_parameter:
1711 if (dwarf_version < 5)
1712 return DW_AT_abstract_origin;
1713 break;
1714
1715 case DW_AT_call_value:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_call_site_value;
1718 break;
1719
1720 case DW_AT_call_data_value:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_call_site_data_value;
1723 break;
1724
1725 case DW_AT_call_all_calls:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_all_call_sites;
1728 break;
1729
1730 case DW_AT_call_all_tail_calls:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_all_tail_call_sites;
1733 break;
1734
1735 case DW_AT_dwo_name:
1736 if (dwarf_version < 5)
1737 return DW_AT_GNU_dwo_name;
1738 break;
1739
1740 case DW_AT_addr_base:
1741 if (dwarf_version < 5)
1742 return DW_AT_GNU_addr_base;
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return at;
1749 }
1750
1751 /* And similarly for tags. */
1752 static inline enum dwarf_tag
1753 dwarf_TAG (enum dwarf_tag tag)
1754 {
1755 switch (tag)
1756 {
1757 case DW_TAG_call_site:
1758 if (dwarf_version < 5)
1759 return DW_TAG_GNU_call_site;
1760 break;
1761
1762 case DW_TAG_call_site_parameter:
1763 if (dwarf_version < 5)
1764 return DW_TAG_GNU_call_site_parameter;
1765 break;
1766
1767 default:
1768 break;
1769 }
1770 return tag;
1771 }
1772
1773 /* And similarly for forms. */
1774 static inline enum dwarf_form
1775 dwarf_FORM (enum dwarf_form form)
1776 {
1777 switch (form)
1778 {
1779 case DW_FORM_addrx:
1780 if (dwarf_version < 5)
1781 return DW_FORM_GNU_addr_index;
1782 break;
1783
1784 case DW_FORM_strx:
1785 if (dwarf_version < 5)
1786 return DW_FORM_GNU_str_index;
1787 break;
1788
1789 default:
1790 break;
1791 }
1792 return form;
1793 }
1794
1795 static unsigned long int get_base_type_offset (dw_die_ref);
1796
1797 /* Return the size of a location descriptor. */
1798
1799 static unsigned long
1800 size_of_loc_descr (dw_loc_descr_ref loc)
1801 {
1802 unsigned long size = 1;
1803
1804 switch (loc->dw_loc_opc)
1805 {
1806 case DW_OP_addr:
1807 size += DWARF2_ADDR_SIZE;
1808 break;
1809 case DW_OP_GNU_addr_index:
1810 case DW_OP_addrx:
1811 case DW_OP_GNU_const_index:
1812 case DW_OP_constx:
1813 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1814 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1815 break;
1816 case DW_OP_const1u:
1817 case DW_OP_const1s:
1818 size += 1;
1819 break;
1820 case DW_OP_const2u:
1821 case DW_OP_const2s:
1822 size += 2;
1823 break;
1824 case DW_OP_const4u:
1825 case DW_OP_const4s:
1826 size += 4;
1827 break;
1828 case DW_OP_const8u:
1829 case DW_OP_const8s:
1830 size += 8;
1831 break;
1832 case DW_OP_constu:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_consts:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_pick:
1839 size += 1;
1840 break;
1841 case DW_OP_plus_uconst:
1842 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1843 break;
1844 case DW_OP_skip:
1845 case DW_OP_bra:
1846 size += 2;
1847 break;
1848 case DW_OP_breg0:
1849 case DW_OP_breg1:
1850 case DW_OP_breg2:
1851 case DW_OP_breg3:
1852 case DW_OP_breg4:
1853 case DW_OP_breg5:
1854 case DW_OP_breg6:
1855 case DW_OP_breg7:
1856 case DW_OP_breg8:
1857 case DW_OP_breg9:
1858 case DW_OP_breg10:
1859 case DW_OP_breg11:
1860 case DW_OP_breg12:
1861 case DW_OP_breg13:
1862 case DW_OP_breg14:
1863 case DW_OP_breg15:
1864 case DW_OP_breg16:
1865 case DW_OP_breg17:
1866 case DW_OP_breg18:
1867 case DW_OP_breg19:
1868 case DW_OP_breg20:
1869 case DW_OP_breg21:
1870 case DW_OP_breg22:
1871 case DW_OP_breg23:
1872 case DW_OP_breg24:
1873 case DW_OP_breg25:
1874 case DW_OP_breg26:
1875 case DW_OP_breg27:
1876 case DW_OP_breg28:
1877 case DW_OP_breg29:
1878 case DW_OP_breg30:
1879 case DW_OP_breg31:
1880 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1881 break;
1882 case DW_OP_regx:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_fbreg:
1886 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1887 break;
1888 case DW_OP_bregx:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1890 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1891 break;
1892 case DW_OP_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 break;
1895 case DW_OP_bit_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1898 break;
1899 case DW_OP_deref_size:
1900 case DW_OP_xderef_size:
1901 size += 1;
1902 break;
1903 case DW_OP_call2:
1904 size += 2;
1905 break;
1906 case DW_OP_call4:
1907 size += 4;
1908 break;
1909 case DW_OP_call_ref:
1910 case DW_OP_GNU_variable_value:
1911 size += DWARF_REF_SIZE;
1912 break;
1913 case DW_OP_implicit_value:
1914 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1915 + loc->dw_loc_oprnd1.v.val_unsigned;
1916 break;
1917 case DW_OP_implicit_pointer:
1918 case DW_OP_GNU_implicit_pointer:
1919 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1920 break;
1921 case DW_OP_entry_value:
1922 case DW_OP_GNU_entry_value:
1923 {
1924 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1925 size += size_of_uleb128 (op_size) + op_size;
1926 break;
1927 }
1928 case DW_OP_const_type:
1929 case DW_OP_GNU_const_type:
1930 {
1931 unsigned long o
1932 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1933 size += size_of_uleb128 (o) + 1;
1934 switch (loc->dw_loc_oprnd2.val_class)
1935 {
1936 case dw_val_class_vec:
1937 size += loc->dw_loc_oprnd2.v.val_vec.length
1938 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1939 break;
1940 case dw_val_class_const:
1941 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_const_double:
1944 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_wide_int:
1947 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1948 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1949 break;
1950 default:
1951 gcc_unreachable ();
1952 }
1953 break;
1954 }
1955 case DW_OP_regval_type:
1956 case DW_OP_GNU_regval_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1961 + size_of_uleb128 (o);
1962 }
1963 break;
1964 case DW_OP_deref_type:
1965 case DW_OP_GNU_deref_type:
1966 {
1967 unsigned long o
1968 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1969 size += 1 + size_of_uleb128 (o);
1970 }
1971 break;
1972 case DW_OP_convert:
1973 case DW_OP_reinterpret:
1974 case DW_OP_GNU_convert:
1975 case DW_OP_GNU_reinterpret:
1976 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1977 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1978 else
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1982 size += size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_GNU_parameter_ref:
1986 size += 4;
1987 break;
1988 default:
1989 break;
1990 }
1991
1992 return size;
1993 }
1994
1995 /* Return the size of a series of location descriptors. */
1996
1997 unsigned long
1998 size_of_locs (dw_loc_descr_ref loc)
1999 {
2000 dw_loc_descr_ref l;
2001 unsigned long size;
2002
2003 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2004 field, to avoid writing to a PCH file. */
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2008 break;
2009 size += size_of_loc_descr (l);
2010 }
2011 if (! l)
2012 return size;
2013
2014 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2015 {
2016 l->dw_loc_addr = size;
2017 size += size_of_loc_descr (l);
2018 }
2019
2020 return size;
2021 }
2022
2023 /* Return the size of the value in a DW_AT_discr_value attribute. */
2024
2025 static int
2026 size_of_discr_value (dw_discr_value *discr_value)
2027 {
2028 if (discr_value->pos)
2029 return size_of_uleb128 (discr_value->v.uval);
2030 else
2031 return size_of_sleb128 (discr_value->v.sval);
2032 }
2033
2034 /* Return the size of the value in a DW_AT_discr_list attribute. */
2035
2036 static int
2037 size_of_discr_list (dw_discr_list_ref discr_list)
2038 {
2039 int size = 0;
2040
2041 for (dw_discr_list_ref list = discr_list;
2042 list != NULL;
2043 list = list->dw_discr_next)
2044 {
2045 /* One byte for the discriminant value descriptor, and then one or two
2046 LEB128 numbers, depending on whether it's a single case label or a
2047 range label. */
2048 size += 1;
2049 size += size_of_discr_value (&list->dw_discr_lower_bound);
2050 if (list->dw_discr_range != 0)
2051 size += size_of_discr_value (&list->dw_discr_upper_bound);
2052 }
2053 return size;
2054 }
2055
2056 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2057 static void get_ref_die_offset_label (char *, dw_die_ref);
2058 static unsigned long int get_ref_die_offset (dw_die_ref);
2059
2060 /* Output location description stack opcode's operands (if any).
2061 The for_eh_or_skip parameter controls whether register numbers are
2062 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2063 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2064 info). This should be suppressed for the cases that have not been converted
2065 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2066
2067 static void
2068 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2069 {
2070 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2071 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2072
2073 switch (loc->dw_loc_opc)
2074 {
2075 #ifdef DWARF2_DEBUGGING_INFO
2076 case DW_OP_const2u:
2077 case DW_OP_const2s:
2078 dw2_asm_output_data (2, val1->v.val_int, NULL);
2079 break;
2080 case DW_OP_const4u:
2081 if (loc->dtprel)
2082 {
2083 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2084 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2085 val1->v.val_addr);
2086 fputc ('\n', asm_out_file);
2087 break;
2088 }
2089 /* FALLTHRU */
2090 case DW_OP_const4s:
2091 dw2_asm_output_data (4, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const8u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const8s:
2104 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2105 dw2_asm_output_data (8, val1->v.val_int, NULL);
2106 break;
2107 case DW_OP_skip:
2108 case DW_OP_bra:
2109 {
2110 int offset;
2111
2112 gcc_assert (val1->val_class == dw_val_class_loc);
2113 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2114
2115 dw2_asm_output_data (2, offset, NULL);
2116 }
2117 break;
2118 case DW_OP_implicit_value:
2119 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2120 switch (val2->val_class)
2121 {
2122 case dw_val_class_const:
2123 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2124 break;
2125 case dw_val_class_vec:
2126 {
2127 unsigned int elt_size = val2->v.val_vec.elt_size;
2128 unsigned int len = val2->v.val_vec.length;
2129 unsigned int i;
2130 unsigned char *p;
2131
2132 if (elt_size > sizeof (HOST_WIDE_INT))
2133 {
2134 elt_size /= 2;
2135 len *= 2;
2136 }
2137 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2138 i < len;
2139 i++, p += elt_size)
2140 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2141 "fp or vector constant word %u", i);
2142 }
2143 break;
2144 case dw_val_class_const_double:
2145 {
2146 unsigned HOST_WIDE_INT first, second;
2147
2148 if (WORDS_BIG_ENDIAN)
2149 {
2150 first = val2->v.val_double.high;
2151 second = val2->v.val_double.low;
2152 }
2153 else
2154 {
2155 first = val2->v.val_double.low;
2156 second = val2->v.val_double.high;
2157 }
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 first, NULL);
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 second, NULL);
2162 }
2163 break;
2164 case dw_val_class_wide_int:
2165 {
2166 int i;
2167 int len = get_full_len (*val2->v.val_wide);
2168 if (WORDS_BIG_ENDIAN)
2169 for (i = len - 1; i >= 0; --i)
2170 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2171 val2->v.val_wide->elt (i), NULL);
2172 else
2173 for (i = 0; i < len; ++i)
2174 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2175 val2->v.val_wide->elt (i), NULL);
2176 }
2177 break;
2178 case dw_val_class_addr:
2179 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2180 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2181 break;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 break;
2186 #else
2187 case DW_OP_const2u:
2188 case DW_OP_const2s:
2189 case DW_OP_const4u:
2190 case DW_OP_const4s:
2191 case DW_OP_const8u:
2192 case DW_OP_const8s:
2193 case DW_OP_skip:
2194 case DW_OP_bra:
2195 case DW_OP_implicit_value:
2196 /* We currently don't make any attempt to make sure these are
2197 aligned properly like we do for the main unwind info, so
2198 don't support emitting things larger than a byte if we're
2199 only doing unwinding. */
2200 gcc_unreachable ();
2201 #endif
2202 case DW_OP_const1u:
2203 case DW_OP_const1s:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_constu:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_consts:
2210 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_pick:
2213 dw2_asm_output_data (1, val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_plus_uconst:
2216 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2217 break;
2218 case DW_OP_breg0:
2219 case DW_OP_breg1:
2220 case DW_OP_breg2:
2221 case DW_OP_breg3:
2222 case DW_OP_breg4:
2223 case DW_OP_breg5:
2224 case DW_OP_breg6:
2225 case DW_OP_breg7:
2226 case DW_OP_breg8:
2227 case DW_OP_breg9:
2228 case DW_OP_breg10:
2229 case DW_OP_breg11:
2230 case DW_OP_breg12:
2231 case DW_OP_breg13:
2232 case DW_OP_breg14:
2233 case DW_OP_breg15:
2234 case DW_OP_breg16:
2235 case DW_OP_breg17:
2236 case DW_OP_breg18:
2237 case DW_OP_breg19:
2238 case DW_OP_breg20:
2239 case DW_OP_breg21:
2240 case DW_OP_breg22:
2241 case DW_OP_breg23:
2242 case DW_OP_breg24:
2243 case DW_OP_breg25:
2244 case DW_OP_breg26:
2245 case DW_OP_breg27:
2246 case DW_OP_breg28:
2247 case DW_OP_breg29:
2248 case DW_OP_breg30:
2249 case DW_OP_breg31:
2250 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2251 break;
2252 case DW_OP_regx:
2253 {
2254 unsigned r = val1->v.val_unsigned;
2255 if (for_eh_or_skip >= 0)
2256 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2257 gcc_assert (size_of_uleb128 (r)
2258 == size_of_uleb128 (val1->v.val_unsigned));
2259 dw2_asm_output_data_uleb128 (r, NULL);
2260 }
2261 break;
2262 case DW_OP_fbreg:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_bregx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2274 }
2275 break;
2276 case DW_OP_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 break;
2279 case DW_OP_bit_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2282 break;
2283 case DW_OP_deref_size:
2284 case DW_OP_xderef_size:
2285 dw2_asm_output_data (1, val1->v.val_int, NULL);
2286 break;
2287
2288 case DW_OP_addr:
2289 if (loc->dtprel)
2290 {
2291 if (targetm.asm_out.output_dwarf_dtprel)
2292 {
2293 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2294 DWARF2_ADDR_SIZE,
2295 val1->v.val_addr);
2296 fputc ('\n', asm_out_file);
2297 }
2298 else
2299 gcc_unreachable ();
2300 }
2301 else
2302 {
2303 #ifdef DWARF2_DEBUGGING_INFO
2304 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2305 #else
2306 gcc_unreachable ();
2307 #endif
2308 }
2309 break;
2310
2311 case DW_OP_GNU_addr_index:
2312 case DW_OP_addrx:
2313 case DW_OP_GNU_const_index:
2314 case DW_OP_constx:
2315 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2316 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2317 "(index into .debug_addr)");
2318 break;
2319
2320 case DW_OP_call2:
2321 case DW_OP_call4:
2322 {
2323 unsigned long die_offset
2324 = get_ref_die_offset (val1->v.val_die_ref.die);
2325 /* Make sure the offset has been computed and that we can encode it as
2326 an operand. */
2327 gcc_assert (die_offset > 0
2328 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2329 ? 0xffff
2330 : 0xffffffff));
2331 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2332 die_offset, NULL);
2333 }
2334 break;
2335
2336 case DW_OP_call_ref:
2337 case DW_OP_GNU_variable_value:
2338 {
2339 char label[MAX_ARTIFICIAL_LABEL_BYTES
2340 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2341 gcc_assert (val1->val_class == dw_val_class_die_ref);
2342 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2343 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2344 }
2345 break;
2346
2347 case DW_OP_implicit_pointer:
2348 case DW_OP_GNU_implicit_pointer:
2349 {
2350 char label[MAX_ARTIFICIAL_LABEL_BYTES
2351 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2352 gcc_assert (val1->val_class == dw_val_class_die_ref);
2353 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2354 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2355 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2356 }
2357 break;
2358
2359 case DW_OP_entry_value:
2360 case DW_OP_GNU_entry_value:
2361 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2362 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2363 break;
2364
2365 case DW_OP_const_type:
2366 case DW_OP_GNU_const_type:
2367 {
2368 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2369 gcc_assert (o);
2370 dw2_asm_output_data_uleb128 (o, NULL);
2371 switch (val2->val_class)
2372 {
2373 case dw_val_class_const:
2374 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2375 dw2_asm_output_data (1, l, NULL);
2376 dw2_asm_output_data (l, val2->v.val_int, NULL);
2377 break;
2378 case dw_val_class_vec:
2379 {
2380 unsigned int elt_size = val2->v.val_vec.elt_size;
2381 unsigned int len = val2->v.val_vec.length;
2382 unsigned int i;
2383 unsigned char *p;
2384
2385 l = len * elt_size;
2386 dw2_asm_output_data (1, l, NULL);
2387 if (elt_size > sizeof (HOST_WIDE_INT))
2388 {
2389 elt_size /= 2;
2390 len *= 2;
2391 }
2392 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2393 i < len;
2394 i++, p += elt_size)
2395 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2396 "fp or vector constant word %u", i);
2397 }
2398 break;
2399 case dw_val_class_const_double:
2400 {
2401 unsigned HOST_WIDE_INT first, second;
2402 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2403
2404 dw2_asm_output_data (1, 2 * l, NULL);
2405 if (WORDS_BIG_ENDIAN)
2406 {
2407 first = val2->v.val_double.high;
2408 second = val2->v.val_double.low;
2409 }
2410 else
2411 {
2412 first = val2->v.val_double.low;
2413 second = val2->v.val_double.high;
2414 }
2415 dw2_asm_output_data (l, first, NULL);
2416 dw2_asm_output_data (l, second, NULL);
2417 }
2418 break;
2419 case dw_val_class_wide_int:
2420 {
2421 int i;
2422 int len = get_full_len (*val2->v.val_wide);
2423 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2424
2425 dw2_asm_output_data (1, len * l, NULL);
2426 if (WORDS_BIG_ENDIAN)
2427 for (i = len - 1; i >= 0; --i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 else
2430 for (i = 0; i < len; ++i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 }
2433 break;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438 break;
2439 case DW_OP_regval_type:
2440 case DW_OP_GNU_regval_type:
2441 {
2442 unsigned r = val1->v.val_unsigned;
2443 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2444 gcc_assert (o);
2445 if (for_eh_or_skip >= 0)
2446 {
2447 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2448 gcc_assert (size_of_uleb128 (r)
2449 == size_of_uleb128 (val1->v.val_unsigned));
2450 }
2451 dw2_asm_output_data_uleb128 (r, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_deref_type:
2456 case DW_OP_GNU_deref_type:
2457 {
2458 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2459 gcc_assert (o);
2460 dw2_asm_output_data (1, val1->v.val_int, NULL);
2461 dw2_asm_output_data_uleb128 (o, NULL);
2462 }
2463 break;
2464 case DW_OP_convert:
2465 case DW_OP_reinterpret:
2466 case DW_OP_GNU_convert:
2467 case DW_OP_GNU_reinterpret:
2468 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2469 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2470 else
2471 {
2472 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2473 gcc_assert (o);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477
2478 case DW_OP_GNU_parameter_ref:
2479 {
2480 unsigned long o;
2481 gcc_assert (val1->val_class == dw_val_class_die_ref);
2482 o = get_ref_die_offset (val1->v.val_die_ref.die);
2483 dw2_asm_output_data (4, o, NULL);
2484 }
2485 break;
2486
2487 default:
2488 /* Other codes have no operands. */
2489 break;
2490 }
2491 }
2492
2493 /* Output a sequence of location operations.
2494 The for_eh_or_skip parameter controls whether register numbers are
2495 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2496 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2497 info). This should be suppressed for the cases that have not been converted
2498 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2499
2500 void
2501 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2502 {
2503 for (; loc != NULL; loc = loc->dw_loc_next)
2504 {
2505 enum dwarf_location_atom opc = loc->dw_loc_opc;
2506 /* Output the opcode. */
2507 if (for_eh_or_skip >= 0
2508 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2509 {
2510 unsigned r = (opc - DW_OP_breg0);
2511 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2512 gcc_assert (r <= 31);
2513 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2514 }
2515 else if (for_eh_or_skip >= 0
2516 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2517 {
2518 unsigned r = (opc - DW_OP_reg0);
2519 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2520 gcc_assert (r <= 31);
2521 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2522 }
2523
2524 dw2_asm_output_data (1, opc,
2525 "%s", dwarf_stack_op_name (opc));
2526
2527 /* Output the operand(s) (if any). */
2528 output_loc_operands (loc, for_eh_or_skip);
2529 }
2530 }
2531
2532 /* Output location description stack opcode's operands (if any).
2533 The output is single bytes on a line, suitable for .cfi_escape. */
2534
2535 static void
2536 output_loc_operands_raw (dw_loc_descr_ref loc)
2537 {
2538 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2539 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2540
2541 switch (loc->dw_loc_opc)
2542 {
2543 case DW_OP_addr:
2544 case DW_OP_GNU_addr_index:
2545 case DW_OP_addrx:
2546 case DW_OP_GNU_const_index:
2547 case DW_OP_constx:
2548 case DW_OP_implicit_value:
2549 /* We cannot output addresses in .cfi_escape, only bytes. */
2550 gcc_unreachable ();
2551
2552 case DW_OP_const1u:
2553 case DW_OP_const1s:
2554 case DW_OP_pick:
2555 case DW_OP_deref_size:
2556 case DW_OP_xderef_size:
2557 fputc (',', asm_out_file);
2558 dw2_asm_output_data_raw (1, val1->v.val_int);
2559 break;
2560
2561 case DW_OP_const2u:
2562 case DW_OP_const2s:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, val1->v.val_int);
2565 break;
2566
2567 case DW_OP_const4u:
2568 case DW_OP_const4s:
2569 fputc (',', asm_out_file);
2570 dw2_asm_output_data_raw (4, val1->v.val_int);
2571 break;
2572
2573 case DW_OP_const8u:
2574 case DW_OP_const8s:
2575 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (8, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_skip:
2581 case DW_OP_bra:
2582 {
2583 int offset;
2584
2585 gcc_assert (val1->val_class == dw_val_class_loc);
2586 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2587
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_raw (2, offset);
2590 }
2591 break;
2592
2593 case DW_OP_regx:
2594 {
2595 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2596 gcc_assert (size_of_uleb128 (r)
2597 == size_of_uleb128 (val1->v.val_unsigned));
2598 fputc (',', asm_out_file);
2599 dw2_asm_output_data_uleb128_raw (r);
2600 }
2601 break;
2602
2603 case DW_OP_constu:
2604 case DW_OP_plus_uconst:
2605 case DW_OP_piece:
2606 fputc (',', asm_out_file);
2607 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2608 break;
2609
2610 case DW_OP_bit_piece:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2613 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2614 break;
2615
2616 case DW_OP_consts:
2617 case DW_OP_breg0:
2618 case DW_OP_breg1:
2619 case DW_OP_breg2:
2620 case DW_OP_breg3:
2621 case DW_OP_breg4:
2622 case DW_OP_breg5:
2623 case DW_OP_breg6:
2624 case DW_OP_breg7:
2625 case DW_OP_breg8:
2626 case DW_OP_breg9:
2627 case DW_OP_breg10:
2628 case DW_OP_breg11:
2629 case DW_OP_breg12:
2630 case DW_OP_breg13:
2631 case DW_OP_breg14:
2632 case DW_OP_breg15:
2633 case DW_OP_breg16:
2634 case DW_OP_breg17:
2635 case DW_OP_breg18:
2636 case DW_OP_breg19:
2637 case DW_OP_breg20:
2638 case DW_OP_breg21:
2639 case DW_OP_breg22:
2640 case DW_OP_breg23:
2641 case DW_OP_breg24:
2642 case DW_OP_breg25:
2643 case DW_OP_breg26:
2644 case DW_OP_breg27:
2645 case DW_OP_breg28:
2646 case DW_OP_breg29:
2647 case DW_OP_breg30:
2648 case DW_OP_breg31:
2649 case DW_OP_fbreg:
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2652 break;
2653
2654 case DW_OP_bregx:
2655 {
2656 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2657 gcc_assert (size_of_uleb128 (r)
2658 == size_of_uleb128 (val1->v.val_unsigned));
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_uleb128_raw (r);
2661 fputc (',', asm_out_file);
2662 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2663 }
2664 break;
2665
2666 case DW_OP_implicit_pointer:
2667 case DW_OP_entry_value:
2668 case DW_OP_const_type:
2669 case DW_OP_regval_type:
2670 case DW_OP_deref_type:
2671 case DW_OP_convert:
2672 case DW_OP_reinterpret:
2673 case DW_OP_GNU_implicit_pointer:
2674 case DW_OP_GNU_entry_value:
2675 case DW_OP_GNU_const_type:
2676 case DW_OP_GNU_regval_type:
2677 case DW_OP_GNU_deref_type:
2678 case DW_OP_GNU_convert:
2679 case DW_OP_GNU_reinterpret:
2680 case DW_OP_GNU_parameter_ref:
2681 gcc_unreachable ();
2682 break;
2683
2684 default:
2685 /* Other codes have no operands. */
2686 break;
2687 }
2688 }
2689
2690 void
2691 output_loc_sequence_raw (dw_loc_descr_ref loc)
2692 {
2693 while (1)
2694 {
2695 enum dwarf_location_atom opc = loc->dw_loc_opc;
2696 /* Output the opcode. */
2697 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2698 {
2699 unsigned r = (opc - DW_OP_breg0);
2700 r = DWARF2_FRAME_REG_OUT (r, 1);
2701 gcc_assert (r <= 31);
2702 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2703 }
2704 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2705 {
2706 unsigned r = (opc - DW_OP_reg0);
2707 r = DWARF2_FRAME_REG_OUT (r, 1);
2708 gcc_assert (r <= 31);
2709 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2710 }
2711 /* Output the opcode. */
2712 fprintf (asm_out_file, "%#x", opc);
2713 output_loc_operands_raw (loc);
2714
2715 if (!loc->dw_loc_next)
2716 break;
2717 loc = loc->dw_loc_next;
2718
2719 fputc (',', asm_out_file);
2720 }
2721 }
2722
2723 /* This function builds a dwarf location descriptor sequence from a
2724 dw_cfa_location, adding the given OFFSET to the result of the
2725 expression. */
2726
2727 struct dw_loc_descr_node *
2728 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2729 {
2730 struct dw_loc_descr_node *head, *tmp;
2731
2732 offset += cfa->offset;
2733
2734 if (cfa->indirect)
2735 {
2736 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2737 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2738 head->dw_loc_oprnd1.val_entry = NULL;
2739 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2740 add_loc_descr (&head, tmp);
2741 loc_descr_plus_const (&head, offset);
2742 }
2743 else
2744 head = new_reg_loc_descr (cfa->reg, offset);
2745
2746 return head;
2747 }
2748
2749 /* This function builds a dwarf location descriptor sequence for
2750 the address at OFFSET from the CFA when stack is aligned to
2751 ALIGNMENT byte. */
2752
2753 struct dw_loc_descr_node *
2754 build_cfa_aligned_loc (dw_cfa_location *cfa,
2755 poly_int64 offset, HOST_WIDE_INT alignment)
2756 {
2757 struct dw_loc_descr_node *head;
2758 unsigned int dwarf_fp
2759 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2760
2761 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2762 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2763 {
2764 head = new_reg_loc_descr (dwarf_fp, 0);
2765 add_loc_descr (&head, int_loc_descriptor (alignment));
2766 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2767 loc_descr_plus_const (&head, offset);
2768 }
2769 else
2770 head = new_reg_loc_descr (dwarf_fp, offset);
2771 return head;
2772 }
2773 \f
2774 /* And now, the support for symbolic debugging information. */
2775
2776 /* .debug_str support. */
2777
2778 static void dwarf2out_init (const char *);
2779 static void dwarf2out_finish (const char *);
2780 static void dwarf2out_early_finish (const char *);
2781 static void dwarf2out_assembly_start (void);
2782 static void dwarf2out_define (unsigned int, const char *);
2783 static void dwarf2out_undef (unsigned int, const char *);
2784 static void dwarf2out_start_source_file (unsigned, const char *);
2785 static void dwarf2out_end_source_file (unsigned);
2786 static void dwarf2out_function_decl (tree);
2787 static void dwarf2out_begin_block (unsigned, unsigned);
2788 static void dwarf2out_end_block (unsigned, unsigned);
2789 static bool dwarf2out_ignore_block (const_tree);
2790 static void dwarf2out_early_global_decl (tree);
2791 static void dwarf2out_late_global_decl (tree);
2792 static void dwarf2out_type_decl (tree, int);
2793 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2794 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2795 dw_die_ref);
2796 static void dwarf2out_abstract_function (tree);
2797 static void dwarf2out_var_location (rtx_insn *);
2798 static void dwarf2out_inline_entry (tree);
2799 static void dwarf2out_size_function (tree);
2800 static void dwarf2out_begin_function (tree);
2801 static void dwarf2out_end_function (unsigned int);
2802 static void dwarf2out_register_main_translation_unit (tree unit);
2803 static void dwarf2out_set_name (tree, tree);
2804 static void dwarf2out_register_external_die (tree decl, const char *sym,
2805 unsigned HOST_WIDE_INT off);
2806 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2807 unsigned HOST_WIDE_INT *off);
2808
2809 /* The debug hooks structure. */
2810
2811 const struct gcc_debug_hooks dwarf2_debug_hooks =
2812 {
2813 dwarf2out_init,
2814 dwarf2out_finish,
2815 dwarf2out_early_finish,
2816 dwarf2out_assembly_start,
2817 dwarf2out_define,
2818 dwarf2out_undef,
2819 dwarf2out_start_source_file,
2820 dwarf2out_end_source_file,
2821 dwarf2out_begin_block,
2822 dwarf2out_end_block,
2823 dwarf2out_ignore_block,
2824 dwarf2out_source_line,
2825 dwarf2out_begin_prologue,
2826 #if VMS_DEBUGGING_INFO
2827 dwarf2out_vms_end_prologue,
2828 dwarf2out_vms_begin_epilogue,
2829 #else
2830 debug_nothing_int_charstar,
2831 debug_nothing_int_charstar,
2832 #endif
2833 dwarf2out_end_epilogue,
2834 dwarf2out_begin_function,
2835 dwarf2out_end_function, /* end_function */
2836 dwarf2out_register_main_translation_unit,
2837 dwarf2out_function_decl, /* function_decl */
2838 dwarf2out_early_global_decl,
2839 dwarf2out_late_global_decl,
2840 dwarf2out_type_decl, /* type_decl */
2841 dwarf2out_imported_module_or_decl,
2842 dwarf2out_die_ref_for_decl,
2843 dwarf2out_register_external_die,
2844 debug_nothing_tree, /* deferred_inline_function */
2845 /* The DWARF 2 backend tries to reduce debugging bloat by not
2846 emitting the abstract description of inline functions until
2847 something tries to reference them. */
2848 dwarf2out_abstract_function, /* outlining_inline_function */
2849 debug_nothing_rtx_code_label, /* label */
2850 debug_nothing_int, /* handle_pch */
2851 dwarf2out_var_location,
2852 dwarf2out_inline_entry, /* inline_entry */
2853 dwarf2out_size_function, /* size_function */
2854 dwarf2out_switch_text_section,
2855 dwarf2out_set_name,
2856 1, /* start_end_main_source_file */
2857 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2858 };
2859
2860 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2861 {
2862 dwarf2out_init,
2863 debug_nothing_charstar,
2864 debug_nothing_charstar,
2865 dwarf2out_assembly_start,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int_charstar,
2868 debug_nothing_int_charstar,
2869 debug_nothing_int,
2870 debug_nothing_int_int, /* begin_block */
2871 debug_nothing_int_int, /* end_block */
2872 debug_true_const_tree, /* ignore_block */
2873 dwarf2out_source_line, /* source_line */
2874 debug_nothing_int_int_charstar, /* begin_prologue */
2875 debug_nothing_int_charstar, /* end_prologue */
2876 debug_nothing_int_charstar, /* begin_epilogue */
2877 debug_nothing_int_charstar, /* end_epilogue */
2878 debug_nothing_tree, /* begin_function */
2879 debug_nothing_int, /* end_function */
2880 debug_nothing_tree, /* register_main_translation_unit */
2881 debug_nothing_tree, /* function_decl */
2882 debug_nothing_tree, /* early_global_decl */
2883 debug_nothing_tree, /* late_global_decl */
2884 debug_nothing_tree_int, /* type_decl */
2885 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2886 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2887 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2888 debug_nothing_tree, /* deferred_inline_function */
2889 debug_nothing_tree, /* outlining_inline_function */
2890 debug_nothing_rtx_code_label, /* label */
2891 debug_nothing_int, /* handle_pch */
2892 debug_nothing_rtx_insn, /* var_location */
2893 debug_nothing_tree, /* inline_entry */
2894 debug_nothing_tree, /* size_function */
2895 debug_nothing_void, /* switch_text_section */
2896 debug_nothing_tree_tree, /* set_name */
2897 0, /* start_end_main_source_file */
2898 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2899 };
2900 \f
2901 /* NOTE: In the comments in this file, many references are made to
2902 "Debugging Information Entries". This term is abbreviated as `DIE'
2903 throughout the remainder of this file. */
2904
2905 /* An internal representation of the DWARF output is built, and then
2906 walked to generate the DWARF debugging info. The walk of the internal
2907 representation is done after the entire program has been compiled.
2908 The types below are used to describe the internal representation. */
2909
2910 /* Whether to put type DIEs into their own section .debug_types instead
2911 of making them part of the .debug_info section. Only supported for
2912 Dwarf V4 or higher and the user didn't disable them through
2913 -fno-debug-types-section. It is more efficient to put them in a
2914 separate comdat sections since the linker will then be able to
2915 remove duplicates. But not all tools support .debug_types sections
2916 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2917 it is DW_UT_type unit type in .debug_info section. */
2918
2919 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2920
2921 /* Various DIE's use offsets relative to the beginning of the
2922 .debug_info section to refer to each other. */
2923
2924 typedef long int dw_offset;
2925
2926 struct comdat_type_node;
2927
2928 /* The entries in the line_info table more-or-less mirror the opcodes
2929 that are used in the real dwarf line table. Arrays of these entries
2930 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2931 supported. */
2932
2933 enum dw_line_info_opcode {
2934 /* Emit DW_LNE_set_address; the operand is the label index. */
2935 LI_set_address,
2936
2937 /* Emit a row to the matrix with the given line. This may be done
2938 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2939 special opcodes. */
2940 LI_set_line,
2941
2942 /* Emit a DW_LNS_set_file. */
2943 LI_set_file,
2944
2945 /* Emit a DW_LNS_set_column. */
2946 LI_set_column,
2947
2948 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2949 LI_negate_stmt,
2950
2951 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2952 LI_set_prologue_end,
2953 LI_set_epilogue_begin,
2954
2955 /* Emit a DW_LNE_set_discriminator. */
2956 LI_set_discriminator,
2957
2958 /* Output a Fixed Advance PC; the target PC is the label index; the
2959 base PC is the previous LI_adv_address or LI_set_address entry.
2960 We only use this when emitting debug views without assembler
2961 support, at explicit user request. Ideally, we should only use
2962 it when the offset might be zero but we can't tell: it's the only
2963 way to maybe change the PC without resetting the view number. */
2964 LI_adv_address
2965 };
2966
2967 typedef struct GTY(()) dw_line_info_struct {
2968 enum dw_line_info_opcode opcode;
2969 unsigned int val;
2970 } dw_line_info_entry;
2971
2972
2973 struct GTY(()) dw_line_info_table {
2974 /* The label that marks the end of this section. */
2975 const char *end_label;
2976
2977 /* The values for the last row of the matrix, as collected in the table.
2978 These are used to minimize the changes to the next row. */
2979 unsigned int file_num;
2980 unsigned int line_num;
2981 unsigned int column_num;
2982 int discrim_num;
2983 bool is_stmt;
2984 bool in_use;
2985
2986 /* This denotes the NEXT view number.
2987
2988 If it is 0, it is known that the NEXT view will be the first view
2989 at the given PC.
2990
2991 If it is -1, we're forcing the view number to be reset, e.g. at a
2992 function entry.
2993
2994 The meaning of other nonzero values depends on whether we're
2995 computing views internally or leaving it for the assembler to do
2996 so. If we're emitting them internally, view denotes the view
2997 number since the last known advance of PC. If we're leaving it
2998 for the assembler, it denotes the LVU label number that we're
2999 going to ask the assembler to assign. */
3000 var_loc_view view;
3001
3002 /* This counts the number of symbolic views emitted in this table
3003 since the latest view reset. Its max value, over all tables,
3004 sets symview_upper_bound. */
3005 var_loc_view symviews_since_reset;
3006
3007 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3008 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3009 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3010 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3011
3012 vec<dw_line_info_entry, va_gc> *entries;
3013 };
3014
3015 /* This is an upper bound for view numbers that the assembler may
3016 assign to symbolic views output in this translation. It is used to
3017 decide how big a field to use to represent view numbers in
3018 symview-classed attributes. */
3019
3020 static var_loc_view symview_upper_bound;
3021
3022 /* If we're keep track of location views and their reset points, and
3023 INSN is a reset point (i.e., it necessarily advances the PC), mark
3024 the next view in TABLE as reset. */
3025
3026 static void
3027 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3028 {
3029 if (!debug_internal_reset_location_views)
3030 return;
3031
3032 /* Maybe turn (part of?) this test into a default target hook. */
3033 int reset = 0;
3034
3035 if (targetm.reset_location_view)
3036 reset = targetm.reset_location_view (insn);
3037
3038 if (reset)
3039 ;
3040 else if (JUMP_TABLE_DATA_P (insn))
3041 reset = 1;
3042 else if (GET_CODE (insn) == USE
3043 || GET_CODE (insn) == CLOBBER
3044 || GET_CODE (insn) == ASM_INPUT
3045 || asm_noperands (insn) >= 0)
3046 ;
3047 else if (get_attr_min_length (insn) > 0)
3048 reset = 1;
3049
3050 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3051 RESET_NEXT_VIEW (table->view);
3052 }
3053
3054 /* Each DIE attribute has a field specifying the attribute kind,
3055 a link to the next attribute in the chain, and an attribute value.
3056 Attributes are typically linked below the DIE they modify. */
3057
3058 typedef struct GTY(()) dw_attr_struct {
3059 enum dwarf_attribute dw_attr;
3060 dw_val_node dw_attr_val;
3061 }
3062 dw_attr_node;
3063
3064
3065 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3066 The children of each node form a circular list linked by
3067 die_sib. die_child points to the node *before* the "first" child node. */
3068
3069 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3070 union die_symbol_or_type_node
3071 {
3072 const char * GTY ((tag ("0"))) die_symbol;
3073 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3074 }
3075 GTY ((desc ("%0.comdat_type_p"))) die_id;
3076 vec<dw_attr_node, va_gc> *die_attr;
3077 dw_die_ref die_parent;
3078 dw_die_ref die_child;
3079 dw_die_ref die_sib;
3080 dw_die_ref die_definition; /* ref from a specification to its definition */
3081 dw_offset die_offset;
3082 unsigned long die_abbrev;
3083 int die_mark;
3084 unsigned int decl_id;
3085 enum dwarf_tag die_tag;
3086 /* Die is used and must not be pruned as unused. */
3087 BOOL_BITFIELD die_perennial_p : 1;
3088 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3089 /* For an external ref to die_symbol if die_offset contains an extra
3090 offset to that symbol. */
3091 BOOL_BITFIELD with_offset : 1;
3092 /* Whether this DIE was removed from the DIE tree, for example via
3093 prune_unused_types. We don't consider those present from the
3094 DIE lookup routines. */
3095 BOOL_BITFIELD removed : 1;
3096 /* Lots of spare bits. */
3097 }
3098 die_node;
3099
3100 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3101 static bool early_dwarf;
3102 static bool early_dwarf_finished;
3103 struct set_early_dwarf {
3104 bool saved;
3105 set_early_dwarf () : saved(early_dwarf)
3106 {
3107 gcc_assert (! early_dwarf_finished);
3108 early_dwarf = true;
3109 }
3110 ~set_early_dwarf () { early_dwarf = saved; }
3111 };
3112
3113 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3114 #define FOR_EACH_CHILD(die, c, expr) do { \
3115 c = die->die_child; \
3116 if (c) do { \
3117 c = c->die_sib; \
3118 expr; \
3119 } while (c != die->die_child); \
3120 } while (0)
3121
3122 /* The pubname structure */
3123
3124 typedef struct GTY(()) pubname_struct {
3125 dw_die_ref die;
3126 const char *name;
3127 }
3128 pubname_entry;
3129
3130
3131 struct GTY(()) dw_ranges {
3132 const char *label;
3133 /* If this is positive, it's a block number, otherwise it's a
3134 bitwise-negated index into dw_ranges_by_label. */
3135 int num;
3136 /* Index for the range list for DW_FORM_rnglistx. */
3137 unsigned int idx : 31;
3138 /* True if this range might be possibly in a different section
3139 from previous entry. */
3140 unsigned int maybe_new_sec : 1;
3141 };
3142
3143 /* A structure to hold a macinfo entry. */
3144
3145 typedef struct GTY(()) macinfo_struct {
3146 unsigned char code;
3147 unsigned HOST_WIDE_INT lineno;
3148 const char *info;
3149 }
3150 macinfo_entry;
3151
3152
3153 struct GTY(()) dw_ranges_by_label {
3154 const char *begin;
3155 const char *end;
3156 };
3157
3158 /* The comdat type node structure. */
3159 struct GTY(()) comdat_type_node
3160 {
3161 dw_die_ref root_die;
3162 dw_die_ref type_die;
3163 dw_die_ref skeleton_die;
3164 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3165 comdat_type_node *next;
3166 };
3167
3168 /* A list of DIEs for which we can't determine ancestry (parent_die
3169 field) just yet. Later in dwarf2out_finish we will fill in the
3170 missing bits. */
3171 typedef struct GTY(()) limbo_die_struct {
3172 dw_die_ref die;
3173 /* The tree for which this DIE was created. We use this to
3174 determine ancestry later. */
3175 tree created_for;
3176 struct limbo_die_struct *next;
3177 }
3178 limbo_die_node;
3179
3180 typedef struct skeleton_chain_struct
3181 {
3182 dw_die_ref old_die;
3183 dw_die_ref new_die;
3184 struct skeleton_chain_struct *parent;
3185 }
3186 skeleton_chain_node;
3187
3188 /* Define a macro which returns nonzero for a TYPE_DECL which was
3189 implicitly generated for a type.
3190
3191 Note that, unlike the C front-end (which generates a NULL named
3192 TYPE_DECL node for each complete tagged type, each array type,
3193 and each function type node created) the C++ front-end generates
3194 a _named_ TYPE_DECL node for each tagged type node created.
3195 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3196 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3197 front-end, but for each type, tagged or not. */
3198
3199 #define TYPE_DECL_IS_STUB(decl) \
3200 (DECL_NAME (decl) == NULL_TREE \
3201 || (DECL_ARTIFICIAL (decl) \
3202 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3203 /* This is necessary for stub decls that \
3204 appear in nested inline functions. */ \
3205 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3206 && (decl_ultimate_origin (decl) \
3207 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3208
3209 /* Information concerning the compilation unit's programming
3210 language, and compiler version. */
3211
3212 /* Fixed size portion of the DWARF compilation unit header. */
3213 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3214 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3215 + (dwarf_version >= 5 ? 4 : 3))
3216
3217 /* Fixed size portion of the DWARF comdat type unit header. */
3218 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3219 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3220 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3221
3222 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3223 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3224 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3225
3226 /* Fixed size portion of public names info. */
3227 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3228
3229 /* Fixed size portion of the address range info. */
3230 #define DWARF_ARANGES_HEADER_SIZE \
3231 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3232 DWARF2_ADDR_SIZE * 2) \
3233 - DWARF_INITIAL_LENGTH_SIZE)
3234
3235 /* Size of padding portion in the address range info. It must be
3236 aligned to twice the pointer size. */
3237 #define DWARF_ARANGES_PAD_SIZE \
3238 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3239 DWARF2_ADDR_SIZE * 2) \
3240 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3241
3242 /* Use assembler line directives if available. */
3243 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3244 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3245 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3246 #else
3247 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3248 #endif
3249 #endif
3250
3251 /* Use assembler views in line directives if available. */
3252 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3253 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3254 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3255 #else
3256 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3257 #endif
3258 #endif
3259
3260 /* Return true if GCC configure detected assembler support for .loc. */
3261
3262 bool
3263 dwarf2out_default_as_loc_support (void)
3264 {
3265 return DWARF2_ASM_LINE_DEBUG_INFO;
3266 #if (GCC_VERSION >= 3000)
3267 # undef DWARF2_ASM_LINE_DEBUG_INFO
3268 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3269 #endif
3270 }
3271
3272 /* Return true if GCC configure detected assembler support for views
3273 in .loc directives. */
3274
3275 bool
3276 dwarf2out_default_as_locview_support (void)
3277 {
3278 return DWARF2_ASM_VIEW_DEBUG_INFO;
3279 #if (GCC_VERSION >= 3000)
3280 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3281 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3282 #endif
3283 }
3284
3285 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3286 view computation, and it refers to a view identifier for which we
3287 will not emit a label because it is known to map to a view number
3288 zero. We won't allocate the bitmap if we're not using assembler
3289 support for location views, but we have to make the variable
3290 visible for GGC and for code that will be optimized out for lack of
3291 support but that's still parsed and compiled. We could abstract it
3292 out with macros, but it's not worth it. */
3293 static GTY(()) bitmap zero_view_p;
3294
3295 /* Evaluate to TRUE iff N is known to identify the first location view
3296 at its PC. When not using assembler location view computation,
3297 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3298 and views label numbers recorded in it are the ones known to be
3299 zero. */
3300 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3301 || (N) == (var_loc_view)-1 \
3302 || (zero_view_p \
3303 && bitmap_bit_p (zero_view_p, (N))))
3304
3305 /* Return true iff we're to emit .loc directives for the assembler to
3306 generate line number sections.
3307
3308 When we're not emitting views, all we need from the assembler is
3309 support for .loc directives.
3310
3311 If we are emitting views, we can only use the assembler's .loc
3312 support if it also supports views.
3313
3314 When the compiler is emitting the line number programs and
3315 computing view numbers itself, it resets view numbers at known PC
3316 changes and counts from that, and then it emits view numbers as
3317 literal constants in locviewlists. There are cases in which the
3318 compiler is not sure about PC changes, e.g. when extra alignment is
3319 requested for a label. In these cases, the compiler may not reset
3320 the view counter, and the potential PC advance in the line number
3321 program will use an opcode that does not reset the view counter
3322 even if the PC actually changes, so that compiler and debug info
3323 consumer can keep view numbers in sync.
3324
3325 When the compiler defers view computation to the assembler, it
3326 emits symbolic view numbers in locviewlists, with the exception of
3327 views known to be zero (forced resets, or reset after
3328 compiler-visible PC changes): instead of emitting symbols for
3329 these, we emit literal zero and assert the assembler agrees with
3330 the compiler's assessment. We could use symbolic views everywhere,
3331 instead of special-casing zero views, but then we'd be unable to
3332 optimize out locviewlists that contain only zeros. */
3333
3334 static bool
3335 output_asm_line_debug_info (void)
3336 {
3337 return (dwarf2out_as_loc_support
3338 && (dwarf2out_as_locview_support
3339 || !debug_variable_location_views));
3340 }
3341
3342 /* Minimum line offset in a special line info. opcode.
3343 This value was chosen to give a reasonable range of values. */
3344 #define DWARF_LINE_BASE -10
3345
3346 /* First special line opcode - leave room for the standard opcodes. */
3347 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3348
3349 /* Range of line offsets in a special line info. opcode. */
3350 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3351
3352 /* Flag that indicates the initial value of the is_stmt_start flag.
3353 In the present implementation, we do not mark any lines as
3354 the beginning of a source statement, because that information
3355 is not made available by the GCC front-end. */
3356 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3357
3358 /* Maximum number of operations per instruction bundle. */
3359 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3360 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3361 #endif
3362
3363 /* This location is used by calc_die_sizes() to keep track
3364 the offset of each DIE within the .debug_info section. */
3365 static unsigned long next_die_offset;
3366
3367 /* Record the root of the DIE's built for the current compilation unit. */
3368 static GTY(()) dw_die_ref single_comp_unit_die;
3369
3370 /* A list of type DIEs that have been separated into comdat sections. */
3371 static GTY(()) comdat_type_node *comdat_type_list;
3372
3373 /* A list of CU DIEs that have been separated. */
3374 static GTY(()) limbo_die_node *cu_die_list;
3375
3376 /* A list of DIEs with a NULL parent waiting to be relocated. */
3377 static GTY(()) limbo_die_node *limbo_die_list;
3378
3379 /* A list of DIEs for which we may have to generate
3380 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3381 static GTY(()) limbo_die_node *deferred_asm_name;
3382
3383 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3384 {
3385 typedef const char *compare_type;
3386
3387 static hashval_t hash (dwarf_file_data *);
3388 static bool equal (dwarf_file_data *, const char *);
3389 };
3390
3391 /* Filenames referenced by this compilation unit. */
3392 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3393
3394 struct decl_die_hasher : ggc_ptr_hash<die_node>
3395 {
3396 typedef tree compare_type;
3397
3398 static hashval_t hash (die_node *);
3399 static bool equal (die_node *, tree);
3400 };
3401 /* A hash table of references to DIE's that describe declarations.
3402 The key is a DECL_UID() which is a unique number identifying each decl. */
3403 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3404
3405 struct GTY ((for_user)) variable_value_struct {
3406 unsigned int decl_id;
3407 vec<dw_die_ref, va_gc> *dies;
3408 };
3409
3410 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3411 {
3412 typedef tree compare_type;
3413
3414 static hashval_t hash (variable_value_struct *);
3415 static bool equal (variable_value_struct *, tree);
3416 };
3417 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3418 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3419 DECL_CONTEXT of the referenced VAR_DECLs. */
3420 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3421
3422 struct block_die_hasher : ggc_ptr_hash<die_struct>
3423 {
3424 static hashval_t hash (die_struct *);
3425 static bool equal (die_struct *, die_struct *);
3426 };
3427
3428 /* A hash table of references to DIE's that describe COMMON blocks.
3429 The key is DECL_UID() ^ die_parent. */
3430 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3431
3432 typedef struct GTY(()) die_arg_entry_struct {
3433 dw_die_ref die;
3434 tree arg;
3435 } die_arg_entry;
3436
3437
3438 /* Node of the variable location list. */
3439 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3440 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3441 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3442 in mode of the EXPR_LIST node and first EXPR_LIST operand
3443 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3444 location or NULL for padding. For larger bitsizes,
3445 mode is 0 and first operand is a CONCAT with bitsize
3446 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3447 NULL as second operand. */
3448 rtx GTY (()) loc;
3449 const char * GTY (()) label;
3450 struct var_loc_node * GTY (()) next;
3451 var_loc_view view;
3452 };
3453
3454 /* Variable location list. */
3455 struct GTY ((for_user)) var_loc_list_def {
3456 struct var_loc_node * GTY (()) first;
3457
3458 /* Pointer to the last but one or last element of the
3459 chained list. If the list is empty, both first and
3460 last are NULL, if the list contains just one node
3461 or the last node certainly is not redundant, it points
3462 to the last node, otherwise points to the last but one.
3463 Do not mark it for GC because it is marked through the chain. */
3464 struct var_loc_node * GTY ((skip ("%h"))) last;
3465
3466 /* Pointer to the last element before section switch,
3467 if NULL, either sections weren't switched or first
3468 is after section switch. */
3469 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3470
3471 /* DECL_UID of the variable decl. */
3472 unsigned int decl_id;
3473 };
3474 typedef struct var_loc_list_def var_loc_list;
3475
3476 /* Call argument location list. */
3477 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3478 rtx GTY (()) call_arg_loc_note;
3479 const char * GTY (()) label;
3480 tree GTY (()) block;
3481 bool tail_call_p;
3482 rtx GTY (()) symbol_ref;
3483 struct call_arg_loc_node * GTY (()) next;
3484 };
3485
3486
3487 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3488 {
3489 typedef const_tree compare_type;
3490
3491 static hashval_t hash (var_loc_list *);
3492 static bool equal (var_loc_list *, const_tree);
3493 };
3494
3495 /* Table of decl location linked lists. */
3496 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3497
3498 /* Head and tail of call_arg_loc chain. */
3499 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3500 static struct call_arg_loc_node *call_arg_loc_last;
3501
3502 /* Number of call sites in the current function. */
3503 static int call_site_count = -1;
3504 /* Number of tail call sites in the current function. */
3505 static int tail_call_site_count = -1;
3506
3507 /* A cached location list. */
3508 struct GTY ((for_user)) cached_dw_loc_list_def {
3509 /* The DECL_UID of the decl that this entry describes. */
3510 unsigned int decl_id;
3511
3512 /* The cached location list. */
3513 dw_loc_list_ref loc_list;
3514 };
3515 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3516
3517 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3518 {
3519
3520 typedef const_tree compare_type;
3521
3522 static hashval_t hash (cached_dw_loc_list *);
3523 static bool equal (cached_dw_loc_list *, const_tree);
3524 };
3525
3526 /* Table of cached location lists. */
3527 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3528
3529 /* A vector of references to DIE's that are uniquely identified by their tag,
3530 presence/absence of children DIE's, and list of attribute/value pairs. */
3531 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3532
3533 /* A hash map to remember the stack usage for DWARF procedures. The value
3534 stored is the stack size difference between before the DWARF procedure
3535 invokation and after it returned. In other words, for a DWARF procedure
3536 that consumes N stack slots and that pushes M ones, this stores M - N. */
3537 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3538
3539 /* A global counter for generating labels for line number data. */
3540 static unsigned int line_info_label_num;
3541
3542 /* The current table to which we should emit line number information
3543 for the current function. This will be set up at the beginning of
3544 assembly for the function. */
3545 static GTY(()) dw_line_info_table *cur_line_info_table;
3546
3547 /* The two default tables of line number info. */
3548 static GTY(()) dw_line_info_table *text_section_line_info;
3549 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3550
3551 /* The set of all non-default tables of line number info. */
3552 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3553
3554 /* A flag to tell pubnames/types export if there is an info section to
3555 refer to. */
3556 static bool info_section_emitted;
3557
3558 /* A pointer to the base of a table that contains a list of publicly
3559 accessible names. */
3560 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3561
3562 /* A pointer to the base of a table that contains a list of publicly
3563 accessible types. */
3564 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3565
3566 /* A pointer to the base of a table that contains a list of macro
3567 defines/undefines (and file start/end markers). */
3568 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3569
3570 /* True if .debug_macinfo or .debug_macros section is going to be
3571 emitted. */
3572 #define have_macinfo \
3573 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3574 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3575 && !macinfo_table->is_empty ())
3576
3577 /* Vector of dies for which we should generate .debug_ranges info. */
3578 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3579
3580 /* Vector of pairs of labels referenced in ranges_table. */
3581 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3582
3583 /* Whether we have location lists that need outputting */
3584 static GTY(()) bool have_location_lists;
3585
3586 /* Unique label counter. */
3587 static GTY(()) unsigned int loclabel_num;
3588
3589 /* Unique label counter for point-of-call tables. */
3590 static GTY(()) unsigned int poc_label_num;
3591
3592 /* The last file entry emitted by maybe_emit_file(). */
3593 static GTY(()) struct dwarf_file_data * last_emitted_file;
3594
3595 /* Number of internal labels generated by gen_internal_sym(). */
3596 static GTY(()) int label_num;
3597
3598 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3599
3600 /* Instances of generic types for which we need to generate debug
3601 info that describe their generic parameters and arguments. That
3602 generation needs to happen once all types are properly laid out so
3603 we do it at the end of compilation. */
3604 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3605
3606 /* Offset from the "steady-state frame pointer" to the frame base,
3607 within the current function. */
3608 static poly_int64 frame_pointer_fb_offset;
3609 static bool frame_pointer_fb_offset_valid;
3610
3611 static vec<dw_die_ref> base_types;
3612
3613 /* Flags to represent a set of attribute classes for attributes that represent
3614 a scalar value (bounds, pointers, ...). */
3615 enum dw_scalar_form
3616 {
3617 dw_scalar_form_constant = 0x01,
3618 dw_scalar_form_exprloc = 0x02,
3619 dw_scalar_form_reference = 0x04
3620 };
3621
3622 /* Forward declarations for functions defined in this file. */
3623
3624 static int is_pseudo_reg (const_rtx);
3625 static tree type_main_variant (tree);
3626 static int is_tagged_type (const_tree);
3627 static const char *dwarf_tag_name (unsigned);
3628 static const char *dwarf_attr_name (unsigned);
3629 static const char *dwarf_form_name (unsigned);
3630 static tree decl_ultimate_origin (const_tree);
3631 static tree decl_class_context (tree);
3632 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3633 static inline enum dw_val_class AT_class (dw_attr_node *);
3634 static inline unsigned int AT_index (dw_attr_node *);
3635 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3636 static inline unsigned AT_flag (dw_attr_node *);
3637 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3638 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3639 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3640 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3641 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3642 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3643 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3644 unsigned int, unsigned char *);
3645 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3646 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3647 static inline const char *AT_string (dw_attr_node *);
3648 static enum dwarf_form AT_string_form (dw_attr_node *);
3649 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3650 static void add_AT_specification (dw_die_ref, dw_die_ref);
3651 static inline dw_die_ref AT_ref (dw_attr_node *);
3652 static inline int AT_ref_external (dw_attr_node *);
3653 static inline void set_AT_ref_external (dw_attr_node *, int);
3654 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3655 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3656 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3657 dw_loc_list_ref);
3658 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3659 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3660 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3661 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3662 static void remove_addr_table_entry (addr_table_entry *);
3663 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3664 static inline rtx AT_addr (dw_attr_node *);
3665 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3668 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3669 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3670 unsigned long, bool);
3671 static inline const char *AT_lbl (dw_attr_node *);
3672 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3673 static const char *get_AT_low_pc (dw_die_ref);
3674 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3675 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3676 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3677 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3678 static bool is_c (void);
3679 static bool is_cxx (void);
3680 static bool is_cxx (const_tree);
3681 static bool is_fortran (void);
3682 static bool is_ada (void);
3683 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3684 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3685 static void add_child_die (dw_die_ref, dw_die_ref);
3686 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3687 static dw_die_ref lookup_type_die (tree);
3688 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3689 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3690 static void equate_type_number_to_die (tree, dw_die_ref);
3691 static dw_die_ref lookup_decl_die (tree);
3692 static var_loc_list *lookup_decl_loc (const_tree);
3693 static void equate_decl_number_to_die (tree, dw_die_ref);
3694 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3695 static void print_spaces (FILE *);
3696 static void print_die (dw_die_ref, FILE *);
3697 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3698 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3699 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3700 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3701 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3702 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3703 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3704 struct md5_ctx *, int *);
3705 struct checksum_attributes;
3706 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3707 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3708 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3709 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3710 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3711 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3712 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3713 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3714 static int is_type_die (dw_die_ref);
3715 static inline bool is_template_instantiation (dw_die_ref);
3716 static int is_declaration_die (dw_die_ref);
3717 static int should_move_die_to_comdat (dw_die_ref);
3718 static dw_die_ref clone_as_declaration (dw_die_ref);
3719 static dw_die_ref clone_die (dw_die_ref);
3720 static dw_die_ref clone_tree (dw_die_ref);
3721 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3722 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3723 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3724 static dw_die_ref generate_skeleton (dw_die_ref);
3725 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3726 dw_die_ref,
3727 dw_die_ref);
3728 static void break_out_comdat_types (dw_die_ref);
3729 static void copy_decls_for_unworthy_types (dw_die_ref);
3730
3731 static void add_sibling_attributes (dw_die_ref);
3732 static void output_location_lists (dw_die_ref);
3733 static int constant_size (unsigned HOST_WIDE_INT);
3734 static unsigned long size_of_die (dw_die_ref);
3735 static void calc_die_sizes (dw_die_ref);
3736 static void calc_base_type_die_sizes (void);
3737 static void mark_dies (dw_die_ref);
3738 static void unmark_dies (dw_die_ref);
3739 static void unmark_all_dies (dw_die_ref);
3740 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3741 static unsigned long size_of_aranges (void);
3742 static enum dwarf_form value_format (dw_attr_node *);
3743 static void output_value_format (dw_attr_node *);
3744 static void output_abbrev_section (void);
3745 static void output_die_abbrevs (unsigned long, dw_die_ref);
3746 static void output_die (dw_die_ref);
3747 static void output_compilation_unit_header (enum dwarf_unit_type);
3748 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3749 static void output_comdat_type_unit (comdat_type_node *);
3750 static const char *dwarf2_name (tree, int);
3751 static void add_pubname (tree, dw_die_ref);
3752 static void add_enumerator_pubname (const char *, dw_die_ref);
3753 static void add_pubname_string (const char *, dw_die_ref);
3754 static void add_pubtype (tree, dw_die_ref);
3755 static void output_pubnames (vec<pubname_entry, va_gc> *);
3756 static void output_aranges (void);
3757 static unsigned int add_ranges (const_tree, bool = false);
3758 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3759 bool *, bool);
3760 static void output_ranges (void);
3761 static dw_line_info_table *new_line_info_table (void);
3762 static void output_line_info (bool);
3763 static void output_file_names (void);
3764 static dw_die_ref base_type_die (tree, bool);
3765 static int is_base_type (tree);
3766 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3767 static int decl_quals (const_tree);
3768 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3769 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3770 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3771 static unsigned int dbx_reg_number (const_rtx);
3772 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3773 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3774 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3775 enum var_init_status);
3776 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3777 enum var_init_status);
3778 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3779 enum var_init_status);
3780 static int is_based_loc (const_rtx);
3781 static bool resolve_one_addr (rtx *);
3782 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3783 enum var_init_status);
3784 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3785 enum var_init_status);
3786 struct loc_descr_context;
3787 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3788 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3789 static dw_loc_list_ref loc_list_from_tree (tree, int,
3790 struct loc_descr_context *);
3791 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3792 struct loc_descr_context *);
3793 static tree field_type (const_tree);
3794 static unsigned int simple_type_align_in_bits (const_tree);
3795 static unsigned int simple_decl_align_in_bits (const_tree);
3796 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3797 struct vlr_context;
3798 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3799 HOST_WIDE_INT *);
3800 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3801 dw_loc_list_ref);
3802 static void add_data_member_location_attribute (dw_die_ref, tree,
3803 struct vlr_context *);
3804 static bool add_const_value_attribute (dw_die_ref, rtx);
3805 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3806 static void insert_wide_int (const wide_int &, unsigned char *, int);
3807 static void insert_float (const_rtx, unsigned char *);
3808 static rtx rtl_for_decl_location (tree);
3809 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3810 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3811 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3812 static void add_name_attribute (dw_die_ref, const char *);
3813 static void add_desc_attribute (dw_die_ref, tree);
3814 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3815 static void add_comp_dir_attribute (dw_die_ref);
3816 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3817 struct loc_descr_context *);
3818 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3819 struct loc_descr_context *);
3820 static void add_subscript_info (dw_die_ref, tree, bool);
3821 static void add_byte_size_attribute (dw_die_ref, tree);
3822 static void add_alignment_attribute (dw_die_ref, tree);
3823 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3824 struct vlr_context *);
3825 static void add_bit_size_attribute (dw_die_ref, tree);
3826 static void add_prototyped_attribute (dw_die_ref, tree);
3827 static void add_abstract_origin_attribute (dw_die_ref, tree);
3828 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3829 static void add_src_coords_attributes (dw_die_ref, tree);
3830 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3831 static void add_discr_value (dw_die_ref, dw_discr_value *);
3832 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3833 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3834 static dw_die_ref scope_die_for (tree, dw_die_ref);
3835 static inline int local_scope_p (dw_die_ref);
3836 static inline int class_scope_p (dw_die_ref);
3837 static inline int class_or_namespace_scope_p (dw_die_ref);
3838 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3839 static void add_calling_convention_attribute (dw_die_ref, tree);
3840 static const char *type_tag (const_tree);
3841 static tree member_declared_type (const_tree);
3842 #if 0
3843 static const char *decl_start_label (tree);
3844 #endif
3845 static void gen_array_type_die (tree, dw_die_ref);
3846 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3847 #if 0
3848 static void gen_entry_point_die (tree, dw_die_ref);
3849 #endif
3850 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3851 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3852 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3853 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3854 static void gen_formal_types_die (tree, dw_die_ref);
3855 static void gen_subprogram_die (tree, dw_die_ref);
3856 static void gen_variable_die (tree, tree, dw_die_ref);
3857 static void gen_const_die (tree, dw_die_ref);
3858 static void gen_label_die (tree, dw_die_ref);
3859 static void gen_lexical_block_die (tree, dw_die_ref);
3860 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3861 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3862 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3863 static dw_die_ref gen_compile_unit_die (const char *);
3864 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3865 static void gen_member_die (tree, dw_die_ref);
3866 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3867 enum debug_info_usage);
3868 static void gen_subroutine_type_die (tree, dw_die_ref);
3869 static void gen_typedef_die (tree, dw_die_ref);
3870 static void gen_type_die (tree, dw_die_ref);
3871 static void gen_block_die (tree, dw_die_ref);
3872 static void decls_for_scope (tree, dw_die_ref, bool = true);
3873 static bool is_naming_typedef_decl (const_tree);
3874 static inline dw_die_ref get_context_die (tree);
3875 static void gen_namespace_die (tree, dw_die_ref);
3876 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3877 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3878 static dw_die_ref force_decl_die (tree);
3879 static dw_die_ref force_type_die (tree);
3880 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3881 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3882 static struct dwarf_file_data * lookup_filename (const char *);
3883 static void retry_incomplete_types (void);
3884 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3885 static void gen_generic_params_dies (tree);
3886 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3887 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3888 static void splice_child_die (dw_die_ref, dw_die_ref);
3889 static int file_info_cmp (const void *, const void *);
3890 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3891 const char *, var_loc_view, const char *);
3892 static void output_loc_list (dw_loc_list_ref);
3893 static char *gen_internal_sym (const char *);
3894 static bool want_pubnames (void);
3895
3896 static void prune_unmark_dies (dw_die_ref);
3897 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3898 static void prune_unused_types_mark (dw_die_ref, int);
3899 static void prune_unused_types_walk (dw_die_ref);
3900 static void prune_unused_types_walk_attribs (dw_die_ref);
3901 static void prune_unused_types_prune (dw_die_ref);
3902 static void prune_unused_types (void);
3903 static int maybe_emit_file (struct dwarf_file_data *fd);
3904 static inline const char *AT_vms_delta1 (dw_attr_node *);
3905 static inline const char *AT_vms_delta2 (dw_attr_node *);
3906 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3907 static void gen_remaining_tmpl_value_param_die_attribute (void);
3908 static bool generic_type_p (tree);
3909 static void schedule_generic_params_dies_gen (tree t);
3910 static void gen_scheduled_generic_parms_dies (void);
3911 static void resolve_variable_values (void);
3912
3913 static const char *comp_dir_string (void);
3914
3915 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3916
3917 /* enum for tracking thread-local variables whose address is really an offset
3918 relative to the TLS pointer, which will need link-time relocation, but will
3919 not need relocation by the DWARF consumer. */
3920
3921 enum dtprel_bool
3922 {
3923 dtprel_false = 0,
3924 dtprel_true = 1
3925 };
3926
3927 /* Return the operator to use for an address of a variable. For dtprel_true, we
3928 use DW_OP_const*. For regular variables, which need both link-time
3929 relocation and consumer-level relocation (e.g., to account for shared objects
3930 loaded at a random address), we use DW_OP_addr*. */
3931
3932 static inline enum dwarf_location_atom
3933 dw_addr_op (enum dtprel_bool dtprel)
3934 {
3935 if (dtprel == dtprel_true)
3936 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3937 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3938 else
3939 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3940 }
3941
3942 /* Return a pointer to a newly allocated address location description. If
3943 dwarf_split_debug_info is true, then record the address with the appropriate
3944 relocation. */
3945 static inline dw_loc_descr_ref
3946 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3947 {
3948 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3949
3950 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3951 ref->dw_loc_oprnd1.v.val_addr = addr;
3952 ref->dtprel = dtprel;
3953 if (dwarf_split_debug_info)
3954 ref->dw_loc_oprnd1.val_entry
3955 = add_addr_table_entry (addr,
3956 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3957 else
3958 ref->dw_loc_oprnd1.val_entry = NULL;
3959
3960 return ref;
3961 }
3962
3963 /* Section names used to hold DWARF debugging information. */
3964
3965 #ifndef DEBUG_INFO_SECTION
3966 #define DEBUG_INFO_SECTION ".debug_info"
3967 #endif
3968 #ifndef DEBUG_DWO_INFO_SECTION
3969 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3970 #endif
3971 #ifndef DEBUG_LTO_INFO_SECTION
3972 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3973 #endif
3974 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3975 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3976 #endif
3977 #ifndef DEBUG_ABBREV_SECTION
3978 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3979 #endif
3980 #ifndef DEBUG_LTO_ABBREV_SECTION
3981 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3982 #endif
3983 #ifndef DEBUG_DWO_ABBREV_SECTION
3984 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3985 #endif
3986 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3987 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3988 #endif
3989 #ifndef DEBUG_ARANGES_SECTION
3990 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3991 #endif
3992 #ifndef DEBUG_ADDR_SECTION
3993 #define DEBUG_ADDR_SECTION ".debug_addr"
3994 #endif
3995 #ifndef DEBUG_MACINFO_SECTION
3996 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3997 #endif
3998 #ifndef DEBUG_LTO_MACINFO_SECTION
3999 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4000 #endif
4001 #ifndef DEBUG_DWO_MACINFO_SECTION
4002 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4003 #endif
4004 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4005 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4006 #endif
4007 #ifndef DEBUG_MACRO_SECTION
4008 #define DEBUG_MACRO_SECTION ".debug_macro"
4009 #endif
4010 #ifndef DEBUG_LTO_MACRO_SECTION
4011 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4012 #endif
4013 #ifndef DEBUG_DWO_MACRO_SECTION
4014 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4015 #endif
4016 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4017 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4018 #endif
4019 #ifndef DEBUG_LINE_SECTION
4020 #define DEBUG_LINE_SECTION ".debug_line"
4021 #endif
4022 #ifndef DEBUG_LTO_LINE_SECTION
4023 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4024 #endif
4025 #ifndef DEBUG_DWO_LINE_SECTION
4026 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4027 #endif
4028 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4029 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4030 #endif
4031 #ifndef DEBUG_LOC_SECTION
4032 #define DEBUG_LOC_SECTION ".debug_loc"
4033 #endif
4034 #ifndef DEBUG_DWO_LOC_SECTION
4035 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4036 #endif
4037 #ifndef DEBUG_LOCLISTS_SECTION
4038 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4039 #endif
4040 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4041 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4042 #endif
4043 #ifndef DEBUG_PUBNAMES_SECTION
4044 #define DEBUG_PUBNAMES_SECTION \
4045 ((debug_generate_pub_sections == 2) \
4046 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4047 #endif
4048 #ifndef DEBUG_PUBTYPES_SECTION
4049 #define DEBUG_PUBTYPES_SECTION \
4050 ((debug_generate_pub_sections == 2) \
4051 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4052 #endif
4053 #ifndef DEBUG_STR_OFFSETS_SECTION
4054 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4055 #endif
4056 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4057 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4058 #endif
4059 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4060 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4061 #endif
4062 #ifndef DEBUG_STR_SECTION
4063 #define DEBUG_STR_SECTION ".debug_str"
4064 #endif
4065 #ifndef DEBUG_LTO_STR_SECTION
4066 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4067 #endif
4068 #ifndef DEBUG_STR_DWO_SECTION
4069 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4070 #endif
4071 #ifndef DEBUG_LTO_STR_DWO_SECTION
4072 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4073 #endif
4074 #ifndef DEBUG_RANGES_SECTION
4075 #define DEBUG_RANGES_SECTION ".debug_ranges"
4076 #endif
4077 #ifndef DEBUG_RNGLISTS_SECTION
4078 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4079 #endif
4080 #ifndef DEBUG_LINE_STR_SECTION
4081 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4082 #endif
4083 #ifndef DEBUG_LTO_LINE_STR_SECTION
4084 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4085 #endif
4086
4087 /* Standard ELF section names for compiled code and data. */
4088 #ifndef TEXT_SECTION_NAME
4089 #define TEXT_SECTION_NAME ".text"
4090 #endif
4091
4092 /* Section flags for .debug_str section. */
4093 #define DEBUG_STR_SECTION_FLAGS \
4094 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4095 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4096 : SECTION_DEBUG)
4097
4098 /* Section flags for .debug_str.dwo section. */
4099 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4100
4101 /* Attribute used to refer to the macro section. */
4102 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4103 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4104
4105 /* Labels we insert at beginning sections we can reference instead of
4106 the section names themselves. */
4107
4108 #ifndef TEXT_SECTION_LABEL
4109 #define TEXT_SECTION_LABEL "Ltext"
4110 #endif
4111 #ifndef COLD_TEXT_SECTION_LABEL
4112 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4113 #endif
4114 #ifndef DEBUG_LINE_SECTION_LABEL
4115 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4116 #endif
4117 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4118 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4119 #endif
4120 #ifndef DEBUG_INFO_SECTION_LABEL
4121 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4122 #endif
4123 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4124 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4125 #endif
4126 #ifndef DEBUG_ABBREV_SECTION_LABEL
4127 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4128 #endif
4129 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4130 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4131 #endif
4132 #ifndef DEBUG_ADDR_SECTION_LABEL
4133 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4134 #endif
4135 #ifndef DEBUG_LOC_SECTION_LABEL
4136 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4137 #endif
4138 #ifndef DEBUG_RANGES_SECTION_LABEL
4139 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4140 #endif
4141 #ifndef DEBUG_MACINFO_SECTION_LABEL
4142 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4143 #endif
4144 #ifndef DEBUG_MACRO_SECTION_LABEL
4145 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4146 #endif
4147 #define SKELETON_COMP_DIE_ABBREV 1
4148 #define SKELETON_TYPE_DIE_ABBREV 2
4149
4150 /* Definitions of defaults for formats and names of various special
4151 (artificial) labels which may be generated within this file (when the -g
4152 options is used and DWARF2_DEBUGGING_INFO is in effect.
4153 If necessary, these may be overridden from within the tm.h file, but
4154 typically, overriding these defaults is unnecessary. */
4155
4156 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4171
4172 #ifndef TEXT_END_LABEL
4173 #define TEXT_END_LABEL "Letext"
4174 #endif
4175 #ifndef COLD_END_LABEL
4176 #define COLD_END_LABEL "Letext_cold"
4177 #endif
4178 #ifndef BLOCK_BEGIN_LABEL
4179 #define BLOCK_BEGIN_LABEL "LBB"
4180 #endif
4181 #ifndef BLOCK_INLINE_ENTRY_LABEL
4182 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4183 #endif
4184 #ifndef BLOCK_END_LABEL
4185 #define BLOCK_END_LABEL "LBE"
4186 #endif
4187 #ifndef LINE_CODE_LABEL
4188 #define LINE_CODE_LABEL "LM"
4189 #endif
4190
4191 \f
4192 /* Return the root of the DIE's built for the current compilation unit. */
4193 static dw_die_ref
4194 comp_unit_die (void)
4195 {
4196 if (!single_comp_unit_die)
4197 single_comp_unit_die = gen_compile_unit_die (NULL);
4198 return single_comp_unit_die;
4199 }
4200
4201 /* We allow a language front-end to designate a function that is to be
4202 called to "demangle" any name before it is put into a DIE. */
4203
4204 static const char *(*demangle_name_func) (const char *);
4205
4206 void
4207 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4208 {
4209 demangle_name_func = func;
4210 }
4211
4212 /* Test if rtl node points to a pseudo register. */
4213
4214 static inline int
4215 is_pseudo_reg (const_rtx rtl)
4216 {
4217 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4218 || (GET_CODE (rtl) == SUBREG
4219 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4220 }
4221
4222 /* Return a reference to a type, with its const and volatile qualifiers
4223 removed. */
4224
4225 static inline tree
4226 type_main_variant (tree type)
4227 {
4228 type = TYPE_MAIN_VARIANT (type);
4229
4230 /* ??? There really should be only one main variant among any group of
4231 variants of a given type (and all of the MAIN_VARIANT values for all
4232 members of the group should point to that one type) but sometimes the C
4233 front-end messes this up for array types, so we work around that bug
4234 here. */
4235 if (TREE_CODE (type) == ARRAY_TYPE)
4236 while (type != TYPE_MAIN_VARIANT (type))
4237 type = TYPE_MAIN_VARIANT (type);
4238
4239 return type;
4240 }
4241
4242 /* Return nonzero if the given type node represents a tagged type. */
4243
4244 static inline int
4245 is_tagged_type (const_tree type)
4246 {
4247 enum tree_code code = TREE_CODE (type);
4248
4249 return (code == RECORD_TYPE || code == UNION_TYPE
4250 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4251 }
4252
4253 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4254
4255 static void
4256 get_ref_die_offset_label (char *label, dw_die_ref ref)
4257 {
4258 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4259 }
4260
4261 /* Return die_offset of a DIE reference to a base type. */
4262
4263 static unsigned long int
4264 get_base_type_offset (dw_die_ref ref)
4265 {
4266 if (ref->die_offset)
4267 return ref->die_offset;
4268 if (comp_unit_die ()->die_abbrev)
4269 {
4270 calc_base_type_die_sizes ();
4271 gcc_assert (ref->die_offset);
4272 }
4273 return ref->die_offset;
4274 }
4275
4276 /* Return die_offset of a DIE reference other than base type. */
4277
4278 static unsigned long int
4279 get_ref_die_offset (dw_die_ref ref)
4280 {
4281 gcc_assert (ref->die_offset);
4282 return ref->die_offset;
4283 }
4284
4285 /* Convert a DIE tag into its string name. */
4286
4287 static const char *
4288 dwarf_tag_name (unsigned int tag)
4289 {
4290 const char *name = get_DW_TAG_name (tag);
4291
4292 if (name != NULL)
4293 return name;
4294
4295 return "DW_TAG_<unknown>";
4296 }
4297
4298 /* Convert a DWARF attribute code into its string name. */
4299
4300 static const char *
4301 dwarf_attr_name (unsigned int attr)
4302 {
4303 const char *name;
4304
4305 switch (attr)
4306 {
4307 #if VMS_DEBUGGING_INFO
4308 case DW_AT_HP_prologue:
4309 return "DW_AT_HP_prologue";
4310 #else
4311 case DW_AT_MIPS_loop_unroll_factor:
4312 return "DW_AT_MIPS_loop_unroll_factor";
4313 #endif
4314
4315 #if VMS_DEBUGGING_INFO
4316 case DW_AT_HP_epilogue:
4317 return "DW_AT_HP_epilogue";
4318 #else
4319 case DW_AT_MIPS_stride:
4320 return "DW_AT_MIPS_stride";
4321 #endif
4322 }
4323
4324 name = get_DW_AT_name (attr);
4325
4326 if (name != NULL)
4327 return name;
4328
4329 return "DW_AT_<unknown>";
4330 }
4331
4332 /* Convert a DWARF value form code into its string name. */
4333
4334 static const char *
4335 dwarf_form_name (unsigned int form)
4336 {
4337 const char *name = get_DW_FORM_name (form);
4338
4339 if (name != NULL)
4340 return name;
4341
4342 return "DW_FORM_<unknown>";
4343 }
4344 \f
4345 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4346 instance of an inlined instance of a decl which is local to an inline
4347 function, so we have to trace all of the way back through the origin chain
4348 to find out what sort of node actually served as the original seed for the
4349 given block. */
4350
4351 static tree
4352 decl_ultimate_origin (const_tree decl)
4353 {
4354 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4355 return NULL_TREE;
4356
4357 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4358 we're trying to output the abstract instance of this function. */
4359 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4360 return NULL_TREE;
4361
4362 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4363 most distant ancestor, this should never happen. */
4364 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4365
4366 return DECL_ABSTRACT_ORIGIN (decl);
4367 }
4368
4369 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4370 of a virtual function may refer to a base class, so we check the 'this'
4371 parameter. */
4372
4373 static tree
4374 decl_class_context (tree decl)
4375 {
4376 tree context = NULL_TREE;
4377
4378 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4379 context = DECL_CONTEXT (decl);
4380 else
4381 context = TYPE_MAIN_VARIANT
4382 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4383
4384 if (context && !TYPE_P (context))
4385 context = NULL_TREE;
4386
4387 return context;
4388 }
4389 \f
4390 /* Add an attribute/value pair to a DIE. */
4391
4392 static inline void
4393 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4394 {
4395 /* Maybe this should be an assert? */
4396 if (die == NULL)
4397 return;
4398
4399 if (flag_checking)
4400 {
4401 /* Check we do not add duplicate attrs. Can't use get_AT here
4402 because that recurses to the specification/abstract origin DIE. */
4403 dw_attr_node *a;
4404 unsigned ix;
4405 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4406 gcc_assert (a->dw_attr != attr->dw_attr);
4407 }
4408
4409 vec_safe_reserve (die->die_attr, 1);
4410 vec_safe_push (die->die_attr, *attr);
4411 }
4412
4413 static inline enum dw_val_class
4414 AT_class (dw_attr_node *a)
4415 {
4416 return a->dw_attr_val.val_class;
4417 }
4418
4419 /* Return the index for any attribute that will be referenced with a
4420 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4421 indices are stored in dw_attr_val.v.val_str for reference counting
4422 pruning. */
4423
4424 static inline unsigned int
4425 AT_index (dw_attr_node *a)
4426 {
4427 if (AT_class (a) == dw_val_class_str)
4428 return a->dw_attr_val.v.val_str->index;
4429 else if (a->dw_attr_val.val_entry != NULL)
4430 return a->dw_attr_val.val_entry->index;
4431 return NOT_INDEXED;
4432 }
4433
4434 /* Add a flag value attribute to a DIE. */
4435
4436 static inline void
4437 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4438 {
4439 dw_attr_node attr;
4440
4441 attr.dw_attr = attr_kind;
4442 attr.dw_attr_val.val_class = dw_val_class_flag;
4443 attr.dw_attr_val.val_entry = NULL;
4444 attr.dw_attr_val.v.val_flag = flag;
4445 add_dwarf_attr (die, &attr);
4446 }
4447
4448 static inline unsigned
4449 AT_flag (dw_attr_node *a)
4450 {
4451 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4452 return a->dw_attr_val.v.val_flag;
4453 }
4454
4455 /* Add a signed integer attribute value to a DIE. */
4456
4457 static inline void
4458 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4459 {
4460 dw_attr_node attr;
4461
4462 attr.dw_attr = attr_kind;
4463 attr.dw_attr_val.val_class = dw_val_class_const;
4464 attr.dw_attr_val.val_entry = NULL;
4465 attr.dw_attr_val.v.val_int = int_val;
4466 add_dwarf_attr (die, &attr);
4467 }
4468
4469 static inline HOST_WIDE_INT
4470 AT_int (dw_attr_node *a)
4471 {
4472 gcc_assert (a && (AT_class (a) == dw_val_class_const
4473 || AT_class (a) == dw_val_class_const_implicit));
4474 return a->dw_attr_val.v.val_int;
4475 }
4476
4477 /* Add an unsigned integer attribute value to a DIE. */
4478
4479 static inline void
4480 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4481 unsigned HOST_WIDE_INT unsigned_val)
4482 {
4483 dw_attr_node attr;
4484
4485 attr.dw_attr = attr_kind;
4486 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4487 attr.dw_attr_val.val_entry = NULL;
4488 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4489 add_dwarf_attr (die, &attr);
4490 }
4491
4492 static inline unsigned HOST_WIDE_INT
4493 AT_unsigned (dw_attr_node *a)
4494 {
4495 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4496 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4497 return a->dw_attr_val.v.val_unsigned;
4498 }
4499
4500 /* Add an unsigned wide integer attribute value to a DIE. */
4501
4502 static inline void
4503 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4504 const wide_int& w)
4505 {
4506 dw_attr_node attr;
4507
4508 attr.dw_attr = attr_kind;
4509 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4510 attr.dw_attr_val.val_entry = NULL;
4511 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4512 *attr.dw_attr_val.v.val_wide = w;
4513 add_dwarf_attr (die, &attr);
4514 }
4515
4516 /* Add an unsigned double integer attribute value to a DIE. */
4517
4518 static inline void
4519 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4520 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4521 {
4522 dw_attr_node attr;
4523
4524 attr.dw_attr = attr_kind;
4525 attr.dw_attr_val.val_class = dw_val_class_const_double;
4526 attr.dw_attr_val.val_entry = NULL;
4527 attr.dw_attr_val.v.val_double.high = high;
4528 attr.dw_attr_val.v.val_double.low = low;
4529 add_dwarf_attr (die, &attr);
4530 }
4531
4532 /* Add a floating point attribute value to a DIE and return it. */
4533
4534 static inline void
4535 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4536 unsigned int length, unsigned int elt_size, unsigned char *array)
4537 {
4538 dw_attr_node attr;
4539
4540 attr.dw_attr = attr_kind;
4541 attr.dw_attr_val.val_class = dw_val_class_vec;
4542 attr.dw_attr_val.val_entry = NULL;
4543 attr.dw_attr_val.v.val_vec.length = length;
4544 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4545 attr.dw_attr_val.v.val_vec.array = array;
4546 add_dwarf_attr (die, &attr);
4547 }
4548
4549 /* Add an 8-byte data attribute value to a DIE. */
4550
4551 static inline void
4552 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4553 unsigned char data8[8])
4554 {
4555 dw_attr_node attr;
4556
4557 attr.dw_attr = attr_kind;
4558 attr.dw_attr_val.val_class = dw_val_class_data8;
4559 attr.dw_attr_val.val_entry = NULL;
4560 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4561 add_dwarf_attr (die, &attr);
4562 }
4563
4564 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4565 dwarf_split_debug_info, address attributes in dies destined for the
4566 final executable have force_direct set to avoid using indexed
4567 references. */
4568
4569 static inline void
4570 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4571 bool force_direct)
4572 {
4573 dw_attr_node attr;
4574 char * lbl_id;
4575
4576 lbl_id = xstrdup (lbl_low);
4577 attr.dw_attr = DW_AT_low_pc;
4578 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4579 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4580 if (dwarf_split_debug_info && !force_direct)
4581 attr.dw_attr_val.val_entry
4582 = add_addr_table_entry (lbl_id, ate_kind_label);
4583 else
4584 attr.dw_attr_val.val_entry = NULL;
4585 add_dwarf_attr (die, &attr);
4586
4587 attr.dw_attr = DW_AT_high_pc;
4588 if (dwarf_version < 4)
4589 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4590 else
4591 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4592 lbl_id = xstrdup (lbl_high);
4593 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4594 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4595 && dwarf_split_debug_info && !force_direct)
4596 attr.dw_attr_val.val_entry
4597 = add_addr_table_entry (lbl_id, ate_kind_label);
4598 else
4599 attr.dw_attr_val.val_entry = NULL;
4600 add_dwarf_attr (die, &attr);
4601 }
4602
4603 /* Hash and equality functions for debug_str_hash. */
4604
4605 hashval_t
4606 indirect_string_hasher::hash (indirect_string_node *x)
4607 {
4608 return htab_hash_string (x->str);
4609 }
4610
4611 bool
4612 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4613 {
4614 return strcmp (x1->str, x2) == 0;
4615 }
4616
4617 /* Add STR to the given string hash table. */
4618
4619 static struct indirect_string_node *
4620 find_AT_string_in_table (const char *str,
4621 hash_table<indirect_string_hasher> *table,
4622 enum insert_option insert = INSERT)
4623 {
4624 struct indirect_string_node *node;
4625
4626 indirect_string_node **slot
4627 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4628 if (*slot == NULL)
4629 {
4630 node = ggc_cleared_alloc<indirect_string_node> ();
4631 node->str = ggc_strdup (str);
4632 *slot = node;
4633 }
4634 else
4635 node = *slot;
4636
4637 node->refcount++;
4638 return node;
4639 }
4640
4641 /* Add STR to the indirect string hash table. */
4642
4643 static struct indirect_string_node *
4644 find_AT_string (const char *str, enum insert_option insert = INSERT)
4645 {
4646 if (! debug_str_hash)
4647 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4648
4649 return find_AT_string_in_table (str, debug_str_hash, insert);
4650 }
4651
4652 /* Add a string attribute value to a DIE. */
4653
4654 static inline void
4655 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4656 {
4657 dw_attr_node attr;
4658 struct indirect_string_node *node;
4659
4660 node = find_AT_string (str);
4661
4662 attr.dw_attr = attr_kind;
4663 attr.dw_attr_val.val_class = dw_val_class_str;
4664 attr.dw_attr_val.val_entry = NULL;
4665 attr.dw_attr_val.v.val_str = node;
4666 add_dwarf_attr (die, &attr);
4667 }
4668
4669 static inline const char *
4670 AT_string (dw_attr_node *a)
4671 {
4672 gcc_assert (a && AT_class (a) == dw_val_class_str);
4673 return a->dw_attr_val.v.val_str->str;
4674 }
4675
4676 /* Call this function directly to bypass AT_string_form's logic to put
4677 the string inline in the die. */
4678
4679 static void
4680 set_indirect_string (struct indirect_string_node *node)
4681 {
4682 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4683 /* Already indirect is a no op. */
4684 if (node->form == DW_FORM_strp
4685 || node->form == DW_FORM_line_strp
4686 || node->form == dwarf_FORM (DW_FORM_strx))
4687 {
4688 gcc_assert (node->label);
4689 return;
4690 }
4691 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4692 ++dw2_string_counter;
4693 node->label = xstrdup (label);
4694
4695 if (!dwarf_split_debug_info)
4696 {
4697 node->form = DW_FORM_strp;
4698 node->index = NOT_INDEXED;
4699 }
4700 else
4701 {
4702 node->form = dwarf_FORM (DW_FORM_strx);
4703 node->index = NO_INDEX_ASSIGNED;
4704 }
4705 }
4706
4707 /* A helper function for dwarf2out_finish, called to reset indirect
4708 string decisions done for early LTO dwarf output before fat object
4709 dwarf output. */
4710
4711 int
4712 reset_indirect_string (indirect_string_node **h, void *)
4713 {
4714 struct indirect_string_node *node = *h;
4715 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4716 {
4717 free (node->label);
4718 node->label = NULL;
4719 node->form = (dwarf_form) 0;
4720 node->index = 0;
4721 }
4722 return 1;
4723 }
4724
4725 /* Find out whether a string should be output inline in DIE
4726 or out-of-line in .debug_str section. */
4727
4728 static enum dwarf_form
4729 find_string_form (struct indirect_string_node *node)
4730 {
4731 unsigned int len;
4732
4733 if (node->form)
4734 return node->form;
4735
4736 len = strlen (node->str) + 1;
4737
4738 /* If the string is shorter or equal to the size of the reference, it is
4739 always better to put it inline. */
4740 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4741 return node->form = DW_FORM_string;
4742
4743 /* If we cannot expect the linker to merge strings in .debug_str
4744 section, only put it into .debug_str if it is worth even in this
4745 single module. */
4746 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4747 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4748 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4749 return node->form = DW_FORM_string;
4750
4751 set_indirect_string (node);
4752
4753 return node->form;
4754 }
4755
4756 /* Find out whether the string referenced from the attribute should be
4757 output inline in DIE or out-of-line in .debug_str section. */
4758
4759 static enum dwarf_form
4760 AT_string_form (dw_attr_node *a)
4761 {
4762 gcc_assert (a && AT_class (a) == dw_val_class_str);
4763 return find_string_form (a->dw_attr_val.v.val_str);
4764 }
4765
4766 /* Add a DIE reference attribute value to a DIE. */
4767
4768 static inline void
4769 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4770 {
4771 dw_attr_node attr;
4772 gcc_checking_assert (targ_die != NULL);
4773
4774 /* With LTO we can end up trying to reference something we didn't create
4775 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4776 if (targ_die == NULL)
4777 return;
4778
4779 attr.dw_attr = attr_kind;
4780 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4781 attr.dw_attr_val.val_entry = NULL;
4782 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4783 attr.dw_attr_val.v.val_die_ref.external = 0;
4784 add_dwarf_attr (die, &attr);
4785 }
4786
4787 /* Change DIE reference REF to point to NEW_DIE instead. */
4788
4789 static inline void
4790 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4791 {
4792 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4793 ref->dw_attr_val.v.val_die_ref.die = new_die;
4794 ref->dw_attr_val.v.val_die_ref.external = 0;
4795 }
4796
4797 /* Add an AT_specification attribute to a DIE, and also make the back
4798 pointer from the specification to the definition. */
4799
4800 static inline void
4801 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4802 {
4803 add_AT_die_ref (die, DW_AT_specification, targ_die);
4804 gcc_assert (!targ_die->die_definition);
4805 targ_die->die_definition = die;
4806 }
4807
4808 static inline dw_die_ref
4809 AT_ref (dw_attr_node *a)
4810 {
4811 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4812 return a->dw_attr_val.v.val_die_ref.die;
4813 }
4814
4815 static inline int
4816 AT_ref_external (dw_attr_node *a)
4817 {
4818 if (a && AT_class (a) == dw_val_class_die_ref)
4819 return a->dw_attr_val.v.val_die_ref.external;
4820
4821 return 0;
4822 }
4823
4824 static inline void
4825 set_AT_ref_external (dw_attr_node *a, int i)
4826 {
4827 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4828 a->dw_attr_val.v.val_die_ref.external = i;
4829 }
4830
4831 /* Add a location description attribute value to a DIE. */
4832
4833 static inline void
4834 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4835 {
4836 dw_attr_node attr;
4837
4838 attr.dw_attr = attr_kind;
4839 attr.dw_attr_val.val_class = dw_val_class_loc;
4840 attr.dw_attr_val.val_entry = NULL;
4841 attr.dw_attr_val.v.val_loc = loc;
4842 add_dwarf_attr (die, &attr);
4843 }
4844
4845 static inline dw_loc_descr_ref
4846 AT_loc (dw_attr_node *a)
4847 {
4848 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4849 return a->dw_attr_val.v.val_loc;
4850 }
4851
4852 static inline void
4853 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4854 {
4855 dw_attr_node attr;
4856
4857 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4858 return;
4859
4860 attr.dw_attr = attr_kind;
4861 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4862 attr.dw_attr_val.val_entry = NULL;
4863 attr.dw_attr_val.v.val_loc_list = loc_list;
4864 add_dwarf_attr (die, &attr);
4865 have_location_lists = true;
4866 }
4867
4868 static inline dw_loc_list_ref
4869 AT_loc_list (dw_attr_node *a)
4870 {
4871 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4872 return a->dw_attr_val.v.val_loc_list;
4873 }
4874
4875 /* Add a view list attribute to DIE. It must have a DW_AT_location
4876 attribute, because the view list complements the location list. */
4877
4878 static inline void
4879 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4880 {
4881 dw_attr_node attr;
4882
4883 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4884 return;
4885
4886 attr.dw_attr = attr_kind;
4887 attr.dw_attr_val.val_class = dw_val_class_view_list;
4888 attr.dw_attr_val.val_entry = NULL;
4889 attr.dw_attr_val.v.val_view_list = die;
4890 add_dwarf_attr (die, &attr);
4891 gcc_checking_assert (get_AT (die, DW_AT_location));
4892 gcc_assert (have_location_lists);
4893 }
4894
4895 /* Return a pointer to the location list referenced by the attribute.
4896 If the named attribute is a view list, look up the corresponding
4897 DW_AT_location attribute and return its location list. */
4898
4899 static inline dw_loc_list_ref *
4900 AT_loc_list_ptr (dw_attr_node *a)
4901 {
4902 gcc_assert (a);
4903 switch (AT_class (a))
4904 {
4905 case dw_val_class_loc_list:
4906 return &a->dw_attr_val.v.val_loc_list;
4907 case dw_val_class_view_list:
4908 {
4909 dw_attr_node *l;
4910 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4911 if (!l)
4912 return NULL;
4913 gcc_checking_assert (l + 1 == a);
4914 return AT_loc_list_ptr (l);
4915 }
4916 default:
4917 gcc_unreachable ();
4918 }
4919 }
4920
4921 /* Return the location attribute value associated with a view list
4922 attribute value. */
4923
4924 static inline dw_val_node *
4925 view_list_to_loc_list_val_node (dw_val_node *val)
4926 {
4927 gcc_assert (val->val_class == dw_val_class_view_list);
4928 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4929 if (!loc)
4930 return NULL;
4931 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4932 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4933 return &loc->dw_attr_val;
4934 }
4935
4936 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4937 {
4938 static hashval_t hash (addr_table_entry *);
4939 static bool equal (addr_table_entry *, addr_table_entry *);
4940 };
4941
4942 /* Table of entries into the .debug_addr section. */
4943
4944 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4945
4946 /* Hash an address_table_entry. */
4947
4948 hashval_t
4949 addr_hasher::hash (addr_table_entry *a)
4950 {
4951 inchash::hash hstate;
4952 switch (a->kind)
4953 {
4954 case ate_kind_rtx:
4955 hstate.add_int (0);
4956 break;
4957 case ate_kind_rtx_dtprel:
4958 hstate.add_int (1);
4959 break;
4960 case ate_kind_label:
4961 return htab_hash_string (a->addr.label);
4962 default:
4963 gcc_unreachable ();
4964 }
4965 inchash::add_rtx (a->addr.rtl, hstate);
4966 return hstate.end ();
4967 }
4968
4969 /* Determine equality for two address_table_entries. */
4970
4971 bool
4972 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4973 {
4974 if (a1->kind != a2->kind)
4975 return 0;
4976 switch (a1->kind)
4977 {
4978 case ate_kind_rtx:
4979 case ate_kind_rtx_dtprel:
4980 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4981 case ate_kind_label:
4982 return strcmp (a1->addr.label, a2->addr.label) == 0;
4983 default:
4984 gcc_unreachable ();
4985 }
4986 }
4987
4988 /* Initialize an addr_table_entry. */
4989
4990 void
4991 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4992 {
4993 e->kind = kind;
4994 switch (kind)
4995 {
4996 case ate_kind_rtx:
4997 case ate_kind_rtx_dtprel:
4998 e->addr.rtl = (rtx) addr;
4999 break;
5000 case ate_kind_label:
5001 e->addr.label = (char *) addr;
5002 break;
5003 }
5004 e->refcount = 0;
5005 e->index = NO_INDEX_ASSIGNED;
5006 }
5007
5008 /* Add attr to the address table entry to the table. Defer setting an
5009 index until output time. */
5010
5011 static addr_table_entry *
5012 add_addr_table_entry (void *addr, enum ate_kind kind)
5013 {
5014 addr_table_entry *node;
5015 addr_table_entry finder;
5016
5017 gcc_assert (dwarf_split_debug_info);
5018 if (! addr_index_table)
5019 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5020 init_addr_table_entry (&finder, kind, addr);
5021 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5022
5023 if (*slot == HTAB_EMPTY_ENTRY)
5024 {
5025 node = ggc_cleared_alloc<addr_table_entry> ();
5026 init_addr_table_entry (node, kind, addr);
5027 *slot = node;
5028 }
5029 else
5030 node = *slot;
5031
5032 node->refcount++;
5033 return node;
5034 }
5035
5036 /* Remove an entry from the addr table by decrementing its refcount.
5037 Strictly, decrementing the refcount would be enough, but the
5038 assertion that the entry is actually in the table has found
5039 bugs. */
5040
5041 static void
5042 remove_addr_table_entry (addr_table_entry *entry)
5043 {
5044 gcc_assert (dwarf_split_debug_info && addr_index_table);
5045 /* After an index is assigned, the table is frozen. */
5046 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5047 entry->refcount--;
5048 }
5049
5050 /* Given a location list, remove all addresses it refers to from the
5051 address_table. */
5052
5053 static void
5054 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5055 {
5056 for (; descr; descr = descr->dw_loc_next)
5057 if (descr->dw_loc_oprnd1.val_entry != NULL)
5058 {
5059 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5060 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5061 }
5062 }
5063
5064 /* A helper function for dwarf2out_finish called through
5065 htab_traverse. Assign an addr_table_entry its index. All entries
5066 must be collected into the table when this function is called,
5067 because the indexing code relies on htab_traverse to traverse nodes
5068 in the same order for each run. */
5069
5070 int
5071 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5072 {
5073 addr_table_entry *node = *h;
5074
5075 /* Don't index unreferenced nodes. */
5076 if (node->refcount == 0)
5077 return 1;
5078
5079 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5080 node->index = *index;
5081 *index += 1;
5082
5083 return 1;
5084 }
5085
5086 /* Add an address constant attribute value to a DIE. When using
5087 dwarf_split_debug_info, address attributes in dies destined for the
5088 final executable should be direct references--setting the parameter
5089 force_direct ensures this behavior. */
5090
5091 static inline void
5092 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5093 bool force_direct)
5094 {
5095 dw_attr_node attr;
5096
5097 attr.dw_attr = attr_kind;
5098 attr.dw_attr_val.val_class = dw_val_class_addr;
5099 attr.dw_attr_val.v.val_addr = addr;
5100 if (dwarf_split_debug_info && !force_direct)
5101 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5102 else
5103 attr.dw_attr_val.val_entry = NULL;
5104 add_dwarf_attr (die, &attr);
5105 }
5106
5107 /* Get the RTX from to an address DIE attribute. */
5108
5109 static inline rtx
5110 AT_addr (dw_attr_node *a)
5111 {
5112 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5113 return a->dw_attr_val.v.val_addr;
5114 }
5115
5116 /* Add a file attribute value to a DIE. */
5117
5118 static inline void
5119 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5120 struct dwarf_file_data *fd)
5121 {
5122 dw_attr_node attr;
5123
5124 attr.dw_attr = attr_kind;
5125 attr.dw_attr_val.val_class = dw_val_class_file;
5126 attr.dw_attr_val.val_entry = NULL;
5127 attr.dw_attr_val.v.val_file = fd;
5128 add_dwarf_attr (die, &attr);
5129 }
5130
5131 /* Get the dwarf_file_data from a file DIE attribute. */
5132
5133 static inline struct dwarf_file_data *
5134 AT_file (dw_attr_node *a)
5135 {
5136 gcc_assert (a && (AT_class (a) == dw_val_class_file
5137 || AT_class (a) == dw_val_class_file_implicit));
5138 return a->dw_attr_val.v.val_file;
5139 }
5140
5141 /* Add a symbolic view identifier attribute value to a DIE. */
5142
5143 static inline void
5144 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5145 const char *view_label)
5146 {
5147 dw_attr_node attr;
5148
5149 attr.dw_attr = attr_kind;
5150 attr.dw_attr_val.val_class = dw_val_class_symview;
5151 attr.dw_attr_val.val_entry = NULL;
5152 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5153 add_dwarf_attr (die, &attr);
5154 }
5155
5156 /* Add a label identifier attribute value to a DIE. */
5157
5158 static inline void
5159 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5160 const char *lbl_id)
5161 {
5162 dw_attr_node attr;
5163
5164 attr.dw_attr = attr_kind;
5165 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5166 attr.dw_attr_val.val_entry = NULL;
5167 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5168 if (dwarf_split_debug_info)
5169 attr.dw_attr_val.val_entry
5170 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5171 ate_kind_label);
5172 add_dwarf_attr (die, &attr);
5173 }
5174
5175 /* Add a section offset attribute value to a DIE, an offset into the
5176 debug_line section. */
5177
5178 static inline void
5179 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5180 const char *label)
5181 {
5182 dw_attr_node attr;
5183
5184 attr.dw_attr = attr_kind;
5185 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5186 attr.dw_attr_val.val_entry = NULL;
5187 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5188 add_dwarf_attr (die, &attr);
5189 }
5190
5191 /* Add a section offset attribute value to a DIE, an offset into the
5192 debug_macinfo section. */
5193
5194 static inline void
5195 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5196 const char *label)
5197 {
5198 dw_attr_node attr;
5199
5200 attr.dw_attr = attr_kind;
5201 attr.dw_attr_val.val_class = dw_val_class_macptr;
5202 attr.dw_attr_val.val_entry = NULL;
5203 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5204 add_dwarf_attr (die, &attr);
5205 }
5206
5207 /* Add a range_list attribute value to a DIE. When using
5208 dwarf_split_debug_info, address attributes in dies destined for the
5209 final executable should be direct references--setting the parameter
5210 force_direct ensures this behavior. */
5211
5212 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5213 #define RELOCATED_OFFSET (NULL)
5214
5215 static void
5216 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5217 long unsigned int offset, bool force_direct)
5218 {
5219 dw_attr_node attr;
5220
5221 attr.dw_attr = attr_kind;
5222 attr.dw_attr_val.val_class = dw_val_class_range_list;
5223 /* For the range_list attribute, use val_entry to store whether the
5224 offset should follow split-debug-info or normal semantics. This
5225 value is read in output_range_list_offset. */
5226 if (dwarf_split_debug_info && !force_direct)
5227 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5228 else
5229 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5230 attr.dw_attr_val.v.val_offset = offset;
5231 add_dwarf_attr (die, &attr);
5232 }
5233
5234 /* Return the start label of a delta attribute. */
5235
5236 static inline const char *
5237 AT_vms_delta1 (dw_attr_node *a)
5238 {
5239 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5240 return a->dw_attr_val.v.val_vms_delta.lbl1;
5241 }
5242
5243 /* Return the end label of a delta attribute. */
5244
5245 static inline const char *
5246 AT_vms_delta2 (dw_attr_node *a)
5247 {
5248 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5249 return a->dw_attr_val.v.val_vms_delta.lbl2;
5250 }
5251
5252 static inline const char *
5253 AT_lbl (dw_attr_node *a)
5254 {
5255 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5256 || AT_class (a) == dw_val_class_lineptr
5257 || AT_class (a) == dw_val_class_macptr
5258 || AT_class (a) == dw_val_class_loclistsptr
5259 || AT_class (a) == dw_val_class_high_pc));
5260 return a->dw_attr_val.v.val_lbl_id;
5261 }
5262
5263 /* Get the attribute of type attr_kind. */
5264
5265 static dw_attr_node *
5266 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5267 {
5268 dw_attr_node *a;
5269 unsigned ix;
5270 dw_die_ref spec = NULL;
5271
5272 if (! die)
5273 return NULL;
5274
5275 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5276 if (a->dw_attr == attr_kind)
5277 return a;
5278 else if (a->dw_attr == DW_AT_specification
5279 || a->dw_attr == DW_AT_abstract_origin)
5280 spec = AT_ref (a);
5281
5282 if (spec)
5283 return get_AT (spec, attr_kind);
5284
5285 return NULL;
5286 }
5287
5288 /* Returns the parent of the declaration of DIE. */
5289
5290 static dw_die_ref
5291 get_die_parent (dw_die_ref die)
5292 {
5293 dw_die_ref t;
5294
5295 if (!die)
5296 return NULL;
5297
5298 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5299 || (t = get_AT_ref (die, DW_AT_specification)))
5300 die = t;
5301
5302 return die->die_parent;
5303 }
5304
5305 /* Return the "low pc" attribute value, typically associated with a subprogram
5306 DIE. Return null if the "low pc" attribute is either not present, or if it
5307 cannot be represented as an assembler label identifier. */
5308
5309 static inline const char *
5310 get_AT_low_pc (dw_die_ref die)
5311 {
5312 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5313
5314 return a ? AT_lbl (a) : NULL;
5315 }
5316
5317 /* Return the value of the string attribute designated by ATTR_KIND, or
5318 NULL if it is not present. */
5319
5320 static inline const char *
5321 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5322 {
5323 dw_attr_node *a = get_AT (die, attr_kind);
5324
5325 return a ? AT_string (a) : NULL;
5326 }
5327
5328 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5329 if it is not present. */
5330
5331 static inline int
5332 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5333 {
5334 dw_attr_node *a = get_AT (die, attr_kind);
5335
5336 return a ? AT_flag (a) : 0;
5337 }
5338
5339 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5340 if it is not present. */
5341
5342 static inline unsigned
5343 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5344 {
5345 dw_attr_node *a = get_AT (die, attr_kind);
5346
5347 return a ? AT_unsigned (a) : 0;
5348 }
5349
5350 static inline dw_die_ref
5351 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5352 {
5353 dw_attr_node *a = get_AT (die, attr_kind);
5354
5355 return a ? AT_ref (a) : NULL;
5356 }
5357
5358 static inline struct dwarf_file_data *
5359 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5360 {
5361 dw_attr_node *a = get_AT (die, attr_kind);
5362
5363 return a ? AT_file (a) : NULL;
5364 }
5365
5366 /* Return TRUE if the language is C. */
5367
5368 static inline bool
5369 is_c (void)
5370 {
5371 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5372
5373 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5374 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5375
5376
5377 }
5378
5379 /* Return TRUE if the language is C++. */
5380
5381 static inline bool
5382 is_cxx (void)
5383 {
5384 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5385
5386 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5387 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5388 }
5389
5390 /* Return TRUE if DECL was created by the C++ frontend. */
5391
5392 static bool
5393 is_cxx (const_tree decl)
5394 {
5395 if (in_lto_p)
5396 {
5397 const_tree context = get_ultimate_context (decl);
5398 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5399 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5400 }
5401 return is_cxx ();
5402 }
5403
5404 /* Return TRUE if the language is Fortran. */
5405
5406 static inline bool
5407 is_fortran (void)
5408 {
5409 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5410
5411 return (lang == DW_LANG_Fortran77
5412 || lang == DW_LANG_Fortran90
5413 || lang == DW_LANG_Fortran95
5414 || lang == DW_LANG_Fortran03
5415 || lang == DW_LANG_Fortran08);
5416 }
5417
5418 static inline bool
5419 is_fortran (const_tree decl)
5420 {
5421 if (in_lto_p)
5422 {
5423 const_tree context = get_ultimate_context (decl);
5424 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5425 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5426 "GNU Fortran", 11) == 0
5427 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5428 "GNU F77") == 0);
5429 }
5430 return is_fortran ();
5431 }
5432
5433 /* Return TRUE if the language is Ada. */
5434
5435 static inline bool
5436 is_ada (void)
5437 {
5438 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5439
5440 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5441 }
5442
5443 /* Return TRUE if the language is D. */
5444
5445 static inline bool
5446 is_dlang (void)
5447 {
5448 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5449
5450 return lang == DW_LANG_D;
5451 }
5452
5453 /* Remove the specified attribute if present. Return TRUE if removal
5454 was successful. */
5455
5456 static bool
5457 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5458 {
5459 dw_attr_node *a;
5460 unsigned ix;
5461
5462 if (! die)
5463 return false;
5464
5465 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5466 if (a->dw_attr == attr_kind)
5467 {
5468 if (AT_class (a) == dw_val_class_str)
5469 if (a->dw_attr_val.v.val_str->refcount)
5470 a->dw_attr_val.v.val_str->refcount--;
5471
5472 /* vec::ordered_remove should help reduce the number of abbrevs
5473 that are needed. */
5474 die->die_attr->ordered_remove (ix);
5475 return true;
5476 }
5477 return false;
5478 }
5479
5480 /* Remove CHILD from its parent. PREV must have the property that
5481 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5482
5483 static void
5484 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5485 {
5486 gcc_assert (child->die_parent == prev->die_parent);
5487 gcc_assert (prev->die_sib == child);
5488 if (prev == child)
5489 {
5490 gcc_assert (child->die_parent->die_child == child);
5491 prev = NULL;
5492 }
5493 else
5494 prev->die_sib = child->die_sib;
5495 if (child->die_parent->die_child == child)
5496 child->die_parent->die_child = prev;
5497 child->die_sib = NULL;
5498 }
5499
5500 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5501 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5502
5503 static void
5504 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5505 {
5506 dw_die_ref parent = old_child->die_parent;
5507
5508 gcc_assert (parent == prev->die_parent);
5509 gcc_assert (prev->die_sib == old_child);
5510
5511 new_child->die_parent = parent;
5512 if (prev == old_child)
5513 {
5514 gcc_assert (parent->die_child == old_child);
5515 new_child->die_sib = new_child;
5516 }
5517 else
5518 {
5519 prev->die_sib = new_child;
5520 new_child->die_sib = old_child->die_sib;
5521 }
5522 if (old_child->die_parent->die_child == old_child)
5523 old_child->die_parent->die_child = new_child;
5524 old_child->die_sib = NULL;
5525 }
5526
5527 /* Move all children from OLD_PARENT to NEW_PARENT. */
5528
5529 static void
5530 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5531 {
5532 dw_die_ref c;
5533 new_parent->die_child = old_parent->die_child;
5534 old_parent->die_child = NULL;
5535 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5536 }
5537
5538 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5539 matches TAG. */
5540
5541 static void
5542 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5543 {
5544 dw_die_ref c;
5545
5546 c = die->die_child;
5547 if (c) do {
5548 dw_die_ref prev = c;
5549 c = c->die_sib;
5550 while (c->die_tag == tag)
5551 {
5552 remove_child_with_prev (c, prev);
5553 c->die_parent = NULL;
5554 /* Might have removed every child. */
5555 if (die->die_child == NULL)
5556 return;
5557 c = prev->die_sib;
5558 }
5559 } while (c != die->die_child);
5560 }
5561
5562 /* Add a CHILD_DIE as the last child of DIE. */
5563
5564 static void
5565 add_child_die (dw_die_ref die, dw_die_ref child_die)
5566 {
5567 /* FIXME this should probably be an assert. */
5568 if (! die || ! child_die)
5569 return;
5570 gcc_assert (die != child_die);
5571
5572 child_die->die_parent = die;
5573 if (die->die_child)
5574 {
5575 child_die->die_sib = die->die_child->die_sib;
5576 die->die_child->die_sib = child_die;
5577 }
5578 else
5579 child_die->die_sib = child_die;
5580 die->die_child = child_die;
5581 }
5582
5583 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5584
5585 static void
5586 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5587 dw_die_ref after_die)
5588 {
5589 gcc_assert (die
5590 && child_die
5591 && after_die
5592 && die->die_child
5593 && die != child_die);
5594
5595 child_die->die_parent = die;
5596 child_die->die_sib = after_die->die_sib;
5597 after_die->die_sib = child_die;
5598 if (die->die_child == after_die)
5599 die->die_child = child_die;
5600 }
5601
5602 /* Unassociate CHILD from its parent, and make its parent be
5603 NEW_PARENT. */
5604
5605 static void
5606 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5607 {
5608 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5609 if (p->die_sib == child)
5610 {
5611 remove_child_with_prev (child, p);
5612 break;
5613 }
5614 add_child_die (new_parent, child);
5615 }
5616
5617 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5618 is the specification, to the end of PARENT's list of children.
5619 This is done by removing and re-adding it. */
5620
5621 static void
5622 splice_child_die (dw_die_ref parent, dw_die_ref child)
5623 {
5624 /* We want the declaration DIE from inside the class, not the
5625 specification DIE at toplevel. */
5626 if (child->die_parent != parent)
5627 {
5628 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5629
5630 if (tmp)
5631 child = tmp;
5632 }
5633
5634 gcc_assert (child->die_parent == parent
5635 || (child->die_parent
5636 == get_AT_ref (parent, DW_AT_specification)));
5637
5638 reparent_child (child, parent);
5639 }
5640
5641 /* Create and return a new die with TAG_VALUE as tag. */
5642
5643 static inline dw_die_ref
5644 new_die_raw (enum dwarf_tag tag_value)
5645 {
5646 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5647 die->die_tag = tag_value;
5648 return die;
5649 }
5650
5651 /* Create and return a new die with a parent of PARENT_DIE. If
5652 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5653 associated tree T must be supplied to determine parenthood
5654 later. */
5655
5656 static inline dw_die_ref
5657 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5658 {
5659 dw_die_ref die = new_die_raw (tag_value);
5660
5661 if (parent_die != NULL)
5662 add_child_die (parent_die, die);
5663 else
5664 {
5665 limbo_die_node *limbo_node;
5666
5667 /* No DIEs created after early dwarf should end up in limbo,
5668 because the limbo list should not persist past LTO
5669 streaming. */
5670 if (tag_value != DW_TAG_compile_unit
5671 /* These are allowed because they're generated while
5672 breaking out COMDAT units late. */
5673 && tag_value != DW_TAG_type_unit
5674 && tag_value != DW_TAG_skeleton_unit
5675 && !early_dwarf
5676 /* Allow nested functions to live in limbo because they will
5677 only temporarily live there, as decls_for_scope will fix
5678 them up. */
5679 && (TREE_CODE (t) != FUNCTION_DECL
5680 || !decl_function_context (t))
5681 /* Same as nested functions above but for types. Types that
5682 are local to a function will be fixed in
5683 decls_for_scope. */
5684 && (!RECORD_OR_UNION_TYPE_P (t)
5685 || !TYPE_CONTEXT (t)
5686 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5687 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5688 especially in the ltrans stage, but once we implement LTO
5689 dwarf streaming, we should remove this exception. */
5690 && !in_lto_p)
5691 {
5692 fprintf (stderr, "symbol ended up in limbo too late:");
5693 debug_generic_stmt (t);
5694 gcc_unreachable ();
5695 }
5696
5697 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5698 limbo_node->die = die;
5699 limbo_node->created_for = t;
5700 limbo_node->next = limbo_die_list;
5701 limbo_die_list = limbo_node;
5702 }
5703
5704 return die;
5705 }
5706
5707 /* Return the DIE associated with the given type specifier. */
5708
5709 static inline dw_die_ref
5710 lookup_type_die (tree type)
5711 {
5712 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5713 if (die && die->removed)
5714 {
5715 TYPE_SYMTAB_DIE (type) = NULL;
5716 return NULL;
5717 }
5718 return die;
5719 }
5720
5721 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5722 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5723 anonymous type instead the one of the naming typedef. */
5724
5725 static inline dw_die_ref
5726 strip_naming_typedef (tree type, dw_die_ref type_die)
5727 {
5728 if (type
5729 && TREE_CODE (type) == RECORD_TYPE
5730 && type_die
5731 && type_die->die_tag == DW_TAG_typedef
5732 && is_naming_typedef_decl (TYPE_NAME (type)))
5733 type_die = get_AT_ref (type_die, DW_AT_type);
5734 return type_die;
5735 }
5736
5737 /* Like lookup_type_die, but if type is an anonymous type named by a
5738 typedef[1], return the DIE of the anonymous type instead the one of
5739 the naming typedef. This is because in gen_typedef_die, we did
5740 equate the anonymous struct named by the typedef with the DIE of
5741 the naming typedef. So by default, lookup_type_die on an anonymous
5742 struct yields the DIE of the naming typedef.
5743
5744 [1]: Read the comment of is_naming_typedef_decl to learn about what
5745 a naming typedef is. */
5746
5747 static inline dw_die_ref
5748 lookup_type_die_strip_naming_typedef (tree type)
5749 {
5750 dw_die_ref die = lookup_type_die (type);
5751 return strip_naming_typedef (type, die);
5752 }
5753
5754 /* Equate a DIE to a given type specifier. */
5755
5756 static inline void
5757 equate_type_number_to_die (tree type, dw_die_ref type_die)
5758 {
5759 TYPE_SYMTAB_DIE (type) = type_die;
5760 }
5761
5762 static dw_die_ref maybe_create_die_with_external_ref (tree);
5763 struct GTY(()) sym_off_pair
5764 {
5765 const char * GTY((skip)) sym;
5766 unsigned HOST_WIDE_INT off;
5767 };
5768 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5769
5770 /* Returns a hash value for X (which really is a die_struct). */
5771
5772 inline hashval_t
5773 decl_die_hasher::hash (die_node *x)
5774 {
5775 return (hashval_t) x->decl_id;
5776 }
5777
5778 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5779
5780 inline bool
5781 decl_die_hasher::equal (die_node *x, tree y)
5782 {
5783 return (x->decl_id == DECL_UID (y));
5784 }
5785
5786 /* Return the DIE associated with a given declaration. */
5787
5788 static inline dw_die_ref
5789 lookup_decl_die (tree decl)
5790 {
5791 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5792 NO_INSERT);
5793 if (!die)
5794 {
5795 if (in_lto_p)
5796 return maybe_create_die_with_external_ref (decl);
5797 return NULL;
5798 }
5799 if ((*die)->removed)
5800 {
5801 decl_die_table->clear_slot (die);
5802 return NULL;
5803 }
5804 return *die;
5805 }
5806
5807
5808 /* Return the DIE associated with BLOCK. */
5809
5810 static inline dw_die_ref
5811 lookup_block_die (tree block)
5812 {
5813 dw_die_ref die = BLOCK_DIE (block);
5814 if (!die && in_lto_p)
5815 return maybe_create_die_with_external_ref (block);
5816 return die;
5817 }
5818
5819 /* Associate DIE with BLOCK. */
5820
5821 static inline void
5822 equate_block_to_die (tree block, dw_die_ref die)
5823 {
5824 BLOCK_DIE (block) = die;
5825 }
5826 #undef BLOCK_DIE
5827
5828
5829 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5830 style reference. Return true if we found one refering to a DIE for
5831 DECL, otherwise return false. */
5832
5833 static bool
5834 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5835 unsigned HOST_WIDE_INT *off)
5836 {
5837 dw_die_ref die;
5838
5839 if (in_lto_p)
5840 {
5841 /* During WPA stage and incremental linking we use a hash-map
5842 to store the decl <-> label + offset map. */
5843 if (!external_die_map)
5844 return false;
5845 sym_off_pair *desc = external_die_map->get (decl);
5846 if (!desc)
5847 return false;
5848 *sym = desc->sym;
5849 *off = desc->off;
5850 return true;
5851 }
5852
5853 if (TREE_CODE (decl) == BLOCK)
5854 die = lookup_block_die (decl);
5855 else
5856 die = lookup_decl_die (decl);
5857 if (!die)
5858 return false;
5859
5860 /* Similar to get_ref_die_offset_label, but using the "correct"
5861 label. */
5862 *off = die->die_offset;
5863 while (die->die_parent)
5864 die = die->die_parent;
5865 /* For the containing CU DIE we compute a die_symbol in
5866 compute_comp_unit_symbol. */
5867 gcc_assert (die->die_tag == DW_TAG_compile_unit
5868 && die->die_id.die_symbol != NULL);
5869 *sym = die->die_id.die_symbol;
5870 return true;
5871 }
5872
5873 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5874
5875 static void
5876 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5877 const char *symbol, HOST_WIDE_INT offset)
5878 {
5879 /* Create a fake DIE that contains the reference. Don't use
5880 new_die because we don't want to end up in the limbo list. */
5881 /* ??? We probably want to share these, thus put a ref to the DIE
5882 we create here to the external_die_map entry. */
5883 dw_die_ref ref = new_die_raw (die->die_tag);
5884 ref->die_id.die_symbol = symbol;
5885 ref->die_offset = offset;
5886 ref->with_offset = 1;
5887 add_AT_die_ref (die, attr_kind, ref);
5888 }
5889
5890 /* Create a DIE for DECL if required and add a reference to a DIE
5891 at SYMBOL + OFFSET which contains attributes dumped early. */
5892
5893 static void
5894 dwarf2out_register_external_die (tree decl, const char *sym,
5895 unsigned HOST_WIDE_INT off)
5896 {
5897 if (debug_info_level == DINFO_LEVEL_NONE)
5898 return;
5899
5900 if (!external_die_map)
5901 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5902 gcc_checking_assert (!external_die_map->get (decl));
5903 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5904 external_die_map->put (decl, p);
5905 }
5906
5907 /* If we have a registered external DIE for DECL return a new DIE for
5908 the concrete instance with an appropriate abstract origin. */
5909
5910 static dw_die_ref
5911 maybe_create_die_with_external_ref (tree decl)
5912 {
5913 if (!external_die_map)
5914 return NULL;
5915 sym_off_pair *desc = external_die_map->get (decl);
5916 if (!desc)
5917 return NULL;
5918
5919 const char *sym = desc->sym;
5920 unsigned HOST_WIDE_INT off = desc->off;
5921
5922 in_lto_p = false;
5923 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5924 ? lookup_block_die (decl) : lookup_decl_die (decl));
5925 gcc_assert (!die);
5926 in_lto_p = true;
5927
5928 tree ctx;
5929 dw_die_ref parent = NULL;
5930 /* Need to lookup a DIE for the decls context - the containing
5931 function or translation unit. */
5932 if (TREE_CODE (decl) == BLOCK)
5933 {
5934 ctx = BLOCK_SUPERCONTEXT (decl);
5935 /* ??? We do not output DIEs for all scopes thus skip as
5936 many DIEs as needed. */
5937 while (TREE_CODE (ctx) == BLOCK
5938 && !lookup_block_die (ctx))
5939 ctx = BLOCK_SUPERCONTEXT (ctx);
5940 }
5941 else
5942 ctx = DECL_CONTEXT (decl);
5943 /* Peel types in the context stack. */
5944 while (ctx && TYPE_P (ctx))
5945 ctx = TYPE_CONTEXT (ctx);
5946 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5947 if (debug_info_level <= DINFO_LEVEL_TERSE)
5948 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5949 ctx = DECL_CONTEXT (ctx);
5950 if (ctx)
5951 {
5952 if (TREE_CODE (ctx) == BLOCK)
5953 parent = lookup_block_die (ctx);
5954 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5955 /* Keep the 1:1 association during WPA. */
5956 && !flag_wpa
5957 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5958 /* Otherwise all late annotations go to the main CU which
5959 imports the original CUs. */
5960 parent = comp_unit_die ();
5961 else if (TREE_CODE (ctx) == FUNCTION_DECL
5962 && TREE_CODE (decl) != FUNCTION_DECL
5963 && TREE_CODE (decl) != PARM_DECL
5964 && TREE_CODE (decl) != RESULT_DECL
5965 && TREE_CODE (decl) != BLOCK)
5966 /* Leave function local entities parent determination to when
5967 we process scope vars. */
5968 ;
5969 else
5970 parent = lookup_decl_die (ctx);
5971 }
5972 else
5973 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5974 Handle this case gracefully by globalizing stuff. */
5975 parent = comp_unit_die ();
5976 /* Create a DIE "stub". */
5977 switch (TREE_CODE (decl))
5978 {
5979 case TRANSLATION_UNIT_DECL:
5980 {
5981 die = comp_unit_die ();
5982 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5983 to create a DIE for the original CUs. */
5984 return die;
5985 }
5986 case NAMESPACE_DECL:
5987 if (is_fortran (decl))
5988 die = new_die (DW_TAG_module, parent, decl);
5989 else
5990 die = new_die (DW_TAG_namespace, parent, decl);
5991 break;
5992 case FUNCTION_DECL:
5993 die = new_die (DW_TAG_subprogram, parent, decl);
5994 break;
5995 case VAR_DECL:
5996 die = new_die (DW_TAG_variable, parent, decl);
5997 break;
5998 case RESULT_DECL:
5999 die = new_die (DW_TAG_variable, parent, decl);
6000 break;
6001 case PARM_DECL:
6002 die = new_die (DW_TAG_formal_parameter, parent, decl);
6003 break;
6004 case CONST_DECL:
6005 die = new_die (DW_TAG_constant, parent, decl);
6006 break;
6007 case LABEL_DECL:
6008 die = new_die (DW_TAG_label, parent, decl);
6009 break;
6010 case BLOCK:
6011 die = new_die (DW_TAG_lexical_block, parent, decl);
6012 break;
6013 default:
6014 gcc_unreachable ();
6015 }
6016 if (TREE_CODE (decl) == BLOCK)
6017 equate_block_to_die (decl, die);
6018 else
6019 equate_decl_number_to_die (decl, die);
6020
6021 add_desc_attribute (die, decl);
6022
6023 /* Add a reference to the DIE providing early debug at $sym + off. */
6024 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6025
6026 return die;
6027 }
6028
6029 /* Returns a hash value for X (which really is a var_loc_list). */
6030
6031 inline hashval_t
6032 decl_loc_hasher::hash (var_loc_list *x)
6033 {
6034 return (hashval_t) x->decl_id;
6035 }
6036
6037 /* Return nonzero if decl_id of var_loc_list X is the same as
6038 UID of decl *Y. */
6039
6040 inline bool
6041 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6042 {
6043 return (x->decl_id == DECL_UID (y));
6044 }
6045
6046 /* Return the var_loc list associated with a given declaration. */
6047
6048 static inline var_loc_list *
6049 lookup_decl_loc (const_tree decl)
6050 {
6051 if (!decl_loc_table)
6052 return NULL;
6053 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6054 }
6055
6056 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6057
6058 inline hashval_t
6059 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6060 {
6061 return (hashval_t) x->decl_id;
6062 }
6063
6064 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6065 UID of decl *Y. */
6066
6067 inline bool
6068 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6069 {
6070 return (x->decl_id == DECL_UID (y));
6071 }
6072
6073 /* Equate a DIE to a particular declaration. */
6074
6075 static void
6076 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6077 {
6078 unsigned int decl_id = DECL_UID (decl);
6079
6080 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6081 decl_die->decl_id = decl_id;
6082 }
6083
6084 /* Return how many bits covers PIECE EXPR_LIST. */
6085
6086 static HOST_WIDE_INT
6087 decl_piece_bitsize (rtx piece)
6088 {
6089 int ret = (int) GET_MODE (piece);
6090 if (ret)
6091 return ret;
6092 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6093 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6094 return INTVAL (XEXP (XEXP (piece, 0), 0));
6095 }
6096
6097 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6098
6099 static rtx *
6100 decl_piece_varloc_ptr (rtx piece)
6101 {
6102 if ((int) GET_MODE (piece))
6103 return &XEXP (piece, 0);
6104 else
6105 return &XEXP (XEXP (piece, 0), 1);
6106 }
6107
6108 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6109 Next is the chain of following piece nodes. */
6110
6111 static rtx_expr_list *
6112 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6113 {
6114 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6115 return alloc_EXPR_LIST (bitsize, loc_note, next);
6116 else
6117 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6118 GEN_INT (bitsize),
6119 loc_note), next);
6120 }
6121
6122 /* Return rtx that should be stored into loc field for
6123 LOC_NOTE and BITPOS/BITSIZE. */
6124
6125 static rtx
6126 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6127 HOST_WIDE_INT bitsize)
6128 {
6129 if (bitsize != -1)
6130 {
6131 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6132 if (bitpos != 0)
6133 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6134 }
6135 return loc_note;
6136 }
6137
6138 /* This function either modifies location piece list *DEST in
6139 place (if SRC and INNER is NULL), or copies location piece list
6140 *SRC to *DEST while modifying it. Location BITPOS is modified
6141 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6142 not copied and if needed some padding around it is added.
6143 When modifying in place, DEST should point to EXPR_LIST where
6144 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6145 to the start of the whole list and INNER points to the EXPR_LIST
6146 where earlier pieces cover PIECE_BITPOS bits. */
6147
6148 static void
6149 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6150 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6151 HOST_WIDE_INT bitsize, rtx loc_note)
6152 {
6153 HOST_WIDE_INT diff;
6154 bool copy = inner != NULL;
6155
6156 if (copy)
6157 {
6158 /* First copy all nodes preceding the current bitpos. */
6159 while (src != inner)
6160 {
6161 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6162 decl_piece_bitsize (*src), NULL_RTX);
6163 dest = &XEXP (*dest, 1);
6164 src = &XEXP (*src, 1);
6165 }
6166 }
6167 /* Add padding if needed. */
6168 if (bitpos != piece_bitpos)
6169 {
6170 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6171 copy ? NULL_RTX : *dest);
6172 dest = &XEXP (*dest, 1);
6173 }
6174 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6175 {
6176 gcc_assert (!copy);
6177 /* A piece with correct bitpos and bitsize already exist,
6178 just update the location for it and return. */
6179 *decl_piece_varloc_ptr (*dest) = loc_note;
6180 return;
6181 }
6182 /* Add the piece that changed. */
6183 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6184 dest = &XEXP (*dest, 1);
6185 /* Skip over pieces that overlap it. */
6186 diff = bitpos - piece_bitpos + bitsize;
6187 if (!copy)
6188 src = dest;
6189 while (diff > 0 && *src)
6190 {
6191 rtx piece = *src;
6192 diff -= decl_piece_bitsize (piece);
6193 if (copy)
6194 src = &XEXP (piece, 1);
6195 else
6196 {
6197 *src = XEXP (piece, 1);
6198 free_EXPR_LIST_node (piece);
6199 }
6200 }
6201 /* Add padding if needed. */
6202 if (diff < 0 && *src)
6203 {
6204 if (!copy)
6205 dest = src;
6206 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 }
6209 if (!copy)
6210 return;
6211 /* Finally copy all nodes following it. */
6212 while (*src)
6213 {
6214 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6215 decl_piece_bitsize (*src), NULL_RTX);
6216 dest = &XEXP (*dest, 1);
6217 src = &XEXP (*src, 1);
6218 }
6219 }
6220
6221 /* Add a variable location node to the linked list for DECL. */
6222
6223 static struct var_loc_node *
6224 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6225 {
6226 unsigned int decl_id;
6227 var_loc_list *temp;
6228 struct var_loc_node *loc = NULL;
6229 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6230
6231 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6232 {
6233 tree realdecl = DECL_DEBUG_EXPR (decl);
6234 if (handled_component_p (realdecl)
6235 || (TREE_CODE (realdecl) == MEM_REF
6236 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6237 {
6238 bool reverse;
6239 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6240 &bitsize, &reverse);
6241 if (!innerdecl
6242 || !DECL_P (innerdecl)
6243 || DECL_IGNORED_P (innerdecl)
6244 || TREE_STATIC (innerdecl)
6245 || bitsize == 0
6246 || bitpos + bitsize > 256)
6247 return NULL;
6248 decl = innerdecl;
6249 }
6250 }
6251
6252 decl_id = DECL_UID (decl);
6253 var_loc_list **slot
6254 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6255 if (*slot == NULL)
6256 {
6257 temp = ggc_cleared_alloc<var_loc_list> ();
6258 temp->decl_id = decl_id;
6259 *slot = temp;
6260 }
6261 else
6262 temp = *slot;
6263
6264 /* For PARM_DECLs try to keep around the original incoming value,
6265 even if that means we'll emit a zero-range .debug_loc entry. */
6266 if (temp->last
6267 && temp->first == temp->last
6268 && TREE_CODE (decl) == PARM_DECL
6269 && NOTE_P (temp->first->loc)
6270 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6271 && DECL_INCOMING_RTL (decl)
6272 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6273 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6274 == GET_CODE (DECL_INCOMING_RTL (decl))
6275 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6276 && (bitsize != -1
6277 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6278 NOTE_VAR_LOCATION_LOC (loc_note))
6279 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6280 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6281 {
6282 loc = ggc_cleared_alloc<var_loc_node> ();
6283 temp->first->next = loc;
6284 temp->last = loc;
6285 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6286 }
6287 else if (temp->last)
6288 {
6289 struct var_loc_node *last = temp->last, *unused = NULL;
6290 rtx *piece_loc = NULL, last_loc_note;
6291 HOST_WIDE_INT piece_bitpos = 0;
6292 if (last->next)
6293 {
6294 last = last->next;
6295 gcc_assert (last->next == NULL);
6296 }
6297 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6298 {
6299 piece_loc = &last->loc;
6300 do
6301 {
6302 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6303 if (piece_bitpos + cur_bitsize > bitpos)
6304 break;
6305 piece_bitpos += cur_bitsize;
6306 piece_loc = &XEXP (*piece_loc, 1);
6307 }
6308 while (*piece_loc);
6309 }
6310 /* TEMP->LAST here is either pointer to the last but one or
6311 last element in the chained list, LAST is pointer to the
6312 last element. */
6313 if (label && strcmp (last->label, label) == 0 && last->view == view)
6314 {
6315 /* For SRA optimized variables if there weren't any real
6316 insns since last note, just modify the last node. */
6317 if (piece_loc != NULL)
6318 {
6319 adjust_piece_list (piece_loc, NULL, NULL,
6320 bitpos, piece_bitpos, bitsize, loc_note);
6321 return NULL;
6322 }
6323 /* If the last note doesn't cover any instructions, remove it. */
6324 if (temp->last != last)
6325 {
6326 temp->last->next = NULL;
6327 unused = last;
6328 last = temp->last;
6329 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6330 }
6331 else
6332 {
6333 gcc_assert (temp->first == temp->last
6334 || (temp->first->next == temp->last
6335 && TREE_CODE (decl) == PARM_DECL));
6336 memset (temp->last, '\0', sizeof (*temp->last));
6337 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6338 return temp->last;
6339 }
6340 }
6341 if (bitsize == -1 && NOTE_P (last->loc))
6342 last_loc_note = last->loc;
6343 else if (piece_loc != NULL
6344 && *piece_loc != NULL_RTX
6345 && piece_bitpos == bitpos
6346 && decl_piece_bitsize (*piece_loc) == bitsize)
6347 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6348 else
6349 last_loc_note = NULL_RTX;
6350 /* If the current location is the same as the end of the list,
6351 and either both or neither of the locations is uninitialized,
6352 we have nothing to do. */
6353 if (last_loc_note == NULL_RTX
6354 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6355 NOTE_VAR_LOCATION_LOC (loc_note)))
6356 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6357 != NOTE_VAR_LOCATION_STATUS (loc_note))
6358 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6359 == VAR_INIT_STATUS_UNINITIALIZED)
6360 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6361 == VAR_INIT_STATUS_UNINITIALIZED))))
6362 {
6363 /* Add LOC to the end of list and update LAST. If the last
6364 element of the list has been removed above, reuse its
6365 memory for the new node, otherwise allocate a new one. */
6366 if (unused)
6367 {
6368 loc = unused;
6369 memset (loc, '\0', sizeof (*loc));
6370 }
6371 else
6372 loc = ggc_cleared_alloc<var_loc_node> ();
6373 if (bitsize == -1 || piece_loc == NULL)
6374 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6375 else
6376 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6377 bitpos, piece_bitpos, bitsize, loc_note);
6378 last->next = loc;
6379 /* Ensure TEMP->LAST will point either to the new last but one
6380 element of the chain, or to the last element in it. */
6381 if (last != temp->last)
6382 temp->last = last;
6383 }
6384 else if (unused)
6385 ggc_free (unused);
6386 }
6387 else
6388 {
6389 loc = ggc_cleared_alloc<var_loc_node> ();
6390 temp->first = loc;
6391 temp->last = loc;
6392 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6393 }
6394 return loc;
6395 }
6396 \f
6397 /* Keep track of the number of spaces used to indent the
6398 output of the debugging routines that print the structure of
6399 the DIE internal representation. */
6400 static int print_indent;
6401
6402 /* Indent the line the number of spaces given by print_indent. */
6403
6404 static inline void
6405 print_spaces (FILE *outfile)
6406 {
6407 fprintf (outfile, "%*s", print_indent, "");
6408 }
6409
6410 /* Print a type signature in hex. */
6411
6412 static inline void
6413 print_signature (FILE *outfile, char *sig)
6414 {
6415 int i;
6416
6417 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6418 fprintf (outfile, "%02x", sig[i] & 0xff);
6419 }
6420
6421 static inline void
6422 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6423 {
6424 if (discr_value->pos)
6425 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6426 else
6427 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6428 }
6429
6430 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6431
6432 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6433 RECURSE, output location descriptor operations. */
6434
6435 static void
6436 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6437 {
6438 switch (val->val_class)
6439 {
6440 case dw_val_class_addr:
6441 fprintf (outfile, "address");
6442 break;
6443 case dw_val_class_offset:
6444 fprintf (outfile, "offset");
6445 break;
6446 case dw_val_class_loc:
6447 fprintf (outfile, "location descriptor");
6448 if (val->v.val_loc == NULL)
6449 fprintf (outfile, " -> <null>\n");
6450 else if (recurse)
6451 {
6452 fprintf (outfile, ":\n");
6453 print_indent += 4;
6454 print_loc_descr (val->v.val_loc, outfile);
6455 print_indent -= 4;
6456 }
6457 else
6458 {
6459 if (flag_dump_noaddr || flag_dump_unnumbered)
6460 fprintf (outfile, " #\n");
6461 else
6462 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6463 }
6464 break;
6465 case dw_val_class_loc_list:
6466 fprintf (outfile, "location list -> label:%s",
6467 val->v.val_loc_list->ll_symbol);
6468 break;
6469 case dw_val_class_view_list:
6470 val = view_list_to_loc_list_val_node (val);
6471 fprintf (outfile, "location list with views -> labels:%s and %s",
6472 val->v.val_loc_list->ll_symbol,
6473 val->v.val_loc_list->vl_symbol);
6474 break;
6475 case dw_val_class_range_list:
6476 fprintf (outfile, "range list");
6477 break;
6478 case dw_val_class_const:
6479 case dw_val_class_const_implicit:
6480 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6481 break;
6482 case dw_val_class_unsigned_const:
6483 case dw_val_class_unsigned_const_implicit:
6484 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6485 break;
6486 case dw_val_class_const_double:
6487 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6488 HOST_WIDE_INT_PRINT_UNSIGNED")",
6489 val->v.val_double.high,
6490 val->v.val_double.low);
6491 break;
6492 case dw_val_class_wide_int:
6493 {
6494 int i = val->v.val_wide->get_len ();
6495 fprintf (outfile, "constant (");
6496 gcc_assert (i > 0);
6497 if (val->v.val_wide->elt (i - 1) == 0)
6498 fprintf (outfile, "0x");
6499 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6500 val->v.val_wide->elt (--i));
6501 while (--i >= 0)
6502 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6503 val->v.val_wide->elt (i));
6504 fprintf (outfile, ")");
6505 break;
6506 }
6507 case dw_val_class_vec:
6508 fprintf (outfile, "floating-point or vector constant");
6509 break;
6510 case dw_val_class_flag:
6511 fprintf (outfile, "%u", val->v.val_flag);
6512 break;
6513 case dw_val_class_die_ref:
6514 if (val->v.val_die_ref.die != NULL)
6515 {
6516 dw_die_ref die = val->v.val_die_ref.die;
6517
6518 if (die->comdat_type_p)
6519 {
6520 fprintf (outfile, "die -> signature: ");
6521 print_signature (outfile,
6522 die->die_id.die_type_node->signature);
6523 }
6524 else if (die->die_id.die_symbol)
6525 {
6526 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6527 if (die->with_offset)
6528 fprintf (outfile, " + %ld", die->die_offset);
6529 }
6530 else
6531 fprintf (outfile, "die -> %ld", die->die_offset);
6532 if (flag_dump_noaddr || flag_dump_unnumbered)
6533 fprintf (outfile, " #");
6534 else
6535 fprintf (outfile, " (%p)", (void *) die);
6536 }
6537 else
6538 fprintf (outfile, "die -> <null>");
6539 break;
6540 case dw_val_class_vms_delta:
6541 fprintf (outfile, "delta: @slotcount(%s-%s)",
6542 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6543 break;
6544 case dw_val_class_symview:
6545 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6546 break;
6547 case dw_val_class_lbl_id:
6548 case dw_val_class_lineptr:
6549 case dw_val_class_macptr:
6550 case dw_val_class_loclistsptr:
6551 case dw_val_class_high_pc:
6552 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6553 break;
6554 case dw_val_class_str:
6555 if (val->v.val_str->str != NULL)
6556 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6557 else
6558 fprintf (outfile, "<null>");
6559 break;
6560 case dw_val_class_file:
6561 case dw_val_class_file_implicit:
6562 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6563 val->v.val_file->emitted_number);
6564 break;
6565 case dw_val_class_data8:
6566 {
6567 int i;
6568
6569 for (i = 0; i < 8; i++)
6570 fprintf (outfile, "%02x", val->v.val_data8[i]);
6571 break;
6572 }
6573 case dw_val_class_discr_value:
6574 print_discr_value (outfile, &val->v.val_discr_value);
6575 break;
6576 case dw_val_class_discr_list:
6577 for (dw_discr_list_ref node = val->v.val_discr_list;
6578 node != NULL;
6579 node = node->dw_discr_next)
6580 {
6581 if (node->dw_discr_range)
6582 {
6583 fprintf (outfile, " .. ");
6584 print_discr_value (outfile, &node->dw_discr_lower_bound);
6585 print_discr_value (outfile, &node->dw_discr_upper_bound);
6586 }
6587 else
6588 print_discr_value (outfile, &node->dw_discr_lower_bound);
6589
6590 if (node->dw_discr_next != NULL)
6591 fprintf (outfile, " | ");
6592 }
6593 default:
6594 break;
6595 }
6596 }
6597
6598 /* Likewise, for a DIE attribute. */
6599
6600 static void
6601 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6602 {
6603 print_dw_val (&a->dw_attr_val, recurse, outfile);
6604 }
6605
6606
6607 /* Print the list of operands in the LOC location description to OUTFILE. This
6608 routine is a debugging aid only. */
6609
6610 static void
6611 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6612 {
6613 dw_loc_descr_ref l = loc;
6614
6615 if (loc == NULL)
6616 {
6617 print_spaces (outfile);
6618 fprintf (outfile, "<null>\n");
6619 return;
6620 }
6621
6622 for (l = loc; l != NULL; l = l->dw_loc_next)
6623 {
6624 print_spaces (outfile);
6625 if (flag_dump_noaddr || flag_dump_unnumbered)
6626 fprintf (outfile, "#");
6627 else
6628 fprintf (outfile, "(%p)", (void *) l);
6629 fprintf (outfile, " %s",
6630 dwarf_stack_op_name (l->dw_loc_opc));
6631 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6632 {
6633 fprintf (outfile, " ");
6634 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6635 }
6636 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6637 {
6638 fprintf (outfile, ", ");
6639 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6640 }
6641 fprintf (outfile, "\n");
6642 }
6643 }
6644
6645 /* Print the information associated with a given DIE, and its children.
6646 This routine is a debugging aid only. */
6647
6648 static void
6649 print_die (dw_die_ref die, FILE *outfile)
6650 {
6651 dw_attr_node *a;
6652 dw_die_ref c;
6653 unsigned ix;
6654
6655 print_spaces (outfile);
6656 fprintf (outfile, "DIE %4ld: %s ",
6657 die->die_offset, dwarf_tag_name (die->die_tag));
6658 if (flag_dump_noaddr || flag_dump_unnumbered)
6659 fprintf (outfile, "#\n");
6660 else
6661 fprintf (outfile, "(%p)\n", (void*) die);
6662 print_spaces (outfile);
6663 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6664 fprintf (outfile, " offset: %ld", die->die_offset);
6665 fprintf (outfile, " mark: %d\n", die->die_mark);
6666
6667 if (die->comdat_type_p)
6668 {
6669 print_spaces (outfile);
6670 fprintf (outfile, " signature: ");
6671 print_signature (outfile, die->die_id.die_type_node->signature);
6672 fprintf (outfile, "\n");
6673 }
6674
6675 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6676 {
6677 print_spaces (outfile);
6678 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6679
6680 print_attribute (a, true, outfile);
6681 fprintf (outfile, "\n");
6682 }
6683
6684 if (die->die_child != NULL)
6685 {
6686 print_indent += 4;
6687 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6688 print_indent -= 4;
6689 }
6690 if (print_indent == 0)
6691 fprintf (outfile, "\n");
6692 }
6693
6694 /* Print the list of operations in the LOC location description. */
6695
6696 DEBUG_FUNCTION void
6697 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6698 {
6699 print_loc_descr (loc, stderr);
6700 }
6701
6702 /* Print the information collected for a given DIE. */
6703
6704 DEBUG_FUNCTION void
6705 debug_dwarf_die (dw_die_ref die)
6706 {
6707 print_die (die, stderr);
6708 }
6709
6710 DEBUG_FUNCTION void
6711 debug (die_struct &ref)
6712 {
6713 print_die (&ref, stderr);
6714 }
6715
6716 DEBUG_FUNCTION void
6717 debug (die_struct *ptr)
6718 {
6719 if (ptr)
6720 debug (*ptr);
6721 else
6722 fprintf (stderr, "<nil>\n");
6723 }
6724
6725
6726 /* Print all DWARF information collected for the compilation unit.
6727 This routine is a debugging aid only. */
6728
6729 DEBUG_FUNCTION void
6730 debug_dwarf (void)
6731 {
6732 print_indent = 0;
6733 print_die (comp_unit_die (), stderr);
6734 }
6735
6736 /* Verify the DIE tree structure. */
6737
6738 DEBUG_FUNCTION void
6739 verify_die (dw_die_ref die)
6740 {
6741 gcc_assert (!die->die_mark);
6742 if (die->die_parent == NULL
6743 && die->die_sib == NULL)
6744 return;
6745 /* Verify the die_sib list is cyclic. */
6746 dw_die_ref x = die;
6747 do
6748 {
6749 x->die_mark = 1;
6750 x = x->die_sib;
6751 }
6752 while (x && !x->die_mark);
6753 gcc_assert (x == die);
6754 x = die;
6755 do
6756 {
6757 /* Verify all dies have the same parent. */
6758 gcc_assert (x->die_parent == die->die_parent);
6759 if (x->die_child)
6760 {
6761 /* Verify the child has the proper parent and recurse. */
6762 gcc_assert (x->die_child->die_parent == x);
6763 verify_die (x->die_child);
6764 }
6765 x->die_mark = 0;
6766 x = x->die_sib;
6767 }
6768 while (x && x->die_mark);
6769 }
6770
6771 /* Sanity checks on DIEs. */
6772
6773 static void
6774 check_die (dw_die_ref die)
6775 {
6776 unsigned ix;
6777 dw_attr_node *a;
6778 bool inline_found = false;
6779 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6780 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6781 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6782 {
6783 switch (a->dw_attr)
6784 {
6785 case DW_AT_inline:
6786 if (a->dw_attr_val.v.val_unsigned)
6787 inline_found = true;
6788 break;
6789 case DW_AT_location:
6790 ++n_location;
6791 break;
6792 case DW_AT_low_pc:
6793 ++n_low_pc;
6794 break;
6795 case DW_AT_high_pc:
6796 ++n_high_pc;
6797 break;
6798 case DW_AT_artificial:
6799 ++n_artificial;
6800 break;
6801 case DW_AT_decl_column:
6802 ++n_decl_column;
6803 break;
6804 case DW_AT_decl_line:
6805 ++n_decl_line;
6806 break;
6807 case DW_AT_decl_file:
6808 ++n_decl_file;
6809 break;
6810 default:
6811 break;
6812 }
6813 }
6814 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6815 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6816 {
6817 fprintf (stderr, "Duplicate attributes in DIE:\n");
6818 debug_dwarf_die (die);
6819 gcc_unreachable ();
6820 }
6821 if (inline_found)
6822 {
6823 /* A debugging information entry that is a member of an abstract
6824 instance tree [that has DW_AT_inline] should not contain any
6825 attributes which describe aspects of the subroutine which vary
6826 between distinct inlined expansions or distinct out-of-line
6827 expansions. */
6828 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6829 gcc_assert (a->dw_attr != DW_AT_low_pc
6830 && a->dw_attr != DW_AT_high_pc
6831 && a->dw_attr != DW_AT_location
6832 && a->dw_attr != DW_AT_frame_base
6833 && a->dw_attr != DW_AT_call_all_calls
6834 && a->dw_attr != DW_AT_GNU_all_call_sites);
6835 }
6836 }
6837 \f
6838 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6839 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6840 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6841
6842 /* Calculate the checksum of a location expression. */
6843
6844 static inline void
6845 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6846 {
6847 int tem;
6848 inchash::hash hstate;
6849 hashval_t hash;
6850
6851 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6852 CHECKSUM (tem);
6853 hash_loc_operands (loc, hstate);
6854 hash = hstate.end();
6855 CHECKSUM (hash);
6856 }
6857
6858 /* Calculate the checksum of an attribute. */
6859
6860 static void
6861 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6862 {
6863 dw_loc_descr_ref loc;
6864 rtx r;
6865
6866 CHECKSUM (at->dw_attr);
6867
6868 /* We don't care that this was compiled with a different compiler
6869 snapshot; if the output is the same, that's what matters. */
6870 if (at->dw_attr == DW_AT_producer)
6871 return;
6872
6873 switch (AT_class (at))
6874 {
6875 case dw_val_class_const:
6876 case dw_val_class_const_implicit:
6877 CHECKSUM (at->dw_attr_val.v.val_int);
6878 break;
6879 case dw_val_class_unsigned_const:
6880 case dw_val_class_unsigned_const_implicit:
6881 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6882 break;
6883 case dw_val_class_const_double:
6884 CHECKSUM (at->dw_attr_val.v.val_double);
6885 break;
6886 case dw_val_class_wide_int:
6887 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6888 get_full_len (*at->dw_attr_val.v.val_wide)
6889 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6890 break;
6891 case dw_val_class_vec:
6892 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6893 (at->dw_attr_val.v.val_vec.length
6894 * at->dw_attr_val.v.val_vec.elt_size));
6895 break;
6896 case dw_val_class_flag:
6897 CHECKSUM (at->dw_attr_val.v.val_flag);
6898 break;
6899 case dw_val_class_str:
6900 CHECKSUM_STRING (AT_string (at));
6901 break;
6902
6903 case dw_val_class_addr:
6904 r = AT_addr (at);
6905 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6906 CHECKSUM_STRING (XSTR (r, 0));
6907 break;
6908
6909 case dw_val_class_offset:
6910 CHECKSUM (at->dw_attr_val.v.val_offset);
6911 break;
6912
6913 case dw_val_class_loc:
6914 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6915 loc_checksum (loc, ctx);
6916 break;
6917
6918 case dw_val_class_die_ref:
6919 die_checksum (AT_ref (at), ctx, mark);
6920 break;
6921
6922 case dw_val_class_fde_ref:
6923 case dw_val_class_vms_delta:
6924 case dw_val_class_symview:
6925 case dw_val_class_lbl_id:
6926 case dw_val_class_lineptr:
6927 case dw_val_class_macptr:
6928 case dw_val_class_loclistsptr:
6929 case dw_val_class_high_pc:
6930 break;
6931
6932 case dw_val_class_file:
6933 case dw_val_class_file_implicit:
6934 CHECKSUM_STRING (AT_file (at)->filename);
6935 break;
6936
6937 case dw_val_class_data8:
6938 CHECKSUM (at->dw_attr_val.v.val_data8);
6939 break;
6940
6941 default:
6942 break;
6943 }
6944 }
6945
6946 /* Calculate the checksum of a DIE. */
6947
6948 static void
6949 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6950 {
6951 dw_die_ref c;
6952 dw_attr_node *a;
6953 unsigned ix;
6954
6955 /* To avoid infinite recursion. */
6956 if (die->die_mark)
6957 {
6958 CHECKSUM (die->die_mark);
6959 return;
6960 }
6961 die->die_mark = ++(*mark);
6962
6963 CHECKSUM (die->die_tag);
6964
6965 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6966 attr_checksum (a, ctx, mark);
6967
6968 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6969 }
6970
6971 #undef CHECKSUM
6972 #undef CHECKSUM_BLOCK
6973 #undef CHECKSUM_STRING
6974
6975 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6976 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6977 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6978 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6979 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6980 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6981 #define CHECKSUM_ATTR(FOO) \
6982 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6983
6984 /* Calculate the checksum of a number in signed LEB128 format. */
6985
6986 static void
6987 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6988 {
6989 unsigned char byte;
6990 bool more;
6991
6992 while (1)
6993 {
6994 byte = (value & 0x7f);
6995 value >>= 7;
6996 more = !((value == 0 && (byte & 0x40) == 0)
6997 || (value == -1 && (byte & 0x40) != 0));
6998 if (more)
6999 byte |= 0x80;
7000 CHECKSUM (byte);
7001 if (!more)
7002 break;
7003 }
7004 }
7005
7006 /* Calculate the checksum of a number in unsigned LEB128 format. */
7007
7008 static void
7009 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7010 {
7011 while (1)
7012 {
7013 unsigned char byte = (value & 0x7f);
7014 value >>= 7;
7015 if (value != 0)
7016 /* More bytes to follow. */
7017 byte |= 0x80;
7018 CHECKSUM (byte);
7019 if (value == 0)
7020 break;
7021 }
7022 }
7023
7024 /* Checksum the context of the DIE. This adds the names of any
7025 surrounding namespaces or structures to the checksum. */
7026
7027 static void
7028 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7029 {
7030 const char *name;
7031 dw_die_ref spec;
7032 int tag = die->die_tag;
7033
7034 if (tag != DW_TAG_namespace
7035 && tag != DW_TAG_structure_type
7036 && tag != DW_TAG_class_type)
7037 return;
7038
7039 name = get_AT_string (die, DW_AT_name);
7040
7041 spec = get_AT_ref (die, DW_AT_specification);
7042 if (spec != NULL)
7043 die = spec;
7044
7045 if (die->die_parent != NULL)
7046 checksum_die_context (die->die_parent, ctx);
7047
7048 CHECKSUM_ULEB128 ('C');
7049 CHECKSUM_ULEB128 (tag);
7050 if (name != NULL)
7051 CHECKSUM_STRING (name);
7052 }
7053
7054 /* Calculate the checksum of a location expression. */
7055
7056 static inline void
7057 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7058 {
7059 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7060 were emitted as a DW_FORM_sdata instead of a location expression. */
7061 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7062 {
7063 CHECKSUM_ULEB128 (DW_FORM_sdata);
7064 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7065 return;
7066 }
7067
7068 /* Otherwise, just checksum the raw location expression. */
7069 while (loc != NULL)
7070 {
7071 inchash::hash hstate;
7072 hashval_t hash;
7073
7074 CHECKSUM_ULEB128 (loc->dtprel);
7075 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7076 hash_loc_operands (loc, hstate);
7077 hash = hstate.end ();
7078 CHECKSUM (hash);
7079 loc = loc->dw_loc_next;
7080 }
7081 }
7082
7083 /* Calculate the checksum of an attribute. */
7084
7085 static void
7086 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7087 struct md5_ctx *ctx, int *mark)
7088 {
7089 dw_loc_descr_ref loc;
7090 rtx r;
7091
7092 if (AT_class (at) == dw_val_class_die_ref)
7093 {
7094 dw_die_ref target_die = AT_ref (at);
7095
7096 /* For pointer and reference types, we checksum only the (qualified)
7097 name of the target type (if there is a name). For friend entries,
7098 we checksum only the (qualified) name of the target type or function.
7099 This allows the checksum to remain the same whether the target type
7100 is complete or not. */
7101 if ((at->dw_attr == DW_AT_type
7102 && (tag == DW_TAG_pointer_type
7103 || tag == DW_TAG_reference_type
7104 || tag == DW_TAG_rvalue_reference_type
7105 || tag == DW_TAG_ptr_to_member_type))
7106 || (at->dw_attr == DW_AT_friend
7107 && tag == DW_TAG_friend))
7108 {
7109 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7110
7111 if (name_attr != NULL)
7112 {
7113 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7114
7115 if (decl == NULL)
7116 decl = target_die;
7117 CHECKSUM_ULEB128 ('N');
7118 CHECKSUM_ULEB128 (at->dw_attr);
7119 if (decl->die_parent != NULL)
7120 checksum_die_context (decl->die_parent, ctx);
7121 CHECKSUM_ULEB128 ('E');
7122 CHECKSUM_STRING (AT_string (name_attr));
7123 return;
7124 }
7125 }
7126
7127 /* For all other references to another DIE, we check to see if the
7128 target DIE has already been visited. If it has, we emit a
7129 backward reference; if not, we descend recursively. */
7130 if (target_die->die_mark > 0)
7131 {
7132 CHECKSUM_ULEB128 ('R');
7133 CHECKSUM_ULEB128 (at->dw_attr);
7134 CHECKSUM_ULEB128 (target_die->die_mark);
7135 }
7136 else
7137 {
7138 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7139
7140 if (decl == NULL)
7141 decl = target_die;
7142 target_die->die_mark = ++(*mark);
7143 CHECKSUM_ULEB128 ('T');
7144 CHECKSUM_ULEB128 (at->dw_attr);
7145 if (decl->die_parent != NULL)
7146 checksum_die_context (decl->die_parent, ctx);
7147 die_checksum_ordered (target_die, ctx, mark);
7148 }
7149 return;
7150 }
7151
7152 CHECKSUM_ULEB128 ('A');
7153 CHECKSUM_ULEB128 (at->dw_attr);
7154
7155 switch (AT_class (at))
7156 {
7157 case dw_val_class_const:
7158 case dw_val_class_const_implicit:
7159 CHECKSUM_ULEB128 (DW_FORM_sdata);
7160 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7161 break;
7162
7163 case dw_val_class_unsigned_const:
7164 case dw_val_class_unsigned_const_implicit:
7165 CHECKSUM_ULEB128 (DW_FORM_sdata);
7166 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7167 break;
7168
7169 case dw_val_class_const_double:
7170 CHECKSUM_ULEB128 (DW_FORM_block);
7171 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7172 CHECKSUM (at->dw_attr_val.v.val_double);
7173 break;
7174
7175 case dw_val_class_wide_int:
7176 CHECKSUM_ULEB128 (DW_FORM_block);
7177 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7178 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7179 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7180 get_full_len (*at->dw_attr_val.v.val_wide)
7181 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7182 break;
7183
7184 case dw_val_class_vec:
7185 CHECKSUM_ULEB128 (DW_FORM_block);
7186 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7187 * at->dw_attr_val.v.val_vec.elt_size);
7188 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7189 (at->dw_attr_val.v.val_vec.length
7190 * at->dw_attr_val.v.val_vec.elt_size));
7191 break;
7192
7193 case dw_val_class_flag:
7194 CHECKSUM_ULEB128 (DW_FORM_flag);
7195 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7196 break;
7197
7198 case dw_val_class_str:
7199 CHECKSUM_ULEB128 (DW_FORM_string);
7200 CHECKSUM_STRING (AT_string (at));
7201 break;
7202
7203 case dw_val_class_addr:
7204 r = AT_addr (at);
7205 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7206 CHECKSUM_ULEB128 (DW_FORM_string);
7207 CHECKSUM_STRING (XSTR (r, 0));
7208 break;
7209
7210 case dw_val_class_offset:
7211 CHECKSUM_ULEB128 (DW_FORM_sdata);
7212 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7213 break;
7214
7215 case dw_val_class_loc:
7216 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7217 loc_checksum_ordered (loc, ctx);
7218 break;
7219
7220 case dw_val_class_fde_ref:
7221 case dw_val_class_symview:
7222 case dw_val_class_lbl_id:
7223 case dw_val_class_lineptr:
7224 case dw_val_class_macptr:
7225 case dw_val_class_loclistsptr:
7226 case dw_val_class_high_pc:
7227 break;
7228
7229 case dw_val_class_file:
7230 case dw_val_class_file_implicit:
7231 CHECKSUM_ULEB128 (DW_FORM_string);
7232 CHECKSUM_STRING (AT_file (at)->filename);
7233 break;
7234
7235 case dw_val_class_data8:
7236 CHECKSUM (at->dw_attr_val.v.val_data8);
7237 break;
7238
7239 default:
7240 break;
7241 }
7242 }
7243
7244 struct checksum_attributes
7245 {
7246 dw_attr_node *at_name;
7247 dw_attr_node *at_type;
7248 dw_attr_node *at_friend;
7249 dw_attr_node *at_accessibility;
7250 dw_attr_node *at_address_class;
7251 dw_attr_node *at_alignment;
7252 dw_attr_node *at_allocated;
7253 dw_attr_node *at_artificial;
7254 dw_attr_node *at_associated;
7255 dw_attr_node *at_binary_scale;
7256 dw_attr_node *at_bit_offset;
7257 dw_attr_node *at_bit_size;
7258 dw_attr_node *at_bit_stride;
7259 dw_attr_node *at_byte_size;
7260 dw_attr_node *at_byte_stride;
7261 dw_attr_node *at_const_value;
7262 dw_attr_node *at_containing_type;
7263 dw_attr_node *at_count;
7264 dw_attr_node *at_data_location;
7265 dw_attr_node *at_data_member_location;
7266 dw_attr_node *at_decimal_scale;
7267 dw_attr_node *at_decimal_sign;
7268 dw_attr_node *at_default_value;
7269 dw_attr_node *at_digit_count;
7270 dw_attr_node *at_discr;
7271 dw_attr_node *at_discr_list;
7272 dw_attr_node *at_discr_value;
7273 dw_attr_node *at_encoding;
7274 dw_attr_node *at_endianity;
7275 dw_attr_node *at_explicit;
7276 dw_attr_node *at_is_optional;
7277 dw_attr_node *at_location;
7278 dw_attr_node *at_lower_bound;
7279 dw_attr_node *at_mutable;
7280 dw_attr_node *at_ordering;
7281 dw_attr_node *at_picture_string;
7282 dw_attr_node *at_prototyped;
7283 dw_attr_node *at_small;
7284 dw_attr_node *at_segment;
7285 dw_attr_node *at_string_length;
7286 dw_attr_node *at_string_length_bit_size;
7287 dw_attr_node *at_string_length_byte_size;
7288 dw_attr_node *at_threads_scaled;
7289 dw_attr_node *at_upper_bound;
7290 dw_attr_node *at_use_location;
7291 dw_attr_node *at_use_UTF8;
7292 dw_attr_node *at_variable_parameter;
7293 dw_attr_node *at_virtuality;
7294 dw_attr_node *at_visibility;
7295 dw_attr_node *at_vtable_elem_location;
7296 };
7297
7298 /* Collect the attributes that we will want to use for the checksum. */
7299
7300 static void
7301 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7302 {
7303 dw_attr_node *a;
7304 unsigned ix;
7305
7306 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7307 {
7308 switch (a->dw_attr)
7309 {
7310 case DW_AT_name:
7311 attrs->at_name = a;
7312 break;
7313 case DW_AT_type:
7314 attrs->at_type = a;
7315 break;
7316 case DW_AT_friend:
7317 attrs->at_friend = a;
7318 break;
7319 case DW_AT_accessibility:
7320 attrs->at_accessibility = a;
7321 break;
7322 case DW_AT_address_class:
7323 attrs->at_address_class = a;
7324 break;
7325 case DW_AT_alignment:
7326 attrs->at_alignment = a;
7327 break;
7328 case DW_AT_allocated:
7329 attrs->at_allocated = a;
7330 break;
7331 case DW_AT_artificial:
7332 attrs->at_artificial = a;
7333 break;
7334 case DW_AT_associated:
7335 attrs->at_associated = a;
7336 break;
7337 case DW_AT_binary_scale:
7338 attrs->at_binary_scale = a;
7339 break;
7340 case DW_AT_bit_offset:
7341 attrs->at_bit_offset = a;
7342 break;
7343 case DW_AT_bit_size:
7344 attrs->at_bit_size = a;
7345 break;
7346 case DW_AT_bit_stride:
7347 attrs->at_bit_stride = a;
7348 break;
7349 case DW_AT_byte_size:
7350 attrs->at_byte_size = a;
7351 break;
7352 case DW_AT_byte_stride:
7353 attrs->at_byte_stride = a;
7354 break;
7355 case DW_AT_const_value:
7356 attrs->at_const_value = a;
7357 break;
7358 case DW_AT_containing_type:
7359 attrs->at_containing_type = a;
7360 break;
7361 case DW_AT_count:
7362 attrs->at_count = a;
7363 break;
7364 case DW_AT_data_location:
7365 attrs->at_data_location = a;
7366 break;
7367 case DW_AT_data_member_location:
7368 attrs->at_data_member_location = a;
7369 break;
7370 case DW_AT_decimal_scale:
7371 attrs->at_decimal_scale = a;
7372 break;
7373 case DW_AT_decimal_sign:
7374 attrs->at_decimal_sign = a;
7375 break;
7376 case DW_AT_default_value:
7377 attrs->at_default_value = a;
7378 break;
7379 case DW_AT_digit_count:
7380 attrs->at_digit_count = a;
7381 break;
7382 case DW_AT_discr:
7383 attrs->at_discr = a;
7384 break;
7385 case DW_AT_discr_list:
7386 attrs->at_discr_list = a;
7387 break;
7388 case DW_AT_discr_value:
7389 attrs->at_discr_value = a;
7390 break;
7391 case DW_AT_encoding:
7392 attrs->at_encoding = a;
7393 break;
7394 case DW_AT_endianity:
7395 attrs->at_endianity = a;
7396 break;
7397 case DW_AT_explicit:
7398 attrs->at_explicit = a;
7399 break;
7400 case DW_AT_is_optional:
7401 attrs->at_is_optional = a;
7402 break;
7403 case DW_AT_location:
7404 attrs->at_location = a;
7405 break;
7406 case DW_AT_lower_bound:
7407 attrs->at_lower_bound = a;
7408 break;
7409 case DW_AT_mutable:
7410 attrs->at_mutable = a;
7411 break;
7412 case DW_AT_ordering:
7413 attrs->at_ordering = a;
7414 break;
7415 case DW_AT_picture_string:
7416 attrs->at_picture_string = a;
7417 break;
7418 case DW_AT_prototyped:
7419 attrs->at_prototyped = a;
7420 break;
7421 case DW_AT_small:
7422 attrs->at_small = a;
7423 break;
7424 case DW_AT_segment:
7425 attrs->at_segment = a;
7426 break;
7427 case DW_AT_string_length:
7428 attrs->at_string_length = a;
7429 break;
7430 case DW_AT_string_length_bit_size:
7431 attrs->at_string_length_bit_size = a;
7432 break;
7433 case DW_AT_string_length_byte_size:
7434 attrs->at_string_length_byte_size = a;
7435 break;
7436 case DW_AT_threads_scaled:
7437 attrs->at_threads_scaled = a;
7438 break;
7439 case DW_AT_upper_bound:
7440 attrs->at_upper_bound = a;
7441 break;
7442 case DW_AT_use_location:
7443 attrs->at_use_location = a;
7444 break;
7445 case DW_AT_use_UTF8:
7446 attrs->at_use_UTF8 = a;
7447 break;
7448 case DW_AT_variable_parameter:
7449 attrs->at_variable_parameter = a;
7450 break;
7451 case DW_AT_virtuality:
7452 attrs->at_virtuality = a;
7453 break;
7454 case DW_AT_visibility:
7455 attrs->at_visibility = a;
7456 break;
7457 case DW_AT_vtable_elem_location:
7458 attrs->at_vtable_elem_location = a;
7459 break;
7460 default:
7461 break;
7462 }
7463 }
7464 }
7465
7466 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7467
7468 static void
7469 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7470 {
7471 dw_die_ref c;
7472 dw_die_ref decl;
7473 struct checksum_attributes attrs;
7474
7475 CHECKSUM_ULEB128 ('D');
7476 CHECKSUM_ULEB128 (die->die_tag);
7477
7478 memset (&attrs, 0, sizeof (attrs));
7479
7480 decl = get_AT_ref (die, DW_AT_specification);
7481 if (decl != NULL)
7482 collect_checksum_attributes (&attrs, decl);
7483 collect_checksum_attributes (&attrs, die);
7484
7485 CHECKSUM_ATTR (attrs.at_name);
7486 CHECKSUM_ATTR (attrs.at_accessibility);
7487 CHECKSUM_ATTR (attrs.at_address_class);
7488 CHECKSUM_ATTR (attrs.at_allocated);
7489 CHECKSUM_ATTR (attrs.at_artificial);
7490 CHECKSUM_ATTR (attrs.at_associated);
7491 CHECKSUM_ATTR (attrs.at_binary_scale);
7492 CHECKSUM_ATTR (attrs.at_bit_offset);
7493 CHECKSUM_ATTR (attrs.at_bit_size);
7494 CHECKSUM_ATTR (attrs.at_bit_stride);
7495 CHECKSUM_ATTR (attrs.at_byte_size);
7496 CHECKSUM_ATTR (attrs.at_byte_stride);
7497 CHECKSUM_ATTR (attrs.at_const_value);
7498 CHECKSUM_ATTR (attrs.at_containing_type);
7499 CHECKSUM_ATTR (attrs.at_count);
7500 CHECKSUM_ATTR (attrs.at_data_location);
7501 CHECKSUM_ATTR (attrs.at_data_member_location);
7502 CHECKSUM_ATTR (attrs.at_decimal_scale);
7503 CHECKSUM_ATTR (attrs.at_decimal_sign);
7504 CHECKSUM_ATTR (attrs.at_default_value);
7505 CHECKSUM_ATTR (attrs.at_digit_count);
7506 CHECKSUM_ATTR (attrs.at_discr);
7507 CHECKSUM_ATTR (attrs.at_discr_list);
7508 CHECKSUM_ATTR (attrs.at_discr_value);
7509 CHECKSUM_ATTR (attrs.at_encoding);
7510 CHECKSUM_ATTR (attrs.at_endianity);
7511 CHECKSUM_ATTR (attrs.at_explicit);
7512 CHECKSUM_ATTR (attrs.at_is_optional);
7513 CHECKSUM_ATTR (attrs.at_location);
7514 CHECKSUM_ATTR (attrs.at_lower_bound);
7515 CHECKSUM_ATTR (attrs.at_mutable);
7516 CHECKSUM_ATTR (attrs.at_ordering);
7517 CHECKSUM_ATTR (attrs.at_picture_string);
7518 CHECKSUM_ATTR (attrs.at_prototyped);
7519 CHECKSUM_ATTR (attrs.at_small);
7520 CHECKSUM_ATTR (attrs.at_segment);
7521 CHECKSUM_ATTR (attrs.at_string_length);
7522 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7523 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7524 CHECKSUM_ATTR (attrs.at_threads_scaled);
7525 CHECKSUM_ATTR (attrs.at_upper_bound);
7526 CHECKSUM_ATTR (attrs.at_use_location);
7527 CHECKSUM_ATTR (attrs.at_use_UTF8);
7528 CHECKSUM_ATTR (attrs.at_variable_parameter);
7529 CHECKSUM_ATTR (attrs.at_virtuality);
7530 CHECKSUM_ATTR (attrs.at_visibility);
7531 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7532 CHECKSUM_ATTR (attrs.at_type);
7533 CHECKSUM_ATTR (attrs.at_friend);
7534 CHECKSUM_ATTR (attrs.at_alignment);
7535
7536 /* Checksum the child DIEs. */
7537 c = die->die_child;
7538 if (c) do {
7539 dw_attr_node *name_attr;
7540
7541 c = c->die_sib;
7542 name_attr = get_AT (c, DW_AT_name);
7543 if (is_template_instantiation (c))
7544 {
7545 /* Ignore instantiations of member type and function templates. */
7546 }
7547 else if (name_attr != NULL
7548 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7549 {
7550 /* Use a shallow checksum for named nested types and member
7551 functions. */
7552 CHECKSUM_ULEB128 ('S');
7553 CHECKSUM_ULEB128 (c->die_tag);
7554 CHECKSUM_STRING (AT_string (name_attr));
7555 }
7556 else
7557 {
7558 /* Use a deep checksum for other children. */
7559 /* Mark this DIE so it gets processed when unmarking. */
7560 if (c->die_mark == 0)
7561 c->die_mark = -1;
7562 die_checksum_ordered (c, ctx, mark);
7563 }
7564 } while (c != die->die_child);
7565
7566 CHECKSUM_ULEB128 (0);
7567 }
7568
7569 /* Add a type name and tag to a hash. */
7570 static void
7571 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7572 {
7573 CHECKSUM_ULEB128 (tag);
7574 CHECKSUM_STRING (name);
7575 }
7576
7577 #undef CHECKSUM
7578 #undef CHECKSUM_STRING
7579 #undef CHECKSUM_ATTR
7580 #undef CHECKSUM_LEB128
7581 #undef CHECKSUM_ULEB128
7582
7583 /* Generate the type signature for DIE. This is computed by generating an
7584 MD5 checksum over the DIE's tag, its relevant attributes, and its
7585 children. Attributes that are references to other DIEs are processed
7586 by recursion, using the MARK field to prevent infinite recursion.
7587 If the DIE is nested inside a namespace or another type, we also
7588 need to include that context in the signature. The lower 64 bits
7589 of the resulting MD5 checksum comprise the signature. */
7590
7591 static void
7592 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7593 {
7594 int mark;
7595 const char *name;
7596 unsigned char checksum[16];
7597 struct md5_ctx ctx;
7598 dw_die_ref decl;
7599 dw_die_ref parent;
7600
7601 name = get_AT_string (die, DW_AT_name);
7602 decl = get_AT_ref (die, DW_AT_specification);
7603 parent = get_die_parent (die);
7604
7605 /* First, compute a signature for just the type name (and its surrounding
7606 context, if any. This is stored in the type unit DIE for link-time
7607 ODR (one-definition rule) checking. */
7608
7609 if (is_cxx () && name != NULL)
7610 {
7611 md5_init_ctx (&ctx);
7612
7613 /* Checksum the names of surrounding namespaces and structures. */
7614 if (parent != NULL)
7615 checksum_die_context (parent, &ctx);
7616
7617 /* Checksum the current DIE. */
7618 die_odr_checksum (die->die_tag, name, &ctx);
7619 md5_finish_ctx (&ctx, checksum);
7620
7621 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7622 }
7623
7624 /* Next, compute the complete type signature. */
7625
7626 md5_init_ctx (&ctx);
7627 mark = 1;
7628 die->die_mark = mark;
7629
7630 /* Checksum the names of surrounding namespaces and structures. */
7631 if (parent != NULL)
7632 checksum_die_context (parent, &ctx);
7633
7634 /* Checksum the DIE and its children. */
7635 die_checksum_ordered (die, &ctx, &mark);
7636 unmark_all_dies (die);
7637 md5_finish_ctx (&ctx, checksum);
7638
7639 /* Store the signature in the type node and link the type DIE and the
7640 type node together. */
7641 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7642 DWARF_TYPE_SIGNATURE_SIZE);
7643 die->comdat_type_p = true;
7644 die->die_id.die_type_node = type_node;
7645 type_node->type_die = die;
7646
7647 /* If the DIE is a specification, link its declaration to the type node
7648 as well. */
7649 if (decl != NULL)
7650 {
7651 decl->comdat_type_p = true;
7652 decl->die_id.die_type_node = type_node;
7653 }
7654 }
7655
7656 /* Do the location expressions look same? */
7657 static inline int
7658 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7659 {
7660 return loc1->dw_loc_opc == loc2->dw_loc_opc
7661 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7662 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7663 }
7664
7665 /* Do the values look the same? */
7666 static int
7667 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7668 {
7669 dw_loc_descr_ref loc1, loc2;
7670 rtx r1, r2;
7671
7672 if (v1->val_class != v2->val_class)
7673 return 0;
7674
7675 switch (v1->val_class)
7676 {
7677 case dw_val_class_const:
7678 case dw_val_class_const_implicit:
7679 return v1->v.val_int == v2->v.val_int;
7680 case dw_val_class_unsigned_const:
7681 case dw_val_class_unsigned_const_implicit:
7682 return v1->v.val_unsigned == v2->v.val_unsigned;
7683 case dw_val_class_const_double:
7684 return v1->v.val_double.high == v2->v.val_double.high
7685 && v1->v.val_double.low == v2->v.val_double.low;
7686 case dw_val_class_wide_int:
7687 return *v1->v.val_wide == *v2->v.val_wide;
7688 case dw_val_class_vec:
7689 if (v1->v.val_vec.length != v2->v.val_vec.length
7690 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7691 return 0;
7692 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7693 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7694 return 0;
7695 return 1;
7696 case dw_val_class_flag:
7697 return v1->v.val_flag == v2->v.val_flag;
7698 case dw_val_class_str:
7699 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7700
7701 case dw_val_class_addr:
7702 r1 = v1->v.val_addr;
7703 r2 = v2->v.val_addr;
7704 if (GET_CODE (r1) != GET_CODE (r2))
7705 return 0;
7706 return !rtx_equal_p (r1, r2);
7707
7708 case dw_val_class_offset:
7709 return v1->v.val_offset == v2->v.val_offset;
7710
7711 case dw_val_class_loc:
7712 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7713 loc1 && loc2;
7714 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7715 if (!same_loc_p (loc1, loc2, mark))
7716 return 0;
7717 return !loc1 && !loc2;
7718
7719 case dw_val_class_die_ref:
7720 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7721
7722 case dw_val_class_symview:
7723 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7724
7725 case dw_val_class_fde_ref:
7726 case dw_val_class_vms_delta:
7727 case dw_val_class_lbl_id:
7728 case dw_val_class_lineptr:
7729 case dw_val_class_macptr:
7730 case dw_val_class_loclistsptr:
7731 case dw_val_class_high_pc:
7732 return 1;
7733
7734 case dw_val_class_file:
7735 case dw_val_class_file_implicit:
7736 return v1->v.val_file == v2->v.val_file;
7737
7738 case dw_val_class_data8:
7739 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7740
7741 default:
7742 return 1;
7743 }
7744 }
7745
7746 /* Do the attributes look the same? */
7747
7748 static int
7749 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7750 {
7751 if (at1->dw_attr != at2->dw_attr)
7752 return 0;
7753
7754 /* We don't care that this was compiled with a different compiler
7755 snapshot; if the output is the same, that's what matters. */
7756 if (at1->dw_attr == DW_AT_producer)
7757 return 1;
7758
7759 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7760 }
7761
7762 /* Do the dies look the same? */
7763
7764 static int
7765 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7766 {
7767 dw_die_ref c1, c2;
7768 dw_attr_node *a1;
7769 unsigned ix;
7770
7771 /* To avoid infinite recursion. */
7772 if (die1->die_mark)
7773 return die1->die_mark == die2->die_mark;
7774 die1->die_mark = die2->die_mark = ++(*mark);
7775
7776 if (die1->die_tag != die2->die_tag)
7777 return 0;
7778
7779 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7780 return 0;
7781
7782 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7783 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7784 return 0;
7785
7786 c1 = die1->die_child;
7787 c2 = die2->die_child;
7788 if (! c1)
7789 {
7790 if (c2)
7791 return 0;
7792 }
7793 else
7794 for (;;)
7795 {
7796 if (!same_die_p (c1, c2, mark))
7797 return 0;
7798 c1 = c1->die_sib;
7799 c2 = c2->die_sib;
7800 if (c1 == die1->die_child)
7801 {
7802 if (c2 == die2->die_child)
7803 break;
7804 else
7805 return 0;
7806 }
7807 }
7808
7809 return 1;
7810 }
7811
7812 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7813 children, and set die_symbol. */
7814
7815 static void
7816 compute_comp_unit_symbol (dw_die_ref unit_die)
7817 {
7818 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7819 const char *base = die_name ? lbasename (die_name) : "anonymous";
7820 char *name = XALLOCAVEC (char, strlen (base) + 64);
7821 char *p;
7822 int i, mark;
7823 unsigned char checksum[16];
7824 struct md5_ctx ctx;
7825
7826 /* Compute the checksum of the DIE, then append part of it as hex digits to
7827 the name filename of the unit. */
7828
7829 md5_init_ctx (&ctx);
7830 mark = 0;
7831 die_checksum (unit_die, &ctx, &mark);
7832 unmark_all_dies (unit_die);
7833 md5_finish_ctx (&ctx, checksum);
7834
7835 /* When we this for comp_unit_die () we have a DW_AT_name that might
7836 not start with a letter but with anything valid for filenames and
7837 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7838 character is not a letter. */
7839 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7840 clean_symbol_name (name);
7841
7842 p = name + strlen (name);
7843 for (i = 0; i < 4; i++)
7844 {
7845 sprintf (p, "%.2x", checksum[i]);
7846 p += 2;
7847 }
7848
7849 unit_die->die_id.die_symbol = xstrdup (name);
7850 }
7851
7852 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7853
7854 static int
7855 is_type_die (dw_die_ref die)
7856 {
7857 switch (die->die_tag)
7858 {
7859 case DW_TAG_array_type:
7860 case DW_TAG_class_type:
7861 case DW_TAG_interface_type:
7862 case DW_TAG_enumeration_type:
7863 case DW_TAG_pointer_type:
7864 case DW_TAG_reference_type:
7865 case DW_TAG_rvalue_reference_type:
7866 case DW_TAG_string_type:
7867 case DW_TAG_structure_type:
7868 case DW_TAG_subroutine_type:
7869 case DW_TAG_union_type:
7870 case DW_TAG_ptr_to_member_type:
7871 case DW_TAG_set_type:
7872 case DW_TAG_subrange_type:
7873 case DW_TAG_base_type:
7874 case DW_TAG_const_type:
7875 case DW_TAG_file_type:
7876 case DW_TAG_packed_type:
7877 case DW_TAG_volatile_type:
7878 case DW_TAG_typedef:
7879 return 1;
7880 default:
7881 return 0;
7882 }
7883 }
7884
7885 /* Returns true iff C is a compile-unit DIE. */
7886
7887 static inline bool
7888 is_cu_die (dw_die_ref c)
7889 {
7890 return c && (c->die_tag == DW_TAG_compile_unit
7891 || c->die_tag == DW_TAG_skeleton_unit);
7892 }
7893
7894 /* Returns true iff C is a unit DIE of some sort. */
7895
7896 static inline bool
7897 is_unit_die (dw_die_ref c)
7898 {
7899 return c && (c->die_tag == DW_TAG_compile_unit
7900 || c->die_tag == DW_TAG_partial_unit
7901 || c->die_tag == DW_TAG_type_unit
7902 || c->die_tag == DW_TAG_skeleton_unit);
7903 }
7904
7905 /* Returns true iff C is a namespace DIE. */
7906
7907 static inline bool
7908 is_namespace_die (dw_die_ref c)
7909 {
7910 return c && c->die_tag == DW_TAG_namespace;
7911 }
7912
7913 /* Return non-zero if this DIE is a template parameter. */
7914
7915 static inline bool
7916 is_template_parameter (dw_die_ref die)
7917 {
7918 switch (die->die_tag)
7919 {
7920 case DW_TAG_template_type_param:
7921 case DW_TAG_template_value_param:
7922 case DW_TAG_GNU_template_template_param:
7923 case DW_TAG_GNU_template_parameter_pack:
7924 return true;
7925 default:
7926 return false;
7927 }
7928 }
7929
7930 /* Return non-zero if this DIE represents a template instantiation. */
7931
7932 static inline bool
7933 is_template_instantiation (dw_die_ref die)
7934 {
7935 dw_die_ref c;
7936
7937 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7938 return false;
7939 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7940 return false;
7941 }
7942
7943 static char *
7944 gen_internal_sym (const char *prefix)
7945 {
7946 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7947
7948 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7949 return xstrdup (buf);
7950 }
7951
7952 /* Return non-zero if this DIE is a declaration. */
7953
7954 static int
7955 is_declaration_die (dw_die_ref die)
7956 {
7957 dw_attr_node *a;
7958 unsigned ix;
7959
7960 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7961 if (a->dw_attr == DW_AT_declaration)
7962 return 1;
7963
7964 return 0;
7965 }
7966
7967 /* Return non-zero if this DIE is nested inside a subprogram. */
7968
7969 static int
7970 is_nested_in_subprogram (dw_die_ref die)
7971 {
7972 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7973
7974 if (decl == NULL)
7975 decl = die;
7976 return local_scope_p (decl);
7977 }
7978
7979 /* Return non-zero if this DIE contains a defining declaration of a
7980 subprogram. */
7981
7982 static int
7983 contains_subprogram_definition (dw_die_ref die)
7984 {
7985 dw_die_ref c;
7986
7987 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7988 return 1;
7989 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7990 return 0;
7991 }
7992
7993 /* Return non-zero if this is a type DIE that should be moved to a
7994 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7995 unit type. */
7996
7997 static int
7998 should_move_die_to_comdat (dw_die_ref die)
7999 {
8000 switch (die->die_tag)
8001 {
8002 case DW_TAG_class_type:
8003 case DW_TAG_structure_type:
8004 case DW_TAG_enumeration_type:
8005 case DW_TAG_union_type:
8006 /* Don't move declarations, inlined instances, types nested in a
8007 subprogram, or types that contain subprogram definitions. */
8008 if (is_declaration_die (die)
8009 || get_AT (die, DW_AT_abstract_origin)
8010 || is_nested_in_subprogram (die)
8011 || contains_subprogram_definition (die))
8012 return 0;
8013 return 1;
8014 case DW_TAG_array_type:
8015 case DW_TAG_interface_type:
8016 case DW_TAG_pointer_type:
8017 case DW_TAG_reference_type:
8018 case DW_TAG_rvalue_reference_type:
8019 case DW_TAG_string_type:
8020 case DW_TAG_subroutine_type:
8021 case DW_TAG_ptr_to_member_type:
8022 case DW_TAG_set_type:
8023 case DW_TAG_subrange_type:
8024 case DW_TAG_base_type:
8025 case DW_TAG_const_type:
8026 case DW_TAG_file_type:
8027 case DW_TAG_packed_type:
8028 case DW_TAG_volatile_type:
8029 case DW_TAG_typedef:
8030 default:
8031 return 0;
8032 }
8033 }
8034
8035 /* Make a clone of DIE. */
8036
8037 static dw_die_ref
8038 clone_die (dw_die_ref die)
8039 {
8040 dw_die_ref clone = new_die_raw (die->die_tag);
8041 dw_attr_node *a;
8042 unsigned ix;
8043
8044 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8045 add_dwarf_attr (clone, a);
8046
8047 return clone;
8048 }
8049
8050 /* Make a clone of the tree rooted at DIE. */
8051
8052 static dw_die_ref
8053 clone_tree (dw_die_ref die)
8054 {
8055 dw_die_ref c;
8056 dw_die_ref clone = clone_die (die);
8057
8058 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8059
8060 return clone;
8061 }
8062
8063 /* Make a clone of DIE as a declaration. */
8064
8065 static dw_die_ref
8066 clone_as_declaration (dw_die_ref die)
8067 {
8068 dw_die_ref clone;
8069 dw_die_ref decl;
8070 dw_attr_node *a;
8071 unsigned ix;
8072
8073 /* If the DIE is already a declaration, just clone it. */
8074 if (is_declaration_die (die))
8075 return clone_die (die);
8076
8077 /* If the DIE is a specification, just clone its declaration DIE. */
8078 decl = get_AT_ref (die, DW_AT_specification);
8079 if (decl != NULL)
8080 {
8081 clone = clone_die (decl);
8082 if (die->comdat_type_p)
8083 add_AT_die_ref (clone, DW_AT_signature, die);
8084 return clone;
8085 }
8086
8087 clone = new_die_raw (die->die_tag);
8088
8089 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8090 {
8091 /* We don't want to copy over all attributes.
8092 For example we don't want DW_AT_byte_size because otherwise we will no
8093 longer have a declaration and GDB will treat it as a definition. */
8094
8095 switch (a->dw_attr)
8096 {
8097 case DW_AT_abstract_origin:
8098 case DW_AT_artificial:
8099 case DW_AT_containing_type:
8100 case DW_AT_external:
8101 case DW_AT_name:
8102 case DW_AT_type:
8103 case DW_AT_virtuality:
8104 case DW_AT_linkage_name:
8105 case DW_AT_MIPS_linkage_name:
8106 add_dwarf_attr (clone, a);
8107 break;
8108 case DW_AT_byte_size:
8109 case DW_AT_alignment:
8110 default:
8111 break;
8112 }
8113 }
8114
8115 if (die->comdat_type_p)
8116 add_AT_die_ref (clone, DW_AT_signature, die);
8117
8118 add_AT_flag (clone, DW_AT_declaration, 1);
8119 return clone;
8120 }
8121
8122
8123 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8124
8125 struct decl_table_entry
8126 {
8127 dw_die_ref orig;
8128 dw_die_ref copy;
8129 };
8130
8131 /* Helpers to manipulate hash table of copied declarations. */
8132
8133 /* Hashtable helpers. */
8134
8135 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8136 {
8137 typedef die_struct *compare_type;
8138 static inline hashval_t hash (const decl_table_entry *);
8139 static inline bool equal (const decl_table_entry *, const die_struct *);
8140 };
8141
8142 inline hashval_t
8143 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8144 {
8145 return htab_hash_pointer (entry->orig);
8146 }
8147
8148 inline bool
8149 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8150 const die_struct *entry2)
8151 {
8152 return entry1->orig == entry2;
8153 }
8154
8155 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8156
8157 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8158 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8159 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8160 to check if the ancestor has already been copied into UNIT. */
8161
8162 static dw_die_ref
8163 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8164 decl_hash_type *decl_table)
8165 {
8166 dw_die_ref parent = die->die_parent;
8167 dw_die_ref new_parent = unit;
8168 dw_die_ref copy;
8169 decl_table_entry **slot = NULL;
8170 struct decl_table_entry *entry = NULL;
8171
8172 if (decl_table)
8173 {
8174 /* Check if the entry has already been copied to UNIT. */
8175 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8176 INSERT);
8177 if (*slot != HTAB_EMPTY_ENTRY)
8178 {
8179 entry = *slot;
8180 return entry->copy;
8181 }
8182
8183 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8184 entry = XCNEW (struct decl_table_entry);
8185 entry->orig = die;
8186 entry->copy = NULL;
8187 *slot = entry;
8188 }
8189
8190 if (parent != NULL)
8191 {
8192 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8193 if (spec != NULL)
8194 parent = spec;
8195 if (!is_unit_die (parent))
8196 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8197 }
8198
8199 copy = clone_as_declaration (die);
8200 add_child_die (new_parent, copy);
8201
8202 if (decl_table)
8203 {
8204 /* Record the pointer to the copy. */
8205 entry->copy = copy;
8206 }
8207
8208 return copy;
8209 }
8210 /* Copy the declaration context to the new type unit DIE. This includes
8211 any surrounding namespace or type declarations. If the DIE has an
8212 AT_specification attribute, it also includes attributes and children
8213 attached to the specification, and returns a pointer to the original
8214 parent of the declaration DIE. Returns NULL otherwise. */
8215
8216 static dw_die_ref
8217 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8218 {
8219 dw_die_ref decl;
8220 dw_die_ref new_decl;
8221 dw_die_ref orig_parent = NULL;
8222
8223 decl = get_AT_ref (die, DW_AT_specification);
8224 if (decl == NULL)
8225 decl = die;
8226 else
8227 {
8228 unsigned ix;
8229 dw_die_ref c;
8230 dw_attr_node *a;
8231
8232 /* The original DIE will be changed to a declaration, and must
8233 be moved to be a child of the original declaration DIE. */
8234 orig_parent = decl->die_parent;
8235
8236 /* Copy the type node pointer from the new DIE to the original
8237 declaration DIE so we can forward references later. */
8238 decl->comdat_type_p = true;
8239 decl->die_id.die_type_node = die->die_id.die_type_node;
8240
8241 remove_AT (die, DW_AT_specification);
8242
8243 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8244 {
8245 if (a->dw_attr != DW_AT_name
8246 && a->dw_attr != DW_AT_declaration
8247 && a->dw_attr != DW_AT_external)
8248 add_dwarf_attr (die, a);
8249 }
8250
8251 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8252 }
8253
8254 if (decl->die_parent != NULL
8255 && !is_unit_die (decl->die_parent))
8256 {
8257 new_decl = copy_ancestor_tree (unit, decl, NULL);
8258 if (new_decl != NULL)
8259 {
8260 remove_AT (new_decl, DW_AT_signature);
8261 add_AT_specification (die, new_decl);
8262 }
8263 }
8264
8265 return orig_parent;
8266 }
8267
8268 /* Generate the skeleton ancestor tree for the given NODE, then clone
8269 the DIE and add the clone into the tree. */
8270
8271 static void
8272 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8273 {
8274 if (node->new_die != NULL)
8275 return;
8276
8277 node->new_die = clone_as_declaration (node->old_die);
8278
8279 if (node->parent != NULL)
8280 {
8281 generate_skeleton_ancestor_tree (node->parent);
8282 add_child_die (node->parent->new_die, node->new_die);
8283 }
8284 }
8285
8286 /* Generate a skeleton tree of DIEs containing any declarations that are
8287 found in the original tree. We traverse the tree looking for declaration
8288 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8289
8290 static void
8291 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8292 {
8293 skeleton_chain_node node;
8294 dw_die_ref c;
8295 dw_die_ref first;
8296 dw_die_ref prev = NULL;
8297 dw_die_ref next = NULL;
8298
8299 node.parent = parent;
8300
8301 first = c = parent->old_die->die_child;
8302 if (c)
8303 next = c->die_sib;
8304 if (c) do {
8305 if (prev == NULL || prev->die_sib == c)
8306 prev = c;
8307 c = next;
8308 next = (c == first ? NULL : c->die_sib);
8309 node.old_die = c;
8310 node.new_die = NULL;
8311 if (is_declaration_die (c))
8312 {
8313 if (is_template_instantiation (c))
8314 {
8315 /* Instantiated templates do not need to be cloned into the
8316 type unit. Just move the DIE and its children back to
8317 the skeleton tree (in the main CU). */
8318 remove_child_with_prev (c, prev);
8319 add_child_die (parent->new_die, c);
8320 c = prev;
8321 }
8322 else if (c->comdat_type_p)
8323 {
8324 /* This is the skeleton of earlier break_out_comdat_types
8325 type. Clone the existing DIE, but keep the children
8326 under the original (which is in the main CU). */
8327 dw_die_ref clone = clone_die (c);
8328
8329 replace_child (c, clone, prev);
8330 generate_skeleton_ancestor_tree (parent);
8331 add_child_die (parent->new_die, c);
8332 c = clone;
8333 continue;
8334 }
8335 else
8336 {
8337 /* Clone the existing DIE, move the original to the skeleton
8338 tree (which is in the main CU), and put the clone, with
8339 all the original's children, where the original came from
8340 (which is about to be moved to the type unit). */
8341 dw_die_ref clone = clone_die (c);
8342 move_all_children (c, clone);
8343
8344 /* If the original has a DW_AT_object_pointer attribute,
8345 it would now point to a child DIE just moved to the
8346 cloned tree, so we need to remove that attribute from
8347 the original. */
8348 remove_AT (c, DW_AT_object_pointer);
8349
8350 replace_child (c, clone, prev);
8351 generate_skeleton_ancestor_tree (parent);
8352 add_child_die (parent->new_die, c);
8353 node.old_die = clone;
8354 node.new_die = c;
8355 c = clone;
8356 }
8357 }
8358 generate_skeleton_bottom_up (&node);
8359 } while (next != NULL);
8360 }
8361
8362 /* Wrapper function for generate_skeleton_bottom_up. */
8363
8364 static dw_die_ref
8365 generate_skeleton (dw_die_ref die)
8366 {
8367 skeleton_chain_node node;
8368
8369 node.old_die = die;
8370 node.new_die = NULL;
8371 node.parent = NULL;
8372
8373 /* If this type definition is nested inside another type,
8374 and is not an instantiation of a template, always leave
8375 at least a declaration in its place. */
8376 if (die->die_parent != NULL
8377 && is_type_die (die->die_parent)
8378 && !is_template_instantiation (die))
8379 node.new_die = clone_as_declaration (die);
8380
8381 generate_skeleton_bottom_up (&node);
8382 return node.new_die;
8383 }
8384
8385 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8386 declaration. The original DIE is moved to a new compile unit so that
8387 existing references to it follow it to the new location. If any of the
8388 original DIE's descendants is a declaration, we need to replace the
8389 original DIE with a skeleton tree and move the declarations back into the
8390 skeleton tree. */
8391
8392 static dw_die_ref
8393 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8394 dw_die_ref prev)
8395 {
8396 dw_die_ref skeleton, orig_parent;
8397
8398 /* Copy the declaration context to the type unit DIE. If the returned
8399 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8400 that DIE. */
8401 orig_parent = copy_declaration_context (unit, child);
8402
8403 skeleton = generate_skeleton (child);
8404 if (skeleton == NULL)
8405 remove_child_with_prev (child, prev);
8406 else
8407 {
8408 skeleton->comdat_type_p = true;
8409 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8410
8411 /* If the original DIE was a specification, we need to put
8412 the skeleton under the parent DIE of the declaration.
8413 This leaves the original declaration in the tree, but
8414 it will be pruned later since there are no longer any
8415 references to it. */
8416 if (orig_parent != NULL)
8417 {
8418 remove_child_with_prev (child, prev);
8419 add_child_die (orig_parent, skeleton);
8420 }
8421 else
8422 replace_child (child, skeleton, prev);
8423 }
8424
8425 return skeleton;
8426 }
8427
8428 static void
8429 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8430 comdat_type_node *type_node,
8431 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8432
8433 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8434 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8435 DWARF procedure references in the DW_AT_location attribute. */
8436
8437 static dw_die_ref
8438 copy_dwarf_procedure (dw_die_ref die,
8439 comdat_type_node *type_node,
8440 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8441 {
8442 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8443
8444 /* DWARF procedures are not supposed to have children... */
8445 gcc_assert (die->die_child == NULL);
8446
8447 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8448 gcc_assert (vec_safe_length (die->die_attr) == 1
8449 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8450
8451 /* Do not copy more than once DWARF procedures. */
8452 bool existed;
8453 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8454 if (existed)
8455 return die_copy;
8456
8457 die_copy = clone_die (die);
8458 add_child_die (type_node->root_die, die_copy);
8459 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8460 return die_copy;
8461 }
8462
8463 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8464 procedures in DIE's attributes. */
8465
8466 static void
8467 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8468 comdat_type_node *type_node,
8469 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8470 {
8471 dw_attr_node *a;
8472 unsigned i;
8473
8474 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8475 {
8476 dw_loc_descr_ref loc;
8477
8478 if (a->dw_attr_val.val_class != dw_val_class_loc)
8479 continue;
8480
8481 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8482 {
8483 switch (loc->dw_loc_opc)
8484 {
8485 case DW_OP_call2:
8486 case DW_OP_call4:
8487 case DW_OP_call_ref:
8488 gcc_assert (loc->dw_loc_oprnd1.val_class
8489 == dw_val_class_die_ref);
8490 loc->dw_loc_oprnd1.v.val_die_ref.die
8491 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8492 type_node,
8493 copied_dwarf_procs);
8494
8495 default:
8496 break;
8497 }
8498 }
8499 }
8500 }
8501
8502 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8503 rewrite references to point to the copies.
8504
8505 References are looked for in DIE's attributes and recursively in all its
8506 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8507 mapping from old DWARF procedures to their copy. It is used not to copy
8508 twice the same DWARF procedure under TYPE_NODE. */
8509
8510 static void
8511 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8512 comdat_type_node *type_node,
8513 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8514 {
8515 dw_die_ref c;
8516
8517 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8518 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8519 type_node,
8520 copied_dwarf_procs));
8521 }
8522
8523 /* Traverse the DIE and set up additional .debug_types or .debug_info
8524 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8525 section. */
8526
8527 static void
8528 break_out_comdat_types (dw_die_ref die)
8529 {
8530 dw_die_ref c;
8531 dw_die_ref first;
8532 dw_die_ref prev = NULL;
8533 dw_die_ref next = NULL;
8534 dw_die_ref unit = NULL;
8535
8536 first = c = die->die_child;
8537 if (c)
8538 next = c->die_sib;
8539 if (c) do {
8540 if (prev == NULL || prev->die_sib == c)
8541 prev = c;
8542 c = next;
8543 next = (c == first ? NULL : c->die_sib);
8544 if (should_move_die_to_comdat (c))
8545 {
8546 dw_die_ref replacement;
8547 comdat_type_node *type_node;
8548
8549 /* Break out nested types into their own type units. */
8550 break_out_comdat_types (c);
8551
8552 /* Create a new type unit DIE as the root for the new tree, and
8553 add it to the list of comdat types. */
8554 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8555 add_AT_unsigned (unit, DW_AT_language,
8556 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8557 type_node = ggc_cleared_alloc<comdat_type_node> ();
8558 type_node->root_die = unit;
8559 type_node->next = comdat_type_list;
8560 comdat_type_list = type_node;
8561
8562 /* Generate the type signature. */
8563 generate_type_signature (c, type_node);
8564
8565 /* Copy the declaration context, attributes, and children of the
8566 declaration into the new type unit DIE, then remove this DIE
8567 from the main CU (or replace it with a skeleton if necessary). */
8568 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8569 type_node->skeleton_die = replacement;
8570
8571 /* Add the DIE to the new compunit. */
8572 add_child_die (unit, c);
8573
8574 /* Types can reference DWARF procedures for type size or data location
8575 expressions. Calls in DWARF expressions cannot target procedures
8576 that are not in the same section. So we must copy DWARF procedures
8577 along with this type and then rewrite references to them. */
8578 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8579 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8580
8581 if (replacement != NULL)
8582 c = replacement;
8583 }
8584 else if (c->die_tag == DW_TAG_namespace
8585 || c->die_tag == DW_TAG_class_type
8586 || c->die_tag == DW_TAG_structure_type
8587 || c->die_tag == DW_TAG_union_type)
8588 {
8589 /* Look for nested types that can be broken out. */
8590 break_out_comdat_types (c);
8591 }
8592 } while (next != NULL);
8593 }
8594
8595 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8596 Enter all the cloned children into the hash table decl_table. */
8597
8598 static dw_die_ref
8599 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8600 {
8601 dw_die_ref c;
8602 dw_die_ref clone;
8603 struct decl_table_entry *entry;
8604 decl_table_entry **slot;
8605
8606 if (die->die_tag == DW_TAG_subprogram)
8607 clone = clone_as_declaration (die);
8608 else
8609 clone = clone_die (die);
8610
8611 slot = decl_table->find_slot_with_hash (die,
8612 htab_hash_pointer (die), INSERT);
8613
8614 /* Assert that DIE isn't in the hash table yet. If it would be there
8615 before, the ancestors would be necessarily there as well, therefore
8616 clone_tree_partial wouldn't be called. */
8617 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8618
8619 entry = XCNEW (struct decl_table_entry);
8620 entry->orig = die;
8621 entry->copy = clone;
8622 *slot = entry;
8623
8624 if (die->die_tag != DW_TAG_subprogram)
8625 FOR_EACH_CHILD (die, c,
8626 add_child_die (clone, clone_tree_partial (c, decl_table)));
8627
8628 return clone;
8629 }
8630
8631 /* Walk the DIE and its children, looking for references to incomplete
8632 or trivial types that are unmarked (i.e., that are not in the current
8633 type_unit). */
8634
8635 static void
8636 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8637 {
8638 dw_die_ref c;
8639 dw_attr_node *a;
8640 unsigned ix;
8641
8642 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8643 {
8644 if (AT_class (a) == dw_val_class_die_ref)
8645 {
8646 dw_die_ref targ = AT_ref (a);
8647 decl_table_entry **slot;
8648 struct decl_table_entry *entry;
8649
8650 if (targ->die_mark != 0 || targ->comdat_type_p)
8651 continue;
8652
8653 slot = decl_table->find_slot_with_hash (targ,
8654 htab_hash_pointer (targ),
8655 INSERT);
8656
8657 if (*slot != HTAB_EMPTY_ENTRY)
8658 {
8659 /* TARG has already been copied, so we just need to
8660 modify the reference to point to the copy. */
8661 entry = *slot;
8662 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8663 }
8664 else
8665 {
8666 dw_die_ref parent = unit;
8667 dw_die_ref copy = clone_die (targ);
8668
8669 /* Record in DECL_TABLE that TARG has been copied.
8670 Need to do this now, before the recursive call,
8671 because DECL_TABLE may be expanded and SLOT
8672 would no longer be a valid pointer. */
8673 entry = XCNEW (struct decl_table_entry);
8674 entry->orig = targ;
8675 entry->copy = copy;
8676 *slot = entry;
8677
8678 /* If TARG is not a declaration DIE, we need to copy its
8679 children. */
8680 if (!is_declaration_die (targ))
8681 {
8682 FOR_EACH_CHILD (
8683 targ, c,
8684 add_child_die (copy,
8685 clone_tree_partial (c, decl_table)));
8686 }
8687
8688 /* Make sure the cloned tree is marked as part of the
8689 type unit. */
8690 mark_dies (copy);
8691
8692 /* If TARG has surrounding context, copy its ancestor tree
8693 into the new type unit. */
8694 if (targ->die_parent != NULL
8695 && !is_unit_die (targ->die_parent))
8696 parent = copy_ancestor_tree (unit, targ->die_parent,
8697 decl_table);
8698
8699 add_child_die (parent, copy);
8700 a->dw_attr_val.v.val_die_ref.die = copy;
8701
8702 /* Make sure the newly-copied DIE is walked. If it was
8703 installed in a previously-added context, it won't
8704 get visited otherwise. */
8705 if (parent != unit)
8706 {
8707 /* Find the highest point of the newly-added tree,
8708 mark each node along the way, and walk from there. */
8709 parent->die_mark = 1;
8710 while (parent->die_parent
8711 && parent->die_parent->die_mark == 0)
8712 {
8713 parent = parent->die_parent;
8714 parent->die_mark = 1;
8715 }
8716 copy_decls_walk (unit, parent, decl_table);
8717 }
8718 }
8719 }
8720 }
8721
8722 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8723 }
8724
8725 /* Copy declarations for "unworthy" types into the new comdat section.
8726 Incomplete types, modified types, and certain other types aren't broken
8727 out into comdat sections of their own, so they don't have a signature,
8728 and we need to copy the declaration into the same section so that we
8729 don't have an external reference. */
8730
8731 static void
8732 copy_decls_for_unworthy_types (dw_die_ref unit)
8733 {
8734 mark_dies (unit);
8735 decl_hash_type decl_table (10);
8736 copy_decls_walk (unit, unit, &decl_table);
8737 unmark_dies (unit);
8738 }
8739
8740 /* Traverse the DIE and add a sibling attribute if it may have the
8741 effect of speeding up access to siblings. To save some space,
8742 avoid generating sibling attributes for DIE's without children. */
8743
8744 static void
8745 add_sibling_attributes (dw_die_ref die)
8746 {
8747 dw_die_ref c;
8748
8749 if (! die->die_child)
8750 return;
8751
8752 if (die->die_parent && die != die->die_parent->die_child)
8753 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8754
8755 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8756 }
8757
8758 /* Output all location lists for the DIE and its children. */
8759
8760 static void
8761 output_location_lists (dw_die_ref die)
8762 {
8763 dw_die_ref c;
8764 dw_attr_node *a;
8765 unsigned ix;
8766
8767 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8768 if (AT_class (a) == dw_val_class_loc_list)
8769 output_loc_list (AT_loc_list (a));
8770
8771 FOR_EACH_CHILD (die, c, output_location_lists (c));
8772 }
8773
8774 /* During assign_location_list_indexes and output_loclists_offset the
8775 current index, after it the number of assigned indexes (i.e. how
8776 large the .debug_loclists* offset table should be). */
8777 static unsigned int loc_list_idx;
8778
8779 /* Output all location list offsets for the DIE and its children. */
8780
8781 static void
8782 output_loclists_offsets (dw_die_ref die)
8783 {
8784 dw_die_ref c;
8785 dw_attr_node *a;
8786 unsigned ix;
8787
8788 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8789 if (AT_class (a) == dw_val_class_loc_list)
8790 {
8791 dw_loc_list_ref l = AT_loc_list (a);
8792 if (l->offset_emitted)
8793 continue;
8794 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8795 loc_section_label, NULL);
8796 gcc_assert (l->hash == loc_list_idx);
8797 loc_list_idx++;
8798 l->offset_emitted = true;
8799 }
8800
8801 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8802 }
8803
8804 /* Recursively set indexes of location lists. */
8805
8806 static void
8807 assign_location_list_indexes (dw_die_ref die)
8808 {
8809 dw_die_ref c;
8810 dw_attr_node *a;
8811 unsigned ix;
8812
8813 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8814 if (AT_class (a) == dw_val_class_loc_list)
8815 {
8816 dw_loc_list_ref list = AT_loc_list (a);
8817 if (!list->num_assigned)
8818 {
8819 list->num_assigned = true;
8820 list->hash = loc_list_idx++;
8821 }
8822 }
8823
8824 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8825 }
8826
8827 /* We want to limit the number of external references, because they are
8828 larger than local references: a relocation takes multiple words, and
8829 even a sig8 reference is always eight bytes, whereas a local reference
8830 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8831 So if we encounter multiple external references to the same type DIE, we
8832 make a local typedef stub for it and redirect all references there.
8833
8834 This is the element of the hash table for keeping track of these
8835 references. */
8836
8837 struct external_ref
8838 {
8839 dw_die_ref type;
8840 dw_die_ref stub;
8841 unsigned n_refs;
8842 };
8843
8844 /* Hashtable helpers. */
8845
8846 struct external_ref_hasher : free_ptr_hash <external_ref>
8847 {
8848 static inline hashval_t hash (const external_ref *);
8849 static inline bool equal (const external_ref *, const external_ref *);
8850 };
8851
8852 inline hashval_t
8853 external_ref_hasher::hash (const external_ref *r)
8854 {
8855 dw_die_ref die = r->type;
8856 hashval_t h = 0;
8857
8858 /* We can't use the address of the DIE for hashing, because
8859 that will make the order of the stub DIEs non-deterministic. */
8860 if (! die->comdat_type_p)
8861 /* We have a symbol; use it to compute a hash. */
8862 h = htab_hash_string (die->die_id.die_symbol);
8863 else
8864 {
8865 /* We have a type signature; use a subset of the bits as the hash.
8866 The 8-byte signature is at least as large as hashval_t. */
8867 comdat_type_node *type_node = die->die_id.die_type_node;
8868 memcpy (&h, type_node->signature, sizeof (h));
8869 }
8870 return h;
8871 }
8872
8873 inline bool
8874 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8875 {
8876 return r1->type == r2->type;
8877 }
8878
8879 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8880
8881 /* Return a pointer to the external_ref for references to DIE. */
8882
8883 static struct external_ref *
8884 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8885 {
8886 struct external_ref ref, *ref_p;
8887 external_ref **slot;
8888
8889 ref.type = die;
8890 slot = map->find_slot (&ref, INSERT);
8891 if (*slot != HTAB_EMPTY_ENTRY)
8892 return *slot;
8893
8894 ref_p = XCNEW (struct external_ref);
8895 ref_p->type = die;
8896 *slot = ref_p;
8897 return ref_p;
8898 }
8899
8900 /* Subroutine of optimize_external_refs, below.
8901
8902 If we see a type skeleton, record it as our stub. If we see external
8903 references, remember how many we've seen. */
8904
8905 static void
8906 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8907 {
8908 dw_die_ref c;
8909 dw_attr_node *a;
8910 unsigned ix;
8911 struct external_ref *ref_p;
8912
8913 if (is_type_die (die)
8914 && (c = get_AT_ref (die, DW_AT_signature)))
8915 {
8916 /* This is a local skeleton; use it for local references. */
8917 ref_p = lookup_external_ref (map, c);
8918 ref_p->stub = die;
8919 }
8920
8921 /* Scan the DIE references, and remember any that refer to DIEs from
8922 other CUs (i.e. those which are not marked). */
8923 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8924 if (AT_class (a) == dw_val_class_die_ref
8925 && (c = AT_ref (a))->die_mark == 0
8926 && is_type_die (c))
8927 {
8928 ref_p = lookup_external_ref (map, c);
8929 ref_p->n_refs++;
8930 }
8931
8932 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8933 }
8934
8935 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8936 points to an external_ref, DATA is the CU we're processing. If we don't
8937 already have a local stub, and we have multiple refs, build a stub. */
8938
8939 int
8940 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8941 {
8942 struct external_ref *ref_p = *slot;
8943
8944 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8945 {
8946 /* We have multiple references to this type, so build a small stub.
8947 Both of these forms are a bit dodgy from the perspective of the
8948 DWARF standard, since technically they should have names. */
8949 dw_die_ref cu = data;
8950 dw_die_ref type = ref_p->type;
8951 dw_die_ref stub = NULL;
8952
8953 if (type->comdat_type_p)
8954 {
8955 /* If we refer to this type via sig8, use AT_signature. */
8956 stub = new_die (type->die_tag, cu, NULL_TREE);
8957 add_AT_die_ref (stub, DW_AT_signature, type);
8958 }
8959 else
8960 {
8961 /* Otherwise, use a typedef with no name. */
8962 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8963 add_AT_die_ref (stub, DW_AT_type, type);
8964 }
8965
8966 stub->die_mark++;
8967 ref_p->stub = stub;
8968 }
8969 return 1;
8970 }
8971
8972 /* DIE is a unit; look through all the DIE references to see if there are
8973 any external references to types, and if so, create local stubs for
8974 them which will be applied in build_abbrev_table. This is useful because
8975 references to local DIEs are smaller. */
8976
8977 static external_ref_hash_type *
8978 optimize_external_refs (dw_die_ref die)
8979 {
8980 external_ref_hash_type *map = new external_ref_hash_type (10);
8981 optimize_external_refs_1 (die, map);
8982 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8983 return map;
8984 }
8985
8986 /* The following 3 variables are temporaries that are computed only during the
8987 build_abbrev_table call and used and released during the following
8988 optimize_abbrev_table call. */
8989
8990 /* First abbrev_id that can be optimized based on usage. */
8991 static unsigned int abbrev_opt_start;
8992
8993 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8994 abbrev_id smaller than this, because they must be already sized
8995 during build_abbrev_table). */
8996 static unsigned int abbrev_opt_base_type_end;
8997
8998 /* Vector of usage counts during build_abbrev_table. Indexed by
8999 abbrev_id - abbrev_opt_start. */
9000 static vec<unsigned int> abbrev_usage_count;
9001
9002 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9003 static vec<dw_die_ref> sorted_abbrev_dies;
9004
9005 /* The format of each DIE (and its attribute value pairs) is encoded in an
9006 abbreviation table. This routine builds the abbreviation table and assigns
9007 a unique abbreviation id for each abbreviation entry. The children of each
9008 die are visited recursively. */
9009
9010 static void
9011 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9012 {
9013 unsigned int abbrev_id = 0;
9014 dw_die_ref c;
9015 dw_attr_node *a;
9016 unsigned ix;
9017 dw_die_ref abbrev;
9018
9019 /* Scan the DIE references, and replace any that refer to
9020 DIEs from other CUs (i.e. those which are not marked) with
9021 the local stubs we built in optimize_external_refs. */
9022 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9023 if (AT_class (a) == dw_val_class_die_ref
9024 && (c = AT_ref (a))->die_mark == 0)
9025 {
9026 struct external_ref *ref_p;
9027 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9028
9029 if (is_type_die (c)
9030 && (ref_p = lookup_external_ref (extern_map, c))
9031 && ref_p->stub && ref_p->stub != die)
9032 change_AT_die_ref (a, ref_p->stub);
9033 else
9034 /* We aren't changing this reference, so mark it external. */
9035 set_AT_ref_external (a, 1);
9036 }
9037
9038 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9039 {
9040 dw_attr_node *die_a, *abbrev_a;
9041 unsigned ix;
9042 bool ok = true;
9043
9044 if (abbrev_id == 0)
9045 continue;
9046 if (abbrev->die_tag != die->die_tag)
9047 continue;
9048 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9049 continue;
9050
9051 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9052 continue;
9053
9054 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9055 {
9056 abbrev_a = &(*abbrev->die_attr)[ix];
9057 if ((abbrev_a->dw_attr != die_a->dw_attr)
9058 || (value_format (abbrev_a) != value_format (die_a)))
9059 {
9060 ok = false;
9061 break;
9062 }
9063 }
9064 if (ok)
9065 break;
9066 }
9067
9068 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9069 {
9070 vec_safe_push (abbrev_die_table, die);
9071 if (abbrev_opt_start)
9072 abbrev_usage_count.safe_push (0);
9073 }
9074 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9075 {
9076 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9077 sorted_abbrev_dies.safe_push (die);
9078 }
9079
9080 die->die_abbrev = abbrev_id;
9081 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9082 }
9083
9084 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9085 by die_abbrev's usage count, from the most commonly used
9086 abbreviation to the least. */
9087
9088 static int
9089 die_abbrev_cmp (const void *p1, const void *p2)
9090 {
9091 dw_die_ref die1 = *(const dw_die_ref *) p1;
9092 dw_die_ref die2 = *(const dw_die_ref *) p2;
9093
9094 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9095 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9096
9097 if (die1->die_abbrev >= abbrev_opt_base_type_end
9098 && die2->die_abbrev >= abbrev_opt_base_type_end)
9099 {
9100 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9101 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9102 return -1;
9103 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9104 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9105 return 1;
9106 }
9107
9108 /* Stabilize the sort. */
9109 if (die1->die_abbrev < die2->die_abbrev)
9110 return -1;
9111 if (die1->die_abbrev > die2->die_abbrev)
9112 return 1;
9113
9114 return 0;
9115 }
9116
9117 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9118 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9119 into dw_val_class_const_implicit or
9120 dw_val_class_unsigned_const_implicit. */
9121
9122 static void
9123 optimize_implicit_const (unsigned int first_id, unsigned int end,
9124 vec<bool> &implicit_consts)
9125 {
9126 /* It never makes sense if there is just one DIE using the abbreviation. */
9127 if (end < first_id + 2)
9128 return;
9129
9130 dw_attr_node *a;
9131 unsigned ix, i;
9132 dw_die_ref die = sorted_abbrev_dies[first_id];
9133 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9134 if (implicit_consts[ix])
9135 {
9136 enum dw_val_class new_class = dw_val_class_none;
9137 switch (AT_class (a))
9138 {
9139 case dw_val_class_unsigned_const:
9140 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9141 continue;
9142
9143 /* The .debug_abbrev section will grow by
9144 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9145 in all the DIEs using that abbreviation. */
9146 if (constant_size (AT_unsigned (a)) * (end - first_id)
9147 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9148 continue;
9149
9150 new_class = dw_val_class_unsigned_const_implicit;
9151 break;
9152
9153 case dw_val_class_const:
9154 new_class = dw_val_class_const_implicit;
9155 break;
9156
9157 case dw_val_class_file:
9158 new_class = dw_val_class_file_implicit;
9159 break;
9160
9161 default:
9162 continue;
9163 }
9164 for (i = first_id; i < end; i++)
9165 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9166 = new_class;
9167 }
9168 }
9169
9170 /* Attempt to optimize abbreviation table from abbrev_opt_start
9171 abbreviation above. */
9172
9173 static void
9174 optimize_abbrev_table (void)
9175 {
9176 if (abbrev_opt_start
9177 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9178 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9179 {
9180 auto_vec<bool, 32> implicit_consts;
9181 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9182
9183 unsigned int abbrev_id = abbrev_opt_start - 1;
9184 unsigned int first_id = ~0U;
9185 unsigned int last_abbrev_id = 0;
9186 unsigned int i;
9187 dw_die_ref die;
9188 if (abbrev_opt_base_type_end > abbrev_opt_start)
9189 abbrev_id = abbrev_opt_base_type_end - 1;
9190 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9191 most commonly used abbreviations come first. */
9192 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9193 {
9194 dw_attr_node *a;
9195 unsigned ix;
9196
9197 /* If calc_base_type_die_sizes has been called, the CU and
9198 base types after it can't be optimized, because we've already
9199 calculated their DIE offsets. We've sorted them first. */
9200 if (die->die_abbrev < abbrev_opt_base_type_end)
9201 continue;
9202 if (die->die_abbrev != last_abbrev_id)
9203 {
9204 last_abbrev_id = die->die_abbrev;
9205 if (dwarf_version >= 5 && first_id != ~0U)
9206 optimize_implicit_const (first_id, i, implicit_consts);
9207 abbrev_id++;
9208 (*abbrev_die_table)[abbrev_id] = die;
9209 if (dwarf_version >= 5)
9210 {
9211 first_id = i;
9212 implicit_consts.truncate (0);
9213
9214 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9215 switch (AT_class (a))
9216 {
9217 case dw_val_class_const:
9218 case dw_val_class_unsigned_const:
9219 case dw_val_class_file:
9220 implicit_consts.safe_push (true);
9221 break;
9222 default:
9223 implicit_consts.safe_push (false);
9224 break;
9225 }
9226 }
9227 }
9228 else if (dwarf_version >= 5)
9229 {
9230 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9231 if (!implicit_consts[ix])
9232 continue;
9233 else
9234 {
9235 dw_attr_node *other_a
9236 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9237 if (!dw_val_equal_p (&a->dw_attr_val,
9238 &other_a->dw_attr_val))
9239 implicit_consts[ix] = false;
9240 }
9241 }
9242 die->die_abbrev = abbrev_id;
9243 }
9244 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9245 if (dwarf_version >= 5 && first_id != ~0U)
9246 optimize_implicit_const (first_id, i, implicit_consts);
9247 }
9248
9249 abbrev_opt_start = 0;
9250 abbrev_opt_base_type_end = 0;
9251 abbrev_usage_count.release ();
9252 sorted_abbrev_dies.release ();
9253 }
9254 \f
9255 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9256
9257 static int
9258 constant_size (unsigned HOST_WIDE_INT value)
9259 {
9260 int log;
9261
9262 if (value == 0)
9263 log = 0;
9264 else
9265 log = floor_log2 (value);
9266
9267 log = log / 8;
9268 log = 1 << (floor_log2 (log) + 1);
9269
9270 return log;
9271 }
9272
9273 /* Return the size of a DIE as it is represented in the
9274 .debug_info section. */
9275
9276 static unsigned long
9277 size_of_die (dw_die_ref die)
9278 {
9279 unsigned long size = 0;
9280 dw_attr_node *a;
9281 unsigned ix;
9282 enum dwarf_form form;
9283
9284 size += size_of_uleb128 (die->die_abbrev);
9285 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9286 {
9287 switch (AT_class (a))
9288 {
9289 case dw_val_class_addr:
9290 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9291 {
9292 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9293 size += size_of_uleb128 (AT_index (a));
9294 }
9295 else
9296 size += DWARF2_ADDR_SIZE;
9297 break;
9298 case dw_val_class_offset:
9299 size += DWARF_OFFSET_SIZE;
9300 break;
9301 case dw_val_class_loc:
9302 {
9303 unsigned long lsize = size_of_locs (AT_loc (a));
9304
9305 /* Block length. */
9306 if (dwarf_version >= 4)
9307 size += size_of_uleb128 (lsize);
9308 else
9309 size += constant_size (lsize);
9310 size += lsize;
9311 }
9312 break;
9313 case dw_val_class_loc_list:
9314 case dw_val_class_view_list:
9315 if (dwarf_split_debug_info && dwarf_version >= 5)
9316 {
9317 gcc_assert (AT_loc_list (a)->num_assigned);
9318 size += size_of_uleb128 (AT_loc_list (a)->hash);
9319 }
9320 else
9321 size += DWARF_OFFSET_SIZE;
9322 break;
9323 case dw_val_class_range_list:
9324 if (value_format (a) == DW_FORM_rnglistx)
9325 {
9326 gcc_assert (rnglist_idx);
9327 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9328 size += size_of_uleb128 (r->idx);
9329 }
9330 else
9331 size += DWARF_OFFSET_SIZE;
9332 break;
9333 case dw_val_class_const:
9334 size += size_of_sleb128 (AT_int (a));
9335 break;
9336 case dw_val_class_unsigned_const:
9337 {
9338 int csize = constant_size (AT_unsigned (a));
9339 if (dwarf_version == 3
9340 && a->dw_attr == DW_AT_data_member_location
9341 && csize >= 4)
9342 size += size_of_uleb128 (AT_unsigned (a));
9343 else
9344 size += csize;
9345 }
9346 break;
9347 case dw_val_class_symview:
9348 if (symview_upper_bound <= 0xff)
9349 size += 1;
9350 else if (symview_upper_bound <= 0xffff)
9351 size += 2;
9352 else if (symview_upper_bound <= 0xffffffff)
9353 size += 4;
9354 else
9355 size += 8;
9356 break;
9357 case dw_val_class_const_implicit:
9358 case dw_val_class_unsigned_const_implicit:
9359 case dw_val_class_file_implicit:
9360 /* These occupy no size in the DIE, just an extra sleb128 in
9361 .debug_abbrev. */
9362 break;
9363 case dw_val_class_const_double:
9364 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9365 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9366 size++; /* block */
9367 break;
9368 case dw_val_class_wide_int:
9369 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9370 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9371 if (get_full_len (*a->dw_attr_val.v.val_wide)
9372 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9373 size++; /* block */
9374 break;
9375 case dw_val_class_vec:
9376 size += constant_size (a->dw_attr_val.v.val_vec.length
9377 * a->dw_attr_val.v.val_vec.elt_size)
9378 + a->dw_attr_val.v.val_vec.length
9379 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9380 break;
9381 case dw_val_class_flag:
9382 if (dwarf_version >= 4)
9383 /* Currently all add_AT_flag calls pass in 1 as last argument,
9384 so DW_FORM_flag_present can be used. If that ever changes,
9385 we'll need to use DW_FORM_flag and have some optimization
9386 in build_abbrev_table that will change those to
9387 DW_FORM_flag_present if it is set to 1 in all DIEs using
9388 the same abbrev entry. */
9389 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9390 else
9391 size += 1;
9392 break;
9393 case dw_val_class_die_ref:
9394 if (AT_ref_external (a))
9395 {
9396 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9397 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9398 is sized by target address length, whereas in DWARF3
9399 it's always sized as an offset. */
9400 if (use_debug_types)
9401 size += DWARF_TYPE_SIGNATURE_SIZE;
9402 else if (dwarf_version == 2)
9403 size += DWARF2_ADDR_SIZE;
9404 else
9405 size += DWARF_OFFSET_SIZE;
9406 }
9407 else
9408 size += DWARF_OFFSET_SIZE;
9409 break;
9410 case dw_val_class_fde_ref:
9411 size += DWARF_OFFSET_SIZE;
9412 break;
9413 case dw_val_class_lbl_id:
9414 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9415 {
9416 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9417 size += size_of_uleb128 (AT_index (a));
9418 }
9419 else
9420 size += DWARF2_ADDR_SIZE;
9421 break;
9422 case dw_val_class_lineptr:
9423 case dw_val_class_macptr:
9424 case dw_val_class_loclistsptr:
9425 size += DWARF_OFFSET_SIZE;
9426 break;
9427 case dw_val_class_str:
9428 form = AT_string_form (a);
9429 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9430 size += DWARF_OFFSET_SIZE;
9431 else if (form == dwarf_FORM (DW_FORM_strx))
9432 size += size_of_uleb128 (AT_index (a));
9433 else
9434 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9435 break;
9436 case dw_val_class_file:
9437 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9438 break;
9439 case dw_val_class_data8:
9440 size += 8;
9441 break;
9442 case dw_val_class_vms_delta:
9443 size += DWARF_OFFSET_SIZE;
9444 break;
9445 case dw_val_class_high_pc:
9446 size += DWARF2_ADDR_SIZE;
9447 break;
9448 case dw_val_class_discr_value:
9449 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9450 break;
9451 case dw_val_class_discr_list:
9452 {
9453 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9454
9455 /* This is a block, so we have the block length and then its
9456 data. */
9457 size += constant_size (block_size) + block_size;
9458 }
9459 break;
9460 default:
9461 gcc_unreachable ();
9462 }
9463 }
9464
9465 return size;
9466 }
9467
9468 /* Size the debugging information associated with a given DIE. Visits the
9469 DIE's children recursively. Updates the global variable next_die_offset, on
9470 each time through. Uses the current value of next_die_offset to update the
9471 die_offset field in each DIE. */
9472
9473 static void
9474 calc_die_sizes (dw_die_ref die)
9475 {
9476 dw_die_ref c;
9477
9478 gcc_assert (die->die_offset == 0
9479 || (unsigned long int) die->die_offset == next_die_offset);
9480 die->die_offset = next_die_offset;
9481 next_die_offset += size_of_die (die);
9482
9483 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9484
9485 if (die->die_child != NULL)
9486 /* Count the null byte used to terminate sibling lists. */
9487 next_die_offset += 1;
9488 }
9489
9490 /* Size just the base type children at the start of the CU.
9491 This is needed because build_abbrev needs to size locs
9492 and sizing of type based stack ops needs to know die_offset
9493 values for the base types. */
9494
9495 static void
9496 calc_base_type_die_sizes (void)
9497 {
9498 unsigned long die_offset = (dwarf_split_debug_info
9499 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9500 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9501 unsigned int i;
9502 dw_die_ref base_type;
9503 #if ENABLE_ASSERT_CHECKING
9504 dw_die_ref prev = comp_unit_die ()->die_child;
9505 #endif
9506
9507 die_offset += size_of_die (comp_unit_die ());
9508 for (i = 0; base_types.iterate (i, &base_type); i++)
9509 {
9510 #if ENABLE_ASSERT_CHECKING
9511 gcc_assert (base_type->die_offset == 0
9512 && prev->die_sib == base_type
9513 && base_type->die_child == NULL
9514 && base_type->die_abbrev);
9515 prev = base_type;
9516 #endif
9517 if (abbrev_opt_start
9518 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9519 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9520 base_type->die_offset = die_offset;
9521 die_offset += size_of_die (base_type);
9522 }
9523 }
9524
9525 /* Set the marks for a die and its children. We do this so
9526 that we know whether or not a reference needs to use FORM_ref_addr; only
9527 DIEs in the same CU will be marked. We used to clear out the offset
9528 and use that as the flag, but ran into ordering problems. */
9529
9530 static void
9531 mark_dies (dw_die_ref die)
9532 {
9533 dw_die_ref c;
9534
9535 gcc_assert (!die->die_mark);
9536
9537 die->die_mark = 1;
9538 FOR_EACH_CHILD (die, c, mark_dies (c));
9539 }
9540
9541 /* Clear the marks for a die and its children. */
9542
9543 static void
9544 unmark_dies (dw_die_ref die)
9545 {
9546 dw_die_ref c;
9547
9548 if (! use_debug_types)
9549 gcc_assert (die->die_mark);
9550
9551 die->die_mark = 0;
9552 FOR_EACH_CHILD (die, c, unmark_dies (c));
9553 }
9554
9555 /* Clear the marks for a die, its children and referred dies. */
9556
9557 static void
9558 unmark_all_dies (dw_die_ref die)
9559 {
9560 dw_die_ref c;
9561 dw_attr_node *a;
9562 unsigned ix;
9563
9564 if (!die->die_mark)
9565 return;
9566 die->die_mark = 0;
9567
9568 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9569
9570 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9571 if (AT_class (a) == dw_val_class_die_ref)
9572 unmark_all_dies (AT_ref (a));
9573 }
9574
9575 /* Calculate if the entry should appear in the final output file. It may be
9576 from a pruned a type. */
9577
9578 static bool
9579 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9580 {
9581 /* By limiting gnu pubnames to definitions only, gold can generate a
9582 gdb index without entries for declarations, which don't include
9583 enough information to be useful. */
9584 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9585 return false;
9586
9587 if (table == pubname_table)
9588 {
9589 /* Enumerator names are part of the pubname table, but the
9590 parent DW_TAG_enumeration_type die may have been pruned.
9591 Don't output them if that is the case. */
9592 if (p->die->die_tag == DW_TAG_enumerator &&
9593 (p->die->die_parent == NULL
9594 || !p->die->die_parent->die_perennial_p))
9595 return false;
9596
9597 /* Everything else in the pubname table is included. */
9598 return true;
9599 }
9600
9601 /* The pubtypes table shouldn't include types that have been
9602 pruned. */
9603 return (p->die->die_offset != 0
9604 || !flag_eliminate_unused_debug_types);
9605 }
9606
9607 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9608 generated for the compilation unit. */
9609
9610 static unsigned long
9611 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9612 {
9613 unsigned long size;
9614 unsigned i;
9615 pubname_entry *p;
9616 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9617
9618 size = DWARF_PUBNAMES_HEADER_SIZE;
9619 FOR_EACH_VEC_ELT (*names, i, p)
9620 if (include_pubname_in_output (names, p))
9621 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9622
9623 size += DWARF_OFFSET_SIZE;
9624 return size;
9625 }
9626
9627 /* Return the size of the information in the .debug_aranges section. */
9628
9629 static unsigned long
9630 size_of_aranges (void)
9631 {
9632 unsigned long size;
9633
9634 size = DWARF_ARANGES_HEADER_SIZE;
9635
9636 /* Count the address/length pair for this compilation unit. */
9637 if (text_section_used)
9638 size += 2 * DWARF2_ADDR_SIZE;
9639 if (cold_text_section_used)
9640 size += 2 * DWARF2_ADDR_SIZE;
9641 if (have_multiple_function_sections)
9642 {
9643 unsigned fde_idx;
9644 dw_fde_ref fde;
9645
9646 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9647 {
9648 if (DECL_IGNORED_P (fde->decl))
9649 continue;
9650 if (!fde->in_std_section)
9651 size += 2 * DWARF2_ADDR_SIZE;
9652 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9653 size += 2 * DWARF2_ADDR_SIZE;
9654 }
9655 }
9656
9657 /* Count the two zero words used to terminated the address range table. */
9658 size += 2 * DWARF2_ADDR_SIZE;
9659 return size;
9660 }
9661 \f
9662 /* Select the encoding of an attribute value. */
9663
9664 static enum dwarf_form
9665 value_format (dw_attr_node *a)
9666 {
9667 switch (AT_class (a))
9668 {
9669 case dw_val_class_addr:
9670 /* Only very few attributes allow DW_FORM_addr. */
9671 switch (a->dw_attr)
9672 {
9673 case DW_AT_low_pc:
9674 case DW_AT_high_pc:
9675 case DW_AT_entry_pc:
9676 case DW_AT_trampoline:
9677 return (AT_index (a) == NOT_INDEXED
9678 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9679 default:
9680 break;
9681 }
9682 switch (DWARF2_ADDR_SIZE)
9683 {
9684 case 1:
9685 return DW_FORM_data1;
9686 case 2:
9687 return DW_FORM_data2;
9688 case 4:
9689 return DW_FORM_data4;
9690 case 8:
9691 return DW_FORM_data8;
9692 default:
9693 gcc_unreachable ();
9694 }
9695 case dw_val_class_loc_list:
9696 case dw_val_class_view_list:
9697 if (dwarf_split_debug_info
9698 && dwarf_version >= 5
9699 && AT_loc_list (a)->num_assigned)
9700 return DW_FORM_loclistx;
9701 /* FALLTHRU */
9702 case dw_val_class_range_list:
9703 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9704 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9705 care about sizes of .debug* sections in shared libraries and
9706 executables and don't take into account relocations that affect just
9707 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9708 table in the .debug_rnglists section. */
9709 if (dwarf_split_debug_info
9710 && dwarf_version >= 5
9711 && AT_class (a) == dw_val_class_range_list
9712 && rnglist_idx
9713 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9714 return DW_FORM_rnglistx;
9715 if (dwarf_version >= 4)
9716 return DW_FORM_sec_offset;
9717 /* FALLTHRU */
9718 case dw_val_class_vms_delta:
9719 case dw_val_class_offset:
9720 switch (DWARF_OFFSET_SIZE)
9721 {
9722 case 4:
9723 return DW_FORM_data4;
9724 case 8:
9725 return DW_FORM_data8;
9726 default:
9727 gcc_unreachable ();
9728 }
9729 case dw_val_class_loc:
9730 if (dwarf_version >= 4)
9731 return DW_FORM_exprloc;
9732 switch (constant_size (size_of_locs (AT_loc (a))))
9733 {
9734 case 1:
9735 return DW_FORM_block1;
9736 case 2:
9737 return DW_FORM_block2;
9738 case 4:
9739 return DW_FORM_block4;
9740 default:
9741 gcc_unreachable ();
9742 }
9743 case dw_val_class_const:
9744 return DW_FORM_sdata;
9745 case dw_val_class_unsigned_const:
9746 switch (constant_size (AT_unsigned (a)))
9747 {
9748 case 1:
9749 return DW_FORM_data1;
9750 case 2:
9751 return DW_FORM_data2;
9752 case 4:
9753 /* In DWARF3 DW_AT_data_member_location with
9754 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9755 constant, so we need to use DW_FORM_udata if we need
9756 a large constant. */
9757 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9758 return DW_FORM_udata;
9759 return DW_FORM_data4;
9760 case 8:
9761 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9762 return DW_FORM_udata;
9763 return DW_FORM_data8;
9764 default:
9765 gcc_unreachable ();
9766 }
9767 case dw_val_class_const_implicit:
9768 case dw_val_class_unsigned_const_implicit:
9769 case dw_val_class_file_implicit:
9770 return DW_FORM_implicit_const;
9771 case dw_val_class_const_double:
9772 switch (HOST_BITS_PER_WIDE_INT)
9773 {
9774 case 8:
9775 return DW_FORM_data2;
9776 case 16:
9777 return DW_FORM_data4;
9778 case 32:
9779 return DW_FORM_data8;
9780 case 64:
9781 if (dwarf_version >= 5)
9782 return DW_FORM_data16;
9783 /* FALLTHRU */
9784 default:
9785 return DW_FORM_block1;
9786 }
9787 case dw_val_class_wide_int:
9788 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9789 {
9790 case 8:
9791 return DW_FORM_data1;
9792 case 16:
9793 return DW_FORM_data2;
9794 case 32:
9795 return DW_FORM_data4;
9796 case 64:
9797 return DW_FORM_data8;
9798 case 128:
9799 if (dwarf_version >= 5)
9800 return DW_FORM_data16;
9801 /* FALLTHRU */
9802 default:
9803 return DW_FORM_block1;
9804 }
9805 case dw_val_class_symview:
9806 /* ??? We might use uleb128, but then we'd have to compute
9807 .debug_info offsets in the assembler. */
9808 if (symview_upper_bound <= 0xff)
9809 return DW_FORM_data1;
9810 else if (symview_upper_bound <= 0xffff)
9811 return DW_FORM_data2;
9812 else if (symview_upper_bound <= 0xffffffff)
9813 return DW_FORM_data4;
9814 else
9815 return DW_FORM_data8;
9816 case dw_val_class_vec:
9817 switch (constant_size (a->dw_attr_val.v.val_vec.length
9818 * a->dw_attr_val.v.val_vec.elt_size))
9819 {
9820 case 1:
9821 return DW_FORM_block1;
9822 case 2:
9823 return DW_FORM_block2;
9824 case 4:
9825 return DW_FORM_block4;
9826 default:
9827 gcc_unreachable ();
9828 }
9829 case dw_val_class_flag:
9830 if (dwarf_version >= 4)
9831 {
9832 /* Currently all add_AT_flag calls pass in 1 as last argument,
9833 so DW_FORM_flag_present can be used. If that ever changes,
9834 we'll need to use DW_FORM_flag and have some optimization
9835 in build_abbrev_table that will change those to
9836 DW_FORM_flag_present if it is set to 1 in all DIEs using
9837 the same abbrev entry. */
9838 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9839 return DW_FORM_flag_present;
9840 }
9841 return DW_FORM_flag;
9842 case dw_val_class_die_ref:
9843 if (AT_ref_external (a))
9844 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9845 else
9846 return DW_FORM_ref;
9847 case dw_val_class_fde_ref:
9848 return DW_FORM_data;
9849 case dw_val_class_lbl_id:
9850 return (AT_index (a) == NOT_INDEXED
9851 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9852 case dw_val_class_lineptr:
9853 case dw_val_class_macptr:
9854 case dw_val_class_loclistsptr:
9855 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9856 case dw_val_class_str:
9857 return AT_string_form (a);
9858 case dw_val_class_file:
9859 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9860 {
9861 case 1:
9862 return DW_FORM_data1;
9863 case 2:
9864 return DW_FORM_data2;
9865 case 4:
9866 return DW_FORM_data4;
9867 default:
9868 gcc_unreachable ();
9869 }
9870
9871 case dw_val_class_data8:
9872 return DW_FORM_data8;
9873
9874 case dw_val_class_high_pc:
9875 switch (DWARF2_ADDR_SIZE)
9876 {
9877 case 1:
9878 return DW_FORM_data1;
9879 case 2:
9880 return DW_FORM_data2;
9881 case 4:
9882 return DW_FORM_data4;
9883 case 8:
9884 return DW_FORM_data8;
9885 default:
9886 gcc_unreachable ();
9887 }
9888
9889 case dw_val_class_discr_value:
9890 return (a->dw_attr_val.v.val_discr_value.pos
9891 ? DW_FORM_udata
9892 : DW_FORM_sdata);
9893 case dw_val_class_discr_list:
9894 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9895 {
9896 case 1:
9897 return DW_FORM_block1;
9898 case 2:
9899 return DW_FORM_block2;
9900 case 4:
9901 return DW_FORM_block4;
9902 default:
9903 gcc_unreachable ();
9904 }
9905
9906 default:
9907 gcc_unreachable ();
9908 }
9909 }
9910
9911 /* Output the encoding of an attribute value. */
9912
9913 static void
9914 output_value_format (dw_attr_node *a)
9915 {
9916 enum dwarf_form form = value_format (a);
9917
9918 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9919 }
9920
9921 /* Given a die and id, produce the appropriate abbreviations. */
9922
9923 static void
9924 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9925 {
9926 unsigned ix;
9927 dw_attr_node *a_attr;
9928
9929 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9930 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9931 dwarf_tag_name (abbrev->die_tag));
9932
9933 if (abbrev->die_child != NULL)
9934 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9935 else
9936 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9937
9938 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9939 {
9940 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9941 dwarf_attr_name (a_attr->dw_attr));
9942 output_value_format (a_attr);
9943 if (value_format (a_attr) == DW_FORM_implicit_const)
9944 {
9945 if (AT_class (a_attr) == dw_val_class_file_implicit)
9946 {
9947 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9948 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9949 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9950 }
9951 else
9952 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9953 }
9954 }
9955
9956 dw2_asm_output_data (1, 0, NULL);
9957 dw2_asm_output_data (1, 0, NULL);
9958 }
9959
9960
9961 /* Output the .debug_abbrev section which defines the DIE abbreviation
9962 table. */
9963
9964 static void
9965 output_abbrev_section (void)
9966 {
9967 unsigned int abbrev_id;
9968 dw_die_ref abbrev;
9969
9970 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9971 if (abbrev_id != 0)
9972 output_die_abbrevs (abbrev_id, abbrev);
9973
9974 /* Terminate the table. */
9975 dw2_asm_output_data (1, 0, NULL);
9976 }
9977
9978 /* Return a new location list, given the begin and end range, and the
9979 expression. */
9980
9981 static inline dw_loc_list_ref
9982 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9983 const char *end, var_loc_view vend,
9984 const char *section)
9985 {
9986 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9987
9988 retlist->begin = begin;
9989 retlist->begin_entry = NULL;
9990 retlist->end = end;
9991 retlist->expr = expr;
9992 retlist->section = section;
9993 retlist->vbegin = vbegin;
9994 retlist->vend = vend;
9995
9996 return retlist;
9997 }
9998
9999 /* Return true iff there's any nonzero view number in the loc list.
10000
10001 ??? When views are not enabled, we'll often extend a single range
10002 to the entire function, so that we emit a single location
10003 expression rather than a location list. With views, even with a
10004 single range, we'll output a list if start or end have a nonzero
10005 view. If we change this, we may want to stop splitting a single
10006 range in dw_loc_list just because of a nonzero view, even if it
10007 straddles across hot/cold partitions. */
10008
10009 static bool
10010 loc_list_has_views (dw_loc_list_ref list)
10011 {
10012 if (!debug_variable_location_views)
10013 return false;
10014
10015 for (dw_loc_list_ref loc = list;
10016 loc != NULL; loc = loc->dw_loc_next)
10017 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10018 return true;
10019
10020 return false;
10021 }
10022
10023 /* Generate a new internal symbol for this location list node, if it
10024 hasn't got one yet. */
10025
10026 static inline void
10027 gen_llsym (dw_loc_list_ref list)
10028 {
10029 gcc_assert (!list->ll_symbol);
10030 list->ll_symbol = gen_internal_sym ("LLST");
10031
10032 if (!loc_list_has_views (list))
10033 return;
10034
10035 if (dwarf2out_locviews_in_attribute ())
10036 {
10037 /* Use the same label_num for the view list. */
10038 label_num--;
10039 list->vl_symbol = gen_internal_sym ("LVUS");
10040 }
10041 else
10042 list->vl_symbol = list->ll_symbol;
10043 }
10044
10045 /* Generate a symbol for the list, but only if we really want to emit
10046 it as a list. */
10047
10048 static inline void
10049 maybe_gen_llsym (dw_loc_list_ref list)
10050 {
10051 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10052 return;
10053
10054 gen_llsym (list);
10055 }
10056
10057 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10058 NULL, don't consider size of the location expression. If we're not
10059 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10060 representation in *SIZEP. */
10061
10062 static bool
10063 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10064 {
10065 /* Don't output an entry that starts and ends at the same address. */
10066 if (strcmp (curr->begin, curr->end) == 0
10067 && curr->vbegin == curr->vend && !curr->force)
10068 return true;
10069
10070 if (!sizep)
10071 return false;
10072
10073 unsigned long size = size_of_locs (curr->expr);
10074
10075 /* If the expression is too large, drop it on the floor. We could
10076 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10077 in the expression, but >= 64KB expressions for a single value
10078 in a single range are unlikely very useful. */
10079 if (dwarf_version < 5 && size > 0xffff)
10080 return true;
10081
10082 *sizep = size;
10083
10084 return false;
10085 }
10086
10087 /* Output a view pair loclist entry for CURR, if it requires one. */
10088
10089 static void
10090 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10091 {
10092 if (!dwarf2out_locviews_in_loclist ())
10093 return;
10094
10095 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10096 return;
10097
10098 #ifdef DW_LLE_view_pair
10099 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10100
10101 if (dwarf2out_as_locview_support)
10102 {
10103 if (ZERO_VIEW_P (curr->vbegin))
10104 dw2_asm_output_data_uleb128 (0, "Location view begin");
10105 else
10106 {
10107 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10108 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10109 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10110 }
10111
10112 if (ZERO_VIEW_P (curr->vend))
10113 dw2_asm_output_data_uleb128 (0, "Location view end");
10114 else
10115 {
10116 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10117 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10118 dw2_asm_output_symname_uleb128 (label, "Location view end");
10119 }
10120 }
10121 else
10122 {
10123 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10124 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10125 }
10126 #endif /* DW_LLE_view_pair */
10127
10128 return;
10129 }
10130
10131 /* Output the location list given to us. */
10132
10133 static void
10134 output_loc_list (dw_loc_list_ref list_head)
10135 {
10136 int vcount = 0, lcount = 0;
10137
10138 if (list_head->emitted)
10139 return;
10140 list_head->emitted = true;
10141
10142 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10143 {
10144 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10145
10146 for (dw_loc_list_ref curr = list_head; curr != NULL;
10147 curr = curr->dw_loc_next)
10148 {
10149 unsigned long size;
10150
10151 if (skip_loc_list_entry (curr, &size))
10152 continue;
10153
10154 vcount++;
10155
10156 /* ?? dwarf_split_debug_info? */
10157 if (dwarf2out_as_locview_support)
10158 {
10159 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10160
10161 if (!ZERO_VIEW_P (curr->vbegin))
10162 {
10163 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10164 dw2_asm_output_symname_uleb128 (label,
10165 "View list begin (%s)",
10166 list_head->vl_symbol);
10167 }
10168 else
10169 dw2_asm_output_data_uleb128 (0,
10170 "View list begin (%s)",
10171 list_head->vl_symbol);
10172
10173 if (!ZERO_VIEW_P (curr->vend))
10174 {
10175 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10176 dw2_asm_output_symname_uleb128 (label,
10177 "View list end (%s)",
10178 list_head->vl_symbol);
10179 }
10180 else
10181 dw2_asm_output_data_uleb128 (0,
10182 "View list end (%s)",
10183 list_head->vl_symbol);
10184 }
10185 else
10186 {
10187 dw2_asm_output_data_uleb128 (curr->vbegin,
10188 "View list begin (%s)",
10189 list_head->vl_symbol);
10190 dw2_asm_output_data_uleb128 (curr->vend,
10191 "View list end (%s)",
10192 list_head->vl_symbol);
10193 }
10194 }
10195 }
10196
10197 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10198
10199 const char *last_section = NULL;
10200 const char *base_label = NULL;
10201
10202 /* Walk the location list, and output each range + expression. */
10203 for (dw_loc_list_ref curr = list_head; curr != NULL;
10204 curr = curr->dw_loc_next)
10205 {
10206 unsigned long size;
10207
10208 /* Skip this entry? If we skip it here, we must skip it in the
10209 view list above as well. */
10210 if (skip_loc_list_entry (curr, &size))
10211 continue;
10212
10213 lcount++;
10214
10215 if (dwarf_version >= 5)
10216 {
10217 if (dwarf_split_debug_info)
10218 {
10219 dwarf2out_maybe_output_loclist_view_pair (curr);
10220 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10221 uleb128 index into .debug_addr and uleb128 length. */
10222 dw2_asm_output_data (1, DW_LLE_startx_length,
10223 "DW_LLE_startx_length (%s)",
10224 list_head->ll_symbol);
10225 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10226 "Location list range start index "
10227 "(%s)", curr->begin);
10228 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10229 For that case we probably need to emit DW_LLE_startx_endx,
10230 but we'd need 2 .debug_addr entries rather than just one. */
10231 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10232 "Location list length (%s)",
10233 list_head->ll_symbol);
10234 }
10235 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10236 {
10237 dwarf2out_maybe_output_loclist_view_pair (curr);
10238 /* If all code is in .text section, the base address is
10239 already provided by the CU attributes. Use
10240 DW_LLE_offset_pair where both addresses are uleb128 encoded
10241 offsets against that base. */
10242 dw2_asm_output_data (1, DW_LLE_offset_pair,
10243 "DW_LLE_offset_pair (%s)",
10244 list_head->ll_symbol);
10245 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10246 "Location list begin address (%s)",
10247 list_head->ll_symbol);
10248 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10249 "Location list end address (%s)",
10250 list_head->ll_symbol);
10251 }
10252 else if (HAVE_AS_LEB128)
10253 {
10254 /* Otherwise, find out how many consecutive entries could share
10255 the same base entry. If just one, emit DW_LLE_start_length,
10256 otherwise emit DW_LLE_base_address for the base address
10257 followed by a series of DW_LLE_offset_pair. */
10258 if (last_section == NULL || curr->section != last_section)
10259 {
10260 dw_loc_list_ref curr2;
10261 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10262 curr2 = curr2->dw_loc_next)
10263 {
10264 if (strcmp (curr2->begin, curr2->end) == 0
10265 && !curr2->force)
10266 continue;
10267 break;
10268 }
10269 if (curr2 == NULL || curr->section != curr2->section)
10270 last_section = NULL;
10271 else
10272 {
10273 last_section = curr->section;
10274 base_label = curr->begin;
10275 dw2_asm_output_data (1, DW_LLE_base_address,
10276 "DW_LLE_base_address (%s)",
10277 list_head->ll_symbol);
10278 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10279 "Base address (%s)",
10280 list_head->ll_symbol);
10281 }
10282 }
10283 /* Only one entry with the same base address. Use
10284 DW_LLE_start_length with absolute address and uleb128
10285 length. */
10286 if (last_section == NULL)
10287 {
10288 dwarf2out_maybe_output_loclist_view_pair (curr);
10289 dw2_asm_output_data (1, DW_LLE_start_length,
10290 "DW_LLE_start_length (%s)",
10291 list_head->ll_symbol);
10292 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10293 "Location list begin address (%s)",
10294 list_head->ll_symbol);
10295 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10296 "Location list length "
10297 "(%s)", list_head->ll_symbol);
10298 }
10299 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10300 DW_LLE_base_address. */
10301 else
10302 {
10303 dwarf2out_maybe_output_loclist_view_pair (curr);
10304 dw2_asm_output_data (1, DW_LLE_offset_pair,
10305 "DW_LLE_offset_pair (%s)",
10306 list_head->ll_symbol);
10307 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10308 "Location list begin address "
10309 "(%s)", list_head->ll_symbol);
10310 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10311 "Location list end address "
10312 "(%s)", list_head->ll_symbol);
10313 }
10314 }
10315 /* The assembler does not support .uleb128 directive. Emit
10316 DW_LLE_start_end with a pair of absolute addresses. */
10317 else
10318 {
10319 dwarf2out_maybe_output_loclist_view_pair (curr);
10320 dw2_asm_output_data (1, DW_LLE_start_end,
10321 "DW_LLE_start_end (%s)",
10322 list_head->ll_symbol);
10323 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10324 "Location list begin address (%s)",
10325 list_head->ll_symbol);
10326 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10327 "Location list end address (%s)",
10328 list_head->ll_symbol);
10329 }
10330 }
10331 else if (dwarf_split_debug_info)
10332 {
10333 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10334 and 4 byte length. */
10335 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10336 "Location list start/length entry (%s)",
10337 list_head->ll_symbol);
10338 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10339 "Location list range start index (%s)",
10340 curr->begin);
10341 /* The length field is 4 bytes. If we ever need to support
10342 an 8-byte length, we can add a new DW_LLE code or fall back
10343 to DW_LLE_GNU_start_end_entry. */
10344 dw2_asm_output_delta (4, curr->end, curr->begin,
10345 "Location list range length (%s)",
10346 list_head->ll_symbol);
10347 }
10348 else if (!have_multiple_function_sections)
10349 {
10350 /* Pair of relative addresses against start of text section. */
10351 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10352 "Location list begin address (%s)",
10353 list_head->ll_symbol);
10354 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10355 "Location list end address (%s)",
10356 list_head->ll_symbol);
10357 }
10358 else
10359 {
10360 /* Pair of absolute addresses. */
10361 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10362 "Location list begin address (%s)",
10363 list_head->ll_symbol);
10364 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10365 "Location list end address (%s)",
10366 list_head->ll_symbol);
10367 }
10368
10369 /* Output the block length for this list of location operations. */
10370 if (dwarf_version >= 5)
10371 dw2_asm_output_data_uleb128 (size, "Location expression size");
10372 else
10373 {
10374 gcc_assert (size <= 0xffff);
10375 dw2_asm_output_data (2, size, "Location expression size");
10376 }
10377
10378 output_loc_sequence (curr->expr, -1);
10379 }
10380
10381 /* And finally list termination. */
10382 if (dwarf_version >= 5)
10383 dw2_asm_output_data (1, DW_LLE_end_of_list,
10384 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10385 else if (dwarf_split_debug_info)
10386 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10387 "Location list terminator (%s)",
10388 list_head->ll_symbol);
10389 else
10390 {
10391 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10392 "Location list terminator begin (%s)",
10393 list_head->ll_symbol);
10394 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10395 "Location list terminator end (%s)",
10396 list_head->ll_symbol);
10397 }
10398
10399 gcc_assert (!list_head->vl_symbol
10400 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10401 }
10402
10403 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10404 section. Emit a relocated reference if val_entry is NULL, otherwise,
10405 emit an indirect reference. */
10406
10407 static void
10408 output_range_list_offset (dw_attr_node *a)
10409 {
10410 const char *name = dwarf_attr_name (a->dw_attr);
10411
10412 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10413 {
10414 if (dwarf_version >= 5)
10415 {
10416 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10417 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10418 debug_ranges_section, "%s", name);
10419 }
10420 else
10421 {
10422 char *p = strchr (ranges_section_label, '\0');
10423 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10424 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10425 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10426 debug_ranges_section, "%s", name);
10427 *p = '\0';
10428 }
10429 }
10430 else if (dwarf_version >= 5)
10431 {
10432 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10433 gcc_assert (rnglist_idx);
10434 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10435 }
10436 else
10437 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10438 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10439 "%s (offset from %s)", name, ranges_section_label);
10440 }
10441
10442 /* Output the offset into the debug_loc section. */
10443
10444 static void
10445 output_loc_list_offset (dw_attr_node *a)
10446 {
10447 char *sym = AT_loc_list (a)->ll_symbol;
10448
10449 gcc_assert (sym);
10450 if (!dwarf_split_debug_info)
10451 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10452 "%s", dwarf_attr_name (a->dw_attr));
10453 else if (dwarf_version >= 5)
10454 {
10455 gcc_assert (AT_loc_list (a)->num_assigned);
10456 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10457 dwarf_attr_name (a->dw_attr),
10458 sym);
10459 }
10460 else
10461 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10462 "%s", dwarf_attr_name (a->dw_attr));
10463 }
10464
10465 /* Output the offset into the debug_loc section. */
10466
10467 static void
10468 output_view_list_offset (dw_attr_node *a)
10469 {
10470 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10471
10472 gcc_assert (sym);
10473 if (dwarf_split_debug_info)
10474 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10475 "%s", dwarf_attr_name (a->dw_attr));
10476 else
10477 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10478 "%s", dwarf_attr_name (a->dw_attr));
10479 }
10480
10481 /* Output an attribute's index or value appropriately. */
10482
10483 static void
10484 output_attr_index_or_value (dw_attr_node *a)
10485 {
10486 const char *name = dwarf_attr_name (a->dw_attr);
10487
10488 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10489 {
10490 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10491 return;
10492 }
10493 switch (AT_class (a))
10494 {
10495 case dw_val_class_addr:
10496 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10497 break;
10498 case dw_val_class_high_pc:
10499 case dw_val_class_lbl_id:
10500 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10501 break;
10502 default:
10503 gcc_unreachable ();
10504 }
10505 }
10506
10507 /* Output a type signature. */
10508
10509 static inline void
10510 output_signature (const char *sig, const char *name)
10511 {
10512 int i;
10513
10514 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10515 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10516 }
10517
10518 /* Output a discriminant value. */
10519
10520 static inline void
10521 output_discr_value (dw_discr_value *discr_value, const char *name)
10522 {
10523 if (discr_value->pos)
10524 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10525 else
10526 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10527 }
10528
10529 /* Output the DIE and its attributes. Called recursively to generate
10530 the definitions of each child DIE. */
10531
10532 static void
10533 output_die (dw_die_ref die)
10534 {
10535 dw_attr_node *a;
10536 dw_die_ref c;
10537 unsigned long size;
10538 unsigned ix;
10539
10540 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10541 (unsigned long)die->die_offset,
10542 dwarf_tag_name (die->die_tag));
10543
10544 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10545 {
10546 const char *name = dwarf_attr_name (a->dw_attr);
10547
10548 switch (AT_class (a))
10549 {
10550 case dw_val_class_addr:
10551 output_attr_index_or_value (a);
10552 break;
10553
10554 case dw_val_class_offset:
10555 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10556 "%s", name);
10557 break;
10558
10559 case dw_val_class_range_list:
10560 output_range_list_offset (a);
10561 break;
10562
10563 case dw_val_class_loc:
10564 size = size_of_locs (AT_loc (a));
10565
10566 /* Output the block length for this list of location operations. */
10567 if (dwarf_version >= 4)
10568 dw2_asm_output_data_uleb128 (size, "%s", name);
10569 else
10570 dw2_asm_output_data (constant_size (size), size, "%s", name);
10571
10572 output_loc_sequence (AT_loc (a), -1);
10573 break;
10574
10575 case dw_val_class_const:
10576 /* ??? It would be slightly more efficient to use a scheme like is
10577 used for unsigned constants below, but gdb 4.x does not sign
10578 extend. Gdb 5.x does sign extend. */
10579 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10580 break;
10581
10582 case dw_val_class_unsigned_const:
10583 {
10584 int csize = constant_size (AT_unsigned (a));
10585 if (dwarf_version == 3
10586 && a->dw_attr == DW_AT_data_member_location
10587 && csize >= 4)
10588 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10589 else
10590 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10591 }
10592 break;
10593
10594 case dw_val_class_symview:
10595 {
10596 int vsize;
10597 if (symview_upper_bound <= 0xff)
10598 vsize = 1;
10599 else if (symview_upper_bound <= 0xffff)
10600 vsize = 2;
10601 else if (symview_upper_bound <= 0xffffffff)
10602 vsize = 4;
10603 else
10604 vsize = 8;
10605 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10606 "%s", name);
10607 }
10608 break;
10609
10610 case dw_val_class_const_implicit:
10611 if (flag_debug_asm)
10612 fprintf (asm_out_file, "\t\t\t%s %s ("
10613 HOST_WIDE_INT_PRINT_DEC ")\n",
10614 ASM_COMMENT_START, name, AT_int (a));
10615 break;
10616
10617 case dw_val_class_unsigned_const_implicit:
10618 if (flag_debug_asm)
10619 fprintf (asm_out_file, "\t\t\t%s %s ("
10620 HOST_WIDE_INT_PRINT_HEX ")\n",
10621 ASM_COMMENT_START, name, AT_unsigned (a));
10622 break;
10623
10624 case dw_val_class_const_double:
10625 {
10626 unsigned HOST_WIDE_INT first, second;
10627
10628 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10629 dw2_asm_output_data (1,
10630 HOST_BITS_PER_DOUBLE_INT
10631 / HOST_BITS_PER_CHAR,
10632 NULL);
10633
10634 if (WORDS_BIG_ENDIAN)
10635 {
10636 first = a->dw_attr_val.v.val_double.high;
10637 second = a->dw_attr_val.v.val_double.low;
10638 }
10639 else
10640 {
10641 first = a->dw_attr_val.v.val_double.low;
10642 second = a->dw_attr_val.v.val_double.high;
10643 }
10644
10645 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10646 first, "%s", name);
10647 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10648 second, NULL);
10649 }
10650 break;
10651
10652 case dw_val_class_wide_int:
10653 {
10654 int i;
10655 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10656 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10657 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10658 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10659 * l, NULL);
10660
10661 if (WORDS_BIG_ENDIAN)
10662 for (i = len - 1; i >= 0; --i)
10663 {
10664 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10665 "%s", name);
10666 name = "";
10667 }
10668 else
10669 for (i = 0; i < len; ++i)
10670 {
10671 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10672 "%s", name);
10673 name = "";
10674 }
10675 }
10676 break;
10677
10678 case dw_val_class_vec:
10679 {
10680 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10681 unsigned int len = a->dw_attr_val.v.val_vec.length;
10682 unsigned int i;
10683 unsigned char *p;
10684
10685 dw2_asm_output_data (constant_size (len * elt_size),
10686 len * elt_size, "%s", name);
10687 if (elt_size > sizeof (HOST_WIDE_INT))
10688 {
10689 elt_size /= 2;
10690 len *= 2;
10691 }
10692 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10693 i < len;
10694 i++, p += elt_size)
10695 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10696 "fp or vector constant word %u", i);
10697 break;
10698 }
10699
10700 case dw_val_class_flag:
10701 if (dwarf_version >= 4)
10702 {
10703 /* Currently all add_AT_flag calls pass in 1 as last argument,
10704 so DW_FORM_flag_present can be used. If that ever changes,
10705 we'll need to use DW_FORM_flag and have some optimization
10706 in build_abbrev_table that will change those to
10707 DW_FORM_flag_present if it is set to 1 in all DIEs using
10708 the same abbrev entry. */
10709 gcc_assert (AT_flag (a) == 1);
10710 if (flag_debug_asm)
10711 fprintf (asm_out_file, "\t\t\t%s %s\n",
10712 ASM_COMMENT_START, name);
10713 break;
10714 }
10715 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10716 break;
10717
10718 case dw_val_class_loc_list:
10719 output_loc_list_offset (a);
10720 break;
10721
10722 case dw_val_class_view_list:
10723 output_view_list_offset (a);
10724 break;
10725
10726 case dw_val_class_die_ref:
10727 if (AT_ref_external (a))
10728 {
10729 if (AT_ref (a)->comdat_type_p)
10730 {
10731 comdat_type_node *type_node
10732 = AT_ref (a)->die_id.die_type_node;
10733
10734 gcc_assert (type_node);
10735 output_signature (type_node->signature, name);
10736 }
10737 else
10738 {
10739 const char *sym = AT_ref (a)->die_id.die_symbol;
10740 int size;
10741
10742 gcc_assert (sym);
10743 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10744 length, whereas in DWARF3 it's always sized as an
10745 offset. */
10746 if (dwarf_version == 2)
10747 size = DWARF2_ADDR_SIZE;
10748 else
10749 size = DWARF_OFFSET_SIZE;
10750 /* ??? We cannot unconditionally output die_offset if
10751 non-zero - others might create references to those
10752 DIEs via symbols.
10753 And we do not clear its DIE offset after outputting it
10754 (and the label refers to the actual DIEs, not the
10755 DWARF CU unit header which is when using label + offset
10756 would be the correct thing to do).
10757 ??? This is the reason for the with_offset flag. */
10758 if (AT_ref (a)->with_offset)
10759 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10760 debug_info_section, "%s", name);
10761 else
10762 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10763 name);
10764 }
10765 }
10766 else
10767 {
10768 gcc_assert (AT_ref (a)->die_offset);
10769 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10770 "%s", name);
10771 }
10772 break;
10773
10774 case dw_val_class_fde_ref:
10775 {
10776 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10777
10778 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10779 a->dw_attr_val.v.val_fde_index * 2);
10780 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10781 "%s", name);
10782 }
10783 break;
10784
10785 case dw_val_class_vms_delta:
10786 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10787 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10788 AT_vms_delta2 (a), AT_vms_delta1 (a),
10789 "%s", name);
10790 #else
10791 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10792 AT_vms_delta2 (a), AT_vms_delta1 (a),
10793 "%s", name);
10794 #endif
10795 break;
10796
10797 case dw_val_class_lbl_id:
10798 output_attr_index_or_value (a);
10799 break;
10800
10801 case dw_val_class_lineptr:
10802 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10803 debug_line_section, "%s", name);
10804 break;
10805
10806 case dw_val_class_macptr:
10807 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10808 debug_macinfo_section, "%s", name);
10809 break;
10810
10811 case dw_val_class_loclistsptr:
10812 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10813 debug_loc_section, "%s", name);
10814 break;
10815
10816 case dw_val_class_str:
10817 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10818 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10819 a->dw_attr_val.v.val_str->label,
10820 debug_str_section,
10821 "%s: \"%s\"", name, AT_string (a));
10822 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10823 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10824 a->dw_attr_val.v.val_str->label,
10825 debug_line_str_section,
10826 "%s: \"%s\"", name, AT_string (a));
10827 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10828 dw2_asm_output_data_uleb128 (AT_index (a),
10829 "%s: \"%s\"", name, AT_string (a));
10830 else
10831 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10832 break;
10833
10834 case dw_val_class_file:
10835 {
10836 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10837
10838 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10839 a->dw_attr_val.v.val_file->filename);
10840 break;
10841 }
10842
10843 case dw_val_class_file_implicit:
10844 if (flag_debug_asm)
10845 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10846 ASM_COMMENT_START, name,
10847 maybe_emit_file (a->dw_attr_val.v.val_file),
10848 a->dw_attr_val.v.val_file->filename);
10849 break;
10850
10851 case dw_val_class_data8:
10852 {
10853 int i;
10854
10855 for (i = 0; i < 8; i++)
10856 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10857 i == 0 ? "%s" : NULL, name);
10858 break;
10859 }
10860
10861 case dw_val_class_high_pc:
10862 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10863 get_AT_low_pc (die), "DW_AT_high_pc");
10864 break;
10865
10866 case dw_val_class_discr_value:
10867 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10868 break;
10869
10870 case dw_val_class_discr_list:
10871 {
10872 dw_discr_list_ref list = AT_discr_list (a);
10873 const int size = size_of_discr_list (list);
10874
10875 /* This is a block, so output its length first. */
10876 dw2_asm_output_data (constant_size (size), size,
10877 "%s: block size", name);
10878
10879 for (; list != NULL; list = list->dw_discr_next)
10880 {
10881 /* One byte for the discriminant value descriptor, and then as
10882 many LEB128 numbers as required. */
10883 if (list->dw_discr_range)
10884 dw2_asm_output_data (1, DW_DSC_range,
10885 "%s: DW_DSC_range", name);
10886 else
10887 dw2_asm_output_data (1, DW_DSC_label,
10888 "%s: DW_DSC_label", name);
10889
10890 output_discr_value (&list->dw_discr_lower_bound, name);
10891 if (list->dw_discr_range)
10892 output_discr_value (&list->dw_discr_upper_bound, name);
10893 }
10894 break;
10895 }
10896
10897 default:
10898 gcc_unreachable ();
10899 }
10900 }
10901
10902 FOR_EACH_CHILD (die, c, output_die (c));
10903
10904 /* Add null byte to terminate sibling list. */
10905 if (die->die_child != NULL)
10906 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10907 (unsigned long) die->die_offset);
10908 }
10909
10910 /* Output the dwarf version number. */
10911
10912 static void
10913 output_dwarf_version ()
10914 {
10915 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10916 views in loclist. That will change eventually. */
10917 if (dwarf_version == 6)
10918 {
10919 static bool once;
10920 if (!once)
10921 {
10922 warning (0,
10923 "-gdwarf-6 is output as version 5 with incompatibilities");
10924 once = true;
10925 }
10926 dw2_asm_output_data (2, 5, "DWARF version number");
10927 }
10928 else
10929 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10930 }
10931
10932 /* Output the compilation unit that appears at the beginning of the
10933 .debug_info section, and precedes the DIE descriptions. */
10934
10935 static void
10936 output_compilation_unit_header (enum dwarf_unit_type ut)
10937 {
10938 if (!XCOFF_DEBUGGING_INFO)
10939 {
10940 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10941 dw2_asm_output_data (4, 0xffffffff,
10942 "Initial length escape value indicating 64-bit DWARF extension");
10943 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10944 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10945 "Length of Compilation Unit Info");
10946 }
10947
10948 output_dwarf_version ();
10949 if (dwarf_version >= 5)
10950 {
10951 const char *name;
10952 switch (ut)
10953 {
10954 case DW_UT_compile: name = "DW_UT_compile"; break;
10955 case DW_UT_type: name = "DW_UT_type"; break;
10956 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10957 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10958 default: gcc_unreachable ();
10959 }
10960 dw2_asm_output_data (1, ut, "%s", name);
10961 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10962 }
10963 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10964 debug_abbrev_section,
10965 "Offset Into Abbrev. Section");
10966 if (dwarf_version < 5)
10967 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10968 }
10969
10970 /* Output the compilation unit DIE and its children. */
10971
10972 static void
10973 output_comp_unit (dw_die_ref die, int output_if_empty,
10974 const unsigned char *dwo_id)
10975 {
10976 const char *secname, *oldsym;
10977 char *tmp;
10978
10979 /* Unless we are outputting main CU, we may throw away empty ones. */
10980 if (!output_if_empty && die->die_child == NULL)
10981 return;
10982
10983 /* Even if there are no children of this DIE, we must output the information
10984 about the compilation unit. Otherwise, on an empty translation unit, we
10985 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10986 will then complain when examining the file. First mark all the DIEs in
10987 this CU so we know which get local refs. */
10988 mark_dies (die);
10989
10990 external_ref_hash_type *extern_map = optimize_external_refs (die);
10991
10992 /* For now, optimize only the main CU, in order to optimize the rest
10993 we'd need to see all of them earlier. Leave the rest for post-linking
10994 tools like DWZ. */
10995 if (die == comp_unit_die ())
10996 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10997
10998 build_abbrev_table (die, extern_map);
10999
11000 optimize_abbrev_table ();
11001
11002 delete extern_map;
11003
11004 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11005 next_die_offset = (dwo_id
11006 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11007 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11008 calc_die_sizes (die);
11009
11010 oldsym = die->die_id.die_symbol;
11011 if (oldsym && die->comdat_type_p)
11012 {
11013 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11014
11015 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11016 secname = tmp;
11017 die->die_id.die_symbol = NULL;
11018 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11019 }
11020 else
11021 {
11022 switch_to_section (debug_info_section);
11023 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11024 info_section_emitted = true;
11025 }
11026
11027 /* For LTO cross unit DIE refs we want a symbol on the start of the
11028 debuginfo section, not on the CU DIE. */
11029 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11030 {
11031 /* ??? No way to get visibility assembled without a decl. */
11032 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11033 get_identifier (oldsym), char_type_node);
11034 TREE_PUBLIC (decl) = true;
11035 TREE_STATIC (decl) = true;
11036 DECL_ARTIFICIAL (decl) = true;
11037 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11038 DECL_VISIBILITY_SPECIFIED (decl) = true;
11039 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11040 #ifdef ASM_WEAKEN_LABEL
11041 /* We prefer a .weak because that handles duplicates from duplicate
11042 archive members in a graceful way. */
11043 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11044 #else
11045 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11046 #endif
11047 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11048 }
11049
11050 /* Output debugging information. */
11051 output_compilation_unit_header (dwo_id
11052 ? DW_UT_split_compile : DW_UT_compile);
11053 if (dwarf_version >= 5)
11054 {
11055 if (dwo_id != NULL)
11056 for (int i = 0; i < 8; i++)
11057 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11058 }
11059 output_die (die);
11060
11061 /* Leave the marks on the main CU, so we can check them in
11062 output_pubnames. */
11063 if (oldsym)
11064 {
11065 unmark_dies (die);
11066 die->die_id.die_symbol = oldsym;
11067 }
11068 }
11069
11070 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11071 and .debug_pubtypes. This is configured per-target, but can be
11072 overridden by the -gpubnames or -gno-pubnames options. */
11073
11074 static inline bool
11075 want_pubnames (void)
11076 {
11077 if (debug_info_level <= DINFO_LEVEL_TERSE)
11078 return false;
11079 if (debug_generate_pub_sections != -1)
11080 return debug_generate_pub_sections;
11081 return targetm.want_debug_pub_sections;
11082 }
11083
11084 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11085
11086 static void
11087 add_AT_pubnames (dw_die_ref die)
11088 {
11089 if (want_pubnames ())
11090 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11091 }
11092
11093 /* Add a string attribute value to a skeleton DIE. */
11094
11095 static inline void
11096 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11097 const char *str)
11098 {
11099 dw_attr_node attr;
11100 struct indirect_string_node *node;
11101
11102 if (! skeleton_debug_str_hash)
11103 skeleton_debug_str_hash
11104 = hash_table<indirect_string_hasher>::create_ggc (10);
11105
11106 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11107 find_string_form (node);
11108 if (node->form == dwarf_FORM (DW_FORM_strx))
11109 node->form = DW_FORM_strp;
11110
11111 attr.dw_attr = attr_kind;
11112 attr.dw_attr_val.val_class = dw_val_class_str;
11113 attr.dw_attr_val.val_entry = NULL;
11114 attr.dw_attr_val.v.val_str = node;
11115 add_dwarf_attr (die, &attr);
11116 }
11117
11118 /* Helper function to generate top-level dies for skeleton debug_info and
11119 debug_types. */
11120
11121 static void
11122 add_top_level_skeleton_die_attrs (dw_die_ref die)
11123 {
11124 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11125 const char *comp_dir = comp_dir_string ();
11126
11127 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11128 if (comp_dir != NULL)
11129 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11130 add_AT_pubnames (die);
11131 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11132 }
11133
11134 /* Output skeleton debug sections that point to the dwo file. */
11135
11136 static void
11137 output_skeleton_debug_sections (dw_die_ref comp_unit,
11138 const unsigned char *dwo_id)
11139 {
11140 /* These attributes will be found in the full debug_info section. */
11141 remove_AT (comp_unit, DW_AT_producer);
11142 remove_AT (comp_unit, DW_AT_language);
11143
11144 switch_to_section (debug_skeleton_info_section);
11145 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11146
11147 /* Produce the skeleton compilation-unit header. This one differs enough from
11148 a normal CU header that it's better not to call output_compilation_unit
11149 header. */
11150 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11151 dw2_asm_output_data (4, 0xffffffff,
11152 "Initial length escape value indicating 64-bit "
11153 "DWARF extension");
11154
11155 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11156 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11157 - DWARF_INITIAL_LENGTH_SIZE
11158 + size_of_die (comp_unit),
11159 "Length of Compilation Unit Info");
11160 output_dwarf_version ();
11161 if (dwarf_version >= 5)
11162 {
11163 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11164 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11165 }
11166 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11167 debug_skeleton_abbrev_section,
11168 "Offset Into Abbrev. Section");
11169 if (dwarf_version < 5)
11170 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11171 else
11172 for (int i = 0; i < 8; i++)
11173 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11174
11175 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11176 output_die (comp_unit);
11177
11178 /* Build the skeleton debug_abbrev section. */
11179 switch_to_section (debug_skeleton_abbrev_section);
11180 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11181
11182 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11183
11184 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11185 }
11186
11187 /* Output a comdat type unit DIE and its children. */
11188
11189 static void
11190 output_comdat_type_unit (comdat_type_node *node)
11191 {
11192 const char *secname;
11193 char *tmp;
11194 int i;
11195 #if defined (OBJECT_FORMAT_ELF)
11196 tree comdat_key;
11197 #endif
11198
11199 /* First mark all the DIEs in this CU so we know which get local refs. */
11200 mark_dies (node->root_die);
11201
11202 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11203
11204 build_abbrev_table (node->root_die, extern_map);
11205
11206 delete extern_map;
11207 extern_map = NULL;
11208
11209 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11210 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11211 calc_die_sizes (node->root_die);
11212
11213 #if defined (OBJECT_FORMAT_ELF)
11214 if (dwarf_version >= 5)
11215 {
11216 if (!dwarf_split_debug_info)
11217 secname = ".debug_info";
11218 else
11219 secname = ".debug_info.dwo";
11220 }
11221 else if (!dwarf_split_debug_info)
11222 secname = ".debug_types";
11223 else
11224 secname = ".debug_types.dwo";
11225
11226 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11227 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11228 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11229 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11230 comdat_key = get_identifier (tmp);
11231 targetm.asm_out.named_section (secname,
11232 SECTION_DEBUG | SECTION_LINKONCE,
11233 comdat_key);
11234 #else
11235 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11236 sprintf (tmp, (dwarf_version >= 5
11237 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11238 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11239 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11240 secname = tmp;
11241 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11242 #endif
11243
11244 /* Output debugging information. */
11245 output_compilation_unit_header (dwarf_split_debug_info
11246 ? DW_UT_split_type : DW_UT_type);
11247 output_signature (node->signature, "Type Signature");
11248 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11249 "Offset to Type DIE");
11250 output_die (node->root_die);
11251
11252 unmark_dies (node->root_die);
11253 }
11254
11255 /* Return the DWARF2/3 pubname associated with a decl. */
11256
11257 static const char *
11258 dwarf2_name (tree decl, int scope)
11259 {
11260 if (DECL_NAMELESS (decl))
11261 return NULL;
11262 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11263 }
11264
11265 /* Add a new entry to .debug_pubnames if appropriate. */
11266
11267 static void
11268 add_pubname_string (const char *str, dw_die_ref die)
11269 {
11270 pubname_entry e;
11271
11272 e.die = die;
11273 e.name = xstrdup (str);
11274 vec_safe_push (pubname_table, e);
11275 }
11276
11277 static void
11278 add_pubname (tree decl, dw_die_ref die)
11279 {
11280 if (!want_pubnames ())
11281 return;
11282
11283 /* Don't add items to the table when we expect that the consumer will have
11284 just read the enclosing die. For example, if the consumer is looking at a
11285 class_member, it will either be inside the class already, or will have just
11286 looked up the class to find the member. Either way, searching the class is
11287 faster than searching the index. */
11288 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11289 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11290 {
11291 const char *name = dwarf2_name (decl, 1);
11292
11293 if (name)
11294 add_pubname_string (name, die);
11295 }
11296 }
11297
11298 /* Add an enumerator to the pubnames section. */
11299
11300 static void
11301 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11302 {
11303 pubname_entry e;
11304
11305 gcc_assert (scope_name);
11306 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11307 e.die = die;
11308 vec_safe_push (pubname_table, e);
11309 }
11310
11311 /* Add a new entry to .debug_pubtypes if appropriate. */
11312
11313 static void
11314 add_pubtype (tree decl, dw_die_ref die)
11315 {
11316 pubname_entry e;
11317
11318 if (!want_pubnames ())
11319 return;
11320
11321 if ((TREE_PUBLIC (decl)
11322 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11323 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11324 {
11325 tree scope = NULL;
11326 const char *scope_name = "";
11327 const char *sep = is_cxx () ? "::" : ".";
11328 const char *name;
11329
11330 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11331 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11332 {
11333 scope_name = lang_hooks.dwarf_name (scope, 1);
11334 if (scope_name != NULL && scope_name[0] != '\0')
11335 scope_name = concat (scope_name, sep, NULL);
11336 else
11337 scope_name = "";
11338 }
11339
11340 if (TYPE_P (decl))
11341 name = type_tag (decl);
11342 else
11343 name = lang_hooks.dwarf_name (decl, 1);
11344
11345 /* If we don't have a name for the type, there's no point in adding
11346 it to the table. */
11347 if (name != NULL && name[0] != '\0')
11348 {
11349 e.die = die;
11350 e.name = concat (scope_name, name, NULL);
11351 vec_safe_push (pubtype_table, e);
11352 }
11353
11354 /* Although it might be more consistent to add the pubinfo for the
11355 enumerators as their dies are created, they should only be added if the
11356 enum type meets the criteria above. So rather than re-check the parent
11357 enum type whenever an enumerator die is created, just output them all
11358 here. This isn't protected by the name conditional because anonymous
11359 enums don't have names. */
11360 if (die->die_tag == DW_TAG_enumeration_type)
11361 {
11362 dw_die_ref c;
11363
11364 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11365 }
11366 }
11367 }
11368
11369 /* Output a single entry in the pubnames table. */
11370
11371 static void
11372 output_pubname (dw_offset die_offset, pubname_entry *entry)
11373 {
11374 dw_die_ref die = entry->die;
11375 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11376
11377 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11378
11379 if (debug_generate_pub_sections == 2)
11380 {
11381 /* This logic follows gdb's method for determining the value of the flag
11382 byte. */
11383 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11384 switch (die->die_tag)
11385 {
11386 case DW_TAG_typedef:
11387 case DW_TAG_base_type:
11388 case DW_TAG_subrange_type:
11389 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11390 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11391 break;
11392 case DW_TAG_enumerator:
11393 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11394 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11395 if (!is_cxx ())
11396 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11397 break;
11398 case DW_TAG_subprogram:
11399 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11400 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11401 if (!is_ada ())
11402 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11403 break;
11404 case DW_TAG_constant:
11405 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11406 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11407 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11408 break;
11409 case DW_TAG_variable:
11410 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11411 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11412 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11413 break;
11414 case DW_TAG_namespace:
11415 case DW_TAG_imported_declaration:
11416 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11417 break;
11418 case DW_TAG_class_type:
11419 case DW_TAG_interface_type:
11420 case DW_TAG_structure_type:
11421 case DW_TAG_union_type:
11422 case DW_TAG_enumeration_type:
11423 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11424 if (!is_cxx ())
11425 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11426 break;
11427 default:
11428 /* An unusual tag. Leave the flag-byte empty. */
11429 break;
11430 }
11431 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11432 "GDB-index flags");
11433 }
11434
11435 dw2_asm_output_nstring (entry->name, -1, "external name");
11436 }
11437
11438
11439 /* Output the public names table used to speed up access to externally
11440 visible names; or the public types table used to find type definitions. */
11441
11442 static void
11443 output_pubnames (vec<pubname_entry, va_gc> *names)
11444 {
11445 unsigned i;
11446 unsigned long pubnames_length = size_of_pubnames (names);
11447 pubname_entry *pub;
11448
11449 if (!XCOFF_DEBUGGING_INFO)
11450 {
11451 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11452 dw2_asm_output_data (4, 0xffffffff,
11453 "Initial length escape value indicating 64-bit DWARF extension");
11454 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11455 "Pub Info Length");
11456 }
11457
11458 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11459 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11460
11461 if (dwarf_split_debug_info)
11462 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11463 debug_skeleton_info_section,
11464 "Offset of Compilation Unit Info");
11465 else
11466 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11467 debug_info_section,
11468 "Offset of Compilation Unit Info");
11469 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11470 "Compilation Unit Length");
11471
11472 FOR_EACH_VEC_ELT (*names, i, pub)
11473 {
11474 if (include_pubname_in_output (names, pub))
11475 {
11476 dw_offset die_offset = pub->die->die_offset;
11477
11478 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11479 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11480 gcc_assert (pub->die->die_mark);
11481
11482 /* If we're putting types in their own .debug_types sections,
11483 the .debug_pubtypes table will still point to the compile
11484 unit (not the type unit), so we want to use the offset of
11485 the skeleton DIE (if there is one). */
11486 if (pub->die->comdat_type_p && names == pubtype_table)
11487 {
11488 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11489
11490 if (type_node != NULL)
11491 die_offset = (type_node->skeleton_die != NULL
11492 ? type_node->skeleton_die->die_offset
11493 : comp_unit_die ()->die_offset);
11494 }
11495
11496 output_pubname (die_offset, pub);
11497 }
11498 }
11499
11500 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11501 }
11502
11503 /* Output public names and types tables if necessary. */
11504
11505 static void
11506 output_pubtables (void)
11507 {
11508 if (!want_pubnames () || !info_section_emitted)
11509 return;
11510
11511 switch_to_section (debug_pubnames_section);
11512 output_pubnames (pubname_table);
11513 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11514 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11515 simply won't look for the section. */
11516 switch_to_section (debug_pubtypes_section);
11517 output_pubnames (pubtype_table);
11518 }
11519
11520
11521 /* Output the information that goes into the .debug_aranges table.
11522 Namely, define the beginning and ending address range of the
11523 text section generated for this compilation unit. */
11524
11525 static void
11526 output_aranges (void)
11527 {
11528 unsigned i;
11529 unsigned long aranges_length = size_of_aranges ();
11530
11531 if (!XCOFF_DEBUGGING_INFO)
11532 {
11533 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11534 dw2_asm_output_data (4, 0xffffffff,
11535 "Initial length escape value indicating 64-bit DWARF extension");
11536 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11537 "Length of Address Ranges Info");
11538 }
11539
11540 /* Version number for aranges is still 2, even up to DWARF5. */
11541 dw2_asm_output_data (2, 2, "DWARF aranges version");
11542 if (dwarf_split_debug_info)
11543 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11544 debug_skeleton_info_section,
11545 "Offset of Compilation Unit Info");
11546 else
11547 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11548 debug_info_section,
11549 "Offset of Compilation Unit Info");
11550 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11551 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11552
11553 /* We need to align to twice the pointer size here. */
11554 if (DWARF_ARANGES_PAD_SIZE)
11555 {
11556 /* Pad using a 2 byte words so that padding is correct for any
11557 pointer size. */
11558 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11559 2 * DWARF2_ADDR_SIZE);
11560 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11561 dw2_asm_output_data (2, 0, NULL);
11562 }
11563
11564 /* It is necessary not to output these entries if the sections were
11565 not used; if the sections were not used, the length will be 0 and
11566 the address may end up as 0 if the section is discarded by ld
11567 --gc-sections, leaving an invalid (0, 0) entry that can be
11568 confused with the terminator. */
11569 if (text_section_used)
11570 {
11571 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11572 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11573 text_section_label, "Length");
11574 }
11575 if (cold_text_section_used)
11576 {
11577 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11578 "Address");
11579 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11580 cold_text_section_label, "Length");
11581 }
11582
11583 if (have_multiple_function_sections)
11584 {
11585 unsigned fde_idx;
11586 dw_fde_ref fde;
11587
11588 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11589 {
11590 if (DECL_IGNORED_P (fde->decl))
11591 continue;
11592 if (!fde->in_std_section)
11593 {
11594 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11595 "Address");
11596 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11597 fde->dw_fde_begin, "Length");
11598 }
11599 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11600 {
11601 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11602 "Address");
11603 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11604 fde->dw_fde_second_begin, "Length");
11605 }
11606 }
11607 }
11608
11609 /* Output the terminator words. */
11610 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11611 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11612 }
11613
11614 /* Add a new entry to .debug_ranges. Return its index into
11615 ranges_table vector. */
11616
11617 static unsigned int
11618 add_ranges_num (int num, bool maybe_new_sec)
11619 {
11620 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11621 vec_safe_push (ranges_table, r);
11622 return vec_safe_length (ranges_table) - 1;
11623 }
11624
11625 /* Add a new entry to .debug_ranges corresponding to a block, or a
11626 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11627 this entry might be in a different section from previous range. */
11628
11629 static unsigned int
11630 add_ranges (const_tree block, bool maybe_new_sec)
11631 {
11632 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11633 }
11634
11635 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11636 chain, or middle entry of a chain that will be directly referred to. */
11637
11638 static void
11639 note_rnglist_head (unsigned int offset)
11640 {
11641 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11642 return;
11643 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11644 }
11645
11646 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11647 When using dwarf_split_debug_info, address attributes in dies destined
11648 for the final executable should be direct references--setting the
11649 parameter force_direct ensures this behavior. */
11650
11651 static void
11652 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11653 bool *added, bool force_direct)
11654 {
11655 unsigned int in_use = vec_safe_length (ranges_by_label);
11656 unsigned int offset;
11657 dw_ranges_by_label rbl = { begin, end };
11658 vec_safe_push (ranges_by_label, rbl);
11659 offset = add_ranges_num (-(int)in_use - 1, true);
11660 if (!*added)
11661 {
11662 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11663 *added = true;
11664 note_rnglist_head (offset);
11665 }
11666 }
11667
11668 /* Emit .debug_ranges section. */
11669
11670 static void
11671 output_ranges (void)
11672 {
11673 unsigned i;
11674 static const char *const start_fmt = "Offset %#x";
11675 const char *fmt = start_fmt;
11676 dw_ranges *r;
11677
11678 switch_to_section (debug_ranges_section);
11679 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11680 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11681 {
11682 int block_num = r->num;
11683
11684 if (block_num > 0)
11685 {
11686 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11687 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11688
11689 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11690 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11691
11692 /* If all code is in the text section, then the compilation
11693 unit base address defaults to DW_AT_low_pc, which is the
11694 base of the text section. */
11695 if (!have_multiple_function_sections)
11696 {
11697 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11698 text_section_label,
11699 fmt, i * 2 * DWARF2_ADDR_SIZE);
11700 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11701 text_section_label, NULL);
11702 }
11703
11704 /* Otherwise, the compilation unit base address is zero,
11705 which allows us to use absolute addresses, and not worry
11706 about whether the target supports cross-section
11707 arithmetic. */
11708 else
11709 {
11710 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11711 fmt, i * 2 * DWARF2_ADDR_SIZE);
11712 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11713 }
11714
11715 fmt = NULL;
11716 }
11717
11718 /* Negative block_num stands for an index into ranges_by_label. */
11719 else if (block_num < 0)
11720 {
11721 int lab_idx = - block_num - 1;
11722
11723 if (!have_multiple_function_sections)
11724 {
11725 gcc_unreachable ();
11726 #if 0
11727 /* If we ever use add_ranges_by_labels () for a single
11728 function section, all we have to do is to take out
11729 the #if 0 above. */
11730 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11731 (*ranges_by_label)[lab_idx].begin,
11732 text_section_label,
11733 fmt, i * 2 * DWARF2_ADDR_SIZE);
11734 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11735 (*ranges_by_label)[lab_idx].end,
11736 text_section_label, NULL);
11737 #endif
11738 }
11739 else
11740 {
11741 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11742 (*ranges_by_label)[lab_idx].begin,
11743 fmt, i * 2 * DWARF2_ADDR_SIZE);
11744 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11745 (*ranges_by_label)[lab_idx].end,
11746 NULL);
11747 }
11748 }
11749 else
11750 {
11751 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11752 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11753 fmt = start_fmt;
11754 }
11755 }
11756 }
11757
11758 /* Non-zero if .debug_line_str should be used for .debug_line section
11759 strings or strings that are likely shareable with those. */
11760 #define DWARF5_USE_DEBUG_LINE_STR \
11761 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11762 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11763 /* FIXME: there is no .debug_line_str.dwo section, \
11764 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11765 && !dwarf_split_debug_info)
11766
11767 /* Assign .debug_rnglists indexes. */
11768
11769 static void
11770 index_rnglists (void)
11771 {
11772 unsigned i;
11773 dw_ranges *r;
11774
11775 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11776 if (r->label)
11777 r->idx = rnglist_idx++;
11778 }
11779
11780 /* Emit .debug_rnglists section. */
11781
11782 static void
11783 output_rnglists (unsigned generation)
11784 {
11785 unsigned i;
11786 dw_ranges *r;
11787 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11788 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11789 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11790
11791 switch_to_section (debug_ranges_section);
11792 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11793 /* There are up to 4 unique ranges labels per generation.
11794 See also init_sections_and_labels. */
11795 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11796 2 + generation * 4);
11797 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11798 3 + generation * 4);
11799 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11800 dw2_asm_output_data (4, 0xffffffff,
11801 "Initial length escape value indicating "
11802 "64-bit DWARF extension");
11803 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11804 "Length of Range Lists");
11805 ASM_OUTPUT_LABEL (asm_out_file, l1);
11806 output_dwarf_version ();
11807 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11808 dw2_asm_output_data (1, 0, "Segment Size");
11809 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11810 about relocation sizes and primarily care about the size of .debug*
11811 sections in linked shared libraries and executables, then
11812 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11813 into it are usually larger than just DW_FORM_sec_offset offsets
11814 into the .debug_rnglists section. */
11815 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11816 "Offset Entry Count");
11817 if (dwarf_split_debug_info)
11818 {
11819 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11820 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11821 if (r->label)
11822 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11823 ranges_base_label, NULL);
11824 }
11825
11826 const char *lab = "";
11827 unsigned int len = vec_safe_length (ranges_table);
11828 const char *base = NULL;
11829 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11830 {
11831 int block_num = r->num;
11832
11833 if (r->label)
11834 {
11835 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11836 lab = r->label;
11837 }
11838 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11839 base = NULL;
11840 if (block_num > 0)
11841 {
11842 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11843 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11844
11845 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11846 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11847
11848 if (HAVE_AS_LEB128)
11849 {
11850 /* If all code is in the text section, then the compilation
11851 unit base address defaults to DW_AT_low_pc, which is the
11852 base of the text section. */
11853 if (!have_multiple_function_sections)
11854 {
11855 dw2_asm_output_data (1, DW_RLE_offset_pair,
11856 "DW_RLE_offset_pair (%s)", lab);
11857 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11858 "Range begin address (%s)", lab);
11859 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11860 "Range end address (%s)", lab);
11861 continue;
11862 }
11863 if (base == NULL)
11864 {
11865 dw_ranges *r2 = NULL;
11866 if (i < len - 1)
11867 r2 = &(*ranges_table)[i + 1];
11868 if (r2
11869 && r2->num != 0
11870 && r2->label == NULL
11871 && !r2->maybe_new_sec)
11872 {
11873 dw2_asm_output_data (1, DW_RLE_base_address,
11874 "DW_RLE_base_address (%s)", lab);
11875 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11876 "Base address (%s)", lab);
11877 strcpy (basebuf, blabel);
11878 base = basebuf;
11879 }
11880 }
11881 if (base)
11882 {
11883 dw2_asm_output_data (1, DW_RLE_offset_pair,
11884 "DW_RLE_offset_pair (%s)", lab);
11885 dw2_asm_output_delta_uleb128 (blabel, base,
11886 "Range begin address (%s)", lab);
11887 dw2_asm_output_delta_uleb128 (elabel, base,
11888 "Range end address (%s)", lab);
11889 continue;
11890 }
11891 dw2_asm_output_data (1, DW_RLE_start_length,
11892 "DW_RLE_start_length (%s)", lab);
11893 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11894 "Range begin address (%s)", lab);
11895 dw2_asm_output_delta_uleb128 (elabel, blabel,
11896 "Range length (%s)", lab);
11897 }
11898 else
11899 {
11900 dw2_asm_output_data (1, DW_RLE_start_end,
11901 "DW_RLE_start_end (%s)", lab);
11902 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11903 "Range begin address (%s)", lab);
11904 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11905 "Range end address (%s)", lab);
11906 }
11907 }
11908
11909 /* Negative block_num stands for an index into ranges_by_label. */
11910 else if (block_num < 0)
11911 {
11912 int lab_idx = - block_num - 1;
11913 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11914 const char *elabel = (*ranges_by_label)[lab_idx].end;
11915
11916 if (!have_multiple_function_sections)
11917 gcc_unreachable ();
11918 if (HAVE_AS_LEB128)
11919 {
11920 dw2_asm_output_data (1, DW_RLE_start_length,
11921 "DW_RLE_start_length (%s)", lab);
11922 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11923 "Range begin address (%s)", lab);
11924 dw2_asm_output_delta_uleb128 (elabel, blabel,
11925 "Range length (%s)", lab);
11926 }
11927 else
11928 {
11929 dw2_asm_output_data (1, DW_RLE_start_end,
11930 "DW_RLE_start_end (%s)", lab);
11931 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11932 "Range begin address (%s)", lab);
11933 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11934 "Range end address (%s)", lab);
11935 }
11936 }
11937 else
11938 dw2_asm_output_data (1, DW_RLE_end_of_list,
11939 "DW_RLE_end_of_list (%s)", lab);
11940 }
11941 ASM_OUTPUT_LABEL (asm_out_file, l2);
11942 }
11943
11944 /* Data structure containing information about input files. */
11945 struct file_info
11946 {
11947 const char *path; /* Complete file name. */
11948 const char *fname; /* File name part. */
11949 int length; /* Length of entire string. */
11950 struct dwarf_file_data * file_idx; /* Index in input file table. */
11951 int dir_idx; /* Index in directory table. */
11952 };
11953
11954 /* Data structure containing information about directories with source
11955 files. */
11956 struct dir_info
11957 {
11958 const char *path; /* Path including directory name. */
11959 int length; /* Path length. */
11960 int prefix; /* Index of directory entry which is a prefix. */
11961 int count; /* Number of files in this directory. */
11962 int dir_idx; /* Index of directory used as base. */
11963 };
11964
11965 /* Callback function for file_info comparison. We sort by looking at
11966 the directories in the path. */
11967
11968 static int
11969 file_info_cmp (const void *p1, const void *p2)
11970 {
11971 const struct file_info *const s1 = (const struct file_info *) p1;
11972 const struct file_info *const s2 = (const struct file_info *) p2;
11973 const unsigned char *cp1;
11974 const unsigned char *cp2;
11975
11976 /* Take care of file names without directories. We need to make sure that
11977 we return consistent values to qsort since some will get confused if
11978 we return the same value when identical operands are passed in opposite
11979 orders. So if neither has a directory, return 0 and otherwise return
11980 1 or -1 depending on which one has the directory. We want the one with
11981 the directory to sort after the one without, so all no directory files
11982 are at the start (normally only the compilation unit file). */
11983 if ((s1->path == s1->fname || s2->path == s2->fname))
11984 return (s2->path == s2->fname) - (s1->path == s1->fname);
11985
11986 cp1 = (const unsigned char *) s1->path;
11987 cp2 = (const unsigned char *) s2->path;
11988
11989 while (1)
11990 {
11991 ++cp1;
11992 ++cp2;
11993 /* Reached the end of the first path? If so, handle like above,
11994 but now we want longer directory prefixes before shorter ones. */
11995 if ((cp1 == (const unsigned char *) s1->fname)
11996 || (cp2 == (const unsigned char *) s2->fname))
11997 return ((cp1 == (const unsigned char *) s1->fname)
11998 - (cp2 == (const unsigned char *) s2->fname));
11999
12000 /* Character of current path component the same? */
12001 else if (*cp1 != *cp2)
12002 return *cp1 - *cp2;
12003 }
12004 }
12005
12006 struct file_name_acquire_data
12007 {
12008 struct file_info *files;
12009 int used_files;
12010 int max_files;
12011 };
12012
12013 /* Traversal function for the hash table. */
12014
12015 int
12016 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12017 {
12018 struct dwarf_file_data *d = *slot;
12019 struct file_info *fi;
12020 const char *f;
12021
12022 gcc_assert (fnad->max_files >= d->emitted_number);
12023
12024 if (! d->emitted_number)
12025 return 1;
12026
12027 gcc_assert (fnad->max_files != fnad->used_files);
12028
12029 fi = fnad->files + fnad->used_files++;
12030
12031 /* Skip all leading "./". */
12032 f = d->filename;
12033 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12034 f += 2;
12035
12036 /* Create a new array entry. */
12037 fi->path = f;
12038 fi->length = strlen (f);
12039 fi->file_idx = d;
12040
12041 /* Search for the file name part. */
12042 f = strrchr (f, DIR_SEPARATOR);
12043 #if defined (DIR_SEPARATOR_2)
12044 {
12045 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12046
12047 if (g != NULL)
12048 {
12049 if (f == NULL || f < g)
12050 f = g;
12051 }
12052 }
12053 #endif
12054
12055 fi->fname = f == NULL ? fi->path : f + 1;
12056 return 1;
12057 }
12058
12059 /* Helper function for output_file_names. Emit a FORM encoded
12060 string STR, with assembly comment start ENTRY_KIND and
12061 index IDX */
12062
12063 static void
12064 output_line_string (enum dwarf_form form, const char *str,
12065 const char *entry_kind, unsigned int idx)
12066 {
12067 switch (form)
12068 {
12069 case DW_FORM_string:
12070 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12071 break;
12072 case DW_FORM_line_strp:
12073 if (!debug_line_str_hash)
12074 debug_line_str_hash
12075 = hash_table<indirect_string_hasher>::create_ggc (10);
12076
12077 struct indirect_string_node *node;
12078 node = find_AT_string_in_table (str, debug_line_str_hash);
12079 set_indirect_string (node);
12080 node->form = form;
12081 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12082 debug_line_str_section, "%s: %#x: \"%s\"",
12083 entry_kind, 0, node->str);
12084 break;
12085 default:
12086 gcc_unreachable ();
12087 }
12088 }
12089
12090 /* Output the directory table and the file name table. We try to minimize
12091 the total amount of memory needed. A heuristic is used to avoid large
12092 slowdowns with many input files. */
12093
12094 static void
12095 output_file_names (void)
12096 {
12097 struct file_name_acquire_data fnad;
12098 int numfiles;
12099 struct file_info *files;
12100 struct dir_info *dirs;
12101 int *saved;
12102 int *savehere;
12103 int *backmap;
12104 int ndirs;
12105 int idx_offset;
12106 int i;
12107
12108 if (!last_emitted_file)
12109 {
12110 if (dwarf_version >= 5)
12111 {
12112 dw2_asm_output_data (1, 0, "Directory entry format count");
12113 dw2_asm_output_data_uleb128 (0, "Directories count");
12114 dw2_asm_output_data (1, 0, "File name entry format count");
12115 dw2_asm_output_data_uleb128 (0, "File names count");
12116 }
12117 else
12118 {
12119 dw2_asm_output_data (1, 0, "End directory table");
12120 dw2_asm_output_data (1, 0, "End file name table");
12121 }
12122 return;
12123 }
12124
12125 numfiles = last_emitted_file->emitted_number;
12126
12127 /* Allocate the various arrays we need. */
12128 files = XALLOCAVEC (struct file_info, numfiles);
12129 dirs = XALLOCAVEC (struct dir_info, numfiles);
12130
12131 fnad.files = files;
12132 fnad.used_files = 0;
12133 fnad.max_files = numfiles;
12134 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12135 gcc_assert (fnad.used_files == fnad.max_files);
12136
12137 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12138
12139 /* Find all the different directories used. */
12140 dirs[0].path = files[0].path;
12141 dirs[0].length = files[0].fname - files[0].path;
12142 dirs[0].prefix = -1;
12143 dirs[0].count = 1;
12144 dirs[0].dir_idx = 0;
12145 files[0].dir_idx = 0;
12146 ndirs = 1;
12147
12148 for (i = 1; i < numfiles; i++)
12149 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12150 && memcmp (dirs[ndirs - 1].path, files[i].path,
12151 dirs[ndirs - 1].length) == 0)
12152 {
12153 /* Same directory as last entry. */
12154 files[i].dir_idx = ndirs - 1;
12155 ++dirs[ndirs - 1].count;
12156 }
12157 else
12158 {
12159 int j;
12160
12161 /* This is a new directory. */
12162 dirs[ndirs].path = files[i].path;
12163 dirs[ndirs].length = files[i].fname - files[i].path;
12164 dirs[ndirs].count = 1;
12165 dirs[ndirs].dir_idx = ndirs;
12166 files[i].dir_idx = ndirs;
12167
12168 /* Search for a prefix. */
12169 dirs[ndirs].prefix = -1;
12170 for (j = 0; j < ndirs; j++)
12171 if (dirs[j].length < dirs[ndirs].length
12172 && dirs[j].length > 1
12173 && (dirs[ndirs].prefix == -1
12174 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12175 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12176 dirs[ndirs].prefix = j;
12177
12178 ++ndirs;
12179 }
12180
12181 /* Now to the actual work. We have to find a subset of the directories which
12182 allow expressing the file name using references to the directory table
12183 with the least amount of characters. We do not do an exhaustive search
12184 where we would have to check out every combination of every single
12185 possible prefix. Instead we use a heuristic which provides nearly optimal
12186 results in most cases and never is much off. */
12187 saved = XALLOCAVEC (int, ndirs);
12188 savehere = XALLOCAVEC (int, ndirs);
12189
12190 memset (saved, '\0', ndirs * sizeof (saved[0]));
12191 for (i = 0; i < ndirs; i++)
12192 {
12193 int j;
12194 int total;
12195
12196 /* We can always save some space for the current directory. But this
12197 does not mean it will be enough to justify adding the directory. */
12198 savehere[i] = dirs[i].length;
12199 total = (savehere[i] - saved[i]) * dirs[i].count;
12200
12201 for (j = i + 1; j < ndirs; j++)
12202 {
12203 savehere[j] = 0;
12204 if (saved[j] < dirs[i].length)
12205 {
12206 /* Determine whether the dirs[i] path is a prefix of the
12207 dirs[j] path. */
12208 int k;
12209
12210 k = dirs[j].prefix;
12211 while (k != -1 && k != (int) i)
12212 k = dirs[k].prefix;
12213
12214 if (k == (int) i)
12215 {
12216 /* Yes it is. We can possibly save some memory by
12217 writing the filenames in dirs[j] relative to
12218 dirs[i]. */
12219 savehere[j] = dirs[i].length;
12220 total += (savehere[j] - saved[j]) * dirs[j].count;
12221 }
12222 }
12223 }
12224
12225 /* Check whether we can save enough to justify adding the dirs[i]
12226 directory. */
12227 if (total > dirs[i].length + 1)
12228 {
12229 /* It's worthwhile adding. */
12230 for (j = i; j < ndirs; j++)
12231 if (savehere[j] > 0)
12232 {
12233 /* Remember how much we saved for this directory so far. */
12234 saved[j] = savehere[j];
12235
12236 /* Remember the prefix directory. */
12237 dirs[j].dir_idx = i;
12238 }
12239 }
12240 }
12241
12242 /* Emit the directory name table. */
12243 idx_offset = dirs[0].length > 0 ? 1 : 0;
12244 enum dwarf_form str_form = DW_FORM_string;
12245 enum dwarf_form idx_form = DW_FORM_udata;
12246 if (dwarf_version >= 5)
12247 {
12248 const char *comp_dir = comp_dir_string ();
12249 if (comp_dir == NULL)
12250 comp_dir = "";
12251 dw2_asm_output_data (1, 1, "Directory entry format count");
12252 if (DWARF5_USE_DEBUG_LINE_STR)
12253 str_form = DW_FORM_line_strp;
12254 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12255 dw2_asm_output_data_uleb128 (str_form, "%s",
12256 get_DW_FORM_name (str_form));
12257 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12258 if (str_form == DW_FORM_string)
12259 {
12260 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12261 for (i = 1 - idx_offset; i < ndirs; i++)
12262 dw2_asm_output_nstring (dirs[i].path,
12263 dirs[i].length
12264 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12265 "Directory Entry: %#x", i + idx_offset);
12266 }
12267 else
12268 {
12269 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12270 for (i = 1 - idx_offset; i < ndirs; i++)
12271 {
12272 const char *str
12273 = ggc_alloc_string (dirs[i].path,
12274 dirs[i].length
12275 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12276 output_line_string (str_form, str, "Directory Entry",
12277 (unsigned) i + idx_offset);
12278 }
12279 }
12280 }
12281 else
12282 {
12283 for (i = 1 - idx_offset; i < ndirs; i++)
12284 dw2_asm_output_nstring (dirs[i].path,
12285 dirs[i].length
12286 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12287 "Directory Entry: %#x", i + idx_offset);
12288
12289 dw2_asm_output_data (1, 0, "End directory table");
12290 }
12291
12292 /* We have to emit them in the order of emitted_number since that's
12293 used in the debug info generation. To do this efficiently we
12294 generate a back-mapping of the indices first. */
12295 backmap = XALLOCAVEC (int, numfiles);
12296 for (i = 0; i < numfiles; i++)
12297 backmap[files[i].file_idx->emitted_number - 1] = i;
12298
12299 if (dwarf_version >= 5)
12300 {
12301 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12302 if (filename0 == NULL)
12303 filename0 = "";
12304 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12305 DW_FORM_data2. Choose one based on the number of directories
12306 and how much space would they occupy in each encoding.
12307 If we have at most 256 directories, all indexes fit into
12308 a single byte, so DW_FORM_data1 is most compact (if there
12309 are at most 128 directories, DW_FORM_udata would be as
12310 compact as that, but not shorter and slower to decode). */
12311 if (ndirs + idx_offset <= 256)
12312 idx_form = DW_FORM_data1;
12313 /* If there are more than 65536 directories, we have to use
12314 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12315 Otherwise, compute what space would occupy if all the indexes
12316 used DW_FORM_udata - sum - and compare that to how large would
12317 be DW_FORM_data2 encoding, and pick the more efficient one. */
12318 else if (ndirs + idx_offset <= 65536)
12319 {
12320 unsigned HOST_WIDE_INT sum = 1;
12321 for (i = 0; i < numfiles; i++)
12322 {
12323 int file_idx = backmap[i];
12324 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12325 sum += size_of_uleb128 (dir_idx);
12326 }
12327 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12328 idx_form = DW_FORM_data2;
12329 }
12330 #ifdef VMS_DEBUGGING_INFO
12331 dw2_asm_output_data (1, 4, "File name entry format count");
12332 #else
12333 dw2_asm_output_data (1, 2, "File name entry format count");
12334 #endif
12335 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12336 dw2_asm_output_data_uleb128 (str_form, "%s",
12337 get_DW_FORM_name (str_form));
12338 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12339 "DW_LNCT_directory_index");
12340 dw2_asm_output_data_uleb128 (idx_form, "%s",
12341 get_DW_FORM_name (idx_form));
12342 #ifdef VMS_DEBUGGING_INFO
12343 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12344 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12345 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12346 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12347 #endif
12348 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12349
12350 output_line_string (str_form, filename0, "File Entry", 0);
12351
12352 /* Include directory index. */
12353 if (idx_form != DW_FORM_udata)
12354 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12355 0, NULL);
12356 else
12357 dw2_asm_output_data_uleb128 (0, NULL);
12358
12359 #ifdef VMS_DEBUGGING_INFO
12360 dw2_asm_output_data_uleb128 (0, NULL);
12361 dw2_asm_output_data_uleb128 (0, NULL);
12362 #endif
12363 }
12364
12365 /* Now write all the file names. */
12366 for (i = 0; i < numfiles; i++)
12367 {
12368 int file_idx = backmap[i];
12369 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12370
12371 #ifdef VMS_DEBUGGING_INFO
12372 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12373
12374 /* Setting these fields can lead to debugger miscomparisons,
12375 but VMS Debug requires them to be set correctly. */
12376
12377 int ver;
12378 long long cdt;
12379 long siz;
12380 int maxfilelen = (strlen (files[file_idx].path)
12381 + dirs[dir_idx].length
12382 + MAX_VMS_VERSION_LEN + 1);
12383 char *filebuf = XALLOCAVEC (char, maxfilelen);
12384
12385 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12386 snprintf (filebuf, maxfilelen, "%s;%d",
12387 files[file_idx].path + dirs[dir_idx].length, ver);
12388
12389 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12390
12391 /* Include directory index. */
12392 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12393 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12394 dir_idx + idx_offset, NULL);
12395 else
12396 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12397
12398 /* Modification time. */
12399 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12400 &cdt, 0, 0, 0) == 0)
12401 ? cdt : 0, NULL);
12402
12403 /* File length in bytes. */
12404 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12405 0, &siz, 0, 0) == 0)
12406 ? siz : 0, NULL);
12407 #else
12408 output_line_string (str_form,
12409 files[file_idx].path + dirs[dir_idx].length,
12410 "File Entry", (unsigned) i + 1);
12411
12412 /* Include directory index. */
12413 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12414 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12415 dir_idx + idx_offset, NULL);
12416 else
12417 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12418
12419 if (dwarf_version >= 5)
12420 continue;
12421
12422 /* Modification time. */
12423 dw2_asm_output_data_uleb128 (0, NULL);
12424
12425 /* File length in bytes. */
12426 dw2_asm_output_data_uleb128 (0, NULL);
12427 #endif /* VMS_DEBUGGING_INFO */
12428 }
12429
12430 if (dwarf_version < 5)
12431 dw2_asm_output_data (1, 0, "End file name table");
12432 }
12433
12434
12435 /* Output one line number table into the .debug_line section. */
12436
12437 static void
12438 output_one_line_info_table (dw_line_info_table *table)
12439 {
12440 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12441 unsigned int current_line = 1;
12442 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12443 dw_line_info_entry *ent, *prev_addr;
12444 size_t i;
12445 unsigned int view;
12446
12447 view = 0;
12448
12449 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12450 {
12451 switch (ent->opcode)
12452 {
12453 case LI_set_address:
12454 /* ??? Unfortunately, we have little choice here currently, and
12455 must always use the most general form. GCC does not know the
12456 address delta itself, so we can't use DW_LNS_advance_pc. Many
12457 ports do have length attributes which will give an upper bound
12458 on the address range. We could perhaps use length attributes
12459 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12460 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12461
12462 view = 0;
12463
12464 /* This can handle any delta. This takes
12465 4+DWARF2_ADDR_SIZE bytes. */
12466 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12467 debug_variable_location_views
12468 ? ", reset view to 0" : "");
12469 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12470 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12471 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12472
12473 prev_addr = ent;
12474 break;
12475
12476 case LI_adv_address:
12477 {
12478 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12479 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12480 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12481
12482 view++;
12483
12484 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12485 dw2_asm_output_delta (2, line_label, prev_label,
12486 "from %s to %s", prev_label, line_label);
12487
12488 prev_addr = ent;
12489 break;
12490 }
12491
12492 case LI_set_line:
12493 if (ent->val == current_line)
12494 {
12495 /* We still need to start a new row, so output a copy insn. */
12496 dw2_asm_output_data (1, DW_LNS_copy,
12497 "copy line %u", current_line);
12498 }
12499 else
12500 {
12501 int line_offset = ent->val - current_line;
12502 int line_delta = line_offset - DWARF_LINE_BASE;
12503
12504 current_line = ent->val;
12505 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12506 {
12507 /* This can handle deltas from -10 to 234, using the current
12508 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12509 This takes 1 byte. */
12510 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12511 "line %u", current_line);
12512 }
12513 else
12514 {
12515 /* This can handle any delta. This takes at least 4 bytes,
12516 depending on the value being encoded. */
12517 dw2_asm_output_data (1, DW_LNS_advance_line,
12518 "advance to line %u", current_line);
12519 dw2_asm_output_data_sleb128 (line_offset, NULL);
12520 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12521 }
12522 }
12523 break;
12524
12525 case LI_set_file:
12526 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12527 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12528 break;
12529
12530 case LI_set_column:
12531 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12532 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12533 break;
12534
12535 case LI_negate_stmt:
12536 current_is_stmt = !current_is_stmt;
12537 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12538 "is_stmt %d", current_is_stmt);
12539 break;
12540
12541 case LI_set_prologue_end:
12542 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12543 "set prologue end");
12544 break;
12545
12546 case LI_set_epilogue_begin:
12547 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12548 "set epilogue begin");
12549 break;
12550
12551 case LI_set_discriminator:
12552 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12553 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12554 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12555 dw2_asm_output_data_uleb128 (ent->val, NULL);
12556 break;
12557 }
12558 }
12559
12560 /* Emit debug info for the address of the end of the table. */
12561 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12562 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12563 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12564 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12565
12566 dw2_asm_output_data (1, 0, "end sequence");
12567 dw2_asm_output_data_uleb128 (1, NULL);
12568 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12569 }
12570
12571 /* Output the source line number correspondence information. This
12572 information goes into the .debug_line section. */
12573
12574 static void
12575 output_line_info (bool prologue_only)
12576 {
12577 static unsigned int generation;
12578 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12579 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12580 bool saw_one = false;
12581 int opc;
12582
12583 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12584 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12585 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12586 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12587
12588 if (!XCOFF_DEBUGGING_INFO)
12589 {
12590 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12591 dw2_asm_output_data (4, 0xffffffff,
12592 "Initial length escape value indicating 64-bit DWARF extension");
12593 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12594 "Length of Source Line Info");
12595 }
12596
12597 ASM_OUTPUT_LABEL (asm_out_file, l1);
12598
12599 output_dwarf_version ();
12600 if (dwarf_version >= 5)
12601 {
12602 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12603 dw2_asm_output_data (1, 0, "Segment Size");
12604 }
12605 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12606 ASM_OUTPUT_LABEL (asm_out_file, p1);
12607
12608 /* Define the architecture-dependent minimum instruction length (in bytes).
12609 In this implementation of DWARF, this field is used for information
12610 purposes only. Since GCC generates assembly language, we have no
12611 a priori knowledge of how many instruction bytes are generated for each
12612 source line, and therefore can use only the DW_LNE_set_address and
12613 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12614 this as '1', which is "correct enough" for all architectures,
12615 and don't let the target override. */
12616 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12617
12618 if (dwarf_version >= 4)
12619 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12620 "Maximum Operations Per Instruction");
12621 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12622 "Default is_stmt_start flag");
12623 dw2_asm_output_data (1, DWARF_LINE_BASE,
12624 "Line Base Value (Special Opcodes)");
12625 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12626 "Line Range Value (Special Opcodes)");
12627 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12628 "Special Opcode Base");
12629
12630 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12631 {
12632 int n_op_args;
12633 switch (opc)
12634 {
12635 case DW_LNS_advance_pc:
12636 case DW_LNS_advance_line:
12637 case DW_LNS_set_file:
12638 case DW_LNS_set_column:
12639 case DW_LNS_fixed_advance_pc:
12640 case DW_LNS_set_isa:
12641 n_op_args = 1;
12642 break;
12643 default:
12644 n_op_args = 0;
12645 break;
12646 }
12647
12648 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12649 opc, n_op_args);
12650 }
12651
12652 /* Write out the information about the files we use. */
12653 output_file_names ();
12654 ASM_OUTPUT_LABEL (asm_out_file, p2);
12655 if (prologue_only)
12656 {
12657 /* Output the marker for the end of the line number info. */
12658 ASM_OUTPUT_LABEL (asm_out_file, l2);
12659 return;
12660 }
12661
12662 if (separate_line_info)
12663 {
12664 dw_line_info_table *table;
12665 size_t i;
12666
12667 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12668 if (table->in_use)
12669 {
12670 output_one_line_info_table (table);
12671 saw_one = true;
12672 }
12673 }
12674 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12675 {
12676 output_one_line_info_table (cold_text_section_line_info);
12677 saw_one = true;
12678 }
12679
12680 /* ??? Some Darwin linkers crash on a .debug_line section with no
12681 sequences. Further, merely a DW_LNE_end_sequence entry is not
12682 sufficient -- the address column must also be initialized.
12683 Make sure to output at least one set_address/end_sequence pair,
12684 choosing .text since that section is always present. */
12685 if (text_section_line_info->in_use || !saw_one)
12686 output_one_line_info_table (text_section_line_info);
12687
12688 /* Output the marker for the end of the line number info. */
12689 ASM_OUTPUT_LABEL (asm_out_file, l2);
12690 }
12691 \f
12692 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12693
12694 static inline bool
12695 need_endianity_attribute_p (bool reverse)
12696 {
12697 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12698 }
12699
12700 /* Given a pointer to a tree node for some base type, return a pointer to
12701 a DIE that describes the given type. REVERSE is true if the type is
12702 to be interpreted in the reverse storage order wrt the target order.
12703
12704 This routine must only be called for GCC type nodes that correspond to
12705 Dwarf base (fundamental) types. */
12706
12707 static dw_die_ref
12708 base_type_die (tree type, bool reverse)
12709 {
12710 dw_die_ref base_type_result;
12711 enum dwarf_type encoding;
12712 bool fpt_used = false;
12713 struct fixed_point_type_info fpt_info;
12714 tree type_bias = NULL_TREE;
12715
12716 /* If this is a subtype that should not be emitted as a subrange type,
12717 use the base type. See subrange_type_for_debug_p. */
12718 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12719 type = TREE_TYPE (type);
12720
12721 switch (TREE_CODE (type))
12722 {
12723 case INTEGER_TYPE:
12724 if ((dwarf_version >= 4 || !dwarf_strict)
12725 && TYPE_NAME (type)
12726 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12727 && DECL_IS_BUILTIN (TYPE_NAME (type))
12728 && DECL_NAME (TYPE_NAME (type)))
12729 {
12730 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12731 if (strcmp (name, "char16_t") == 0
12732 || strcmp (name, "char32_t") == 0)
12733 {
12734 encoding = DW_ATE_UTF;
12735 break;
12736 }
12737 }
12738 if ((dwarf_version >= 3 || !dwarf_strict)
12739 && lang_hooks.types.get_fixed_point_type_info)
12740 {
12741 memset (&fpt_info, 0, sizeof (fpt_info));
12742 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12743 {
12744 fpt_used = true;
12745 encoding = ((TYPE_UNSIGNED (type))
12746 ? DW_ATE_unsigned_fixed
12747 : DW_ATE_signed_fixed);
12748 break;
12749 }
12750 }
12751 if (TYPE_STRING_FLAG (type))
12752 {
12753 if (TYPE_UNSIGNED (type))
12754 encoding = DW_ATE_unsigned_char;
12755 else
12756 encoding = DW_ATE_signed_char;
12757 }
12758 else if (TYPE_UNSIGNED (type))
12759 encoding = DW_ATE_unsigned;
12760 else
12761 encoding = DW_ATE_signed;
12762
12763 if (!dwarf_strict
12764 && lang_hooks.types.get_type_bias)
12765 type_bias = lang_hooks.types.get_type_bias (type);
12766 break;
12767
12768 case REAL_TYPE:
12769 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12770 {
12771 if (dwarf_version >= 3 || !dwarf_strict)
12772 encoding = DW_ATE_decimal_float;
12773 else
12774 encoding = DW_ATE_lo_user;
12775 }
12776 else
12777 encoding = DW_ATE_float;
12778 break;
12779
12780 case FIXED_POINT_TYPE:
12781 if (!(dwarf_version >= 3 || !dwarf_strict))
12782 encoding = DW_ATE_lo_user;
12783 else if (TYPE_UNSIGNED (type))
12784 encoding = DW_ATE_unsigned_fixed;
12785 else
12786 encoding = DW_ATE_signed_fixed;
12787 break;
12788
12789 /* Dwarf2 doesn't know anything about complex ints, so use
12790 a user defined type for it. */
12791 case COMPLEX_TYPE:
12792 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12793 encoding = DW_ATE_complex_float;
12794 else
12795 encoding = DW_ATE_lo_user;
12796 break;
12797
12798 case BOOLEAN_TYPE:
12799 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12800 encoding = DW_ATE_boolean;
12801 break;
12802
12803 default:
12804 /* No other TREE_CODEs are Dwarf fundamental types. */
12805 gcc_unreachable ();
12806 }
12807
12808 base_type_result = new_die_raw (DW_TAG_base_type);
12809
12810 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12811 int_size_in_bytes (type));
12812 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12813
12814 if (need_endianity_attribute_p (reverse))
12815 add_AT_unsigned (base_type_result, DW_AT_endianity,
12816 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12817
12818 add_alignment_attribute (base_type_result, type);
12819
12820 if (fpt_used)
12821 {
12822 switch (fpt_info.scale_factor_kind)
12823 {
12824 case fixed_point_scale_factor_binary:
12825 add_AT_int (base_type_result, DW_AT_binary_scale,
12826 fpt_info.scale_factor.binary);
12827 break;
12828
12829 case fixed_point_scale_factor_decimal:
12830 add_AT_int (base_type_result, DW_AT_decimal_scale,
12831 fpt_info.scale_factor.decimal);
12832 break;
12833
12834 case fixed_point_scale_factor_arbitrary:
12835 /* Arbitrary scale factors cannot be described in standard DWARF,
12836 yet. */
12837 if (!dwarf_strict)
12838 {
12839 /* Describe the scale factor as a rational constant. */
12840 const dw_die_ref scale_factor
12841 = new_die (DW_TAG_constant, comp_unit_die (), type);
12842
12843 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12844 fpt_info.scale_factor.arbitrary.numerator);
12845 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12846 fpt_info.scale_factor.arbitrary.denominator);
12847
12848 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12849 }
12850 break;
12851
12852 default:
12853 gcc_unreachable ();
12854 }
12855 }
12856
12857 if (type_bias)
12858 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12859 dw_scalar_form_constant
12860 | dw_scalar_form_exprloc
12861 | dw_scalar_form_reference,
12862 NULL);
12863
12864 return base_type_result;
12865 }
12866
12867 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12868 named 'auto' in its type: return true for it, false otherwise. */
12869
12870 static inline bool
12871 is_cxx_auto (tree type)
12872 {
12873 if (is_cxx ())
12874 {
12875 tree name = TYPE_IDENTIFIER (type);
12876 if (name == get_identifier ("auto")
12877 || name == get_identifier ("decltype(auto)"))
12878 return true;
12879 }
12880 return false;
12881 }
12882
12883 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12884 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12885
12886 static inline int
12887 is_base_type (tree type)
12888 {
12889 switch (TREE_CODE (type))
12890 {
12891 case INTEGER_TYPE:
12892 case REAL_TYPE:
12893 case FIXED_POINT_TYPE:
12894 case COMPLEX_TYPE:
12895 case BOOLEAN_TYPE:
12896 return 1;
12897
12898 case VOID_TYPE:
12899 case ARRAY_TYPE:
12900 case RECORD_TYPE:
12901 case UNION_TYPE:
12902 case QUAL_UNION_TYPE:
12903 case ENUMERAL_TYPE:
12904 case FUNCTION_TYPE:
12905 case METHOD_TYPE:
12906 case POINTER_TYPE:
12907 case REFERENCE_TYPE:
12908 case NULLPTR_TYPE:
12909 case OFFSET_TYPE:
12910 case LANG_TYPE:
12911 case VECTOR_TYPE:
12912 return 0;
12913
12914 default:
12915 if (is_cxx_auto (type))
12916 return 0;
12917 gcc_unreachable ();
12918 }
12919
12920 return 0;
12921 }
12922
12923 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12924 node, return the size in bits for the type if it is a constant, or else
12925 return the alignment for the type if the type's size is not constant, or
12926 else return BITS_PER_WORD if the type actually turns out to be an
12927 ERROR_MARK node. */
12928
12929 static inline unsigned HOST_WIDE_INT
12930 simple_type_size_in_bits (const_tree type)
12931 {
12932 if (TREE_CODE (type) == ERROR_MARK)
12933 return BITS_PER_WORD;
12934 else if (TYPE_SIZE (type) == NULL_TREE)
12935 return 0;
12936 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12937 return tree_to_uhwi (TYPE_SIZE (type));
12938 else
12939 return TYPE_ALIGN (type);
12940 }
12941
12942 /* Similarly, but return an offset_int instead of UHWI. */
12943
12944 static inline offset_int
12945 offset_int_type_size_in_bits (const_tree type)
12946 {
12947 if (TREE_CODE (type) == ERROR_MARK)
12948 return BITS_PER_WORD;
12949 else if (TYPE_SIZE (type) == NULL_TREE)
12950 return 0;
12951 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12952 return wi::to_offset (TYPE_SIZE (type));
12953 else
12954 return TYPE_ALIGN (type);
12955 }
12956
12957 /* Given a pointer to a tree node for a subrange type, return a pointer
12958 to a DIE that describes the given type. */
12959
12960 static dw_die_ref
12961 subrange_type_die (tree type, tree low, tree high, tree bias,
12962 dw_die_ref context_die)
12963 {
12964 dw_die_ref subrange_die;
12965 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12966
12967 if (context_die == NULL)
12968 context_die = comp_unit_die ();
12969
12970 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12971
12972 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12973 {
12974 /* The size of the subrange type and its base type do not match,
12975 so we need to generate a size attribute for the subrange type. */
12976 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12977 }
12978
12979 add_alignment_attribute (subrange_die, type);
12980
12981 if (low)
12982 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12983 if (high)
12984 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12985 if (bias && !dwarf_strict)
12986 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12987 dw_scalar_form_constant
12988 | dw_scalar_form_exprloc
12989 | dw_scalar_form_reference,
12990 NULL);
12991
12992 return subrange_die;
12993 }
12994
12995 /* Returns the (const and/or volatile) cv_qualifiers associated with
12996 the decl node. This will normally be augmented with the
12997 cv_qualifiers of the underlying type in add_type_attribute. */
12998
12999 static int
13000 decl_quals (const_tree decl)
13001 {
13002 return ((TREE_READONLY (decl)
13003 /* The C++ front-end correctly marks reference-typed
13004 variables as readonly, but from a language (and debug
13005 info) standpoint they are not const-qualified. */
13006 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13007 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13008 | (TREE_THIS_VOLATILE (decl)
13009 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13010 }
13011
13012 /* Determine the TYPE whose qualifiers match the largest strict subset
13013 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13014 qualifiers outside QUAL_MASK. */
13015
13016 static int
13017 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13018 {
13019 tree t;
13020 int best_rank = 0, best_qual = 0, max_rank;
13021
13022 type_quals &= qual_mask;
13023 max_rank = popcount_hwi (type_quals) - 1;
13024
13025 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13026 t = TYPE_NEXT_VARIANT (t))
13027 {
13028 int q = TYPE_QUALS (t) & qual_mask;
13029
13030 if ((q & type_quals) == q && q != type_quals
13031 && check_base_type (t, type))
13032 {
13033 int rank = popcount_hwi (q);
13034
13035 if (rank > best_rank)
13036 {
13037 best_rank = rank;
13038 best_qual = q;
13039 }
13040 }
13041 }
13042
13043 return best_qual;
13044 }
13045
13046 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13047 static const dwarf_qual_info_t dwarf_qual_info[] =
13048 {
13049 { TYPE_QUAL_CONST, DW_TAG_const_type },
13050 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13051 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13052 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13053 };
13054 static const unsigned int dwarf_qual_info_size
13055 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13056
13057 /* If DIE is a qualified DIE of some base DIE with the same parent,
13058 return the base DIE, otherwise return NULL. Set MASK to the
13059 qualifiers added compared to the returned DIE. */
13060
13061 static dw_die_ref
13062 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13063 {
13064 unsigned int i;
13065 for (i = 0; i < dwarf_qual_info_size; i++)
13066 if (die->die_tag == dwarf_qual_info[i].t)
13067 break;
13068 if (i == dwarf_qual_info_size)
13069 return NULL;
13070 if (vec_safe_length (die->die_attr) != 1)
13071 return NULL;
13072 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13073 if (type == NULL || type->die_parent != die->die_parent)
13074 return NULL;
13075 *mask |= dwarf_qual_info[i].q;
13076 if (depth)
13077 {
13078 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13079 if (ret)
13080 return ret;
13081 }
13082 return type;
13083 }
13084
13085 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13086 entry that chains the modifiers specified by CV_QUALS in front of the
13087 given type. REVERSE is true if the type is to be interpreted in the
13088 reverse storage order wrt the target order. */
13089
13090 static dw_die_ref
13091 modified_type_die (tree type, int cv_quals, bool reverse,
13092 dw_die_ref context_die)
13093 {
13094 enum tree_code code = TREE_CODE (type);
13095 dw_die_ref mod_type_die;
13096 dw_die_ref sub_die = NULL;
13097 tree item_type = NULL;
13098 tree qualified_type;
13099 tree name, low, high;
13100 dw_die_ref mod_scope;
13101 /* Only these cv-qualifiers are currently handled. */
13102 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13103 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13104 ENCODE_QUAL_ADDR_SPACE(~0U));
13105 const bool reverse_base_type
13106 = need_endianity_attribute_p (reverse) && is_base_type (type);
13107
13108 if (code == ERROR_MARK)
13109 return NULL;
13110
13111 if (lang_hooks.types.get_debug_type)
13112 {
13113 tree debug_type = lang_hooks.types.get_debug_type (type);
13114
13115 if (debug_type != NULL_TREE && debug_type != type)
13116 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13117 }
13118
13119 cv_quals &= cv_qual_mask;
13120
13121 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13122 tag modifier (and not an attribute) old consumers won't be able
13123 to handle it. */
13124 if (dwarf_version < 3)
13125 cv_quals &= ~TYPE_QUAL_RESTRICT;
13126
13127 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13128 if (dwarf_version < 5)
13129 cv_quals &= ~TYPE_QUAL_ATOMIC;
13130
13131 /* See if we already have the appropriately qualified variant of
13132 this type. */
13133 qualified_type = get_qualified_type (type, cv_quals);
13134
13135 if (qualified_type == sizetype)
13136 {
13137 /* Try not to expose the internal sizetype type's name. */
13138 if (TYPE_NAME (qualified_type)
13139 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13140 {
13141 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13142
13143 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13144 && (TYPE_PRECISION (t)
13145 == TYPE_PRECISION (qualified_type))
13146 && (TYPE_UNSIGNED (t)
13147 == TYPE_UNSIGNED (qualified_type)));
13148 qualified_type = t;
13149 }
13150 else if (qualified_type == sizetype
13151 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13152 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13153 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13154 qualified_type = size_type_node;
13155 if (type == sizetype)
13156 type = qualified_type;
13157 }
13158
13159 /* If we do, then we can just use its DIE, if it exists. */
13160 if (qualified_type)
13161 {
13162 mod_type_die = lookup_type_die (qualified_type);
13163
13164 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13165 dealt with specially: the DIE with the attribute, if it exists, is
13166 placed immediately after the regular DIE for the same base type. */
13167 if (mod_type_die
13168 && (!reverse_base_type
13169 || ((mod_type_die = mod_type_die->die_sib) != NULL
13170 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13171 return mod_type_die;
13172 }
13173
13174 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13175
13176 /* Handle C typedef types. */
13177 if (name
13178 && TREE_CODE (name) == TYPE_DECL
13179 && DECL_ORIGINAL_TYPE (name)
13180 && !DECL_ARTIFICIAL (name))
13181 {
13182 tree dtype = TREE_TYPE (name);
13183
13184 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13185 if (qualified_type == dtype && !reverse_base_type)
13186 {
13187 tree origin = decl_ultimate_origin (name);
13188
13189 /* Typedef variants that have an abstract origin don't get their own
13190 type DIE (see gen_typedef_die), so fall back on the ultimate
13191 abstract origin instead. */
13192 if (origin != NULL && origin != name)
13193 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13194 context_die);
13195
13196 /* For a named type, use the typedef. */
13197 gen_type_die (qualified_type, context_die);
13198 return lookup_type_die (qualified_type);
13199 }
13200 else
13201 {
13202 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13203 dquals &= cv_qual_mask;
13204 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13205 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13206 /* cv-unqualified version of named type. Just use
13207 the unnamed type to which it refers. */
13208 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13209 reverse, context_die);
13210 /* Else cv-qualified version of named type; fall through. */
13211 }
13212 }
13213
13214 mod_scope = scope_die_for (type, context_die);
13215
13216 if (cv_quals)
13217 {
13218 int sub_quals = 0, first_quals = 0;
13219 unsigned i;
13220 dw_die_ref first = NULL, last = NULL;
13221
13222 /* Determine a lesser qualified type that most closely matches
13223 this one. Then generate DW_TAG_* entries for the remaining
13224 qualifiers. */
13225 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13226 cv_qual_mask);
13227 if (sub_quals && use_debug_types)
13228 {
13229 bool needed = false;
13230 /* If emitting type units, make sure the order of qualifiers
13231 is canonical. Thus, start from unqualified type if
13232 an earlier qualifier is missing in sub_quals, but some later
13233 one is present there. */
13234 for (i = 0; i < dwarf_qual_info_size; i++)
13235 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13236 needed = true;
13237 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13238 {
13239 sub_quals = 0;
13240 break;
13241 }
13242 }
13243 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13244 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13245 {
13246 /* As not all intermediate qualified DIEs have corresponding
13247 tree types, ensure that qualified DIEs in the same scope
13248 as their DW_AT_type are emitted after their DW_AT_type,
13249 only with other qualified DIEs for the same type possibly
13250 in between them. Determine the range of such qualified
13251 DIEs now (first being the base type, last being corresponding
13252 last qualified DIE for it). */
13253 unsigned int count = 0;
13254 first = qualified_die_p (mod_type_die, &first_quals,
13255 dwarf_qual_info_size);
13256 if (first == NULL)
13257 first = mod_type_die;
13258 gcc_assert ((first_quals & ~sub_quals) == 0);
13259 for (count = 0, last = first;
13260 count < (1U << dwarf_qual_info_size);
13261 count++, last = last->die_sib)
13262 {
13263 int quals = 0;
13264 if (last == mod_scope->die_child)
13265 break;
13266 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13267 != first)
13268 break;
13269 }
13270 }
13271
13272 for (i = 0; i < dwarf_qual_info_size; i++)
13273 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13274 {
13275 dw_die_ref d;
13276 if (first && first != last)
13277 {
13278 for (d = first->die_sib; ; d = d->die_sib)
13279 {
13280 int quals = 0;
13281 qualified_die_p (d, &quals, dwarf_qual_info_size);
13282 if (quals == (first_quals | dwarf_qual_info[i].q))
13283 break;
13284 if (d == last)
13285 {
13286 d = NULL;
13287 break;
13288 }
13289 }
13290 if (d)
13291 {
13292 mod_type_die = d;
13293 continue;
13294 }
13295 }
13296 if (first)
13297 {
13298 d = new_die_raw (dwarf_qual_info[i].t);
13299 add_child_die_after (mod_scope, d, last);
13300 last = d;
13301 }
13302 else
13303 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13304 if (mod_type_die)
13305 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13306 mod_type_die = d;
13307 first_quals |= dwarf_qual_info[i].q;
13308 }
13309 }
13310 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13311 {
13312 dwarf_tag tag = DW_TAG_pointer_type;
13313 if (code == REFERENCE_TYPE)
13314 {
13315 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13316 tag = DW_TAG_rvalue_reference_type;
13317 else
13318 tag = DW_TAG_reference_type;
13319 }
13320 mod_type_die = new_die (tag, mod_scope, type);
13321
13322 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13323 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13324 add_alignment_attribute (mod_type_die, type);
13325 item_type = TREE_TYPE (type);
13326
13327 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13328 if (!ADDR_SPACE_GENERIC_P (as))
13329 {
13330 int action = targetm.addr_space.debug (as);
13331 if (action >= 0)
13332 {
13333 /* Positive values indicate an address_class. */
13334 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13335 }
13336 else
13337 {
13338 /* Negative values indicate an (inverted) segment base reg. */
13339 dw_loc_descr_ref d
13340 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13341 add_AT_loc (mod_type_die, DW_AT_segment, d);
13342 }
13343 }
13344 }
13345 else if (code == INTEGER_TYPE
13346 && TREE_TYPE (type) != NULL_TREE
13347 && subrange_type_for_debug_p (type, &low, &high))
13348 {
13349 tree bias = NULL_TREE;
13350 if (lang_hooks.types.get_type_bias)
13351 bias = lang_hooks.types.get_type_bias (type);
13352 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13353 item_type = TREE_TYPE (type);
13354 }
13355 else if (is_base_type (type))
13356 {
13357 mod_type_die = base_type_die (type, reverse);
13358
13359 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13360 if (reverse_base_type)
13361 {
13362 dw_die_ref after_die
13363 = modified_type_die (type, cv_quals, false, context_die);
13364 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13365 }
13366 else
13367 add_child_die (comp_unit_die (), mod_type_die);
13368
13369 add_pubtype (type, mod_type_die);
13370 }
13371 else
13372 {
13373 gen_type_die (type, context_die);
13374
13375 /* We have to get the type_main_variant here (and pass that to the
13376 `lookup_type_die' routine) because the ..._TYPE node we have
13377 might simply be a *copy* of some original type node (where the
13378 copy was created to help us keep track of typedef names) and
13379 that copy might have a different TYPE_UID from the original
13380 ..._TYPE node. */
13381 if (TREE_CODE (type) == FUNCTION_TYPE
13382 || TREE_CODE (type) == METHOD_TYPE)
13383 {
13384 /* For function/method types, can't just use type_main_variant here,
13385 because that can have different ref-qualifiers for C++,
13386 but try to canonicalize. */
13387 tree main = TYPE_MAIN_VARIANT (type);
13388 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13389 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13390 && check_base_type (t, main)
13391 && check_lang_type (t, type))
13392 return lookup_type_die (t);
13393 return lookup_type_die (type);
13394 }
13395 else if (TREE_CODE (type) != VECTOR_TYPE
13396 && TREE_CODE (type) != ARRAY_TYPE)
13397 return lookup_type_die (type_main_variant (type));
13398 else
13399 /* Vectors have the debugging information in the type,
13400 not the main variant. */
13401 return lookup_type_die (type);
13402 }
13403
13404 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13405 don't output a DW_TAG_typedef, since there isn't one in the
13406 user's program; just attach a DW_AT_name to the type.
13407 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13408 if the base type already has the same name. */
13409 if (name
13410 && ((TREE_CODE (name) != TYPE_DECL
13411 && (qualified_type == TYPE_MAIN_VARIANT (type)
13412 || (cv_quals == TYPE_UNQUALIFIED)))
13413 || (TREE_CODE (name) == TYPE_DECL
13414 && TREE_TYPE (name) == qualified_type
13415 && DECL_NAME (name))))
13416 {
13417 if (TREE_CODE (name) == TYPE_DECL)
13418 /* Could just call add_name_and_src_coords_attributes here,
13419 but since this is a builtin type it doesn't have any
13420 useful source coordinates anyway. */
13421 name = DECL_NAME (name);
13422 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13423 }
13424 /* This probably indicates a bug. */
13425 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13426 {
13427 name = TYPE_IDENTIFIER (type);
13428 add_name_attribute (mod_type_die,
13429 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13430 }
13431
13432 if (qualified_type && !reverse_base_type)
13433 equate_type_number_to_die (qualified_type, mod_type_die);
13434
13435 if (item_type)
13436 /* We must do this after the equate_type_number_to_die call, in case
13437 this is a recursive type. This ensures that the modified_type_die
13438 recursion will terminate even if the type is recursive. Recursive
13439 types are possible in Ada. */
13440 sub_die = modified_type_die (item_type,
13441 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13442 reverse,
13443 context_die);
13444
13445 if (sub_die != NULL)
13446 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13447
13448 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13449 if (TYPE_ARTIFICIAL (type))
13450 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13451
13452 return mod_type_die;
13453 }
13454
13455 /* Generate DIEs for the generic parameters of T.
13456 T must be either a generic type or a generic function.
13457 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13458
13459 static void
13460 gen_generic_params_dies (tree t)
13461 {
13462 tree parms, args;
13463 int parms_num, i;
13464 dw_die_ref die = NULL;
13465 int non_default;
13466
13467 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13468 return;
13469
13470 if (TYPE_P (t))
13471 die = lookup_type_die (t);
13472 else if (DECL_P (t))
13473 die = lookup_decl_die (t);
13474
13475 gcc_assert (die);
13476
13477 parms = lang_hooks.get_innermost_generic_parms (t);
13478 if (!parms)
13479 /* T has no generic parameter. It means T is neither a generic type
13480 or function. End of story. */
13481 return;
13482
13483 parms_num = TREE_VEC_LENGTH (parms);
13484 args = lang_hooks.get_innermost_generic_args (t);
13485 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13486 non_default = int_cst_value (TREE_CHAIN (args));
13487 else
13488 non_default = TREE_VEC_LENGTH (args);
13489 for (i = 0; i < parms_num; i++)
13490 {
13491 tree parm, arg, arg_pack_elems;
13492 dw_die_ref parm_die;
13493
13494 parm = TREE_VEC_ELT (parms, i);
13495 arg = TREE_VEC_ELT (args, i);
13496 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13497 gcc_assert (parm && TREE_VALUE (parm) && arg);
13498
13499 if (parm && TREE_VALUE (parm) && arg)
13500 {
13501 /* If PARM represents a template parameter pack,
13502 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13503 by DW_TAG_template_*_parameter DIEs for the argument
13504 pack elements of ARG. Note that ARG would then be
13505 an argument pack. */
13506 if (arg_pack_elems)
13507 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13508 arg_pack_elems,
13509 die);
13510 else
13511 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13512 true /* emit name */, die);
13513 if (i >= non_default)
13514 add_AT_flag (parm_die, DW_AT_default_value, 1);
13515 }
13516 }
13517 }
13518
13519 /* Create and return a DIE for PARM which should be
13520 the representation of a generic type parameter.
13521 For instance, in the C++ front end, PARM would be a template parameter.
13522 ARG is the argument to PARM.
13523 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13524 name of the PARM.
13525 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13526 as a child node. */
13527
13528 static dw_die_ref
13529 generic_parameter_die (tree parm, tree arg,
13530 bool emit_name_p,
13531 dw_die_ref parent_die)
13532 {
13533 dw_die_ref tmpl_die = NULL;
13534 const char *name = NULL;
13535
13536 if (!parm || !DECL_NAME (parm) || !arg)
13537 return NULL;
13538
13539 /* We support non-type generic parameters and arguments,
13540 type generic parameters and arguments, as well as
13541 generic generic parameters (a.k.a. template template parameters in C++)
13542 and arguments. */
13543 if (TREE_CODE (parm) == PARM_DECL)
13544 /* PARM is a nontype generic parameter */
13545 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13546 else if (TREE_CODE (parm) == TYPE_DECL)
13547 /* PARM is a type generic parameter. */
13548 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13549 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13550 /* PARM is a generic generic parameter.
13551 Its DIE is a GNU extension. It shall have a
13552 DW_AT_name attribute to represent the name of the template template
13553 parameter, and a DW_AT_GNU_template_name attribute to represent the
13554 name of the template template argument. */
13555 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13556 parent_die, parm);
13557 else
13558 gcc_unreachable ();
13559
13560 if (tmpl_die)
13561 {
13562 tree tmpl_type;
13563
13564 /* If PARM is a generic parameter pack, it means we are
13565 emitting debug info for a template argument pack element.
13566 In other terms, ARG is a template argument pack element.
13567 In that case, we don't emit any DW_AT_name attribute for
13568 the die. */
13569 if (emit_name_p)
13570 {
13571 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13572 gcc_assert (name);
13573 add_AT_string (tmpl_die, DW_AT_name, name);
13574 }
13575
13576 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13577 {
13578 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13579 TMPL_DIE should have a child DW_AT_type attribute that is set
13580 to the type of the argument to PARM, which is ARG.
13581 If PARM is a type generic parameter, TMPL_DIE should have a
13582 child DW_AT_type that is set to ARG. */
13583 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13584 add_type_attribute (tmpl_die, tmpl_type,
13585 (TREE_THIS_VOLATILE (tmpl_type)
13586 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13587 false, parent_die);
13588 }
13589 else
13590 {
13591 /* So TMPL_DIE is a DIE representing a
13592 a generic generic template parameter, a.k.a template template
13593 parameter in C++ and arg is a template. */
13594
13595 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13596 to the name of the argument. */
13597 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13598 if (name)
13599 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13600 }
13601
13602 if (TREE_CODE (parm) == PARM_DECL)
13603 /* So PARM is a non-type generic parameter.
13604 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13605 attribute of TMPL_DIE which value represents the value
13606 of ARG.
13607 We must be careful here:
13608 The value of ARG might reference some function decls.
13609 We might currently be emitting debug info for a generic
13610 type and types are emitted before function decls, we don't
13611 know if the function decls referenced by ARG will actually be
13612 emitted after cgraph computations.
13613 So must defer the generation of the DW_AT_const_value to
13614 after cgraph is ready. */
13615 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13616 }
13617
13618 return tmpl_die;
13619 }
13620
13621 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13622 PARM_PACK must be a template parameter pack. The returned DIE
13623 will be child DIE of PARENT_DIE. */
13624
13625 static dw_die_ref
13626 template_parameter_pack_die (tree parm_pack,
13627 tree parm_pack_args,
13628 dw_die_ref parent_die)
13629 {
13630 dw_die_ref die;
13631 int j;
13632
13633 gcc_assert (parent_die && parm_pack);
13634
13635 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13636 add_name_and_src_coords_attributes (die, parm_pack);
13637 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13638 generic_parameter_die (parm_pack,
13639 TREE_VEC_ELT (parm_pack_args, j),
13640 false /* Don't emit DW_AT_name */,
13641 die);
13642 return die;
13643 }
13644
13645 /* Return the DBX register number described by a given RTL node. */
13646
13647 static unsigned int
13648 dbx_reg_number (const_rtx rtl)
13649 {
13650 unsigned regno = REGNO (rtl);
13651
13652 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13653
13654 #ifdef LEAF_REG_REMAP
13655 if (crtl->uses_only_leaf_regs)
13656 {
13657 int leaf_reg = LEAF_REG_REMAP (regno);
13658 if (leaf_reg != -1)
13659 regno = (unsigned) leaf_reg;
13660 }
13661 #endif
13662
13663 regno = DBX_REGISTER_NUMBER (regno);
13664 gcc_assert (regno != INVALID_REGNUM);
13665 return regno;
13666 }
13667
13668 /* Optionally add a DW_OP_piece term to a location description expression.
13669 DW_OP_piece is only added if the location description expression already
13670 doesn't end with DW_OP_piece. */
13671
13672 static void
13673 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13674 {
13675 dw_loc_descr_ref loc;
13676
13677 if (*list_head != NULL)
13678 {
13679 /* Find the end of the chain. */
13680 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13681 ;
13682
13683 if (loc->dw_loc_opc != DW_OP_piece)
13684 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13685 }
13686 }
13687
13688 /* Return a location descriptor that designates a machine register or
13689 zero if there is none. */
13690
13691 static dw_loc_descr_ref
13692 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13693 {
13694 rtx regs;
13695
13696 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13697 return 0;
13698
13699 /* We only use "frame base" when we're sure we're talking about the
13700 post-prologue local stack frame. We do this by *not* running
13701 register elimination until this point, and recognizing the special
13702 argument pointer and soft frame pointer rtx's.
13703 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13704 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13705 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13706 {
13707 dw_loc_descr_ref result = NULL;
13708
13709 if (dwarf_version >= 4 || !dwarf_strict)
13710 {
13711 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13712 initialized);
13713 if (result)
13714 add_loc_descr (&result,
13715 new_loc_descr (DW_OP_stack_value, 0, 0));
13716 }
13717 return result;
13718 }
13719
13720 regs = targetm.dwarf_register_span (rtl);
13721
13722 if (REG_NREGS (rtl) > 1 || regs)
13723 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13724 else
13725 {
13726 unsigned int dbx_regnum = dbx_reg_number (rtl);
13727 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13728 return 0;
13729 return one_reg_loc_descriptor (dbx_regnum, initialized);
13730 }
13731 }
13732
13733 /* Return a location descriptor that designates a machine register for
13734 a given hard register number. */
13735
13736 static dw_loc_descr_ref
13737 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13738 {
13739 dw_loc_descr_ref reg_loc_descr;
13740
13741 if (regno <= 31)
13742 reg_loc_descr
13743 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13744 else
13745 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13746
13747 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13748 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13749
13750 return reg_loc_descr;
13751 }
13752
13753 /* Given an RTL of a register, return a location descriptor that
13754 designates a value that spans more than one register. */
13755
13756 static dw_loc_descr_ref
13757 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13758 enum var_init_status initialized)
13759 {
13760 int size, i;
13761 dw_loc_descr_ref loc_result = NULL;
13762
13763 /* Simple, contiguous registers. */
13764 if (regs == NULL_RTX)
13765 {
13766 unsigned reg = REGNO (rtl);
13767 int nregs;
13768
13769 #ifdef LEAF_REG_REMAP
13770 if (crtl->uses_only_leaf_regs)
13771 {
13772 int leaf_reg = LEAF_REG_REMAP (reg);
13773 if (leaf_reg != -1)
13774 reg = (unsigned) leaf_reg;
13775 }
13776 #endif
13777
13778 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13779 nregs = REG_NREGS (rtl);
13780
13781 /* At present we only track constant-sized pieces. */
13782 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13783 return NULL;
13784 size /= nregs;
13785
13786 loc_result = NULL;
13787 while (nregs--)
13788 {
13789 dw_loc_descr_ref t;
13790
13791 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13792 VAR_INIT_STATUS_INITIALIZED);
13793 add_loc_descr (&loc_result, t);
13794 add_loc_descr_op_piece (&loc_result, size);
13795 ++reg;
13796 }
13797 return loc_result;
13798 }
13799
13800 /* Now onto stupid register sets in non contiguous locations. */
13801
13802 gcc_assert (GET_CODE (regs) == PARALLEL);
13803
13804 /* At present we only track constant-sized pieces. */
13805 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13806 return NULL;
13807 loc_result = NULL;
13808
13809 for (i = 0; i < XVECLEN (regs, 0); ++i)
13810 {
13811 dw_loc_descr_ref t;
13812
13813 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13814 VAR_INIT_STATUS_INITIALIZED);
13815 add_loc_descr (&loc_result, t);
13816 add_loc_descr_op_piece (&loc_result, size);
13817 }
13818
13819 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13820 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13821 return loc_result;
13822 }
13823
13824 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13825
13826 /* Return a location descriptor that designates a constant i,
13827 as a compound operation from constant (i >> shift), constant shift
13828 and DW_OP_shl. */
13829
13830 static dw_loc_descr_ref
13831 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13832 {
13833 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13834 add_loc_descr (&ret, int_loc_descriptor (shift));
13835 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13836 return ret;
13837 }
13838
13839 /* Return a location descriptor that designates constant POLY_I. */
13840
13841 static dw_loc_descr_ref
13842 int_loc_descriptor (poly_int64 poly_i)
13843 {
13844 enum dwarf_location_atom op;
13845
13846 HOST_WIDE_INT i;
13847 if (!poly_i.is_constant (&i))
13848 {
13849 /* Create location descriptions for the non-constant part and
13850 add any constant offset at the end. */
13851 dw_loc_descr_ref ret = NULL;
13852 HOST_WIDE_INT constant = poly_i.coeffs[0];
13853 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13854 {
13855 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13856 if (coeff != 0)
13857 {
13858 dw_loc_descr_ref start = ret;
13859 unsigned int factor;
13860 int bias;
13861 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13862 (j, &factor, &bias);
13863
13864 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13865 add COEFF * (REGNO / FACTOR) now and subtract
13866 COEFF * BIAS from the final constant part. */
13867 constant -= coeff * bias;
13868 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13869 if (coeff % factor == 0)
13870 coeff /= factor;
13871 else
13872 {
13873 int amount = exact_log2 (factor);
13874 gcc_assert (amount >= 0);
13875 add_loc_descr (&ret, int_loc_descriptor (amount));
13876 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13877 }
13878 if (coeff != 1)
13879 {
13880 add_loc_descr (&ret, int_loc_descriptor (coeff));
13881 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13882 }
13883 if (start)
13884 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13885 }
13886 }
13887 loc_descr_plus_const (&ret, constant);
13888 return ret;
13889 }
13890
13891 /* Pick the smallest representation of a constant, rather than just
13892 defaulting to the LEB encoding. */
13893 if (i >= 0)
13894 {
13895 int clz = clz_hwi (i);
13896 int ctz = ctz_hwi (i);
13897 if (i <= 31)
13898 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13899 else if (i <= 0xff)
13900 op = DW_OP_const1u;
13901 else if (i <= 0xffff)
13902 op = DW_OP_const2u;
13903 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13904 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13905 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13906 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13907 while DW_OP_const4u is 5 bytes. */
13908 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13909 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13910 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13911 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13912 while DW_OP_const4u is 5 bytes. */
13913 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13914
13915 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13916 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13917 <= 4)
13918 {
13919 /* As i >= 2**31, the double cast above will yield a negative number.
13920 Since wrapping is defined in DWARF expressions we can output big
13921 positive integers as small negative ones, regardless of the size
13922 of host wide ints.
13923
13924 Here, since the evaluator will handle 32-bit values and since i >=
13925 2**31, we know it's going to be interpreted as a negative literal:
13926 store it this way if we can do better than 5 bytes this way. */
13927 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13928 }
13929 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13930 op = DW_OP_const4u;
13931
13932 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13933 least 6 bytes: see if we can do better before falling back to it. */
13934 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13935 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13936 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13937 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13938 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13939 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13940 >= HOST_BITS_PER_WIDE_INT)
13941 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13942 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13943 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13944 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13945 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13946 && size_of_uleb128 (i) > 6)
13947 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13948 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13949 else
13950 op = DW_OP_constu;
13951 }
13952 else
13953 {
13954 if (i >= -0x80)
13955 op = DW_OP_const1s;
13956 else if (i >= -0x8000)
13957 op = DW_OP_const2s;
13958 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13959 {
13960 if (size_of_int_loc_descriptor (i) < 5)
13961 {
13962 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13963 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13964 return ret;
13965 }
13966 op = DW_OP_const4s;
13967 }
13968 else
13969 {
13970 if (size_of_int_loc_descriptor (i)
13971 < (unsigned long) 1 + size_of_sleb128 (i))
13972 {
13973 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13974 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13975 return ret;
13976 }
13977 op = DW_OP_consts;
13978 }
13979 }
13980
13981 return new_loc_descr (op, i, 0);
13982 }
13983
13984 /* Likewise, for unsigned constants. */
13985
13986 static dw_loc_descr_ref
13987 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13988 {
13989 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13990 const unsigned HOST_WIDE_INT max_uint
13991 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13992
13993 /* If possible, use the clever signed constants handling. */
13994 if (i <= max_int)
13995 return int_loc_descriptor ((HOST_WIDE_INT) i);
13996
13997 /* Here, we are left with positive numbers that cannot be represented as
13998 HOST_WIDE_INT, i.e.:
13999 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14000
14001 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14002 whereas may be better to output a negative integer: thanks to integer
14003 wrapping, we know that:
14004 x = x - 2 ** DWARF2_ADDR_SIZE
14005 = x - 2 * (max (HOST_WIDE_INT) + 1)
14006 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14007 small negative integers. Let's try that in cases it will clearly improve
14008 the encoding: there is no gain turning DW_OP_const4u into
14009 DW_OP_const4s. */
14010 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14011 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14012 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14013 {
14014 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14015
14016 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14017 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14018 const HOST_WIDE_INT second_shift
14019 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14020
14021 /* So we finally have:
14022 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14023 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14024 return int_loc_descriptor (second_shift);
14025 }
14026
14027 /* Last chance: fallback to a simple constant operation. */
14028 return new_loc_descr
14029 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14030 ? DW_OP_const4u
14031 : DW_OP_const8u,
14032 i, 0);
14033 }
14034
14035 /* Generate and return a location description that computes the unsigned
14036 comparison of the two stack top entries (a OP b where b is the top-most
14037 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14038 LE_EXPR, GT_EXPR or GE_EXPR. */
14039
14040 static dw_loc_descr_ref
14041 uint_comparison_loc_list (enum tree_code kind)
14042 {
14043 enum dwarf_location_atom op, flip_op;
14044 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14045
14046 switch (kind)
14047 {
14048 case LT_EXPR:
14049 op = DW_OP_lt;
14050 break;
14051 case LE_EXPR:
14052 op = DW_OP_le;
14053 break;
14054 case GT_EXPR:
14055 op = DW_OP_gt;
14056 break;
14057 case GE_EXPR:
14058 op = DW_OP_ge;
14059 break;
14060 default:
14061 gcc_unreachable ();
14062 }
14063
14064 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14065 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14066
14067 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14068 possible to perform unsigned comparisons: we just have to distinguish
14069 three cases:
14070
14071 1. when a and b have the same sign (as signed integers); then we should
14072 return: a OP(signed) b;
14073
14074 2. when a is a negative signed integer while b is a positive one, then a
14075 is a greater unsigned integer than b; likewise when a and b's roles
14076 are flipped.
14077
14078 So first, compare the sign of the two operands. */
14079 ret = new_loc_descr (DW_OP_over, 0, 0);
14080 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14081 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14082 /* If they have different signs (i.e. they have different sign bits), then
14083 the stack top value has now the sign bit set and thus it's smaller than
14084 zero. */
14085 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14086 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14087 add_loc_descr (&ret, bra_node);
14088
14089 /* We are in case 1. At this point, we know both operands have the same
14090 sign, to it's safe to use the built-in signed comparison. */
14091 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14092 add_loc_descr (&ret, jmp_node);
14093
14094 /* We are in case 2. Here, we know both operands do not have the same sign,
14095 so we have to flip the signed comparison. */
14096 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14097 tmp = new_loc_descr (flip_op, 0, 0);
14098 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14099 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14100 add_loc_descr (&ret, tmp);
14101
14102 /* This dummy operation is necessary to make the two branches join. */
14103 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14104 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14105 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14106 add_loc_descr (&ret, tmp);
14107
14108 return ret;
14109 }
14110
14111 /* Likewise, but takes the location description lists (might be destructive on
14112 them). Return NULL if either is NULL or if concatenation fails. */
14113
14114 static dw_loc_list_ref
14115 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14116 enum tree_code kind)
14117 {
14118 if (left == NULL || right == NULL)
14119 return NULL;
14120
14121 add_loc_list (&left, right);
14122 if (left == NULL)
14123 return NULL;
14124
14125 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14126 return left;
14127 }
14128
14129 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14130 without actually allocating it. */
14131
14132 static unsigned long
14133 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14134 {
14135 return size_of_int_loc_descriptor (i >> shift)
14136 + size_of_int_loc_descriptor (shift)
14137 + 1;
14138 }
14139
14140 /* Return size_of_locs (int_loc_descriptor (i)) without
14141 actually allocating it. */
14142
14143 static unsigned long
14144 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14145 {
14146 unsigned long s;
14147
14148 if (i >= 0)
14149 {
14150 int clz, ctz;
14151 if (i <= 31)
14152 return 1;
14153 else if (i <= 0xff)
14154 return 2;
14155 else if (i <= 0xffff)
14156 return 3;
14157 clz = clz_hwi (i);
14158 ctz = ctz_hwi (i);
14159 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14160 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14161 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14162 - clz - 5);
14163 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14164 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14165 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14166 - clz - 8);
14167 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14168 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14169 <= 4)
14170 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14171 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14172 return 5;
14173 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14174 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14175 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14176 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14177 - clz - 8);
14178 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14179 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14180 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14181 - clz - 16);
14182 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14183 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14184 && s > 6)
14185 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14186 - clz - 32);
14187 else
14188 return 1 + s;
14189 }
14190 else
14191 {
14192 if (i >= -0x80)
14193 return 2;
14194 else if (i >= -0x8000)
14195 return 3;
14196 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14197 {
14198 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14199 {
14200 s = size_of_int_loc_descriptor (-i) + 1;
14201 if (s < 5)
14202 return s;
14203 }
14204 return 5;
14205 }
14206 else
14207 {
14208 unsigned long r = 1 + size_of_sleb128 (i);
14209 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14210 {
14211 s = size_of_int_loc_descriptor (-i) + 1;
14212 if (s < r)
14213 return s;
14214 }
14215 return r;
14216 }
14217 }
14218 }
14219
14220 /* Return loc description representing "address" of integer value.
14221 This can appear only as toplevel expression. */
14222
14223 static dw_loc_descr_ref
14224 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14225 {
14226 int litsize;
14227 dw_loc_descr_ref loc_result = NULL;
14228
14229 if (!(dwarf_version >= 4 || !dwarf_strict))
14230 return NULL;
14231
14232 litsize = size_of_int_loc_descriptor (i);
14233 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14234 is more compact. For DW_OP_stack_value we need:
14235 litsize + 1 (DW_OP_stack_value)
14236 and for DW_OP_implicit_value:
14237 1 (DW_OP_implicit_value) + 1 (length) + size. */
14238 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14239 {
14240 loc_result = int_loc_descriptor (i);
14241 add_loc_descr (&loc_result,
14242 new_loc_descr (DW_OP_stack_value, 0, 0));
14243 return loc_result;
14244 }
14245
14246 loc_result = new_loc_descr (DW_OP_implicit_value,
14247 size, 0);
14248 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14249 loc_result->dw_loc_oprnd2.v.val_int = i;
14250 return loc_result;
14251 }
14252
14253 /* Return a location descriptor that designates a base+offset location. */
14254
14255 static dw_loc_descr_ref
14256 based_loc_descr (rtx reg, poly_int64 offset,
14257 enum var_init_status initialized)
14258 {
14259 unsigned int regno;
14260 dw_loc_descr_ref result;
14261 dw_fde_ref fde = cfun->fde;
14262
14263 /* We only use "frame base" when we're sure we're talking about the
14264 post-prologue local stack frame. We do this by *not* running
14265 register elimination until this point, and recognizing the special
14266 argument pointer and soft frame pointer rtx's. */
14267 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14268 {
14269 rtx elim = (ira_use_lra_p
14270 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14271 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14272
14273 if (elim != reg)
14274 {
14275 /* Allow hard frame pointer here even if frame pointer
14276 isn't used since hard frame pointer is encoded with
14277 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14278 not hard frame pointer directly. */
14279 elim = strip_offset_and_add (elim, &offset);
14280 gcc_assert (elim == hard_frame_pointer_rtx
14281 || elim == stack_pointer_rtx);
14282
14283 /* If drap register is used to align stack, use frame
14284 pointer + offset to access stack variables. If stack
14285 is aligned without drap, use stack pointer + offset to
14286 access stack variables. */
14287 if (crtl->stack_realign_tried
14288 && reg == frame_pointer_rtx)
14289 {
14290 int base_reg
14291 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14292 ? HARD_FRAME_POINTER_REGNUM
14293 : REGNO (elim));
14294 return new_reg_loc_descr (base_reg, offset);
14295 }
14296
14297 gcc_assert (frame_pointer_fb_offset_valid);
14298 offset += frame_pointer_fb_offset;
14299 HOST_WIDE_INT const_offset;
14300 if (offset.is_constant (&const_offset))
14301 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14302 else
14303 {
14304 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14305 loc_descr_plus_const (&ret, offset);
14306 return ret;
14307 }
14308 }
14309 }
14310
14311 regno = REGNO (reg);
14312 #ifdef LEAF_REG_REMAP
14313 if (crtl->uses_only_leaf_regs)
14314 {
14315 int leaf_reg = LEAF_REG_REMAP (regno);
14316 if (leaf_reg != -1)
14317 regno = (unsigned) leaf_reg;
14318 }
14319 #endif
14320 regno = DWARF_FRAME_REGNUM (regno);
14321
14322 HOST_WIDE_INT const_offset;
14323 if (!optimize && fde
14324 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14325 && offset.is_constant (&const_offset))
14326 {
14327 /* Use cfa+offset to represent the location of arguments passed
14328 on the stack when drap is used to align stack.
14329 Only do this when not optimizing, for optimized code var-tracking
14330 is supposed to track where the arguments live and the register
14331 used as vdrap or drap in some spot might be used for something
14332 else in other part of the routine. */
14333 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14334 }
14335
14336 result = new_reg_loc_descr (regno, offset);
14337
14338 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14339 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14340
14341 return result;
14342 }
14343
14344 /* Return true if this RTL expression describes a base+offset calculation. */
14345
14346 static inline int
14347 is_based_loc (const_rtx rtl)
14348 {
14349 return (GET_CODE (rtl) == PLUS
14350 && ((REG_P (XEXP (rtl, 0))
14351 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14352 && CONST_INT_P (XEXP (rtl, 1)))));
14353 }
14354
14355 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14356 failed. */
14357
14358 static dw_loc_descr_ref
14359 tls_mem_loc_descriptor (rtx mem)
14360 {
14361 tree base;
14362 dw_loc_descr_ref loc_result;
14363
14364 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14365 return NULL;
14366
14367 base = get_base_address (MEM_EXPR (mem));
14368 if (base == NULL
14369 || !VAR_P (base)
14370 || !DECL_THREAD_LOCAL_P (base))
14371 return NULL;
14372
14373 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14374 if (loc_result == NULL)
14375 return NULL;
14376
14377 if (maybe_ne (MEM_OFFSET (mem), 0))
14378 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14379
14380 return loc_result;
14381 }
14382
14383 /* Output debug info about reason why we failed to expand expression as dwarf
14384 expression. */
14385
14386 static void
14387 expansion_failed (tree expr, rtx rtl, char const *reason)
14388 {
14389 if (dump_file && (dump_flags & TDF_DETAILS))
14390 {
14391 fprintf (dump_file, "Failed to expand as dwarf: ");
14392 if (expr)
14393 print_generic_expr (dump_file, expr, dump_flags);
14394 if (rtl)
14395 {
14396 fprintf (dump_file, "\n");
14397 print_rtl (dump_file, rtl);
14398 }
14399 fprintf (dump_file, "\nReason: %s\n", reason);
14400 }
14401 }
14402
14403 /* Helper function for const_ok_for_output. */
14404
14405 static bool
14406 const_ok_for_output_1 (rtx rtl)
14407 {
14408 if (targetm.const_not_ok_for_debug_p (rtl))
14409 {
14410 if (GET_CODE (rtl) != UNSPEC)
14411 {
14412 expansion_failed (NULL_TREE, rtl,
14413 "Expression rejected for debug by the backend.\n");
14414 return false;
14415 }
14416
14417 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14418 the target hook doesn't explicitly allow it in debug info, assume
14419 we can't express it in the debug info. */
14420 /* Don't complain about TLS UNSPECs, those are just too hard to
14421 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14422 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14423 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14424 if (flag_checking
14425 && (XVECLEN (rtl, 0) == 0
14426 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14427 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14428 inform (current_function_decl
14429 ? DECL_SOURCE_LOCATION (current_function_decl)
14430 : UNKNOWN_LOCATION,
14431 #if NUM_UNSPEC_VALUES > 0
14432 "non-delegitimized UNSPEC %s (%d) found in variable location",
14433 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14434 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14435 XINT (rtl, 1));
14436 #else
14437 "non-delegitimized UNSPEC %d found in variable location",
14438 XINT (rtl, 1));
14439 #endif
14440 expansion_failed (NULL_TREE, rtl,
14441 "UNSPEC hasn't been delegitimized.\n");
14442 return false;
14443 }
14444
14445 if (CONST_POLY_INT_P (rtl))
14446 return false;
14447
14448 /* FIXME: Refer to PR60655. It is possible for simplification
14449 of rtl expressions in var tracking to produce such expressions.
14450 We should really identify / validate expressions
14451 enclosed in CONST that can be handled by assemblers on various
14452 targets and only handle legitimate cases here. */
14453 switch (GET_CODE (rtl))
14454 {
14455 case SYMBOL_REF:
14456 break;
14457 case NOT:
14458 case NEG:
14459 return false;
14460 case PLUS:
14461 {
14462 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14463 operands. */
14464 subrtx_var_iterator::array_type array;
14465 bool first = false;
14466 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14467 if (SYMBOL_REF_P (*iter)
14468 || LABEL_P (*iter)
14469 || GET_CODE (*iter) == UNSPEC)
14470 {
14471 first = true;
14472 break;
14473 }
14474 if (!first)
14475 return true;
14476 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14477 if (SYMBOL_REF_P (*iter)
14478 || LABEL_P (*iter)
14479 || GET_CODE (*iter) == UNSPEC)
14480 return false;
14481 return true;
14482 }
14483 case MINUS:
14484 {
14485 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14486 appear in the second operand of MINUS. */
14487 subrtx_var_iterator::array_type array;
14488 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14489 if (SYMBOL_REF_P (*iter)
14490 || LABEL_P (*iter)
14491 || GET_CODE (*iter) == UNSPEC)
14492 return false;
14493 return true;
14494 }
14495 default:
14496 return true;
14497 }
14498
14499 if (CONSTANT_POOL_ADDRESS_P (rtl))
14500 {
14501 bool marked;
14502 get_pool_constant_mark (rtl, &marked);
14503 /* If all references to this pool constant were optimized away,
14504 it was not output and thus we can't represent it. */
14505 if (!marked)
14506 {
14507 expansion_failed (NULL_TREE, rtl,
14508 "Constant was removed from constant pool.\n");
14509 return false;
14510 }
14511 }
14512
14513 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14514 return false;
14515
14516 /* Avoid references to external symbols in debug info, on several targets
14517 the linker might even refuse to link when linking a shared library,
14518 and in many other cases the relocations for .debug_info/.debug_loc are
14519 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14520 to be defined within the same shared library or executable are fine. */
14521 if (SYMBOL_REF_EXTERNAL_P (rtl))
14522 {
14523 tree decl = SYMBOL_REF_DECL (rtl);
14524
14525 if (decl == NULL || !targetm.binds_local_p (decl))
14526 {
14527 expansion_failed (NULL_TREE, rtl,
14528 "Symbol not defined in current TU.\n");
14529 return false;
14530 }
14531 }
14532
14533 return true;
14534 }
14535
14536 /* Return true if constant RTL can be emitted in DW_OP_addr or
14537 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14538 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14539
14540 static bool
14541 const_ok_for_output (rtx rtl)
14542 {
14543 if (GET_CODE (rtl) == SYMBOL_REF)
14544 return const_ok_for_output_1 (rtl);
14545
14546 if (GET_CODE (rtl) == CONST)
14547 {
14548 subrtx_var_iterator::array_type array;
14549 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14550 if (!const_ok_for_output_1 (*iter))
14551 return false;
14552 return true;
14553 }
14554
14555 return true;
14556 }
14557
14558 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14559 if possible, NULL otherwise. */
14560
14561 static dw_die_ref
14562 base_type_for_mode (machine_mode mode, bool unsignedp)
14563 {
14564 dw_die_ref type_die;
14565 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14566
14567 if (type == NULL)
14568 return NULL;
14569 switch (TREE_CODE (type))
14570 {
14571 case INTEGER_TYPE:
14572 case REAL_TYPE:
14573 break;
14574 default:
14575 return NULL;
14576 }
14577 type_die = lookup_type_die (type);
14578 if (!type_die)
14579 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14580 comp_unit_die ());
14581 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14582 return NULL;
14583 return type_die;
14584 }
14585
14586 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14587 type matching MODE, or, if MODE is narrower than or as wide as
14588 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14589 possible. */
14590
14591 static dw_loc_descr_ref
14592 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14593 {
14594 machine_mode outer_mode = mode;
14595 dw_die_ref type_die;
14596 dw_loc_descr_ref cvt;
14597
14598 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14599 {
14600 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14601 return op;
14602 }
14603 type_die = base_type_for_mode (outer_mode, 1);
14604 if (type_die == NULL)
14605 return NULL;
14606 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14607 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14608 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14609 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14610 add_loc_descr (&op, cvt);
14611 return op;
14612 }
14613
14614 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14615
14616 static dw_loc_descr_ref
14617 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14618 dw_loc_descr_ref op1)
14619 {
14620 dw_loc_descr_ref ret = op0;
14621 add_loc_descr (&ret, op1);
14622 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14623 if (STORE_FLAG_VALUE != 1)
14624 {
14625 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14626 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14627 }
14628 return ret;
14629 }
14630
14631 /* Subroutine of scompare_loc_descriptor for the case in which we're
14632 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14633 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14634
14635 static dw_loc_descr_ref
14636 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14637 scalar_int_mode op_mode,
14638 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14639 {
14640 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14641 dw_loc_descr_ref cvt;
14642
14643 if (type_die == NULL)
14644 return NULL;
14645 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14646 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14647 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14648 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14649 add_loc_descr (&op0, cvt);
14650 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14651 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14652 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14653 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14654 add_loc_descr (&op1, cvt);
14655 return compare_loc_descriptor (op, op0, op1);
14656 }
14657
14658 /* Subroutine of scompare_loc_descriptor for the case in which we're
14659 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14660 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14661
14662 static dw_loc_descr_ref
14663 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14664 scalar_int_mode op_mode,
14665 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14666 {
14667 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14668 /* For eq/ne, if the operands are known to be zero-extended,
14669 there is no need to do the fancy shifting up. */
14670 if (op == DW_OP_eq || op == DW_OP_ne)
14671 {
14672 dw_loc_descr_ref last0, last1;
14673 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14674 ;
14675 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14676 ;
14677 /* deref_size zero extends, and for constants we can check
14678 whether they are zero extended or not. */
14679 if (((last0->dw_loc_opc == DW_OP_deref_size
14680 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14681 || (CONST_INT_P (XEXP (rtl, 0))
14682 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14683 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14684 && ((last1->dw_loc_opc == DW_OP_deref_size
14685 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14686 || (CONST_INT_P (XEXP (rtl, 1))
14687 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14688 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14689 return compare_loc_descriptor (op, op0, op1);
14690
14691 /* EQ/NE comparison against constant in narrower type than
14692 DWARF2_ADDR_SIZE can be performed either as
14693 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14694 DW_OP_{eq,ne}
14695 or
14696 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14697 DW_OP_{eq,ne}. Pick whatever is shorter. */
14698 if (CONST_INT_P (XEXP (rtl, 1))
14699 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14700 && (size_of_int_loc_descriptor (shift) + 1
14701 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14702 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14703 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14704 & GET_MODE_MASK (op_mode))))
14705 {
14706 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14707 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14708 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14709 & GET_MODE_MASK (op_mode));
14710 return compare_loc_descriptor (op, op0, op1);
14711 }
14712 }
14713 add_loc_descr (&op0, int_loc_descriptor (shift));
14714 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14715 if (CONST_INT_P (XEXP (rtl, 1)))
14716 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14717 else
14718 {
14719 add_loc_descr (&op1, int_loc_descriptor (shift));
14720 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14721 }
14722 return compare_loc_descriptor (op, op0, op1);
14723 }
14724
14725 /* Return location descriptor for unsigned comparison OP RTL. */
14726
14727 static dw_loc_descr_ref
14728 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14729 machine_mode mem_mode)
14730 {
14731 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14732 dw_loc_descr_ref op0, op1;
14733
14734 if (op_mode == VOIDmode)
14735 op_mode = GET_MODE (XEXP (rtl, 1));
14736 if (op_mode == VOIDmode)
14737 return NULL;
14738
14739 scalar_int_mode int_op_mode;
14740 if (dwarf_strict
14741 && dwarf_version < 5
14742 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14743 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14744 return NULL;
14745
14746 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14747 VAR_INIT_STATUS_INITIALIZED);
14748 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14749 VAR_INIT_STATUS_INITIALIZED);
14750
14751 if (op0 == NULL || op1 == NULL)
14752 return NULL;
14753
14754 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14755 {
14756 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14757 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14758
14759 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14760 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14761 }
14762 return compare_loc_descriptor (op, op0, op1);
14763 }
14764
14765 /* Return location descriptor for unsigned comparison OP RTL. */
14766
14767 static dw_loc_descr_ref
14768 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14769 machine_mode mem_mode)
14770 {
14771 dw_loc_descr_ref op0, op1;
14772
14773 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14774 if (test_op_mode == VOIDmode)
14775 test_op_mode = GET_MODE (XEXP (rtl, 1));
14776
14777 scalar_int_mode op_mode;
14778 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14779 return NULL;
14780
14781 if (dwarf_strict
14782 && dwarf_version < 5
14783 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14784 return NULL;
14785
14786 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14787 VAR_INIT_STATUS_INITIALIZED);
14788 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14789 VAR_INIT_STATUS_INITIALIZED);
14790
14791 if (op0 == NULL || op1 == NULL)
14792 return NULL;
14793
14794 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14795 {
14796 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14797 dw_loc_descr_ref last0, last1;
14798 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14799 ;
14800 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14801 ;
14802 if (CONST_INT_P (XEXP (rtl, 0)))
14803 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14804 /* deref_size zero extends, so no need to mask it again. */
14805 else if (last0->dw_loc_opc != DW_OP_deref_size
14806 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14807 {
14808 add_loc_descr (&op0, int_loc_descriptor (mask));
14809 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14810 }
14811 if (CONST_INT_P (XEXP (rtl, 1)))
14812 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14813 /* deref_size zero extends, so no need to mask it again. */
14814 else if (last1->dw_loc_opc != DW_OP_deref_size
14815 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14816 {
14817 add_loc_descr (&op1, int_loc_descriptor (mask));
14818 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14819 }
14820 }
14821 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14822 {
14823 HOST_WIDE_INT bias = 1;
14824 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14825 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14826 if (CONST_INT_P (XEXP (rtl, 1)))
14827 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14828 + INTVAL (XEXP (rtl, 1)));
14829 else
14830 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14831 bias, 0));
14832 }
14833 return compare_loc_descriptor (op, op0, op1);
14834 }
14835
14836 /* Return location descriptor for {U,S}{MIN,MAX}. */
14837
14838 static dw_loc_descr_ref
14839 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14840 machine_mode mem_mode)
14841 {
14842 enum dwarf_location_atom op;
14843 dw_loc_descr_ref op0, op1, ret;
14844 dw_loc_descr_ref bra_node, drop_node;
14845
14846 scalar_int_mode int_mode;
14847 if (dwarf_strict
14848 && dwarf_version < 5
14849 && (!is_a <scalar_int_mode> (mode, &int_mode)
14850 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14851 return NULL;
14852
14853 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14854 VAR_INIT_STATUS_INITIALIZED);
14855 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14856 VAR_INIT_STATUS_INITIALIZED);
14857
14858 if (op0 == NULL || op1 == NULL)
14859 return NULL;
14860
14861 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14862 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14863 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14864 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14865 {
14866 /* Checked by the caller. */
14867 int_mode = as_a <scalar_int_mode> (mode);
14868 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14869 {
14870 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14871 add_loc_descr (&op0, int_loc_descriptor (mask));
14872 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14873 add_loc_descr (&op1, int_loc_descriptor (mask));
14874 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14875 }
14876 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14877 {
14878 HOST_WIDE_INT bias = 1;
14879 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14880 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14881 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14882 }
14883 }
14884 else if (is_a <scalar_int_mode> (mode, &int_mode)
14885 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14886 {
14887 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14888 add_loc_descr (&op0, int_loc_descriptor (shift));
14889 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14890 add_loc_descr (&op1, int_loc_descriptor (shift));
14891 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14892 }
14893 else if (is_a <scalar_int_mode> (mode, &int_mode)
14894 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14895 {
14896 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14897 dw_loc_descr_ref cvt;
14898 if (type_die == NULL)
14899 return NULL;
14900 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14901 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14902 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14903 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14904 add_loc_descr (&op0, cvt);
14905 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14906 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14907 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14908 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14909 add_loc_descr (&op1, cvt);
14910 }
14911
14912 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14913 op = DW_OP_lt;
14914 else
14915 op = DW_OP_gt;
14916 ret = op0;
14917 add_loc_descr (&ret, op1);
14918 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14919 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14920 add_loc_descr (&ret, bra_node);
14921 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14922 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14923 add_loc_descr (&ret, drop_node);
14924 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14925 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14926 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14927 && is_a <scalar_int_mode> (mode, &int_mode)
14928 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14929 ret = convert_descriptor_to_mode (int_mode, ret);
14930 return ret;
14931 }
14932
14933 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14934 but after converting arguments to type_die, afterwards
14935 convert back to unsigned. */
14936
14937 static dw_loc_descr_ref
14938 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14939 scalar_int_mode mode, machine_mode mem_mode)
14940 {
14941 dw_loc_descr_ref cvt, op0, op1;
14942
14943 if (type_die == NULL)
14944 return NULL;
14945 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14946 VAR_INIT_STATUS_INITIALIZED);
14947 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14948 VAR_INIT_STATUS_INITIALIZED);
14949 if (op0 == NULL || op1 == NULL)
14950 return NULL;
14951 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14952 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14953 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14954 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14955 add_loc_descr (&op0, cvt);
14956 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14957 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14958 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14959 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14960 add_loc_descr (&op1, cvt);
14961 add_loc_descr (&op0, op1);
14962 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14963 return convert_descriptor_to_mode (mode, op0);
14964 }
14965
14966 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14967 const0 is DW_OP_lit0 or corresponding typed constant,
14968 const1 is DW_OP_lit1 or corresponding typed constant
14969 and constMSB is constant with just the MSB bit set
14970 for the mode):
14971 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14972 L1: const0 DW_OP_swap
14973 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14974 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14975 L3: DW_OP_drop
14976 L4: DW_OP_nop
14977
14978 CTZ is similar:
14979 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14980 L1: const0 DW_OP_swap
14981 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14982 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14983 L3: DW_OP_drop
14984 L4: DW_OP_nop
14985
14986 FFS is similar:
14987 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14988 L1: const1 DW_OP_swap
14989 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14990 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14991 L3: DW_OP_drop
14992 L4: DW_OP_nop */
14993
14994 static dw_loc_descr_ref
14995 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14996 machine_mode mem_mode)
14997 {
14998 dw_loc_descr_ref op0, ret, tmp;
14999 HOST_WIDE_INT valv;
15000 dw_loc_descr_ref l1jump, l1label;
15001 dw_loc_descr_ref l2jump, l2label;
15002 dw_loc_descr_ref l3jump, l3label;
15003 dw_loc_descr_ref l4jump, l4label;
15004 rtx msb;
15005
15006 if (GET_MODE (XEXP (rtl, 0)) != mode)
15007 return NULL;
15008
15009 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15010 VAR_INIT_STATUS_INITIALIZED);
15011 if (op0 == NULL)
15012 return NULL;
15013 ret = op0;
15014 if (GET_CODE (rtl) == CLZ)
15015 {
15016 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15017 valv = GET_MODE_BITSIZE (mode);
15018 }
15019 else if (GET_CODE (rtl) == FFS)
15020 valv = 0;
15021 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15022 valv = GET_MODE_BITSIZE (mode);
15023 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15024 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15025 add_loc_descr (&ret, l1jump);
15026 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15027 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15028 VAR_INIT_STATUS_INITIALIZED);
15029 if (tmp == NULL)
15030 return NULL;
15031 add_loc_descr (&ret, tmp);
15032 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15033 add_loc_descr (&ret, l4jump);
15034 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15035 ? const1_rtx : const0_rtx,
15036 mode, mem_mode,
15037 VAR_INIT_STATUS_INITIALIZED);
15038 if (l1label == NULL)
15039 return NULL;
15040 add_loc_descr (&ret, l1label);
15041 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15042 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15043 add_loc_descr (&ret, l2label);
15044 if (GET_CODE (rtl) != CLZ)
15045 msb = const1_rtx;
15046 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15047 msb = GEN_INT (HOST_WIDE_INT_1U
15048 << (GET_MODE_BITSIZE (mode) - 1));
15049 else
15050 msb = immed_wide_int_const
15051 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15052 GET_MODE_PRECISION (mode)), mode);
15053 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15054 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15055 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15056 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15057 else
15058 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15059 VAR_INIT_STATUS_INITIALIZED);
15060 if (tmp == NULL)
15061 return NULL;
15062 add_loc_descr (&ret, tmp);
15063 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15064 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15065 add_loc_descr (&ret, l3jump);
15066 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15067 VAR_INIT_STATUS_INITIALIZED);
15068 if (tmp == NULL)
15069 return NULL;
15070 add_loc_descr (&ret, tmp);
15071 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15072 ? DW_OP_shl : DW_OP_shr, 0, 0));
15073 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15074 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15075 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15076 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15077 add_loc_descr (&ret, l2jump);
15078 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15079 add_loc_descr (&ret, l3label);
15080 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15081 add_loc_descr (&ret, l4label);
15082 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15083 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15084 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15085 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15086 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15087 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15088 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15089 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15090 return ret;
15091 }
15092
15093 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15094 const1 is DW_OP_lit1 or corresponding typed constant):
15095 const0 DW_OP_swap
15096 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15097 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15098 L2: DW_OP_drop
15099
15100 PARITY is similar:
15101 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15102 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15103 L2: DW_OP_drop */
15104
15105 static dw_loc_descr_ref
15106 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15107 machine_mode mem_mode)
15108 {
15109 dw_loc_descr_ref op0, ret, tmp;
15110 dw_loc_descr_ref l1jump, l1label;
15111 dw_loc_descr_ref l2jump, l2label;
15112
15113 if (GET_MODE (XEXP (rtl, 0)) != mode)
15114 return NULL;
15115
15116 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15117 VAR_INIT_STATUS_INITIALIZED);
15118 if (op0 == NULL)
15119 return NULL;
15120 ret = op0;
15121 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15122 VAR_INIT_STATUS_INITIALIZED);
15123 if (tmp == NULL)
15124 return NULL;
15125 add_loc_descr (&ret, tmp);
15126 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15127 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15128 add_loc_descr (&ret, l1label);
15129 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15130 add_loc_descr (&ret, l2jump);
15131 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15132 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15133 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15134 VAR_INIT_STATUS_INITIALIZED);
15135 if (tmp == NULL)
15136 return NULL;
15137 add_loc_descr (&ret, tmp);
15138 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15139 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15140 ? DW_OP_plus : DW_OP_xor, 0, 0));
15141 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15142 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15143 VAR_INIT_STATUS_INITIALIZED);
15144 add_loc_descr (&ret, tmp);
15145 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15146 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15147 add_loc_descr (&ret, l1jump);
15148 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15149 add_loc_descr (&ret, l2label);
15150 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15151 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15152 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15153 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15154 return ret;
15155 }
15156
15157 /* BSWAP (constS is initial shift count, either 56 or 24):
15158 constS const0
15159 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15160 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15161 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15162 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15163 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15164
15165 static dw_loc_descr_ref
15166 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15167 machine_mode mem_mode)
15168 {
15169 dw_loc_descr_ref op0, ret, tmp;
15170 dw_loc_descr_ref l1jump, l1label;
15171 dw_loc_descr_ref l2jump, l2label;
15172
15173 if (BITS_PER_UNIT != 8
15174 || (GET_MODE_BITSIZE (mode) != 32
15175 && GET_MODE_BITSIZE (mode) != 64))
15176 return NULL;
15177
15178 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15179 VAR_INIT_STATUS_INITIALIZED);
15180 if (op0 == NULL)
15181 return NULL;
15182
15183 ret = op0;
15184 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15185 mode, mem_mode,
15186 VAR_INIT_STATUS_INITIALIZED);
15187 if (tmp == NULL)
15188 return NULL;
15189 add_loc_descr (&ret, tmp);
15190 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15191 VAR_INIT_STATUS_INITIALIZED);
15192 if (tmp == NULL)
15193 return NULL;
15194 add_loc_descr (&ret, tmp);
15195 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15196 add_loc_descr (&ret, l1label);
15197 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15198 mode, mem_mode,
15199 VAR_INIT_STATUS_INITIALIZED);
15200 add_loc_descr (&ret, tmp);
15201 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15202 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15203 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15204 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15205 VAR_INIT_STATUS_INITIALIZED);
15206 if (tmp == NULL)
15207 return NULL;
15208 add_loc_descr (&ret, tmp);
15209 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15210 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15211 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15212 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15213 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15214 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15215 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15216 VAR_INIT_STATUS_INITIALIZED);
15217 add_loc_descr (&ret, tmp);
15218 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15219 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15220 add_loc_descr (&ret, l2jump);
15221 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15222 VAR_INIT_STATUS_INITIALIZED);
15223 add_loc_descr (&ret, tmp);
15224 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15225 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15226 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15227 add_loc_descr (&ret, l1jump);
15228 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15229 add_loc_descr (&ret, l2label);
15230 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15231 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15232 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15233 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15234 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15235 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15236 return ret;
15237 }
15238
15239 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15240 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15241 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15242 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15243
15244 ROTATERT is similar:
15245 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15246 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15247 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15248
15249 static dw_loc_descr_ref
15250 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15251 machine_mode mem_mode)
15252 {
15253 rtx rtlop1 = XEXP (rtl, 1);
15254 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15255 int i;
15256
15257 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15258 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15259 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15260 VAR_INIT_STATUS_INITIALIZED);
15261 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15262 VAR_INIT_STATUS_INITIALIZED);
15263 if (op0 == NULL || op1 == NULL)
15264 return NULL;
15265 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15266 for (i = 0; i < 2; i++)
15267 {
15268 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15269 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15270 mode, mem_mode,
15271 VAR_INIT_STATUS_INITIALIZED);
15272 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15273 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15274 ? DW_OP_const4u
15275 : HOST_BITS_PER_WIDE_INT == 64
15276 ? DW_OP_const8u : DW_OP_constu,
15277 GET_MODE_MASK (mode), 0);
15278 else
15279 mask[i] = NULL;
15280 if (mask[i] == NULL)
15281 return NULL;
15282 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15283 }
15284 ret = op0;
15285 add_loc_descr (&ret, op1);
15286 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15287 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15288 if (GET_CODE (rtl) == ROTATERT)
15289 {
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15292 GET_MODE_BITSIZE (mode), 0));
15293 }
15294 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15295 if (mask[0] != NULL)
15296 add_loc_descr (&ret, mask[0]);
15297 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15298 if (mask[1] != NULL)
15299 {
15300 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15301 add_loc_descr (&ret, mask[1]);
15302 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15303 }
15304 if (GET_CODE (rtl) == ROTATE)
15305 {
15306 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15307 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15308 GET_MODE_BITSIZE (mode), 0));
15309 }
15310 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15311 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15312 return ret;
15313 }
15314
15315 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15316 for DEBUG_PARAMETER_REF RTL. */
15317
15318 static dw_loc_descr_ref
15319 parameter_ref_descriptor (rtx rtl)
15320 {
15321 dw_loc_descr_ref ret;
15322 dw_die_ref ref;
15323
15324 if (dwarf_strict)
15325 return NULL;
15326 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15327 /* With LTO during LTRANS we get the late DIE that refers to the early
15328 DIE, thus we add another indirection here. This seems to confuse
15329 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15330 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15331 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15332 if (ref)
15333 {
15334 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15335 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15336 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15337 }
15338 else
15339 {
15340 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15341 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15342 }
15343 return ret;
15344 }
15345
15346 /* The following routine converts the RTL for a variable or parameter
15347 (resident in memory) into an equivalent Dwarf representation of a
15348 mechanism for getting the address of that same variable onto the top of a
15349 hypothetical "address evaluation" stack.
15350
15351 When creating memory location descriptors, we are effectively transforming
15352 the RTL for a memory-resident object into its Dwarf postfix expression
15353 equivalent. This routine recursively descends an RTL tree, turning
15354 it into Dwarf postfix code as it goes.
15355
15356 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15357
15358 MEM_MODE is the mode of the memory reference, needed to handle some
15359 autoincrement addressing modes.
15360
15361 Return 0 if we can't represent the location. */
15362
15363 dw_loc_descr_ref
15364 mem_loc_descriptor (rtx rtl, machine_mode mode,
15365 machine_mode mem_mode,
15366 enum var_init_status initialized)
15367 {
15368 dw_loc_descr_ref mem_loc_result = NULL;
15369 enum dwarf_location_atom op;
15370 dw_loc_descr_ref op0, op1;
15371 rtx inner = NULL_RTX;
15372 poly_int64 offset;
15373
15374 if (mode == VOIDmode)
15375 mode = GET_MODE (rtl);
15376
15377 /* Note that for a dynamically sized array, the location we will generate a
15378 description of here will be the lowest numbered location which is
15379 actually within the array. That's *not* necessarily the same as the
15380 zeroth element of the array. */
15381
15382 rtl = targetm.delegitimize_address (rtl);
15383
15384 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15385 return NULL;
15386
15387 scalar_int_mode int_mode, inner_mode, op1_mode;
15388 switch (GET_CODE (rtl))
15389 {
15390 case POST_INC:
15391 case POST_DEC:
15392 case POST_MODIFY:
15393 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15394
15395 case SUBREG:
15396 /* The case of a subreg may arise when we have a local (register)
15397 variable or a formal (register) parameter which doesn't quite fill
15398 up an entire register. For now, just assume that it is
15399 legitimate to make the Dwarf info refer to the whole register which
15400 contains the given subreg. */
15401 if (!subreg_lowpart_p (rtl))
15402 break;
15403 inner = SUBREG_REG (rtl);
15404 /* FALLTHRU */
15405 case TRUNCATE:
15406 if (inner == NULL_RTX)
15407 inner = XEXP (rtl, 0);
15408 if (is_a <scalar_int_mode> (mode, &int_mode)
15409 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15410 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15411 #ifdef POINTERS_EXTEND_UNSIGNED
15412 || (int_mode == Pmode && mem_mode != VOIDmode)
15413 #endif
15414 )
15415 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15416 {
15417 mem_loc_result = mem_loc_descriptor (inner,
15418 inner_mode,
15419 mem_mode, initialized);
15420 break;
15421 }
15422 if (dwarf_strict && dwarf_version < 5)
15423 break;
15424 if (is_a <scalar_int_mode> (mode, &int_mode)
15425 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15426 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15427 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15428 {
15429 dw_die_ref type_die;
15430 dw_loc_descr_ref cvt;
15431
15432 mem_loc_result = mem_loc_descriptor (inner,
15433 GET_MODE (inner),
15434 mem_mode, initialized);
15435 if (mem_loc_result == NULL)
15436 break;
15437 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15438 if (type_die == NULL)
15439 {
15440 mem_loc_result = NULL;
15441 break;
15442 }
15443 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15444 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15445 else
15446 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15447 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15448 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15449 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15450 add_loc_descr (&mem_loc_result, cvt);
15451 if (is_a <scalar_int_mode> (mode, &int_mode)
15452 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15453 {
15454 /* Convert it to untyped afterwards. */
15455 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15456 add_loc_descr (&mem_loc_result, cvt);
15457 }
15458 }
15459 break;
15460
15461 case REG:
15462 if (!is_a <scalar_int_mode> (mode, &int_mode)
15463 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15464 && rtl != arg_pointer_rtx
15465 && rtl != frame_pointer_rtx
15466 #ifdef POINTERS_EXTEND_UNSIGNED
15467 && (int_mode != Pmode || mem_mode == VOIDmode)
15468 #endif
15469 ))
15470 {
15471 dw_die_ref type_die;
15472 unsigned int dbx_regnum;
15473
15474 if (dwarf_strict && dwarf_version < 5)
15475 break;
15476 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15477 break;
15478 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15479 if (type_die == NULL)
15480 break;
15481
15482 dbx_regnum = dbx_reg_number (rtl);
15483 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15484 break;
15485 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15486 dbx_regnum, 0);
15487 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15488 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15489 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15490 break;
15491 }
15492 /* Whenever a register number forms a part of the description of the
15493 method for calculating the (dynamic) address of a memory resident
15494 object, DWARF rules require the register number be referred to as
15495 a "base register". This distinction is not based in any way upon
15496 what category of register the hardware believes the given register
15497 belongs to. This is strictly DWARF terminology we're dealing with
15498 here. Note that in cases where the location of a memory-resident
15499 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15500 OP_CONST (0)) the actual DWARF location descriptor that we generate
15501 may just be OP_BASEREG (basereg). This may look deceptively like
15502 the object in question was allocated to a register (rather than in
15503 memory) so DWARF consumers need to be aware of the subtle
15504 distinction between OP_REG and OP_BASEREG. */
15505 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15506 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15507 else if (stack_realign_drap
15508 && crtl->drap_reg
15509 && crtl->args.internal_arg_pointer == rtl
15510 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15511 {
15512 /* If RTL is internal_arg_pointer, which has been optimized
15513 out, use DRAP instead. */
15514 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15515 VAR_INIT_STATUS_INITIALIZED);
15516 }
15517 break;
15518
15519 case SIGN_EXTEND:
15520 case ZERO_EXTEND:
15521 if (!is_a <scalar_int_mode> (mode, &int_mode)
15522 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15523 break;
15524 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15525 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15526 if (op0 == 0)
15527 break;
15528 else if (GET_CODE (rtl) == ZERO_EXTEND
15529 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15530 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15531 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15532 to expand zero extend as two shifts instead of
15533 masking. */
15534 && GET_MODE_SIZE (inner_mode) <= 4)
15535 {
15536 mem_loc_result = op0;
15537 add_loc_descr (&mem_loc_result,
15538 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15539 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15540 }
15541 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15542 {
15543 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15544 shift *= BITS_PER_UNIT;
15545 if (GET_CODE (rtl) == SIGN_EXTEND)
15546 op = DW_OP_shra;
15547 else
15548 op = DW_OP_shr;
15549 mem_loc_result = op0;
15550 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15551 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15552 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15553 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15554 }
15555 else if (!dwarf_strict || dwarf_version >= 5)
15556 {
15557 dw_die_ref type_die1, type_die2;
15558 dw_loc_descr_ref cvt;
15559
15560 type_die1 = base_type_for_mode (inner_mode,
15561 GET_CODE (rtl) == ZERO_EXTEND);
15562 if (type_die1 == NULL)
15563 break;
15564 type_die2 = base_type_for_mode (int_mode, 1);
15565 if (type_die2 == NULL)
15566 break;
15567 mem_loc_result = op0;
15568 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15569 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15570 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15571 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15572 add_loc_descr (&mem_loc_result, cvt);
15573 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15574 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15575 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15576 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15577 add_loc_descr (&mem_loc_result, cvt);
15578 }
15579 break;
15580
15581 case MEM:
15582 {
15583 rtx new_rtl = avoid_constant_pool_reference (rtl);
15584 if (new_rtl != rtl)
15585 {
15586 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15587 initialized);
15588 if (mem_loc_result != NULL)
15589 return mem_loc_result;
15590 }
15591 }
15592 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15593 get_address_mode (rtl), mode,
15594 VAR_INIT_STATUS_INITIALIZED);
15595 if (mem_loc_result == NULL)
15596 mem_loc_result = tls_mem_loc_descriptor (rtl);
15597 if (mem_loc_result != NULL)
15598 {
15599 if (!is_a <scalar_int_mode> (mode, &int_mode)
15600 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15601 {
15602 dw_die_ref type_die;
15603 dw_loc_descr_ref deref;
15604 HOST_WIDE_INT size;
15605
15606 if (dwarf_strict && dwarf_version < 5)
15607 return NULL;
15608 if (!GET_MODE_SIZE (mode).is_constant (&size))
15609 return NULL;
15610 type_die
15611 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15612 if (type_die == NULL)
15613 return NULL;
15614 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15615 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15616 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15617 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15618 add_loc_descr (&mem_loc_result, deref);
15619 }
15620 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15621 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15622 else
15623 add_loc_descr (&mem_loc_result,
15624 new_loc_descr (DW_OP_deref_size,
15625 GET_MODE_SIZE (int_mode), 0));
15626 }
15627 break;
15628
15629 case LO_SUM:
15630 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15631
15632 case LABEL_REF:
15633 /* Some ports can transform a symbol ref into a label ref, because
15634 the symbol ref is too far away and has to be dumped into a constant
15635 pool. */
15636 case CONST:
15637 case SYMBOL_REF:
15638 case UNSPEC:
15639 if (!is_a <scalar_int_mode> (mode, &int_mode)
15640 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15641 #ifdef POINTERS_EXTEND_UNSIGNED
15642 && (int_mode != Pmode || mem_mode == VOIDmode)
15643 #endif
15644 ))
15645 break;
15646
15647 if (GET_CODE (rtl) == UNSPEC)
15648 {
15649 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15650 can't express it in the debug info. This can happen e.g. with some
15651 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15652 approves. */
15653 bool not_ok = false;
15654 subrtx_var_iterator::array_type array;
15655 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15656 if (*iter != rtl && !CONSTANT_P (*iter))
15657 {
15658 not_ok = true;
15659 break;
15660 }
15661
15662 if (not_ok)
15663 break;
15664
15665 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15666 if (!const_ok_for_output_1 (*iter))
15667 {
15668 not_ok = true;
15669 break;
15670 }
15671
15672 if (not_ok)
15673 break;
15674
15675 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15676 goto symref;
15677 }
15678
15679 if (GET_CODE (rtl) == SYMBOL_REF
15680 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15681 {
15682 dw_loc_descr_ref temp;
15683
15684 /* If this is not defined, we have no way to emit the data. */
15685 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15686 break;
15687
15688 temp = new_addr_loc_descr (rtl, dtprel_true);
15689
15690 /* We check for DWARF 5 here because gdb did not implement
15691 DW_OP_form_tls_address until after 7.12. */
15692 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15693 ? DW_OP_form_tls_address
15694 : DW_OP_GNU_push_tls_address),
15695 0, 0);
15696 add_loc_descr (&mem_loc_result, temp);
15697
15698 break;
15699 }
15700
15701 if (!const_ok_for_output (rtl))
15702 {
15703 if (GET_CODE (rtl) == CONST)
15704 switch (GET_CODE (XEXP (rtl, 0)))
15705 {
15706 case NOT:
15707 op = DW_OP_not;
15708 goto try_const_unop;
15709 case NEG:
15710 op = DW_OP_neg;
15711 goto try_const_unop;
15712 try_const_unop:
15713 rtx arg;
15714 arg = XEXP (XEXP (rtl, 0), 0);
15715 if (!CONSTANT_P (arg))
15716 arg = gen_rtx_CONST (int_mode, arg);
15717 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15718 initialized);
15719 if (op0)
15720 {
15721 mem_loc_result = op0;
15722 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15723 }
15724 break;
15725 default:
15726 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15727 mem_mode, initialized);
15728 break;
15729 }
15730 break;
15731 }
15732
15733 symref:
15734 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15735 vec_safe_push (used_rtx_array, rtl);
15736 break;
15737
15738 case CONCAT:
15739 case CONCATN:
15740 case VAR_LOCATION:
15741 case DEBUG_IMPLICIT_PTR:
15742 expansion_failed (NULL_TREE, rtl,
15743 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15744 return 0;
15745
15746 case ENTRY_VALUE:
15747 if (dwarf_strict && dwarf_version < 5)
15748 return NULL;
15749 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15750 {
15751 if (!is_a <scalar_int_mode> (mode, &int_mode)
15752 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15753 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15754 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15755 else
15756 {
15757 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15758 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15759 return NULL;
15760 op0 = one_reg_loc_descriptor (dbx_regnum,
15761 VAR_INIT_STATUS_INITIALIZED);
15762 }
15763 }
15764 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15765 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15766 {
15767 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15768 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15769 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15770 return NULL;
15771 }
15772 else
15773 gcc_unreachable ();
15774 if (op0 == NULL)
15775 return NULL;
15776 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15777 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15778 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15779 break;
15780
15781 case DEBUG_PARAMETER_REF:
15782 mem_loc_result = parameter_ref_descriptor (rtl);
15783 break;
15784
15785 case PRE_MODIFY:
15786 /* Extract the PLUS expression nested inside and fall into
15787 PLUS code below. */
15788 rtl = XEXP (rtl, 1);
15789 goto plus;
15790
15791 case PRE_INC:
15792 case PRE_DEC:
15793 /* Turn these into a PLUS expression and fall into the PLUS code
15794 below. */
15795 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15796 gen_int_mode (GET_CODE (rtl) == PRE_INC
15797 ? GET_MODE_UNIT_SIZE (mem_mode)
15798 : -GET_MODE_UNIT_SIZE (mem_mode),
15799 mode));
15800
15801 /* fall through */
15802
15803 case PLUS:
15804 plus:
15805 if (is_based_loc (rtl)
15806 && is_a <scalar_int_mode> (mode, &int_mode)
15807 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15808 || XEXP (rtl, 0) == arg_pointer_rtx
15809 || XEXP (rtl, 0) == frame_pointer_rtx))
15810 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15811 INTVAL (XEXP (rtl, 1)),
15812 VAR_INIT_STATUS_INITIALIZED);
15813 else
15814 {
15815 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15816 VAR_INIT_STATUS_INITIALIZED);
15817 if (mem_loc_result == 0)
15818 break;
15819
15820 if (CONST_INT_P (XEXP (rtl, 1))
15821 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15822 <= DWARF2_ADDR_SIZE))
15823 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15824 else
15825 {
15826 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15827 VAR_INIT_STATUS_INITIALIZED);
15828 if (op1 == 0)
15829 return NULL;
15830 add_loc_descr (&mem_loc_result, op1);
15831 add_loc_descr (&mem_loc_result,
15832 new_loc_descr (DW_OP_plus, 0, 0));
15833 }
15834 }
15835 break;
15836
15837 /* If a pseudo-reg is optimized away, it is possible for it to
15838 be replaced with a MEM containing a multiply or shift. */
15839 case MINUS:
15840 op = DW_OP_minus;
15841 goto do_binop;
15842
15843 case MULT:
15844 op = DW_OP_mul;
15845 goto do_binop;
15846
15847 case DIV:
15848 if ((!dwarf_strict || dwarf_version >= 5)
15849 && is_a <scalar_int_mode> (mode, &int_mode)
15850 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15851 {
15852 mem_loc_result = typed_binop (DW_OP_div, rtl,
15853 base_type_for_mode (mode, 0),
15854 int_mode, mem_mode);
15855 break;
15856 }
15857 op = DW_OP_div;
15858 goto do_binop;
15859
15860 case UMOD:
15861 op = DW_OP_mod;
15862 goto do_binop;
15863
15864 case ASHIFT:
15865 op = DW_OP_shl;
15866 goto do_shift;
15867
15868 case ASHIFTRT:
15869 op = DW_OP_shra;
15870 goto do_shift;
15871
15872 case LSHIFTRT:
15873 op = DW_OP_shr;
15874 goto do_shift;
15875
15876 do_shift:
15877 if (!is_a <scalar_int_mode> (mode, &int_mode))
15878 break;
15879 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15880 VAR_INIT_STATUS_INITIALIZED);
15881 {
15882 rtx rtlop1 = XEXP (rtl, 1);
15883 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15884 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15885 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15886 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15887 VAR_INIT_STATUS_INITIALIZED);
15888 }
15889
15890 if (op0 == 0 || op1 == 0)
15891 break;
15892
15893 mem_loc_result = op0;
15894 add_loc_descr (&mem_loc_result, op1);
15895 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15896 break;
15897
15898 case AND:
15899 op = DW_OP_and;
15900 goto do_binop;
15901
15902 case IOR:
15903 op = DW_OP_or;
15904 goto do_binop;
15905
15906 case XOR:
15907 op = DW_OP_xor;
15908 goto do_binop;
15909
15910 do_binop:
15911 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15912 VAR_INIT_STATUS_INITIALIZED);
15913 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15914 VAR_INIT_STATUS_INITIALIZED);
15915
15916 if (op0 == 0 || op1 == 0)
15917 break;
15918
15919 mem_loc_result = op0;
15920 add_loc_descr (&mem_loc_result, op1);
15921 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15922 break;
15923
15924 case MOD:
15925 if ((!dwarf_strict || dwarf_version >= 5)
15926 && is_a <scalar_int_mode> (mode, &int_mode)
15927 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15928 {
15929 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15930 base_type_for_mode (mode, 0),
15931 int_mode, mem_mode);
15932 break;
15933 }
15934
15935 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15936 VAR_INIT_STATUS_INITIALIZED);
15937 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15938 VAR_INIT_STATUS_INITIALIZED);
15939
15940 if (op0 == 0 || op1 == 0)
15941 break;
15942
15943 mem_loc_result = op0;
15944 add_loc_descr (&mem_loc_result, op1);
15945 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15946 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15947 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15948 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15949 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15950 break;
15951
15952 case UDIV:
15953 if ((!dwarf_strict || dwarf_version >= 5)
15954 && is_a <scalar_int_mode> (mode, &int_mode))
15955 {
15956 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15957 {
15958 op = DW_OP_div;
15959 goto do_binop;
15960 }
15961 mem_loc_result = typed_binop (DW_OP_div, rtl,
15962 base_type_for_mode (int_mode, 1),
15963 int_mode, mem_mode);
15964 }
15965 break;
15966
15967 case NOT:
15968 op = DW_OP_not;
15969 goto do_unop;
15970
15971 case ABS:
15972 op = DW_OP_abs;
15973 goto do_unop;
15974
15975 case NEG:
15976 op = DW_OP_neg;
15977 goto do_unop;
15978
15979 do_unop:
15980 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15981 VAR_INIT_STATUS_INITIALIZED);
15982
15983 if (op0 == 0)
15984 break;
15985
15986 mem_loc_result = op0;
15987 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15988 break;
15989
15990 case CONST_INT:
15991 if (!is_a <scalar_int_mode> (mode, &int_mode)
15992 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15993 #ifdef POINTERS_EXTEND_UNSIGNED
15994 || (int_mode == Pmode
15995 && mem_mode != VOIDmode
15996 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15997 #endif
15998 )
15999 {
16000 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16001 break;
16002 }
16003 if ((!dwarf_strict || dwarf_version >= 5)
16004 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16005 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16006 {
16007 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16008 scalar_int_mode amode;
16009 if (type_die == NULL)
16010 return NULL;
16011 if (INTVAL (rtl) >= 0
16012 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16013 .exists (&amode))
16014 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16015 /* const DW_OP_convert <XXX> vs.
16016 DW_OP_const_type <XXX, 1, const>. */
16017 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16018 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16019 {
16020 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16021 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16022 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16023 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16024 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16025 add_loc_descr (&mem_loc_result, op0);
16026 return mem_loc_result;
16027 }
16028 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16029 INTVAL (rtl));
16030 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16031 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16032 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16033 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16034 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16035 else
16036 {
16037 mem_loc_result->dw_loc_oprnd2.val_class
16038 = dw_val_class_const_double;
16039 mem_loc_result->dw_loc_oprnd2.v.val_double
16040 = double_int::from_shwi (INTVAL (rtl));
16041 }
16042 }
16043 break;
16044
16045 case CONST_DOUBLE:
16046 if (!dwarf_strict || dwarf_version >= 5)
16047 {
16048 dw_die_ref type_die;
16049
16050 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16051 CONST_DOUBLE rtx could represent either a large integer
16052 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16053 the value is always a floating point constant.
16054
16055 When it is an integer, a CONST_DOUBLE is used whenever
16056 the constant requires 2 HWIs to be adequately represented.
16057 We output CONST_DOUBLEs as blocks. */
16058 if (mode == VOIDmode
16059 || (GET_MODE (rtl) == VOIDmode
16060 && maybe_ne (GET_MODE_BITSIZE (mode),
16061 HOST_BITS_PER_DOUBLE_INT)))
16062 break;
16063 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16064 if (type_die == NULL)
16065 return NULL;
16066 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16067 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16068 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16069 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16070 #if TARGET_SUPPORTS_WIDE_INT == 0
16071 if (!SCALAR_FLOAT_MODE_P (mode))
16072 {
16073 mem_loc_result->dw_loc_oprnd2.val_class
16074 = dw_val_class_const_double;
16075 mem_loc_result->dw_loc_oprnd2.v.val_double
16076 = rtx_to_double_int (rtl);
16077 }
16078 else
16079 #endif
16080 {
16081 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16082 unsigned int length = GET_MODE_SIZE (float_mode);
16083 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16084
16085 insert_float (rtl, array);
16086 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16087 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16088 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16089 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16090 }
16091 }
16092 break;
16093
16094 case CONST_WIDE_INT:
16095 if (!dwarf_strict || dwarf_version >= 5)
16096 {
16097 dw_die_ref type_die;
16098
16099 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16100 if (type_die == NULL)
16101 return NULL;
16102 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16103 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16104 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16105 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16106 mem_loc_result->dw_loc_oprnd2.val_class
16107 = dw_val_class_wide_int;
16108 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16109 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16110 }
16111 break;
16112
16113 case CONST_POLY_INT:
16114 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16115 break;
16116
16117 case EQ:
16118 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16119 break;
16120
16121 case GE:
16122 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16123 break;
16124
16125 case GT:
16126 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16127 break;
16128
16129 case LE:
16130 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16131 break;
16132
16133 case LT:
16134 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16135 break;
16136
16137 case NE:
16138 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16139 break;
16140
16141 case GEU:
16142 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16143 break;
16144
16145 case GTU:
16146 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16147 break;
16148
16149 case LEU:
16150 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16151 break;
16152
16153 case LTU:
16154 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16155 break;
16156
16157 case UMIN:
16158 case UMAX:
16159 if (!SCALAR_INT_MODE_P (mode))
16160 break;
16161 /* FALLTHRU */
16162 case SMIN:
16163 case SMAX:
16164 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16165 break;
16166
16167 case ZERO_EXTRACT:
16168 case SIGN_EXTRACT:
16169 if (CONST_INT_P (XEXP (rtl, 1))
16170 && CONST_INT_P (XEXP (rtl, 2))
16171 && is_a <scalar_int_mode> (mode, &int_mode)
16172 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16173 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16174 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16175 && ((unsigned) INTVAL (XEXP (rtl, 1))
16176 + (unsigned) INTVAL (XEXP (rtl, 2))
16177 <= GET_MODE_BITSIZE (int_mode)))
16178 {
16179 int shift, size;
16180 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16181 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16182 if (op0 == 0)
16183 break;
16184 if (GET_CODE (rtl) == SIGN_EXTRACT)
16185 op = DW_OP_shra;
16186 else
16187 op = DW_OP_shr;
16188 mem_loc_result = op0;
16189 size = INTVAL (XEXP (rtl, 1));
16190 shift = INTVAL (XEXP (rtl, 2));
16191 if (BITS_BIG_ENDIAN)
16192 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16193 if (shift + size != (int) DWARF2_ADDR_SIZE)
16194 {
16195 add_loc_descr (&mem_loc_result,
16196 int_loc_descriptor (DWARF2_ADDR_SIZE
16197 - shift - size));
16198 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16199 }
16200 if (size != (int) DWARF2_ADDR_SIZE)
16201 {
16202 add_loc_descr (&mem_loc_result,
16203 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16204 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16205 }
16206 }
16207 break;
16208
16209 case IF_THEN_ELSE:
16210 {
16211 dw_loc_descr_ref op2, bra_node, drop_node;
16212 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16213 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16214 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16215 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16216 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16217 VAR_INIT_STATUS_INITIALIZED);
16218 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16219 VAR_INIT_STATUS_INITIALIZED);
16220 if (op0 == NULL || op1 == NULL || op2 == NULL)
16221 break;
16222
16223 mem_loc_result = op1;
16224 add_loc_descr (&mem_loc_result, op2);
16225 add_loc_descr (&mem_loc_result, op0);
16226 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16227 add_loc_descr (&mem_loc_result, bra_node);
16228 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16229 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16230 add_loc_descr (&mem_loc_result, drop_node);
16231 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16232 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16233 }
16234 break;
16235
16236 case FLOAT_EXTEND:
16237 case FLOAT_TRUNCATE:
16238 case FLOAT:
16239 case UNSIGNED_FLOAT:
16240 case FIX:
16241 case UNSIGNED_FIX:
16242 if (!dwarf_strict || dwarf_version >= 5)
16243 {
16244 dw_die_ref type_die;
16245 dw_loc_descr_ref cvt;
16246
16247 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16248 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16249 if (op0 == NULL)
16250 break;
16251 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16252 && (GET_CODE (rtl) == FLOAT
16253 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16254 {
16255 type_die = base_type_for_mode (int_mode,
16256 GET_CODE (rtl) == UNSIGNED_FLOAT);
16257 if (type_die == NULL)
16258 break;
16259 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16260 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16261 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16262 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16263 add_loc_descr (&op0, cvt);
16264 }
16265 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16266 if (type_die == NULL)
16267 break;
16268 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16269 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16270 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16271 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16272 add_loc_descr (&op0, cvt);
16273 if (is_a <scalar_int_mode> (mode, &int_mode)
16274 && (GET_CODE (rtl) == FIX
16275 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16276 {
16277 op0 = convert_descriptor_to_mode (int_mode, op0);
16278 if (op0 == NULL)
16279 break;
16280 }
16281 mem_loc_result = op0;
16282 }
16283 break;
16284
16285 case CLZ:
16286 case CTZ:
16287 case FFS:
16288 if (is_a <scalar_int_mode> (mode, &int_mode))
16289 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16290 break;
16291
16292 case POPCOUNT:
16293 case PARITY:
16294 if (is_a <scalar_int_mode> (mode, &int_mode))
16295 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16296 break;
16297
16298 case BSWAP:
16299 if (is_a <scalar_int_mode> (mode, &int_mode))
16300 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16301 break;
16302
16303 case ROTATE:
16304 case ROTATERT:
16305 if (is_a <scalar_int_mode> (mode, &int_mode))
16306 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16307 break;
16308
16309 case COMPARE:
16310 /* In theory, we could implement the above. */
16311 /* DWARF cannot represent the unsigned compare operations
16312 natively. */
16313 case SS_MULT:
16314 case US_MULT:
16315 case SS_DIV:
16316 case US_DIV:
16317 case SS_PLUS:
16318 case US_PLUS:
16319 case SS_MINUS:
16320 case US_MINUS:
16321 case SS_NEG:
16322 case US_NEG:
16323 case SS_ABS:
16324 case SS_ASHIFT:
16325 case US_ASHIFT:
16326 case SS_TRUNCATE:
16327 case US_TRUNCATE:
16328 case UNORDERED:
16329 case ORDERED:
16330 case UNEQ:
16331 case UNGE:
16332 case UNGT:
16333 case UNLE:
16334 case UNLT:
16335 case LTGT:
16336 case FRACT_CONVERT:
16337 case UNSIGNED_FRACT_CONVERT:
16338 case SAT_FRACT:
16339 case UNSIGNED_SAT_FRACT:
16340 case SQRT:
16341 case ASM_OPERANDS:
16342 case VEC_MERGE:
16343 case VEC_SELECT:
16344 case VEC_CONCAT:
16345 case VEC_DUPLICATE:
16346 case VEC_SERIES:
16347 case HIGH:
16348 case FMA:
16349 case STRICT_LOW_PART:
16350 case CONST_VECTOR:
16351 case CONST_FIXED:
16352 case CLRSB:
16353 case CLOBBER:
16354 case CLOBBER_HIGH:
16355 break;
16356
16357 case CONST_STRING:
16358 resolve_one_addr (&rtl);
16359 goto symref;
16360
16361 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16362 the expression. An UNSPEC rtx represents a raw DWARF operation,
16363 new_loc_descr is called for it to build the operation directly.
16364 Otherwise mem_loc_descriptor is called recursively. */
16365 case PARALLEL:
16366 {
16367 int index = 0;
16368 dw_loc_descr_ref exp_result = NULL;
16369
16370 for (; index < XVECLEN (rtl, 0); index++)
16371 {
16372 rtx elem = XVECEXP (rtl, 0, index);
16373 if (GET_CODE (elem) == UNSPEC)
16374 {
16375 /* Each DWARF operation UNSPEC contain two operands, if
16376 one operand is not used for the operation, const0_rtx is
16377 passed. */
16378 gcc_assert (XVECLEN (elem, 0) == 2);
16379
16380 HOST_WIDE_INT dw_op = XINT (elem, 1);
16381 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16382 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16383 exp_result
16384 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16385 oprnd2);
16386 }
16387 else
16388 exp_result
16389 = mem_loc_descriptor (elem, mode, mem_mode,
16390 VAR_INIT_STATUS_INITIALIZED);
16391
16392 if (!mem_loc_result)
16393 mem_loc_result = exp_result;
16394 else
16395 add_loc_descr (&mem_loc_result, exp_result);
16396 }
16397
16398 break;
16399 }
16400
16401 default:
16402 if (flag_checking)
16403 {
16404 print_rtl (stderr, rtl);
16405 gcc_unreachable ();
16406 }
16407 break;
16408 }
16409
16410 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16411 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16412
16413 return mem_loc_result;
16414 }
16415
16416 /* Return a descriptor that describes the concatenation of two locations.
16417 This is typically a complex variable. */
16418
16419 static dw_loc_descr_ref
16420 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16421 {
16422 /* At present we only track constant-sized pieces. */
16423 unsigned int size0, size1;
16424 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16425 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16426 return 0;
16427
16428 dw_loc_descr_ref cc_loc_result = NULL;
16429 dw_loc_descr_ref x0_ref
16430 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16431 dw_loc_descr_ref x1_ref
16432 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16433
16434 if (x0_ref == 0 || x1_ref == 0)
16435 return 0;
16436
16437 cc_loc_result = x0_ref;
16438 add_loc_descr_op_piece (&cc_loc_result, size0);
16439
16440 add_loc_descr (&cc_loc_result, x1_ref);
16441 add_loc_descr_op_piece (&cc_loc_result, size1);
16442
16443 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16444 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16445
16446 return cc_loc_result;
16447 }
16448
16449 /* Return a descriptor that describes the concatenation of N
16450 locations. */
16451
16452 static dw_loc_descr_ref
16453 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16454 {
16455 unsigned int i;
16456 dw_loc_descr_ref cc_loc_result = NULL;
16457 unsigned int n = XVECLEN (concatn, 0);
16458 unsigned int size;
16459
16460 for (i = 0; i < n; ++i)
16461 {
16462 dw_loc_descr_ref ref;
16463 rtx x = XVECEXP (concatn, 0, i);
16464
16465 /* At present we only track constant-sized pieces. */
16466 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16467 return NULL;
16468
16469 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16470 if (ref == NULL)
16471 return NULL;
16472
16473 add_loc_descr (&cc_loc_result, ref);
16474 add_loc_descr_op_piece (&cc_loc_result, size);
16475 }
16476
16477 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16478 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16479
16480 return cc_loc_result;
16481 }
16482
16483 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16484 for DEBUG_IMPLICIT_PTR RTL. */
16485
16486 static dw_loc_descr_ref
16487 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16488 {
16489 dw_loc_descr_ref ret;
16490 dw_die_ref ref;
16491
16492 if (dwarf_strict && dwarf_version < 5)
16493 return NULL;
16494 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16495 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16496 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16497 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16498 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16499 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16500 if (ref)
16501 {
16502 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16503 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16504 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16505 }
16506 else
16507 {
16508 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16509 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16510 }
16511 return ret;
16512 }
16513
16514 /* Output a proper Dwarf location descriptor for a variable or parameter
16515 which is either allocated in a register or in a memory location. For a
16516 register, we just generate an OP_REG and the register number. For a
16517 memory location we provide a Dwarf postfix expression describing how to
16518 generate the (dynamic) address of the object onto the address stack.
16519
16520 MODE is mode of the decl if this loc_descriptor is going to be used in
16521 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16522 allowed, VOIDmode otherwise.
16523
16524 If we don't know how to describe it, return 0. */
16525
16526 static dw_loc_descr_ref
16527 loc_descriptor (rtx rtl, machine_mode mode,
16528 enum var_init_status initialized)
16529 {
16530 dw_loc_descr_ref loc_result = NULL;
16531 scalar_int_mode int_mode;
16532
16533 switch (GET_CODE (rtl))
16534 {
16535 case SUBREG:
16536 /* The case of a subreg may arise when we have a local (register)
16537 variable or a formal (register) parameter which doesn't quite fill
16538 up an entire register. For now, just assume that it is
16539 legitimate to make the Dwarf info refer to the whole register which
16540 contains the given subreg. */
16541 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16542 loc_result = loc_descriptor (SUBREG_REG (rtl),
16543 GET_MODE (SUBREG_REG (rtl)), initialized);
16544 else
16545 goto do_default;
16546 break;
16547
16548 case REG:
16549 loc_result = reg_loc_descriptor (rtl, initialized);
16550 break;
16551
16552 case MEM:
16553 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16554 GET_MODE (rtl), initialized);
16555 if (loc_result == NULL)
16556 loc_result = tls_mem_loc_descriptor (rtl);
16557 if (loc_result == NULL)
16558 {
16559 rtx new_rtl = avoid_constant_pool_reference (rtl);
16560 if (new_rtl != rtl)
16561 loc_result = loc_descriptor (new_rtl, mode, initialized);
16562 }
16563 break;
16564
16565 case CONCAT:
16566 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16567 initialized);
16568 break;
16569
16570 case CONCATN:
16571 loc_result = concatn_loc_descriptor (rtl, initialized);
16572 break;
16573
16574 case VAR_LOCATION:
16575 /* Single part. */
16576 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16577 {
16578 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16579 if (GET_CODE (loc) == EXPR_LIST)
16580 loc = XEXP (loc, 0);
16581 loc_result = loc_descriptor (loc, mode, initialized);
16582 break;
16583 }
16584
16585 rtl = XEXP (rtl, 1);
16586 /* FALLTHRU */
16587
16588 case PARALLEL:
16589 {
16590 rtvec par_elems = XVEC (rtl, 0);
16591 int num_elem = GET_NUM_ELEM (par_elems);
16592 machine_mode mode;
16593 int i, size;
16594
16595 /* Create the first one, so we have something to add to. */
16596 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16597 VOIDmode, initialized);
16598 if (loc_result == NULL)
16599 return NULL;
16600 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16601 /* At present we only track constant-sized pieces. */
16602 if (!GET_MODE_SIZE (mode).is_constant (&size))
16603 return NULL;
16604 add_loc_descr_op_piece (&loc_result, size);
16605 for (i = 1; i < num_elem; i++)
16606 {
16607 dw_loc_descr_ref temp;
16608
16609 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16610 VOIDmode, initialized);
16611 if (temp == NULL)
16612 return NULL;
16613 add_loc_descr (&loc_result, temp);
16614 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16615 /* At present we only track constant-sized pieces. */
16616 if (!GET_MODE_SIZE (mode).is_constant (&size))
16617 return NULL;
16618 add_loc_descr_op_piece (&loc_result, size);
16619 }
16620 }
16621 break;
16622
16623 case CONST_INT:
16624 if (mode != VOIDmode && mode != BLKmode)
16625 {
16626 int_mode = as_a <scalar_int_mode> (mode);
16627 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16628 INTVAL (rtl));
16629 }
16630 break;
16631
16632 case CONST_DOUBLE:
16633 if (mode == VOIDmode)
16634 mode = GET_MODE (rtl);
16635
16636 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16637 {
16638 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16639
16640 /* Note that a CONST_DOUBLE rtx could represent either an integer
16641 or a floating-point constant. A CONST_DOUBLE is used whenever
16642 the constant requires more than one word in order to be
16643 adequately represented. We output CONST_DOUBLEs as blocks. */
16644 scalar_mode smode = as_a <scalar_mode> (mode);
16645 loc_result = new_loc_descr (DW_OP_implicit_value,
16646 GET_MODE_SIZE (smode), 0);
16647 #if TARGET_SUPPORTS_WIDE_INT == 0
16648 if (!SCALAR_FLOAT_MODE_P (smode))
16649 {
16650 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16651 loc_result->dw_loc_oprnd2.v.val_double
16652 = rtx_to_double_int (rtl);
16653 }
16654 else
16655 #endif
16656 {
16657 unsigned int length = GET_MODE_SIZE (smode);
16658 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16659
16660 insert_float (rtl, array);
16661 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16662 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16663 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16664 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16665 }
16666 }
16667 break;
16668
16669 case CONST_WIDE_INT:
16670 if (mode == VOIDmode)
16671 mode = GET_MODE (rtl);
16672
16673 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16674 {
16675 int_mode = as_a <scalar_int_mode> (mode);
16676 loc_result = new_loc_descr (DW_OP_implicit_value,
16677 GET_MODE_SIZE (int_mode), 0);
16678 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16679 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16680 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16681 }
16682 break;
16683
16684 case CONST_VECTOR:
16685 if (mode == VOIDmode)
16686 mode = GET_MODE (rtl);
16687
16688 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16689 {
16690 unsigned int length;
16691 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16692 return NULL;
16693
16694 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16695 unsigned char *array
16696 = ggc_vec_alloc<unsigned char> (length * elt_size);
16697 unsigned int i;
16698 unsigned char *p;
16699 machine_mode imode = GET_MODE_INNER (mode);
16700
16701 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16702 switch (GET_MODE_CLASS (mode))
16703 {
16704 case MODE_VECTOR_INT:
16705 for (i = 0, p = array; i < length; i++, p += elt_size)
16706 {
16707 rtx elt = CONST_VECTOR_ELT (rtl, i);
16708 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16709 }
16710 break;
16711
16712 case MODE_VECTOR_FLOAT:
16713 for (i = 0, p = array; i < length; i++, p += elt_size)
16714 {
16715 rtx elt = CONST_VECTOR_ELT (rtl, i);
16716 insert_float (elt, p);
16717 }
16718 break;
16719
16720 default:
16721 gcc_unreachable ();
16722 }
16723
16724 loc_result = new_loc_descr (DW_OP_implicit_value,
16725 length * elt_size, 0);
16726 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16727 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16728 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16729 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16730 }
16731 break;
16732
16733 case CONST:
16734 if (mode == VOIDmode
16735 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16736 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16737 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16738 {
16739 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16740 break;
16741 }
16742 /* FALLTHROUGH */
16743 case SYMBOL_REF:
16744 if (!const_ok_for_output (rtl))
16745 break;
16746 /* FALLTHROUGH */
16747 case LABEL_REF:
16748 if (is_a <scalar_int_mode> (mode, &int_mode)
16749 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16750 && (dwarf_version >= 4 || !dwarf_strict))
16751 {
16752 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16753 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16754 vec_safe_push (used_rtx_array, rtl);
16755 }
16756 break;
16757
16758 case DEBUG_IMPLICIT_PTR:
16759 loc_result = implicit_ptr_descriptor (rtl, 0);
16760 break;
16761
16762 case PLUS:
16763 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16764 && CONST_INT_P (XEXP (rtl, 1)))
16765 {
16766 loc_result
16767 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16768 break;
16769 }
16770 /* FALLTHRU */
16771 do_default:
16772 default:
16773 if ((is_a <scalar_int_mode> (mode, &int_mode)
16774 && GET_MODE (rtl) == int_mode
16775 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16776 && dwarf_version >= 4)
16777 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16778 {
16779 /* Value expression. */
16780 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16781 if (loc_result)
16782 add_loc_descr (&loc_result,
16783 new_loc_descr (DW_OP_stack_value, 0, 0));
16784 }
16785 break;
16786 }
16787
16788 return loc_result;
16789 }
16790
16791 /* We need to figure out what section we should use as the base for the
16792 address ranges where a given location is valid.
16793 1. If this particular DECL has a section associated with it, use that.
16794 2. If this function has a section associated with it, use that.
16795 3. Otherwise, use the text section.
16796 XXX: If you split a variable across multiple sections, we won't notice. */
16797
16798 static const char *
16799 secname_for_decl (const_tree decl)
16800 {
16801 const char *secname;
16802
16803 if (VAR_OR_FUNCTION_DECL_P (decl)
16804 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16805 && DECL_SECTION_NAME (decl))
16806 secname = DECL_SECTION_NAME (decl);
16807 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16808 {
16809 if (in_cold_section_p)
16810 {
16811 section *sec = current_function_section ();
16812 if (sec->common.flags & SECTION_NAMED)
16813 return sec->named.name;
16814 }
16815 secname = DECL_SECTION_NAME (current_function_decl);
16816 }
16817 else if (cfun && in_cold_section_p)
16818 secname = crtl->subsections.cold_section_label;
16819 else
16820 secname = text_section_label;
16821
16822 return secname;
16823 }
16824
16825 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16826
16827 static bool
16828 decl_by_reference_p (tree decl)
16829 {
16830 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16831 || VAR_P (decl))
16832 && DECL_BY_REFERENCE (decl));
16833 }
16834
16835 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16836 for VARLOC. */
16837
16838 static dw_loc_descr_ref
16839 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16840 enum var_init_status initialized)
16841 {
16842 int have_address = 0;
16843 dw_loc_descr_ref descr;
16844 machine_mode mode;
16845
16846 if (want_address != 2)
16847 {
16848 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16849 /* Single part. */
16850 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16851 {
16852 varloc = PAT_VAR_LOCATION_LOC (varloc);
16853 if (GET_CODE (varloc) == EXPR_LIST)
16854 varloc = XEXP (varloc, 0);
16855 mode = GET_MODE (varloc);
16856 if (MEM_P (varloc))
16857 {
16858 rtx addr = XEXP (varloc, 0);
16859 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16860 mode, initialized);
16861 if (descr)
16862 have_address = 1;
16863 else
16864 {
16865 rtx x = avoid_constant_pool_reference (varloc);
16866 if (x != varloc)
16867 descr = mem_loc_descriptor (x, mode, VOIDmode,
16868 initialized);
16869 }
16870 }
16871 else
16872 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16873 }
16874 else
16875 return 0;
16876 }
16877 else
16878 {
16879 if (GET_CODE (varloc) == VAR_LOCATION)
16880 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16881 else
16882 mode = DECL_MODE (loc);
16883 descr = loc_descriptor (varloc, mode, initialized);
16884 have_address = 1;
16885 }
16886
16887 if (!descr)
16888 return 0;
16889
16890 if (want_address == 2 && !have_address
16891 && (dwarf_version >= 4 || !dwarf_strict))
16892 {
16893 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16894 {
16895 expansion_failed (loc, NULL_RTX,
16896 "DWARF address size mismatch");
16897 return 0;
16898 }
16899 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16900 have_address = 1;
16901 }
16902 /* Show if we can't fill the request for an address. */
16903 if (want_address && !have_address)
16904 {
16905 expansion_failed (loc, NULL_RTX,
16906 "Want address and only have value");
16907 return 0;
16908 }
16909
16910 /* If we've got an address and don't want one, dereference. */
16911 if (!want_address && have_address)
16912 {
16913 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16914 enum dwarf_location_atom op;
16915
16916 if (size > DWARF2_ADDR_SIZE || size == -1)
16917 {
16918 expansion_failed (loc, NULL_RTX,
16919 "DWARF address size mismatch");
16920 return 0;
16921 }
16922 else if (size == DWARF2_ADDR_SIZE)
16923 op = DW_OP_deref;
16924 else
16925 op = DW_OP_deref_size;
16926
16927 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16928 }
16929
16930 return descr;
16931 }
16932
16933 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16934 if it is not possible. */
16935
16936 static dw_loc_descr_ref
16937 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16938 {
16939 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16940 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16941 else if (dwarf_version >= 3 || !dwarf_strict)
16942 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16943 else
16944 return NULL;
16945 }
16946
16947 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16948 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16949
16950 static dw_loc_descr_ref
16951 dw_sra_loc_expr (tree decl, rtx loc)
16952 {
16953 rtx p;
16954 unsigned HOST_WIDE_INT padsize = 0;
16955 dw_loc_descr_ref descr, *descr_tail;
16956 unsigned HOST_WIDE_INT decl_size;
16957 rtx varloc;
16958 enum var_init_status initialized;
16959
16960 if (DECL_SIZE (decl) == NULL
16961 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16962 return NULL;
16963
16964 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16965 descr = NULL;
16966 descr_tail = &descr;
16967
16968 for (p = loc; p; p = XEXP (p, 1))
16969 {
16970 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16971 rtx loc_note = *decl_piece_varloc_ptr (p);
16972 dw_loc_descr_ref cur_descr;
16973 dw_loc_descr_ref *tail, last = NULL;
16974 unsigned HOST_WIDE_INT opsize = 0;
16975
16976 if (loc_note == NULL_RTX
16977 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16978 {
16979 padsize += bitsize;
16980 continue;
16981 }
16982 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16983 varloc = NOTE_VAR_LOCATION (loc_note);
16984 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16985 if (cur_descr == NULL)
16986 {
16987 padsize += bitsize;
16988 continue;
16989 }
16990
16991 /* Check that cur_descr either doesn't use
16992 DW_OP_*piece operations, or their sum is equal
16993 to bitsize. Otherwise we can't embed it. */
16994 for (tail = &cur_descr; *tail != NULL;
16995 tail = &(*tail)->dw_loc_next)
16996 if ((*tail)->dw_loc_opc == DW_OP_piece)
16997 {
16998 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16999 * BITS_PER_UNIT;
17000 last = *tail;
17001 }
17002 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17003 {
17004 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17005 last = *tail;
17006 }
17007
17008 if (last != NULL && opsize != bitsize)
17009 {
17010 padsize += bitsize;
17011 /* Discard the current piece of the descriptor and release any
17012 addr_table entries it uses. */
17013 remove_loc_list_addr_table_entries (cur_descr);
17014 continue;
17015 }
17016
17017 /* If there is a hole, add DW_OP_*piece after empty DWARF
17018 expression, which means that those bits are optimized out. */
17019 if (padsize)
17020 {
17021 if (padsize > decl_size)
17022 {
17023 remove_loc_list_addr_table_entries (cur_descr);
17024 goto discard_descr;
17025 }
17026 decl_size -= padsize;
17027 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17028 if (*descr_tail == NULL)
17029 {
17030 remove_loc_list_addr_table_entries (cur_descr);
17031 goto discard_descr;
17032 }
17033 descr_tail = &(*descr_tail)->dw_loc_next;
17034 padsize = 0;
17035 }
17036 *descr_tail = cur_descr;
17037 descr_tail = tail;
17038 if (bitsize > decl_size)
17039 goto discard_descr;
17040 decl_size -= bitsize;
17041 if (last == NULL)
17042 {
17043 HOST_WIDE_INT offset = 0;
17044 if (GET_CODE (varloc) == VAR_LOCATION
17045 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17046 {
17047 varloc = PAT_VAR_LOCATION_LOC (varloc);
17048 if (GET_CODE (varloc) == EXPR_LIST)
17049 varloc = XEXP (varloc, 0);
17050 }
17051 do
17052 {
17053 if (GET_CODE (varloc) == CONST
17054 || GET_CODE (varloc) == SIGN_EXTEND
17055 || GET_CODE (varloc) == ZERO_EXTEND)
17056 varloc = XEXP (varloc, 0);
17057 else if (GET_CODE (varloc) == SUBREG)
17058 varloc = SUBREG_REG (varloc);
17059 else
17060 break;
17061 }
17062 while (1);
17063 /* DW_OP_bit_size offset should be zero for register
17064 or implicit location descriptions and empty location
17065 descriptions, but for memory addresses needs big endian
17066 adjustment. */
17067 if (MEM_P (varloc))
17068 {
17069 unsigned HOST_WIDE_INT memsize;
17070 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17071 goto discard_descr;
17072 memsize *= BITS_PER_UNIT;
17073 if (memsize != bitsize)
17074 {
17075 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17076 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17077 goto discard_descr;
17078 if (memsize < bitsize)
17079 goto discard_descr;
17080 if (BITS_BIG_ENDIAN)
17081 offset = memsize - bitsize;
17082 }
17083 }
17084
17085 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17086 if (*descr_tail == NULL)
17087 goto discard_descr;
17088 descr_tail = &(*descr_tail)->dw_loc_next;
17089 }
17090 }
17091
17092 /* If there were any non-empty expressions, add padding till the end of
17093 the decl. */
17094 if (descr != NULL && decl_size != 0)
17095 {
17096 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17097 if (*descr_tail == NULL)
17098 goto discard_descr;
17099 }
17100 return descr;
17101
17102 discard_descr:
17103 /* Discard the descriptor and release any addr_table entries it uses. */
17104 remove_loc_list_addr_table_entries (descr);
17105 return NULL;
17106 }
17107
17108 /* Return the dwarf representation of the location list LOC_LIST of
17109 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17110 function. */
17111
17112 static dw_loc_list_ref
17113 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17114 {
17115 const char *endname, *secname;
17116 var_loc_view endview;
17117 rtx varloc;
17118 enum var_init_status initialized;
17119 struct var_loc_node *node;
17120 dw_loc_descr_ref descr;
17121 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17122 dw_loc_list_ref list = NULL;
17123 dw_loc_list_ref *listp = &list;
17124
17125 /* Now that we know what section we are using for a base,
17126 actually construct the list of locations.
17127 The first location information is what is passed to the
17128 function that creates the location list, and the remaining
17129 locations just get added on to that list.
17130 Note that we only know the start address for a location
17131 (IE location changes), so to build the range, we use
17132 the range [current location start, next location start].
17133 This means we have to special case the last node, and generate
17134 a range of [last location start, end of function label]. */
17135
17136 if (cfun && crtl->has_bb_partition)
17137 {
17138 bool save_in_cold_section_p = in_cold_section_p;
17139 in_cold_section_p = first_function_block_is_cold;
17140 if (loc_list->last_before_switch == NULL)
17141 in_cold_section_p = !in_cold_section_p;
17142 secname = secname_for_decl (decl);
17143 in_cold_section_p = save_in_cold_section_p;
17144 }
17145 else
17146 secname = secname_for_decl (decl);
17147
17148 for (node = loc_list->first; node; node = node->next)
17149 {
17150 bool range_across_switch = false;
17151 if (GET_CODE (node->loc) == EXPR_LIST
17152 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17153 {
17154 if (GET_CODE (node->loc) == EXPR_LIST)
17155 {
17156 descr = NULL;
17157 /* This requires DW_OP_{,bit_}piece, which is not usable
17158 inside DWARF expressions. */
17159 if (want_address == 2)
17160 descr = dw_sra_loc_expr (decl, node->loc);
17161 }
17162 else
17163 {
17164 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17165 varloc = NOTE_VAR_LOCATION (node->loc);
17166 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17167 }
17168 if (descr)
17169 {
17170 /* If section switch happens in between node->label
17171 and node->next->label (or end of function) and
17172 we can't emit it as a single entry list,
17173 emit two ranges, first one ending at the end
17174 of first partition and second one starting at the
17175 beginning of second partition. */
17176 if (node == loc_list->last_before_switch
17177 && (node != loc_list->first || loc_list->first->next
17178 /* If we are to emit a view number, we will emit
17179 a loclist rather than a single location
17180 expression for the entire function (see
17181 loc_list_has_views), so we have to split the
17182 range that straddles across partitions. */
17183 || !ZERO_VIEW_P (node->view))
17184 && current_function_decl)
17185 {
17186 endname = cfun->fde->dw_fde_end;
17187 endview = 0;
17188 range_across_switch = true;
17189 }
17190 /* The variable has a location between NODE->LABEL and
17191 NODE->NEXT->LABEL. */
17192 else if (node->next)
17193 endname = node->next->label, endview = node->next->view;
17194 /* If the variable has a location at the last label
17195 it keeps its location until the end of function. */
17196 else if (!current_function_decl)
17197 endname = text_end_label, endview = 0;
17198 else
17199 {
17200 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17201 current_function_funcdef_no);
17202 endname = ggc_strdup (label_id);
17203 endview = 0;
17204 }
17205
17206 *listp = new_loc_list (descr, node->label, node->view,
17207 endname, endview, secname);
17208 if (TREE_CODE (decl) == PARM_DECL
17209 && node == loc_list->first
17210 && NOTE_P (node->loc)
17211 && strcmp (node->label, endname) == 0)
17212 (*listp)->force = true;
17213 listp = &(*listp)->dw_loc_next;
17214 }
17215 }
17216
17217 if (cfun
17218 && crtl->has_bb_partition
17219 && node == loc_list->last_before_switch)
17220 {
17221 bool save_in_cold_section_p = in_cold_section_p;
17222 in_cold_section_p = !first_function_block_is_cold;
17223 secname = secname_for_decl (decl);
17224 in_cold_section_p = save_in_cold_section_p;
17225 }
17226
17227 if (range_across_switch)
17228 {
17229 if (GET_CODE (node->loc) == EXPR_LIST)
17230 descr = dw_sra_loc_expr (decl, node->loc);
17231 else
17232 {
17233 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17234 varloc = NOTE_VAR_LOCATION (node->loc);
17235 descr = dw_loc_list_1 (decl, varloc, want_address,
17236 initialized);
17237 }
17238 gcc_assert (descr);
17239 /* The variable has a location between NODE->LABEL and
17240 NODE->NEXT->LABEL. */
17241 if (node->next)
17242 endname = node->next->label, endview = node->next->view;
17243 else
17244 endname = cfun->fde->dw_fde_second_end, endview = 0;
17245 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17246 endname, endview, secname);
17247 listp = &(*listp)->dw_loc_next;
17248 }
17249 }
17250
17251 /* Try to avoid the overhead of a location list emitting a location
17252 expression instead, but only if we didn't have more than one
17253 location entry in the first place. If some entries were not
17254 representable, we don't want to pretend a single entry that was
17255 applies to the entire scope in which the variable is
17256 available. */
17257 if (list && loc_list->first->next)
17258 gen_llsym (list);
17259 else
17260 maybe_gen_llsym (list);
17261
17262 return list;
17263 }
17264
17265 /* Return if the loc_list has only single element and thus can be represented
17266 as location description. */
17267
17268 static bool
17269 single_element_loc_list_p (dw_loc_list_ref list)
17270 {
17271 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17272 return !list->ll_symbol;
17273 }
17274
17275 /* Duplicate a single element of location list. */
17276
17277 static inline dw_loc_descr_ref
17278 copy_loc_descr (dw_loc_descr_ref ref)
17279 {
17280 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17281 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17282 return copy;
17283 }
17284
17285 /* To each location in list LIST append loc descr REF. */
17286
17287 static void
17288 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17289 {
17290 dw_loc_descr_ref copy;
17291 add_loc_descr (&list->expr, ref);
17292 list = list->dw_loc_next;
17293 while (list)
17294 {
17295 copy = copy_loc_descr (ref);
17296 add_loc_descr (&list->expr, copy);
17297 while (copy->dw_loc_next)
17298 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17299 list = list->dw_loc_next;
17300 }
17301 }
17302
17303 /* To each location in list LIST prepend loc descr REF. */
17304
17305 static void
17306 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17307 {
17308 dw_loc_descr_ref copy;
17309 dw_loc_descr_ref ref_end = list->expr;
17310 add_loc_descr (&ref, list->expr);
17311 list->expr = ref;
17312 list = list->dw_loc_next;
17313 while (list)
17314 {
17315 dw_loc_descr_ref end = list->expr;
17316 list->expr = copy = copy_loc_descr (ref);
17317 while (copy->dw_loc_next != ref_end)
17318 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17319 copy->dw_loc_next = end;
17320 list = list->dw_loc_next;
17321 }
17322 }
17323
17324 /* Given two lists RET and LIST
17325 produce location list that is result of adding expression in LIST
17326 to expression in RET on each position in program.
17327 Might be destructive on both RET and LIST.
17328
17329 TODO: We handle only simple cases of RET or LIST having at most one
17330 element. General case would involve sorting the lists in program order
17331 and merging them that will need some additional work.
17332 Adding that will improve quality of debug info especially for SRA-ed
17333 structures. */
17334
17335 static void
17336 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17337 {
17338 if (!list)
17339 return;
17340 if (!*ret)
17341 {
17342 *ret = list;
17343 return;
17344 }
17345 if (!list->dw_loc_next)
17346 {
17347 add_loc_descr_to_each (*ret, list->expr);
17348 return;
17349 }
17350 if (!(*ret)->dw_loc_next)
17351 {
17352 prepend_loc_descr_to_each (list, (*ret)->expr);
17353 *ret = list;
17354 return;
17355 }
17356 expansion_failed (NULL_TREE, NULL_RTX,
17357 "Don't know how to merge two non-trivial"
17358 " location lists.\n");
17359 *ret = NULL;
17360 return;
17361 }
17362
17363 /* LOC is constant expression. Try a luck, look it up in constant
17364 pool and return its loc_descr of its address. */
17365
17366 static dw_loc_descr_ref
17367 cst_pool_loc_descr (tree loc)
17368 {
17369 /* Get an RTL for this, if something has been emitted. */
17370 rtx rtl = lookup_constant_def (loc);
17371
17372 if (!rtl || !MEM_P (rtl))
17373 {
17374 gcc_assert (!rtl);
17375 return 0;
17376 }
17377 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17378
17379 /* TODO: We might get more coverage if we was actually delaying expansion
17380 of all expressions till end of compilation when constant pools are fully
17381 populated. */
17382 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17383 {
17384 expansion_failed (loc, NULL_RTX,
17385 "CST value in contant pool but not marked.");
17386 return 0;
17387 }
17388 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17389 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17390 }
17391
17392 /* Return dw_loc_list representing address of addr_expr LOC
17393 by looking for inner INDIRECT_REF expression and turning
17394 it into simple arithmetics.
17395
17396 See loc_list_from_tree for the meaning of CONTEXT. */
17397
17398 static dw_loc_list_ref
17399 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17400 loc_descr_context *context)
17401 {
17402 tree obj, offset;
17403 poly_int64 bitsize, bitpos, bytepos;
17404 machine_mode mode;
17405 int unsignedp, reversep, volatilep = 0;
17406 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17407
17408 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17409 &bitsize, &bitpos, &offset, &mode,
17410 &unsignedp, &reversep, &volatilep);
17411 STRIP_NOPS (obj);
17412 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17413 {
17414 expansion_failed (loc, NULL_RTX, "bitfield access");
17415 return 0;
17416 }
17417 if (!INDIRECT_REF_P (obj))
17418 {
17419 expansion_failed (obj,
17420 NULL_RTX, "no indirect ref in inner refrence");
17421 return 0;
17422 }
17423 if (!offset && known_eq (bitpos, 0))
17424 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17425 context);
17426 else if (toplev
17427 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17428 && (dwarf_version >= 4 || !dwarf_strict))
17429 {
17430 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17431 if (!list_ret)
17432 return 0;
17433 if (offset)
17434 {
17435 /* Variable offset. */
17436 list_ret1 = loc_list_from_tree (offset, 0, context);
17437 if (list_ret1 == 0)
17438 return 0;
17439 add_loc_list (&list_ret, list_ret1);
17440 if (!list_ret)
17441 return 0;
17442 add_loc_descr_to_each (list_ret,
17443 new_loc_descr (DW_OP_plus, 0, 0));
17444 }
17445 HOST_WIDE_INT value;
17446 if (bytepos.is_constant (&value) && value > 0)
17447 add_loc_descr_to_each (list_ret,
17448 new_loc_descr (DW_OP_plus_uconst, value, 0));
17449 else if (maybe_ne (bytepos, 0))
17450 loc_list_plus_const (list_ret, bytepos);
17451 add_loc_descr_to_each (list_ret,
17452 new_loc_descr (DW_OP_stack_value, 0, 0));
17453 }
17454 return list_ret;
17455 }
17456
17457 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17458 all operations from LOC are nops, move to the last one. Insert in NOPS all
17459 operations that are skipped. */
17460
17461 static void
17462 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17463 hash_set<dw_loc_descr_ref> &nops)
17464 {
17465 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17466 {
17467 nops.add (loc);
17468 loc = loc->dw_loc_next;
17469 }
17470 }
17471
17472 /* Helper for loc_descr_without_nops: free the location description operation
17473 P. */
17474
17475 bool
17476 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17477 {
17478 ggc_free (loc);
17479 return true;
17480 }
17481
17482 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17483 finishes LOC. */
17484
17485 static void
17486 loc_descr_without_nops (dw_loc_descr_ref &loc)
17487 {
17488 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17489 return;
17490
17491 /* Set of all DW_OP_nop operations we remove. */
17492 hash_set<dw_loc_descr_ref> nops;
17493
17494 /* First, strip all prefix NOP operations in order to keep the head of the
17495 operations list. */
17496 loc_descr_to_next_no_nop (loc, nops);
17497
17498 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17499 {
17500 /* For control flow operations: strip "prefix" nops in destination
17501 labels. */
17502 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17503 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17504 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17505 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17506
17507 /* Do the same for the operations that follow, then move to the next
17508 iteration. */
17509 if (cur->dw_loc_next != NULL)
17510 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17511 cur = cur->dw_loc_next;
17512 }
17513
17514 nops.traverse<void *, free_loc_descr> (NULL);
17515 }
17516
17517
17518 struct dwarf_procedure_info;
17519
17520 /* Helper structure for location descriptions generation. */
17521 struct loc_descr_context
17522 {
17523 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17524 NULL_TREE if DW_OP_push_object_address in invalid for this location
17525 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17526 tree context_type;
17527 /* The ..._DECL node that should be translated as a
17528 DW_OP_push_object_address operation. */
17529 tree base_decl;
17530 /* Information about the DWARF procedure we are currently generating. NULL if
17531 we are not generating a DWARF procedure. */
17532 struct dwarf_procedure_info *dpi;
17533 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17534 by consumer. Used for DW_TAG_generic_subrange attributes. */
17535 bool placeholder_arg;
17536 /* True if PLACEHOLDER_EXPR has been seen. */
17537 bool placeholder_seen;
17538 };
17539
17540 /* DWARF procedures generation
17541
17542 DWARF expressions (aka. location descriptions) are used to encode variable
17543 things such as sizes or offsets. Such computations can have redundant parts
17544 that can be factorized in order to reduce the size of the output debug
17545 information. This is the whole point of DWARF procedures.
17546
17547 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17548 already factorized into functions ("size functions") in order to handle very
17549 big and complex types. Such functions are quite simple: they have integral
17550 arguments, they return an integral result and their body contains only a
17551 return statement with arithmetic expressions. This is the only kind of
17552 function we are interested in translating into DWARF procedures, here.
17553
17554 DWARF expressions and DWARF procedure are executed using a stack, so we have
17555 to define some calling convention for them to interact. Let's say that:
17556
17557 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17558 all arguments in reverse order (right-to-left) so that when the DWARF
17559 procedure execution starts, the first argument is the top of the stack.
17560
17561 - Then, when returning, the DWARF procedure must have consumed all arguments
17562 on the stack, must have pushed the result and touched nothing else.
17563
17564 - Each integral argument and the result are integral types can be hold in a
17565 single stack slot.
17566
17567 - We call "frame offset" the number of stack slots that are "under DWARF
17568 procedure control": it includes the arguments slots, the temporaries and
17569 the result slot. Thus, it is equal to the number of arguments when the
17570 procedure execution starts and must be equal to one (the result) when it
17571 returns. */
17572
17573 /* Helper structure used when generating operations for a DWARF procedure. */
17574 struct dwarf_procedure_info
17575 {
17576 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17577 currently translated. */
17578 tree fndecl;
17579 /* The number of arguments FNDECL takes. */
17580 unsigned args_count;
17581 };
17582
17583 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17584 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17585 equate it to this DIE. */
17586
17587 static dw_die_ref
17588 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17589 dw_die_ref parent_die)
17590 {
17591 dw_die_ref dwarf_proc_die;
17592
17593 if ((dwarf_version < 3 && dwarf_strict)
17594 || location == NULL)
17595 return NULL;
17596
17597 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17598 if (fndecl)
17599 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17600 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17601 return dwarf_proc_die;
17602 }
17603
17604 /* Return whether TYPE is a supported type as a DWARF procedure argument
17605 type or return type (we handle only scalar types and pointer types that
17606 aren't wider than the DWARF expression evaluation stack. */
17607
17608 static bool
17609 is_handled_procedure_type (tree type)
17610 {
17611 return ((INTEGRAL_TYPE_P (type)
17612 || TREE_CODE (type) == OFFSET_TYPE
17613 || TREE_CODE (type) == POINTER_TYPE)
17614 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17615 }
17616
17617 /* Helper for resolve_args_picking: do the same but stop when coming across
17618 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17619 offset *before* evaluating the corresponding operation. */
17620
17621 static bool
17622 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17623 struct dwarf_procedure_info *dpi,
17624 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17625 {
17626 /* The "frame_offset" identifier is already used to name a macro... */
17627 unsigned frame_offset_ = initial_frame_offset;
17628 dw_loc_descr_ref l;
17629
17630 for (l = loc; l != NULL;)
17631 {
17632 bool existed;
17633 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17634
17635 /* If we already met this node, there is nothing to compute anymore. */
17636 if (existed)
17637 {
17638 /* Make sure that the stack size is consistent wherever the execution
17639 flow comes from. */
17640 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17641 break;
17642 }
17643 l_frame_offset = frame_offset_;
17644
17645 /* If needed, relocate the picking offset with respect to the frame
17646 offset. */
17647 if (l->frame_offset_rel)
17648 {
17649 unsigned HOST_WIDE_INT off;
17650 switch (l->dw_loc_opc)
17651 {
17652 case DW_OP_pick:
17653 off = l->dw_loc_oprnd1.v.val_unsigned;
17654 break;
17655 case DW_OP_dup:
17656 off = 0;
17657 break;
17658 case DW_OP_over:
17659 off = 1;
17660 break;
17661 default:
17662 gcc_unreachable ();
17663 }
17664 /* frame_offset_ is the size of the current stack frame, including
17665 incoming arguments. Besides, the arguments are pushed
17666 right-to-left. Thus, in order to access the Nth argument from
17667 this operation node, the picking has to skip temporaries *plus*
17668 one stack slot per argument (0 for the first one, 1 for the second
17669 one, etc.).
17670
17671 The targetted argument number (N) is already set as the operand,
17672 and the number of temporaries can be computed with:
17673 frame_offsets_ - dpi->args_count */
17674 off += frame_offset_ - dpi->args_count;
17675
17676 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17677 if (off > 255)
17678 return false;
17679
17680 if (off == 0)
17681 {
17682 l->dw_loc_opc = DW_OP_dup;
17683 l->dw_loc_oprnd1.v.val_unsigned = 0;
17684 }
17685 else if (off == 1)
17686 {
17687 l->dw_loc_opc = DW_OP_over;
17688 l->dw_loc_oprnd1.v.val_unsigned = 0;
17689 }
17690 else
17691 {
17692 l->dw_loc_opc = DW_OP_pick;
17693 l->dw_loc_oprnd1.v.val_unsigned = off;
17694 }
17695 }
17696
17697 /* Update frame_offset according to the effect the current operation has
17698 on the stack. */
17699 switch (l->dw_loc_opc)
17700 {
17701 case DW_OP_deref:
17702 case DW_OP_swap:
17703 case DW_OP_rot:
17704 case DW_OP_abs:
17705 case DW_OP_neg:
17706 case DW_OP_not:
17707 case DW_OP_plus_uconst:
17708 case DW_OP_skip:
17709 case DW_OP_reg0:
17710 case DW_OP_reg1:
17711 case DW_OP_reg2:
17712 case DW_OP_reg3:
17713 case DW_OP_reg4:
17714 case DW_OP_reg5:
17715 case DW_OP_reg6:
17716 case DW_OP_reg7:
17717 case DW_OP_reg8:
17718 case DW_OP_reg9:
17719 case DW_OP_reg10:
17720 case DW_OP_reg11:
17721 case DW_OP_reg12:
17722 case DW_OP_reg13:
17723 case DW_OP_reg14:
17724 case DW_OP_reg15:
17725 case DW_OP_reg16:
17726 case DW_OP_reg17:
17727 case DW_OP_reg18:
17728 case DW_OP_reg19:
17729 case DW_OP_reg20:
17730 case DW_OP_reg21:
17731 case DW_OP_reg22:
17732 case DW_OP_reg23:
17733 case DW_OP_reg24:
17734 case DW_OP_reg25:
17735 case DW_OP_reg26:
17736 case DW_OP_reg27:
17737 case DW_OP_reg28:
17738 case DW_OP_reg29:
17739 case DW_OP_reg30:
17740 case DW_OP_reg31:
17741 case DW_OP_bregx:
17742 case DW_OP_piece:
17743 case DW_OP_deref_size:
17744 case DW_OP_nop:
17745 case DW_OP_bit_piece:
17746 case DW_OP_implicit_value:
17747 case DW_OP_stack_value:
17748 break;
17749
17750 case DW_OP_addr:
17751 case DW_OP_const1u:
17752 case DW_OP_const1s:
17753 case DW_OP_const2u:
17754 case DW_OP_const2s:
17755 case DW_OP_const4u:
17756 case DW_OP_const4s:
17757 case DW_OP_const8u:
17758 case DW_OP_const8s:
17759 case DW_OP_constu:
17760 case DW_OP_consts:
17761 case DW_OP_dup:
17762 case DW_OP_over:
17763 case DW_OP_pick:
17764 case DW_OP_lit0:
17765 case DW_OP_lit1:
17766 case DW_OP_lit2:
17767 case DW_OP_lit3:
17768 case DW_OP_lit4:
17769 case DW_OP_lit5:
17770 case DW_OP_lit6:
17771 case DW_OP_lit7:
17772 case DW_OP_lit8:
17773 case DW_OP_lit9:
17774 case DW_OP_lit10:
17775 case DW_OP_lit11:
17776 case DW_OP_lit12:
17777 case DW_OP_lit13:
17778 case DW_OP_lit14:
17779 case DW_OP_lit15:
17780 case DW_OP_lit16:
17781 case DW_OP_lit17:
17782 case DW_OP_lit18:
17783 case DW_OP_lit19:
17784 case DW_OP_lit20:
17785 case DW_OP_lit21:
17786 case DW_OP_lit22:
17787 case DW_OP_lit23:
17788 case DW_OP_lit24:
17789 case DW_OP_lit25:
17790 case DW_OP_lit26:
17791 case DW_OP_lit27:
17792 case DW_OP_lit28:
17793 case DW_OP_lit29:
17794 case DW_OP_lit30:
17795 case DW_OP_lit31:
17796 case DW_OP_breg0:
17797 case DW_OP_breg1:
17798 case DW_OP_breg2:
17799 case DW_OP_breg3:
17800 case DW_OP_breg4:
17801 case DW_OP_breg5:
17802 case DW_OP_breg6:
17803 case DW_OP_breg7:
17804 case DW_OP_breg8:
17805 case DW_OP_breg9:
17806 case DW_OP_breg10:
17807 case DW_OP_breg11:
17808 case DW_OP_breg12:
17809 case DW_OP_breg13:
17810 case DW_OP_breg14:
17811 case DW_OP_breg15:
17812 case DW_OP_breg16:
17813 case DW_OP_breg17:
17814 case DW_OP_breg18:
17815 case DW_OP_breg19:
17816 case DW_OP_breg20:
17817 case DW_OP_breg21:
17818 case DW_OP_breg22:
17819 case DW_OP_breg23:
17820 case DW_OP_breg24:
17821 case DW_OP_breg25:
17822 case DW_OP_breg26:
17823 case DW_OP_breg27:
17824 case DW_OP_breg28:
17825 case DW_OP_breg29:
17826 case DW_OP_breg30:
17827 case DW_OP_breg31:
17828 case DW_OP_fbreg:
17829 case DW_OP_push_object_address:
17830 case DW_OP_call_frame_cfa:
17831 case DW_OP_GNU_variable_value:
17832 ++frame_offset_;
17833 break;
17834
17835 case DW_OP_drop:
17836 case DW_OP_xderef:
17837 case DW_OP_and:
17838 case DW_OP_div:
17839 case DW_OP_minus:
17840 case DW_OP_mod:
17841 case DW_OP_mul:
17842 case DW_OP_or:
17843 case DW_OP_plus:
17844 case DW_OP_shl:
17845 case DW_OP_shr:
17846 case DW_OP_shra:
17847 case DW_OP_xor:
17848 case DW_OP_bra:
17849 case DW_OP_eq:
17850 case DW_OP_ge:
17851 case DW_OP_gt:
17852 case DW_OP_le:
17853 case DW_OP_lt:
17854 case DW_OP_ne:
17855 case DW_OP_regx:
17856 case DW_OP_xderef_size:
17857 --frame_offset_;
17858 break;
17859
17860 case DW_OP_call2:
17861 case DW_OP_call4:
17862 case DW_OP_call_ref:
17863 {
17864 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17865 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17866
17867 if (stack_usage == NULL)
17868 return false;
17869 frame_offset_ += *stack_usage;
17870 break;
17871 }
17872
17873 case DW_OP_implicit_pointer:
17874 case DW_OP_entry_value:
17875 case DW_OP_const_type:
17876 case DW_OP_regval_type:
17877 case DW_OP_deref_type:
17878 case DW_OP_convert:
17879 case DW_OP_reinterpret:
17880 case DW_OP_form_tls_address:
17881 case DW_OP_GNU_push_tls_address:
17882 case DW_OP_GNU_uninit:
17883 case DW_OP_GNU_encoded_addr:
17884 case DW_OP_GNU_implicit_pointer:
17885 case DW_OP_GNU_entry_value:
17886 case DW_OP_GNU_const_type:
17887 case DW_OP_GNU_regval_type:
17888 case DW_OP_GNU_deref_type:
17889 case DW_OP_GNU_convert:
17890 case DW_OP_GNU_reinterpret:
17891 case DW_OP_GNU_parameter_ref:
17892 /* loc_list_from_tree will probably not output these operations for
17893 size functions, so assume they will not appear here. */
17894 /* Fall through... */
17895
17896 default:
17897 gcc_unreachable ();
17898 }
17899
17900 /* Now, follow the control flow (except subroutine calls). */
17901 switch (l->dw_loc_opc)
17902 {
17903 case DW_OP_bra:
17904 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17905 frame_offsets))
17906 return false;
17907 /* Fall through. */
17908
17909 case DW_OP_skip:
17910 l = l->dw_loc_oprnd1.v.val_loc;
17911 break;
17912
17913 case DW_OP_stack_value:
17914 return true;
17915
17916 default:
17917 l = l->dw_loc_next;
17918 break;
17919 }
17920 }
17921
17922 return true;
17923 }
17924
17925 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17926 operations) in order to resolve the operand of DW_OP_pick operations that
17927 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17928 offset *before* LOC is executed. Return if all relocations were
17929 successful. */
17930
17931 static bool
17932 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17933 struct dwarf_procedure_info *dpi)
17934 {
17935 /* Associate to all visited operations the frame offset *before* evaluating
17936 this operation. */
17937 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17938
17939 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17940 frame_offsets);
17941 }
17942
17943 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17944 Return NULL if it is not possible. */
17945
17946 static dw_die_ref
17947 function_to_dwarf_procedure (tree fndecl)
17948 {
17949 struct loc_descr_context ctx;
17950 struct dwarf_procedure_info dpi;
17951 dw_die_ref dwarf_proc_die;
17952 tree tree_body = DECL_SAVED_TREE (fndecl);
17953 dw_loc_descr_ref loc_body, epilogue;
17954
17955 tree cursor;
17956 unsigned i;
17957
17958 /* Do not generate multiple DWARF procedures for the same function
17959 declaration. */
17960 dwarf_proc_die = lookup_decl_die (fndecl);
17961 if (dwarf_proc_die != NULL)
17962 return dwarf_proc_die;
17963
17964 /* DWARF procedures are available starting with the DWARFv3 standard. */
17965 if (dwarf_version < 3 && dwarf_strict)
17966 return NULL;
17967
17968 /* We handle only functions for which we still have a body, that return a
17969 supported type and that takes arguments with supported types. Note that
17970 there is no point translating functions that return nothing. */
17971 if (tree_body == NULL_TREE
17972 || DECL_RESULT (fndecl) == NULL_TREE
17973 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17974 return NULL;
17975
17976 for (cursor = DECL_ARGUMENTS (fndecl);
17977 cursor != NULL_TREE;
17978 cursor = TREE_CHAIN (cursor))
17979 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17980 return NULL;
17981
17982 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17983 if (TREE_CODE (tree_body) != RETURN_EXPR)
17984 return NULL;
17985 tree_body = TREE_OPERAND (tree_body, 0);
17986 if (TREE_CODE (tree_body) != MODIFY_EXPR
17987 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17988 return NULL;
17989 tree_body = TREE_OPERAND (tree_body, 1);
17990
17991 /* Try to translate the body expression itself. Note that this will probably
17992 cause an infinite recursion if its call graph has a cycle. This is very
17993 unlikely for size functions, however, so don't bother with such things at
17994 the moment. */
17995 ctx.context_type = NULL_TREE;
17996 ctx.base_decl = NULL_TREE;
17997 ctx.dpi = &dpi;
17998 ctx.placeholder_arg = false;
17999 ctx.placeholder_seen = false;
18000 dpi.fndecl = fndecl;
18001 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18002 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18003 if (!loc_body)
18004 return NULL;
18005
18006 /* After evaluating all operands in "loc_body", we should still have on the
18007 stack all arguments plus the desired function result (top of the stack).
18008 Generate code in order to keep only the result in our stack frame. */
18009 epilogue = NULL;
18010 for (i = 0; i < dpi.args_count; ++i)
18011 {
18012 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18013 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18014 op_couple->dw_loc_next->dw_loc_next = epilogue;
18015 epilogue = op_couple;
18016 }
18017 add_loc_descr (&loc_body, epilogue);
18018 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18019 return NULL;
18020
18021 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18022 because they are considered useful. Now there is an epilogue, they are
18023 not anymore, so give it another try. */
18024 loc_descr_without_nops (loc_body);
18025
18026 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18027 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18028 though, given that size functions do not come from source, so they should
18029 not have a dedicated DW_TAG_subprogram DIE. */
18030 dwarf_proc_die
18031 = new_dwarf_proc_die (loc_body, fndecl,
18032 get_context_die (DECL_CONTEXT (fndecl)));
18033
18034 /* The called DWARF procedure consumes one stack slot per argument and
18035 returns one stack slot. */
18036 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18037
18038 return dwarf_proc_die;
18039 }
18040
18041
18042 /* Generate Dwarf location list representing LOC.
18043 If WANT_ADDRESS is false, expression computing LOC will be computed
18044 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18045 if WANT_ADDRESS is 2, expression computing address useable in location
18046 will be returned (i.e. DW_OP_reg can be used
18047 to refer to register values).
18048
18049 CONTEXT provides information to customize the location descriptions
18050 generation. Its context_type field specifies what type is implicitly
18051 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18052 will not be generated.
18053
18054 Its DPI field determines whether we are generating a DWARF expression for a
18055 DWARF procedure, so PARM_DECL references are processed specifically.
18056
18057 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18058 and dpi fields were null. */
18059
18060 static dw_loc_list_ref
18061 loc_list_from_tree_1 (tree loc, int want_address,
18062 struct loc_descr_context *context)
18063 {
18064 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18065 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18066 int have_address = 0;
18067 enum dwarf_location_atom op;
18068
18069 /* ??? Most of the time we do not take proper care for sign/zero
18070 extending the values properly. Hopefully this won't be a real
18071 problem... */
18072
18073 if (context != NULL
18074 && context->base_decl == loc
18075 && want_address == 0)
18076 {
18077 if (dwarf_version >= 3 || !dwarf_strict)
18078 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18079 NULL, 0, NULL, 0, NULL);
18080 else
18081 return NULL;
18082 }
18083
18084 switch (TREE_CODE (loc))
18085 {
18086 case ERROR_MARK:
18087 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18088 return 0;
18089
18090 case PLACEHOLDER_EXPR:
18091 /* This case involves extracting fields from an object to determine the
18092 position of other fields. It is supposed to appear only as the first
18093 operand of COMPONENT_REF nodes and to reference precisely the type
18094 that the context allows. */
18095 if (context != NULL
18096 && TREE_TYPE (loc) == context->context_type
18097 && want_address >= 1)
18098 {
18099 if (dwarf_version >= 3 || !dwarf_strict)
18100 {
18101 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18102 have_address = 1;
18103 break;
18104 }
18105 else
18106 return NULL;
18107 }
18108 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18109 the single argument passed by consumer. */
18110 else if (context != NULL
18111 && context->placeholder_arg
18112 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18113 && want_address == 0)
18114 {
18115 ret = new_loc_descr (DW_OP_pick, 0, 0);
18116 ret->frame_offset_rel = 1;
18117 context->placeholder_seen = true;
18118 break;
18119 }
18120 else
18121 expansion_failed (loc, NULL_RTX,
18122 "PLACEHOLDER_EXPR for an unexpected type");
18123 break;
18124
18125 case CALL_EXPR:
18126 {
18127 const int nargs = call_expr_nargs (loc);
18128 tree callee = get_callee_fndecl (loc);
18129 int i;
18130 dw_die_ref dwarf_proc;
18131
18132 if (callee == NULL_TREE)
18133 goto call_expansion_failed;
18134
18135 /* We handle only functions that return an integer. */
18136 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18137 goto call_expansion_failed;
18138
18139 dwarf_proc = function_to_dwarf_procedure (callee);
18140 if (dwarf_proc == NULL)
18141 goto call_expansion_failed;
18142
18143 /* Evaluate arguments right-to-left so that the first argument will
18144 be the top-most one on the stack. */
18145 for (i = nargs - 1; i >= 0; --i)
18146 {
18147 dw_loc_descr_ref loc_descr
18148 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18149 context);
18150
18151 if (loc_descr == NULL)
18152 goto call_expansion_failed;
18153
18154 add_loc_descr (&ret, loc_descr);
18155 }
18156
18157 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18158 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18159 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18160 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18161 add_loc_descr (&ret, ret1);
18162 break;
18163
18164 call_expansion_failed:
18165 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18166 /* There are no opcodes for these operations. */
18167 return 0;
18168 }
18169
18170 case PREINCREMENT_EXPR:
18171 case PREDECREMENT_EXPR:
18172 case POSTINCREMENT_EXPR:
18173 case POSTDECREMENT_EXPR:
18174 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18175 /* There are no opcodes for these operations. */
18176 return 0;
18177
18178 case ADDR_EXPR:
18179 /* If we already want an address, see if there is INDIRECT_REF inside
18180 e.g. for &this->field. */
18181 if (want_address)
18182 {
18183 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18184 (loc, want_address == 2, context);
18185 if (list_ret)
18186 have_address = 1;
18187 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18188 && (ret = cst_pool_loc_descr (loc)))
18189 have_address = 1;
18190 }
18191 /* Otherwise, process the argument and look for the address. */
18192 if (!list_ret && !ret)
18193 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18194 else
18195 {
18196 if (want_address)
18197 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18198 return NULL;
18199 }
18200 break;
18201
18202 case VAR_DECL:
18203 if (DECL_THREAD_LOCAL_P (loc))
18204 {
18205 rtx rtl;
18206 enum dwarf_location_atom tls_op;
18207 enum dtprel_bool dtprel = dtprel_false;
18208
18209 if (targetm.have_tls)
18210 {
18211 /* If this is not defined, we have no way to emit the
18212 data. */
18213 if (!targetm.asm_out.output_dwarf_dtprel)
18214 return 0;
18215
18216 /* The way DW_OP_GNU_push_tls_address is specified, we
18217 can only look up addresses of objects in the current
18218 module. We used DW_OP_addr as first op, but that's
18219 wrong, because DW_OP_addr is relocated by the debug
18220 info consumer, while DW_OP_GNU_push_tls_address
18221 operand shouldn't be. */
18222 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18223 return 0;
18224 dtprel = dtprel_true;
18225 /* We check for DWARF 5 here because gdb did not implement
18226 DW_OP_form_tls_address until after 7.12. */
18227 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18228 : DW_OP_GNU_push_tls_address);
18229 }
18230 else
18231 {
18232 if (!targetm.emutls.debug_form_tls_address
18233 || !(dwarf_version >= 3 || !dwarf_strict))
18234 return 0;
18235 /* We stuffed the control variable into the DECL_VALUE_EXPR
18236 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18237 no longer appear in gimple code. We used the control
18238 variable in specific so that we could pick it up here. */
18239 loc = DECL_VALUE_EXPR (loc);
18240 tls_op = DW_OP_form_tls_address;
18241 }
18242
18243 rtl = rtl_for_decl_location (loc);
18244 if (rtl == NULL_RTX)
18245 return 0;
18246
18247 if (!MEM_P (rtl))
18248 return 0;
18249 rtl = XEXP (rtl, 0);
18250 if (! CONSTANT_P (rtl))
18251 return 0;
18252
18253 ret = new_addr_loc_descr (rtl, dtprel);
18254 ret1 = new_loc_descr (tls_op, 0, 0);
18255 add_loc_descr (&ret, ret1);
18256
18257 have_address = 1;
18258 break;
18259 }
18260 /* FALLTHRU */
18261
18262 case PARM_DECL:
18263 if (context != NULL && context->dpi != NULL
18264 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18265 {
18266 /* We are generating code for a DWARF procedure and we want to access
18267 one of its arguments: find the appropriate argument offset and let
18268 the resolve_args_picking pass compute the offset that complies
18269 with the stack frame size. */
18270 unsigned i = 0;
18271 tree cursor;
18272
18273 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18274 cursor != NULL_TREE && cursor != loc;
18275 cursor = TREE_CHAIN (cursor), ++i)
18276 ;
18277 /* If we are translating a DWARF procedure, all referenced parameters
18278 must belong to the current function. */
18279 gcc_assert (cursor != NULL_TREE);
18280
18281 ret = new_loc_descr (DW_OP_pick, i, 0);
18282 ret->frame_offset_rel = 1;
18283 break;
18284 }
18285 /* FALLTHRU */
18286
18287 case RESULT_DECL:
18288 if (DECL_HAS_VALUE_EXPR_P (loc))
18289 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18290 want_address, context);
18291 /* FALLTHRU */
18292
18293 case FUNCTION_DECL:
18294 {
18295 rtx rtl;
18296 var_loc_list *loc_list = lookup_decl_loc (loc);
18297
18298 if (loc_list && loc_list->first)
18299 {
18300 list_ret = dw_loc_list (loc_list, loc, want_address);
18301 have_address = want_address != 0;
18302 break;
18303 }
18304 rtl = rtl_for_decl_location (loc);
18305 if (rtl == NULL_RTX)
18306 {
18307 if (TREE_CODE (loc) != FUNCTION_DECL
18308 && early_dwarf
18309 && current_function_decl
18310 && want_address != 1
18311 && ! DECL_IGNORED_P (loc)
18312 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18313 || POINTER_TYPE_P (TREE_TYPE (loc)))
18314 && DECL_CONTEXT (loc) == current_function_decl
18315 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18316 <= DWARF2_ADDR_SIZE))
18317 {
18318 dw_die_ref ref = lookup_decl_die (loc);
18319 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18320 if (ref)
18321 {
18322 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18323 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18324 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18325 }
18326 else
18327 {
18328 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18329 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18330 }
18331 break;
18332 }
18333 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18334 return 0;
18335 }
18336 else if (CONST_INT_P (rtl))
18337 {
18338 HOST_WIDE_INT val = INTVAL (rtl);
18339 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18340 val &= GET_MODE_MASK (DECL_MODE (loc));
18341 ret = int_loc_descriptor (val);
18342 }
18343 else if (GET_CODE (rtl) == CONST_STRING)
18344 {
18345 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18346 return 0;
18347 }
18348 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18349 ret = new_addr_loc_descr (rtl, dtprel_false);
18350 else
18351 {
18352 machine_mode mode, mem_mode;
18353
18354 /* Certain constructs can only be represented at top-level. */
18355 if (want_address == 2)
18356 {
18357 ret = loc_descriptor (rtl, VOIDmode,
18358 VAR_INIT_STATUS_INITIALIZED);
18359 have_address = 1;
18360 }
18361 else
18362 {
18363 mode = GET_MODE (rtl);
18364 mem_mode = VOIDmode;
18365 if (MEM_P (rtl))
18366 {
18367 mem_mode = mode;
18368 mode = get_address_mode (rtl);
18369 rtl = XEXP (rtl, 0);
18370 have_address = 1;
18371 }
18372 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18373 VAR_INIT_STATUS_INITIALIZED);
18374 }
18375 if (!ret)
18376 expansion_failed (loc, rtl,
18377 "failed to produce loc descriptor for rtl");
18378 }
18379 }
18380 break;
18381
18382 case MEM_REF:
18383 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18384 {
18385 have_address = 1;
18386 goto do_plus;
18387 }
18388 /* Fallthru. */
18389 case INDIRECT_REF:
18390 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18391 have_address = 1;
18392 break;
18393
18394 case TARGET_MEM_REF:
18395 case SSA_NAME:
18396 case DEBUG_EXPR_DECL:
18397 return NULL;
18398
18399 case COMPOUND_EXPR:
18400 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18401 context);
18402
18403 CASE_CONVERT:
18404 case VIEW_CONVERT_EXPR:
18405 case SAVE_EXPR:
18406 case MODIFY_EXPR:
18407 case NON_LVALUE_EXPR:
18408 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18409 context);
18410
18411 case COMPONENT_REF:
18412 case BIT_FIELD_REF:
18413 case ARRAY_REF:
18414 case ARRAY_RANGE_REF:
18415 case REALPART_EXPR:
18416 case IMAGPART_EXPR:
18417 {
18418 tree obj, offset;
18419 poly_int64 bitsize, bitpos, bytepos;
18420 machine_mode mode;
18421 int unsignedp, reversep, volatilep = 0;
18422
18423 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18424 &unsignedp, &reversep, &volatilep);
18425
18426 gcc_assert (obj != loc);
18427
18428 list_ret = loc_list_from_tree_1 (obj,
18429 want_address == 2
18430 && known_eq (bitpos, 0)
18431 && !offset ? 2 : 1,
18432 context);
18433 /* TODO: We can extract value of the small expression via shifting even
18434 for nonzero bitpos. */
18435 if (list_ret == 0)
18436 return 0;
18437 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18438 || !multiple_p (bitsize, BITS_PER_UNIT))
18439 {
18440 expansion_failed (loc, NULL_RTX,
18441 "bitfield access");
18442 return 0;
18443 }
18444
18445 if (offset != NULL_TREE)
18446 {
18447 /* Variable offset. */
18448 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18449 if (list_ret1 == 0)
18450 return 0;
18451 add_loc_list (&list_ret, list_ret1);
18452 if (!list_ret)
18453 return 0;
18454 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18455 }
18456
18457 HOST_WIDE_INT value;
18458 if (bytepos.is_constant (&value) && value > 0)
18459 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18460 value, 0));
18461 else if (maybe_ne (bytepos, 0))
18462 loc_list_plus_const (list_ret, bytepos);
18463
18464 have_address = 1;
18465 break;
18466 }
18467
18468 case INTEGER_CST:
18469 if ((want_address || !tree_fits_shwi_p (loc))
18470 && (ret = cst_pool_loc_descr (loc)))
18471 have_address = 1;
18472 else if (want_address == 2
18473 && tree_fits_shwi_p (loc)
18474 && (ret = address_of_int_loc_descriptor
18475 (int_size_in_bytes (TREE_TYPE (loc)),
18476 tree_to_shwi (loc))))
18477 have_address = 1;
18478 else if (tree_fits_shwi_p (loc))
18479 ret = int_loc_descriptor (tree_to_shwi (loc));
18480 else if (tree_fits_uhwi_p (loc))
18481 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18482 else
18483 {
18484 expansion_failed (loc, NULL_RTX,
18485 "Integer operand is not host integer");
18486 return 0;
18487 }
18488 break;
18489
18490 case CONSTRUCTOR:
18491 case REAL_CST:
18492 case STRING_CST:
18493 case COMPLEX_CST:
18494 if ((ret = cst_pool_loc_descr (loc)))
18495 have_address = 1;
18496 else if (TREE_CODE (loc) == CONSTRUCTOR)
18497 {
18498 tree type = TREE_TYPE (loc);
18499 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18500 unsigned HOST_WIDE_INT offset = 0;
18501 unsigned HOST_WIDE_INT cnt;
18502 constructor_elt *ce;
18503
18504 if (TREE_CODE (type) == RECORD_TYPE)
18505 {
18506 /* This is very limited, but it's enough to output
18507 pointers to member functions, as long as the
18508 referenced function is defined in the current
18509 translation unit. */
18510 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18511 {
18512 tree val = ce->value;
18513
18514 tree field = ce->index;
18515
18516 if (val)
18517 STRIP_NOPS (val);
18518
18519 if (!field || DECL_BIT_FIELD (field))
18520 {
18521 expansion_failed (loc, NULL_RTX,
18522 "bitfield in record type constructor");
18523 size = offset = (unsigned HOST_WIDE_INT)-1;
18524 ret = NULL;
18525 break;
18526 }
18527
18528 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18529 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18530 gcc_assert (pos + fieldsize <= size);
18531 if (pos < offset)
18532 {
18533 expansion_failed (loc, NULL_RTX,
18534 "out-of-order fields in record constructor");
18535 size = offset = (unsigned HOST_WIDE_INT)-1;
18536 ret = NULL;
18537 break;
18538 }
18539 if (pos > offset)
18540 {
18541 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18542 add_loc_descr (&ret, ret1);
18543 offset = pos;
18544 }
18545 if (val && fieldsize != 0)
18546 {
18547 ret1 = loc_descriptor_from_tree (val, want_address, context);
18548 if (!ret1)
18549 {
18550 expansion_failed (loc, NULL_RTX,
18551 "unsupported expression in field");
18552 size = offset = (unsigned HOST_WIDE_INT)-1;
18553 ret = NULL;
18554 break;
18555 }
18556 add_loc_descr (&ret, ret1);
18557 }
18558 if (fieldsize)
18559 {
18560 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18561 add_loc_descr (&ret, ret1);
18562 offset = pos + fieldsize;
18563 }
18564 }
18565
18566 if (offset != size)
18567 {
18568 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18569 add_loc_descr (&ret, ret1);
18570 offset = size;
18571 }
18572
18573 have_address = !!want_address;
18574 }
18575 else
18576 expansion_failed (loc, NULL_RTX,
18577 "constructor of non-record type");
18578 }
18579 else
18580 /* We can construct small constants here using int_loc_descriptor. */
18581 expansion_failed (loc, NULL_RTX,
18582 "constructor or constant not in constant pool");
18583 break;
18584
18585 case TRUTH_AND_EXPR:
18586 case TRUTH_ANDIF_EXPR:
18587 case BIT_AND_EXPR:
18588 op = DW_OP_and;
18589 goto do_binop;
18590
18591 case TRUTH_XOR_EXPR:
18592 case BIT_XOR_EXPR:
18593 op = DW_OP_xor;
18594 goto do_binop;
18595
18596 case TRUTH_OR_EXPR:
18597 case TRUTH_ORIF_EXPR:
18598 case BIT_IOR_EXPR:
18599 op = DW_OP_or;
18600 goto do_binop;
18601
18602 case FLOOR_DIV_EXPR:
18603 case CEIL_DIV_EXPR:
18604 case ROUND_DIV_EXPR:
18605 case TRUNC_DIV_EXPR:
18606 case EXACT_DIV_EXPR:
18607 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18608 return 0;
18609 op = DW_OP_div;
18610 goto do_binop;
18611
18612 case MINUS_EXPR:
18613 op = DW_OP_minus;
18614 goto do_binop;
18615
18616 case FLOOR_MOD_EXPR:
18617 case CEIL_MOD_EXPR:
18618 case ROUND_MOD_EXPR:
18619 case TRUNC_MOD_EXPR:
18620 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18621 {
18622 op = DW_OP_mod;
18623 goto do_binop;
18624 }
18625 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18626 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18627 if (list_ret == 0 || list_ret1 == 0)
18628 return 0;
18629
18630 add_loc_list (&list_ret, list_ret1);
18631 if (list_ret == 0)
18632 return 0;
18633 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18634 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18635 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18636 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18637 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18638 break;
18639
18640 case MULT_EXPR:
18641 op = DW_OP_mul;
18642 goto do_binop;
18643
18644 case LSHIFT_EXPR:
18645 op = DW_OP_shl;
18646 goto do_binop;
18647
18648 case RSHIFT_EXPR:
18649 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18650 goto do_binop;
18651
18652 case POINTER_PLUS_EXPR:
18653 case PLUS_EXPR:
18654 do_plus:
18655 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18656 {
18657 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18658 smarter to encode their opposite. The DW_OP_plus_uconst operation
18659 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18660 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18661 bytes, Y being the size of the operation that pushes the opposite
18662 of the addend. So let's choose the smallest representation. */
18663 const tree tree_addend = TREE_OPERAND (loc, 1);
18664 offset_int wi_addend;
18665 HOST_WIDE_INT shwi_addend;
18666 dw_loc_descr_ref loc_naddend;
18667
18668 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18669 if (list_ret == 0)
18670 return 0;
18671
18672 /* Try to get the literal to push. It is the opposite of the addend,
18673 so as we rely on wrapping during DWARF evaluation, first decode
18674 the literal as a "DWARF-sized" signed number. */
18675 wi_addend = wi::to_offset (tree_addend);
18676 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18677 shwi_addend = wi_addend.to_shwi ();
18678 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18679 ? int_loc_descriptor (-shwi_addend)
18680 : NULL;
18681
18682 if (loc_naddend != NULL
18683 && ((unsigned) size_of_uleb128 (shwi_addend)
18684 > size_of_loc_descr (loc_naddend)))
18685 {
18686 add_loc_descr_to_each (list_ret, loc_naddend);
18687 add_loc_descr_to_each (list_ret,
18688 new_loc_descr (DW_OP_minus, 0, 0));
18689 }
18690 else
18691 {
18692 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18693 {
18694 loc_naddend = loc_cur;
18695 loc_cur = loc_cur->dw_loc_next;
18696 ggc_free (loc_naddend);
18697 }
18698 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18699 }
18700 break;
18701 }
18702
18703 op = DW_OP_plus;
18704 goto do_binop;
18705
18706 case LE_EXPR:
18707 op = DW_OP_le;
18708 goto do_comp_binop;
18709
18710 case GE_EXPR:
18711 op = DW_OP_ge;
18712 goto do_comp_binop;
18713
18714 case LT_EXPR:
18715 op = DW_OP_lt;
18716 goto do_comp_binop;
18717
18718 case GT_EXPR:
18719 op = DW_OP_gt;
18720 goto do_comp_binop;
18721
18722 do_comp_binop:
18723 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18724 {
18725 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18726 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18727 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18728 TREE_CODE (loc));
18729 break;
18730 }
18731 else
18732 goto do_binop;
18733
18734 case EQ_EXPR:
18735 op = DW_OP_eq;
18736 goto do_binop;
18737
18738 case NE_EXPR:
18739 op = DW_OP_ne;
18740 goto do_binop;
18741
18742 do_binop:
18743 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18744 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18745 if (list_ret == 0 || list_ret1 == 0)
18746 return 0;
18747
18748 add_loc_list (&list_ret, list_ret1);
18749 if (list_ret == 0)
18750 return 0;
18751 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18752 break;
18753
18754 case TRUTH_NOT_EXPR:
18755 case BIT_NOT_EXPR:
18756 op = DW_OP_not;
18757 goto do_unop;
18758
18759 case ABS_EXPR:
18760 op = DW_OP_abs;
18761 goto do_unop;
18762
18763 case NEGATE_EXPR:
18764 op = DW_OP_neg;
18765 goto do_unop;
18766
18767 do_unop:
18768 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18769 if (list_ret == 0)
18770 return 0;
18771
18772 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18773 break;
18774
18775 case MIN_EXPR:
18776 case MAX_EXPR:
18777 {
18778 const enum tree_code code =
18779 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18780
18781 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18782 build2 (code, integer_type_node,
18783 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18784 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18785 }
18786
18787 /* fall through */
18788
18789 case COND_EXPR:
18790 {
18791 dw_loc_descr_ref lhs
18792 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18793 dw_loc_list_ref rhs
18794 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18795 dw_loc_descr_ref bra_node, jump_node, tmp;
18796
18797 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18798 if (list_ret == 0 || lhs == 0 || rhs == 0)
18799 return 0;
18800
18801 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18802 add_loc_descr_to_each (list_ret, bra_node);
18803
18804 add_loc_list (&list_ret, rhs);
18805 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18806 add_loc_descr_to_each (list_ret, jump_node);
18807
18808 add_loc_descr_to_each (list_ret, lhs);
18809 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18810 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18811
18812 /* ??? Need a node to point the skip at. Use a nop. */
18813 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18814 add_loc_descr_to_each (list_ret, tmp);
18815 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18816 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18817 }
18818 break;
18819
18820 case FIX_TRUNC_EXPR:
18821 return 0;
18822
18823 default:
18824 /* Leave front-end specific codes as simply unknown. This comes
18825 up, for instance, with the C STMT_EXPR. */
18826 if ((unsigned int) TREE_CODE (loc)
18827 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18828 {
18829 expansion_failed (loc, NULL_RTX,
18830 "language specific tree node");
18831 return 0;
18832 }
18833
18834 /* Otherwise this is a generic code; we should just lists all of
18835 these explicitly. We forgot one. */
18836 if (flag_checking)
18837 gcc_unreachable ();
18838
18839 /* In a release build, we want to degrade gracefully: better to
18840 generate incomplete debugging information than to crash. */
18841 return NULL;
18842 }
18843
18844 if (!ret && !list_ret)
18845 return 0;
18846
18847 if (want_address == 2 && !have_address
18848 && (dwarf_version >= 4 || !dwarf_strict))
18849 {
18850 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18851 {
18852 expansion_failed (loc, NULL_RTX,
18853 "DWARF address size mismatch");
18854 return 0;
18855 }
18856 if (ret)
18857 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18858 else
18859 add_loc_descr_to_each (list_ret,
18860 new_loc_descr (DW_OP_stack_value, 0, 0));
18861 have_address = 1;
18862 }
18863 /* Show if we can't fill the request for an address. */
18864 if (want_address && !have_address)
18865 {
18866 expansion_failed (loc, NULL_RTX,
18867 "Want address and only have value");
18868 return 0;
18869 }
18870
18871 gcc_assert (!ret || !list_ret);
18872
18873 /* If we've got an address and don't want one, dereference. */
18874 if (!want_address && have_address)
18875 {
18876 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18877
18878 if (size > DWARF2_ADDR_SIZE || size == -1)
18879 {
18880 expansion_failed (loc, NULL_RTX,
18881 "DWARF address size mismatch");
18882 return 0;
18883 }
18884 else if (size == DWARF2_ADDR_SIZE)
18885 op = DW_OP_deref;
18886 else
18887 op = DW_OP_deref_size;
18888
18889 if (ret)
18890 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18891 else
18892 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18893 }
18894 if (ret)
18895 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18896
18897 return list_ret;
18898 }
18899
18900 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18901 expressions. */
18902
18903 static dw_loc_list_ref
18904 loc_list_from_tree (tree loc, int want_address,
18905 struct loc_descr_context *context)
18906 {
18907 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18908
18909 for (dw_loc_list_ref loc_cur = result;
18910 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18911 loc_descr_without_nops (loc_cur->expr);
18912 return result;
18913 }
18914
18915 /* Same as above but return only single location expression. */
18916 static dw_loc_descr_ref
18917 loc_descriptor_from_tree (tree loc, int want_address,
18918 struct loc_descr_context *context)
18919 {
18920 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18921 if (!ret)
18922 return NULL;
18923 if (ret->dw_loc_next)
18924 {
18925 expansion_failed (loc, NULL_RTX,
18926 "Location list where only loc descriptor needed");
18927 return NULL;
18928 }
18929 return ret->expr;
18930 }
18931
18932 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18933 pointer to the declared type for the relevant field variable, or return
18934 `integer_type_node' if the given node turns out to be an
18935 ERROR_MARK node. */
18936
18937 static inline tree
18938 field_type (const_tree decl)
18939 {
18940 tree type;
18941
18942 if (TREE_CODE (decl) == ERROR_MARK)
18943 return integer_type_node;
18944
18945 type = DECL_BIT_FIELD_TYPE (decl);
18946 if (type == NULL_TREE)
18947 type = TREE_TYPE (decl);
18948
18949 return type;
18950 }
18951
18952 /* Given a pointer to a tree node, return the alignment in bits for
18953 it, or else return BITS_PER_WORD if the node actually turns out to
18954 be an ERROR_MARK node. */
18955
18956 static inline unsigned
18957 simple_type_align_in_bits (const_tree type)
18958 {
18959 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18960 }
18961
18962 static inline unsigned
18963 simple_decl_align_in_bits (const_tree decl)
18964 {
18965 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18966 }
18967
18968 /* Return the result of rounding T up to ALIGN. */
18969
18970 static inline offset_int
18971 round_up_to_align (const offset_int &t, unsigned int align)
18972 {
18973 return wi::udiv_trunc (t + align - 1, align) * align;
18974 }
18975
18976 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18977 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18978 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18979 if we fail to return the size in one of these two forms. */
18980
18981 static dw_loc_descr_ref
18982 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18983 {
18984 tree tree_size;
18985 struct loc_descr_context ctx;
18986
18987 /* Return a constant integer in priority, if possible. */
18988 *cst_size = int_size_in_bytes (type);
18989 if (*cst_size != -1)
18990 return NULL;
18991
18992 ctx.context_type = const_cast<tree> (type);
18993 ctx.base_decl = NULL_TREE;
18994 ctx.dpi = NULL;
18995 ctx.placeholder_arg = false;
18996 ctx.placeholder_seen = false;
18997
18998 type = TYPE_MAIN_VARIANT (type);
18999 tree_size = TYPE_SIZE_UNIT (type);
19000 return ((tree_size != NULL_TREE)
19001 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19002 : NULL);
19003 }
19004
19005 /* Helper structure for RECORD_TYPE processing. */
19006 struct vlr_context
19007 {
19008 /* Root RECORD_TYPE. It is needed to generate data member location
19009 descriptions in variable-length records (VLR), but also to cope with
19010 variants, which are composed of nested structures multiplexed with
19011 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19012 function processing a FIELD_DECL, it is required to be non null. */
19013 tree struct_type;
19014 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19015 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19016 this variant part as part of the root record (in storage units). For
19017 regular records, it must be NULL_TREE. */
19018 tree variant_part_offset;
19019 };
19020
19021 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19022 addressed byte of the "containing object" for the given FIELD_DECL. If
19023 possible, return a native constant through CST_OFFSET (in which case NULL is
19024 returned); otherwise return a DWARF expression that computes the offset.
19025
19026 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19027 that offset is, either because the argument turns out to be a pointer to an
19028 ERROR_MARK node, or because the offset expression is too complex for us.
19029
19030 CTX is required: see the comment for VLR_CONTEXT. */
19031
19032 static dw_loc_descr_ref
19033 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19034 HOST_WIDE_INT *cst_offset)
19035 {
19036 tree tree_result;
19037 dw_loc_list_ref loc_result;
19038
19039 *cst_offset = 0;
19040
19041 if (TREE_CODE (decl) == ERROR_MARK)
19042 return NULL;
19043 else
19044 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19045
19046 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19047 case. */
19048 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19049 return NULL;
19050
19051 /* We used to handle only constant offsets in all cases. Now, we handle
19052 properly dynamic byte offsets only when PCC bitfield type doesn't
19053 matter. */
19054 if (PCC_BITFIELD_TYPE_MATTERS
19055 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19056 {
19057 offset_int object_offset_in_bits;
19058 offset_int object_offset_in_bytes;
19059 offset_int bitpos_int;
19060 tree type;
19061 tree field_size_tree;
19062 offset_int deepest_bitpos;
19063 offset_int field_size_in_bits;
19064 unsigned int type_align_in_bits;
19065 unsigned int decl_align_in_bits;
19066 offset_int type_size_in_bits;
19067
19068 bitpos_int = wi::to_offset (bit_position (decl));
19069 type = field_type (decl);
19070 type_size_in_bits = offset_int_type_size_in_bits (type);
19071 type_align_in_bits = simple_type_align_in_bits (type);
19072
19073 field_size_tree = DECL_SIZE (decl);
19074
19075 /* The size could be unspecified if there was an error, or for
19076 a flexible array member. */
19077 if (!field_size_tree)
19078 field_size_tree = bitsize_zero_node;
19079
19080 /* If the size of the field is not constant, use the type size. */
19081 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19082 field_size_in_bits = wi::to_offset (field_size_tree);
19083 else
19084 field_size_in_bits = type_size_in_bits;
19085
19086 decl_align_in_bits = simple_decl_align_in_bits (decl);
19087
19088 /* The GCC front-end doesn't make any attempt to keep track of the
19089 starting bit offset (relative to the start of the containing
19090 structure type) of the hypothetical "containing object" for a
19091 bit-field. Thus, when computing the byte offset value for the
19092 start of the "containing object" of a bit-field, we must deduce
19093 this information on our own. This can be rather tricky to do in
19094 some cases. For example, handling the following structure type
19095 definition when compiling for an i386/i486 target (which only
19096 aligns long long's to 32-bit boundaries) can be very tricky:
19097
19098 struct S { int field1; long long field2:31; };
19099
19100 Fortunately, there is a simple rule-of-thumb which can be used
19101 in such cases. When compiling for an i386/i486, GCC will
19102 allocate 8 bytes for the structure shown above. It decides to
19103 do this based upon one simple rule for bit-field allocation.
19104 GCC allocates each "containing object" for each bit-field at
19105 the first (i.e. lowest addressed) legitimate alignment boundary
19106 (based upon the required minimum alignment for the declared
19107 type of the field) which it can possibly use, subject to the
19108 condition that there is still enough available space remaining
19109 in the containing object (when allocated at the selected point)
19110 to fully accommodate all of the bits of the bit-field itself.
19111
19112 This simple rule makes it obvious why GCC allocates 8 bytes for
19113 each object of the structure type shown above. When looking
19114 for a place to allocate the "containing object" for `field2',
19115 the compiler simply tries to allocate a 64-bit "containing
19116 object" at each successive 32-bit boundary (starting at zero)
19117 until it finds a place to allocate that 64- bit field such that
19118 at least 31 contiguous (and previously unallocated) bits remain
19119 within that selected 64 bit field. (As it turns out, for the
19120 example above, the compiler finds it is OK to allocate the
19121 "containing object" 64-bit field at bit-offset zero within the
19122 structure type.)
19123
19124 Here we attempt to work backwards from the limited set of facts
19125 we're given, and we try to deduce from those facts, where GCC
19126 must have believed that the containing object started (within
19127 the structure type). The value we deduce is then used (by the
19128 callers of this routine) to generate DW_AT_location and
19129 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19130 the case of DW_AT_location, regular fields as well). */
19131
19132 /* Figure out the bit-distance from the start of the structure to
19133 the "deepest" bit of the bit-field. */
19134 deepest_bitpos = bitpos_int + field_size_in_bits;
19135
19136 /* This is the tricky part. Use some fancy footwork to deduce
19137 where the lowest addressed bit of the containing object must
19138 be. */
19139 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19140
19141 /* Round up to type_align by default. This works best for
19142 bitfields. */
19143 object_offset_in_bits
19144 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19145
19146 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19147 {
19148 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19149
19150 /* Round up to decl_align instead. */
19151 object_offset_in_bits
19152 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19153 }
19154
19155 object_offset_in_bytes
19156 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19157 if (ctx->variant_part_offset == NULL_TREE)
19158 {
19159 *cst_offset = object_offset_in_bytes.to_shwi ();
19160 return NULL;
19161 }
19162 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19163 }
19164 else
19165 tree_result = byte_position (decl);
19166
19167 if (ctx->variant_part_offset != NULL_TREE)
19168 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19169 ctx->variant_part_offset, tree_result);
19170
19171 /* If the byte offset is a constant, it's simplier to handle a native
19172 constant rather than a DWARF expression. */
19173 if (TREE_CODE (tree_result) == INTEGER_CST)
19174 {
19175 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19176 return NULL;
19177 }
19178 struct loc_descr_context loc_ctx = {
19179 ctx->struct_type, /* context_type */
19180 NULL_TREE, /* base_decl */
19181 NULL, /* dpi */
19182 false, /* placeholder_arg */
19183 false /* placeholder_seen */
19184 };
19185 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19186
19187 /* We want a DWARF expression: abort if we only have a location list with
19188 multiple elements. */
19189 if (!loc_result || !single_element_loc_list_p (loc_result))
19190 return NULL;
19191 else
19192 return loc_result->expr;
19193 }
19194 \f
19195 /* The following routines define various Dwarf attributes and any data
19196 associated with them. */
19197
19198 /* Add a location description attribute value to a DIE.
19199
19200 This emits location attributes suitable for whole variables and
19201 whole parameters. Note that the location attributes for struct fields are
19202 generated by the routine `data_member_location_attribute' below. */
19203
19204 static inline void
19205 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19206 dw_loc_list_ref descr)
19207 {
19208 bool check_no_locviews = true;
19209 if (descr == 0)
19210 return;
19211 if (single_element_loc_list_p (descr))
19212 add_AT_loc (die, attr_kind, descr->expr);
19213 else
19214 {
19215 add_AT_loc_list (die, attr_kind, descr);
19216 gcc_assert (descr->ll_symbol);
19217 if (attr_kind == DW_AT_location && descr->vl_symbol
19218 && dwarf2out_locviews_in_attribute ())
19219 {
19220 add_AT_view_list (die, DW_AT_GNU_locviews);
19221 check_no_locviews = false;
19222 }
19223 }
19224
19225 if (check_no_locviews)
19226 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19227 }
19228
19229 /* Add DW_AT_accessibility attribute to DIE if needed. */
19230
19231 static void
19232 add_accessibility_attribute (dw_die_ref die, tree decl)
19233 {
19234 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19235 children, otherwise the default is DW_ACCESS_public. In DWARF2
19236 the default has always been DW_ACCESS_public. */
19237 if (TREE_PROTECTED (decl))
19238 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19239 else if (TREE_PRIVATE (decl))
19240 {
19241 if (dwarf_version == 2
19242 || die->die_parent == NULL
19243 || die->die_parent->die_tag != DW_TAG_class_type)
19244 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19245 }
19246 else if (dwarf_version > 2
19247 && die->die_parent
19248 && die->die_parent->die_tag == DW_TAG_class_type)
19249 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19250 }
19251
19252 /* Attach the specialized form of location attribute used for data members of
19253 struct and union types. In the special case of a FIELD_DECL node which
19254 represents a bit-field, the "offset" part of this special location
19255 descriptor must indicate the distance in bytes from the lowest-addressed
19256 byte of the containing struct or union type to the lowest-addressed byte of
19257 the "containing object" for the bit-field. (See the `field_byte_offset'
19258 function above).
19259
19260 For any given bit-field, the "containing object" is a hypothetical object
19261 (of some integral or enum type) within which the given bit-field lives. The
19262 type of this hypothetical "containing object" is always the same as the
19263 declared type of the individual bit-field itself (for GCC anyway... the
19264 DWARF spec doesn't actually mandate this). Note that it is the size (in
19265 bytes) of the hypothetical "containing object" which will be given in the
19266 DW_AT_byte_size attribute for this bit-field. (See the
19267 `byte_size_attribute' function below.) It is also used when calculating the
19268 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19269 function below.)
19270
19271 CTX is required: see the comment for VLR_CONTEXT. */
19272
19273 static void
19274 add_data_member_location_attribute (dw_die_ref die,
19275 tree decl,
19276 struct vlr_context *ctx)
19277 {
19278 HOST_WIDE_INT offset;
19279 dw_loc_descr_ref loc_descr = 0;
19280
19281 if (TREE_CODE (decl) == TREE_BINFO)
19282 {
19283 /* We're working on the TAG_inheritance for a base class. */
19284 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19285 {
19286 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19287 aren't at a fixed offset from all (sub)objects of the same
19288 type. We need to extract the appropriate offset from our
19289 vtable. The following dwarf expression means
19290
19291 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19292
19293 This is specific to the V3 ABI, of course. */
19294
19295 dw_loc_descr_ref tmp;
19296
19297 /* Make a copy of the object address. */
19298 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19299 add_loc_descr (&loc_descr, tmp);
19300
19301 /* Extract the vtable address. */
19302 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19303 add_loc_descr (&loc_descr, tmp);
19304
19305 /* Calculate the address of the offset. */
19306 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19307 gcc_assert (offset < 0);
19308
19309 tmp = int_loc_descriptor (-offset);
19310 add_loc_descr (&loc_descr, tmp);
19311 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19312 add_loc_descr (&loc_descr, tmp);
19313
19314 /* Extract the offset. */
19315 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19316 add_loc_descr (&loc_descr, tmp);
19317
19318 /* Add it to the object address. */
19319 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19320 add_loc_descr (&loc_descr, tmp);
19321 }
19322 else
19323 offset = tree_to_shwi (BINFO_OFFSET (decl));
19324 }
19325 else
19326 {
19327 loc_descr = field_byte_offset (decl, ctx, &offset);
19328
19329 /* If loc_descr is available then we know the field offset is dynamic.
19330 However, GDB does not handle dynamic field offsets very well at the
19331 moment. */
19332 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19333 {
19334 loc_descr = NULL;
19335 offset = 0;
19336 }
19337
19338 /* Data member location evalutation starts with the base address on the
19339 stack. Compute the field offset and add it to this base address. */
19340 else if (loc_descr != NULL)
19341 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19342 }
19343
19344 if (! loc_descr)
19345 {
19346 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19347 e.g. GDB only added support to it in November 2016. For DWARF5
19348 we need newer debug info consumers anyway. We might change this
19349 to dwarf_version >= 4 once most consumers catched up. */
19350 if (dwarf_version >= 5
19351 && TREE_CODE (decl) == FIELD_DECL
19352 && DECL_BIT_FIELD_TYPE (decl))
19353 {
19354 tree off = bit_position (decl);
19355 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19356 {
19357 remove_AT (die, DW_AT_byte_size);
19358 remove_AT (die, DW_AT_bit_offset);
19359 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19360 return;
19361 }
19362 }
19363 if (dwarf_version > 2)
19364 {
19365 /* Don't need to output a location expression, just the constant. */
19366 if (offset < 0)
19367 add_AT_int (die, DW_AT_data_member_location, offset);
19368 else
19369 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19370 return;
19371 }
19372 else
19373 {
19374 enum dwarf_location_atom op;
19375
19376 /* The DWARF2 standard says that we should assume that the structure
19377 address is already on the stack, so we can specify a structure
19378 field address by using DW_OP_plus_uconst. */
19379 op = DW_OP_plus_uconst;
19380 loc_descr = new_loc_descr (op, offset, 0);
19381 }
19382 }
19383
19384 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19385 }
19386
19387 /* Writes integer values to dw_vec_const array. */
19388
19389 static void
19390 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19391 {
19392 while (size != 0)
19393 {
19394 *dest++ = val & 0xff;
19395 val >>= 8;
19396 --size;
19397 }
19398 }
19399
19400 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19401
19402 static HOST_WIDE_INT
19403 extract_int (const unsigned char *src, unsigned int size)
19404 {
19405 HOST_WIDE_INT val = 0;
19406
19407 src += size;
19408 while (size != 0)
19409 {
19410 val <<= 8;
19411 val |= *--src & 0xff;
19412 --size;
19413 }
19414 return val;
19415 }
19416
19417 /* Writes wide_int values to dw_vec_const array. */
19418
19419 static void
19420 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19421 {
19422 int i;
19423
19424 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19425 {
19426 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19427 return;
19428 }
19429
19430 /* We'd have to extend this code to support odd sizes. */
19431 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19432
19433 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19434
19435 if (WORDS_BIG_ENDIAN)
19436 for (i = n - 1; i >= 0; i--)
19437 {
19438 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19439 dest += sizeof (HOST_WIDE_INT);
19440 }
19441 else
19442 for (i = 0; i < n; i++)
19443 {
19444 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19445 dest += sizeof (HOST_WIDE_INT);
19446 }
19447 }
19448
19449 /* Writes floating point values to dw_vec_const array. */
19450
19451 static void
19452 insert_float (const_rtx rtl, unsigned char *array)
19453 {
19454 long val[4];
19455 int i;
19456 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19457
19458 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19459
19460 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19461 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19462 {
19463 insert_int (val[i], 4, array);
19464 array += 4;
19465 }
19466 }
19467
19468 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19469 does not have a "location" either in memory or in a register. These
19470 things can arise in GNU C when a constant is passed as an actual parameter
19471 to an inlined function. They can also arise in C++ where declared
19472 constants do not necessarily get memory "homes". */
19473
19474 static bool
19475 add_const_value_attribute (dw_die_ref die, rtx rtl)
19476 {
19477 switch (GET_CODE (rtl))
19478 {
19479 case CONST_INT:
19480 {
19481 HOST_WIDE_INT val = INTVAL (rtl);
19482
19483 if (val < 0)
19484 add_AT_int (die, DW_AT_const_value, val);
19485 else
19486 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19487 }
19488 return true;
19489
19490 case CONST_WIDE_INT:
19491 {
19492 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19493 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19494 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19495 wide_int w = wi::zext (w1, prec);
19496 add_AT_wide (die, DW_AT_const_value, w);
19497 }
19498 return true;
19499
19500 case CONST_DOUBLE:
19501 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19502 floating-point constant. A CONST_DOUBLE is used whenever the
19503 constant requires more than one word in order to be adequately
19504 represented. */
19505 if (TARGET_SUPPORTS_WIDE_INT == 0
19506 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19507 add_AT_double (die, DW_AT_const_value,
19508 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19509 else
19510 {
19511 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19512 unsigned int length = GET_MODE_SIZE (mode);
19513 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19514
19515 insert_float (rtl, array);
19516 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19517 }
19518 return true;
19519
19520 case CONST_VECTOR:
19521 {
19522 unsigned int length;
19523 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19524 return false;
19525
19526 machine_mode mode = GET_MODE (rtl);
19527 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19528 unsigned char *array
19529 = ggc_vec_alloc<unsigned char> (length * elt_size);
19530 unsigned int i;
19531 unsigned char *p;
19532 machine_mode imode = GET_MODE_INNER (mode);
19533
19534 switch (GET_MODE_CLASS (mode))
19535 {
19536 case MODE_VECTOR_INT:
19537 for (i = 0, p = array; i < length; i++, p += elt_size)
19538 {
19539 rtx elt = CONST_VECTOR_ELT (rtl, i);
19540 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19541 }
19542 break;
19543
19544 case MODE_VECTOR_FLOAT:
19545 for (i = 0, p = array; i < length; i++, p += elt_size)
19546 {
19547 rtx elt = CONST_VECTOR_ELT (rtl, i);
19548 insert_float (elt, p);
19549 }
19550 break;
19551
19552 default:
19553 gcc_unreachable ();
19554 }
19555
19556 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19557 }
19558 return true;
19559
19560 case CONST_STRING:
19561 if (dwarf_version >= 4 || !dwarf_strict)
19562 {
19563 dw_loc_descr_ref loc_result;
19564 resolve_one_addr (&rtl);
19565 rtl_addr:
19566 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19567 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19568 add_AT_loc (die, DW_AT_location, loc_result);
19569 vec_safe_push (used_rtx_array, rtl);
19570 return true;
19571 }
19572 return false;
19573
19574 case CONST:
19575 if (CONSTANT_P (XEXP (rtl, 0)))
19576 return add_const_value_attribute (die, XEXP (rtl, 0));
19577 /* FALLTHROUGH */
19578 case SYMBOL_REF:
19579 if (!const_ok_for_output (rtl))
19580 return false;
19581 /* FALLTHROUGH */
19582 case LABEL_REF:
19583 if (dwarf_version >= 4 || !dwarf_strict)
19584 goto rtl_addr;
19585 return false;
19586
19587 case PLUS:
19588 /* In cases where an inlined instance of an inline function is passed
19589 the address of an `auto' variable (which is local to the caller) we
19590 can get a situation where the DECL_RTL of the artificial local
19591 variable (for the inlining) which acts as a stand-in for the
19592 corresponding formal parameter (of the inline function) will look
19593 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19594 exactly a compile-time constant expression, but it isn't the address
19595 of the (artificial) local variable either. Rather, it represents the
19596 *value* which the artificial local variable always has during its
19597 lifetime. We currently have no way to represent such quasi-constant
19598 values in Dwarf, so for now we just punt and generate nothing. */
19599 return false;
19600
19601 case HIGH:
19602 case CONST_FIXED:
19603 return false;
19604
19605 case MEM:
19606 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19607 && MEM_READONLY_P (rtl)
19608 && GET_MODE (rtl) == BLKmode)
19609 {
19610 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19611 return true;
19612 }
19613 return false;
19614
19615 default:
19616 /* No other kinds of rtx should be possible here. */
19617 gcc_unreachable ();
19618 }
19619 return false;
19620 }
19621
19622 /* Determine whether the evaluation of EXPR references any variables
19623 or functions which aren't otherwise used (and therefore may not be
19624 output). */
19625 static tree
19626 reference_to_unused (tree * tp, int * walk_subtrees,
19627 void * data ATTRIBUTE_UNUSED)
19628 {
19629 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19630 *walk_subtrees = 0;
19631
19632 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19633 && ! TREE_ASM_WRITTEN (*tp))
19634 return *tp;
19635 /* ??? The C++ FE emits debug information for using decls, so
19636 putting gcc_unreachable here falls over. See PR31899. For now
19637 be conservative. */
19638 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19639 return *tp;
19640 else if (VAR_P (*tp))
19641 {
19642 varpool_node *node = varpool_node::get (*tp);
19643 if (!node || !node->definition)
19644 return *tp;
19645 }
19646 else if (TREE_CODE (*tp) == FUNCTION_DECL
19647 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19648 {
19649 /* The call graph machinery must have finished analyzing,
19650 optimizing and gimplifying the CU by now.
19651 So if *TP has no call graph node associated
19652 to it, it means *TP will not be emitted. */
19653 if (!cgraph_node::get (*tp))
19654 return *tp;
19655 }
19656 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19657 return *tp;
19658
19659 return NULL_TREE;
19660 }
19661
19662 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19663 for use in a later add_const_value_attribute call. */
19664
19665 static rtx
19666 rtl_for_decl_init (tree init, tree type)
19667 {
19668 rtx rtl = NULL_RTX;
19669
19670 STRIP_NOPS (init);
19671
19672 /* If a variable is initialized with a string constant without embedded
19673 zeros, build CONST_STRING. */
19674 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19675 {
19676 tree enttype = TREE_TYPE (type);
19677 tree domain = TYPE_DOMAIN (type);
19678 scalar_int_mode mode;
19679
19680 if (is_int_mode (TYPE_MODE (enttype), &mode)
19681 && GET_MODE_SIZE (mode) == 1
19682 && domain
19683 && TYPE_MAX_VALUE (domain)
19684 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19685 && integer_zerop (TYPE_MIN_VALUE (domain))
19686 && compare_tree_int (TYPE_MAX_VALUE (domain),
19687 TREE_STRING_LENGTH (init) - 1) == 0
19688 && ((size_t) TREE_STRING_LENGTH (init)
19689 == strlen (TREE_STRING_POINTER (init)) + 1))
19690 {
19691 rtl = gen_rtx_CONST_STRING (VOIDmode,
19692 ggc_strdup (TREE_STRING_POINTER (init)));
19693 rtl = gen_rtx_MEM (BLKmode, rtl);
19694 MEM_READONLY_P (rtl) = 1;
19695 }
19696 }
19697 /* Other aggregates, and complex values, could be represented using
19698 CONCAT: FIXME! */
19699 else if (AGGREGATE_TYPE_P (type)
19700 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19701 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19702 || TREE_CODE (type) == COMPLEX_TYPE)
19703 ;
19704 /* Vectors only work if their mode is supported by the target.
19705 FIXME: generic vectors ought to work too. */
19706 else if (TREE_CODE (type) == VECTOR_TYPE
19707 && !VECTOR_MODE_P (TYPE_MODE (type)))
19708 ;
19709 /* If the initializer is something that we know will expand into an
19710 immediate RTL constant, expand it now. We must be careful not to
19711 reference variables which won't be output. */
19712 else if (initializer_constant_valid_p (init, type)
19713 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19714 {
19715 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19716 possible. */
19717 if (TREE_CODE (type) == VECTOR_TYPE)
19718 switch (TREE_CODE (init))
19719 {
19720 case VECTOR_CST:
19721 break;
19722 case CONSTRUCTOR:
19723 if (TREE_CONSTANT (init))
19724 {
19725 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19726 bool constant_p = true;
19727 tree value;
19728 unsigned HOST_WIDE_INT ix;
19729
19730 /* Even when ctor is constant, it might contain non-*_CST
19731 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19732 belong into VECTOR_CST nodes. */
19733 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19734 if (!CONSTANT_CLASS_P (value))
19735 {
19736 constant_p = false;
19737 break;
19738 }
19739
19740 if (constant_p)
19741 {
19742 init = build_vector_from_ctor (type, elts);
19743 break;
19744 }
19745 }
19746 /* FALLTHRU */
19747
19748 default:
19749 return NULL;
19750 }
19751
19752 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19753
19754 /* If expand_expr returns a MEM, it wasn't immediate. */
19755 gcc_assert (!rtl || !MEM_P (rtl));
19756 }
19757
19758 return rtl;
19759 }
19760
19761 /* Generate RTL for the variable DECL to represent its location. */
19762
19763 static rtx
19764 rtl_for_decl_location (tree decl)
19765 {
19766 rtx rtl;
19767
19768 /* Here we have to decide where we are going to say the parameter "lives"
19769 (as far as the debugger is concerned). We only have a couple of
19770 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19771
19772 DECL_RTL normally indicates where the parameter lives during most of the
19773 activation of the function. If optimization is enabled however, this
19774 could be either NULL or else a pseudo-reg. Both of those cases indicate
19775 that the parameter doesn't really live anywhere (as far as the code
19776 generation parts of GCC are concerned) during most of the function's
19777 activation. That will happen (for example) if the parameter is never
19778 referenced within the function.
19779
19780 We could just generate a location descriptor here for all non-NULL
19781 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19782 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19783 where DECL_RTL is NULL or is a pseudo-reg.
19784
19785 Note however that we can only get away with using DECL_INCOMING_RTL as
19786 a backup substitute for DECL_RTL in certain limited cases. In cases
19787 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19788 we can be sure that the parameter was passed using the same type as it is
19789 declared to have within the function, and that its DECL_INCOMING_RTL
19790 points us to a place where a value of that type is passed.
19791
19792 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19793 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19794 because in these cases DECL_INCOMING_RTL points us to a value of some
19795 type which is *different* from the type of the parameter itself. Thus,
19796 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19797 such cases, the debugger would end up (for example) trying to fetch a
19798 `float' from a place which actually contains the first part of a
19799 `double'. That would lead to really incorrect and confusing
19800 output at debug-time.
19801
19802 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19803 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19804 are a couple of exceptions however. On little-endian machines we can
19805 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19806 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19807 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19808 when (on a little-endian machine) a non-prototyped function has a
19809 parameter declared to be of type `short' or `char'. In such cases,
19810 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19811 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19812 passed `int' value. If the debugger then uses that address to fetch
19813 a `short' or a `char' (on a little-endian machine) the result will be
19814 the correct data, so we allow for such exceptional cases below.
19815
19816 Note that our goal here is to describe the place where the given formal
19817 parameter lives during most of the function's activation (i.e. between the
19818 end of the prologue and the start of the epilogue). We'll do that as best
19819 as we can. Note however that if the given formal parameter is modified
19820 sometime during the execution of the function, then a stack backtrace (at
19821 debug-time) will show the function as having been called with the *new*
19822 value rather than the value which was originally passed in. This happens
19823 rarely enough that it is not a major problem, but it *is* a problem, and
19824 I'd like to fix it.
19825
19826 A future version of dwarf2out.c may generate two additional attributes for
19827 any given DW_TAG_formal_parameter DIE which will describe the "passed
19828 type" and the "passed location" for the given formal parameter in addition
19829 to the attributes we now generate to indicate the "declared type" and the
19830 "active location" for each parameter. This additional set of attributes
19831 could be used by debuggers for stack backtraces. Separately, note that
19832 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19833 This happens (for example) for inlined-instances of inline function formal
19834 parameters which are never referenced. This really shouldn't be
19835 happening. All PARM_DECL nodes should get valid non-NULL
19836 DECL_INCOMING_RTL values. FIXME. */
19837
19838 /* Use DECL_RTL as the "location" unless we find something better. */
19839 rtl = DECL_RTL_IF_SET (decl);
19840
19841 /* When generating abstract instances, ignore everything except
19842 constants, symbols living in memory, and symbols living in
19843 fixed registers. */
19844 if (! reload_completed)
19845 {
19846 if (rtl
19847 && (CONSTANT_P (rtl)
19848 || (MEM_P (rtl)
19849 && CONSTANT_P (XEXP (rtl, 0)))
19850 || (REG_P (rtl)
19851 && VAR_P (decl)
19852 && TREE_STATIC (decl))))
19853 {
19854 rtl = targetm.delegitimize_address (rtl);
19855 return rtl;
19856 }
19857 rtl = NULL_RTX;
19858 }
19859 else if (TREE_CODE (decl) == PARM_DECL)
19860 {
19861 if (rtl == NULL_RTX
19862 || is_pseudo_reg (rtl)
19863 || (MEM_P (rtl)
19864 && is_pseudo_reg (XEXP (rtl, 0))
19865 && DECL_INCOMING_RTL (decl)
19866 && MEM_P (DECL_INCOMING_RTL (decl))
19867 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19868 {
19869 tree declared_type = TREE_TYPE (decl);
19870 tree passed_type = DECL_ARG_TYPE (decl);
19871 machine_mode dmode = TYPE_MODE (declared_type);
19872 machine_mode pmode = TYPE_MODE (passed_type);
19873
19874 /* This decl represents a formal parameter which was optimized out.
19875 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19876 all cases where (rtl == NULL_RTX) just below. */
19877 if (dmode == pmode)
19878 rtl = DECL_INCOMING_RTL (decl);
19879 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19880 && SCALAR_INT_MODE_P (dmode)
19881 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19882 && DECL_INCOMING_RTL (decl))
19883 {
19884 rtx inc = DECL_INCOMING_RTL (decl);
19885 if (REG_P (inc))
19886 rtl = inc;
19887 else if (MEM_P (inc))
19888 {
19889 if (BYTES_BIG_ENDIAN)
19890 rtl = adjust_address_nv (inc, dmode,
19891 GET_MODE_SIZE (pmode)
19892 - GET_MODE_SIZE (dmode));
19893 else
19894 rtl = inc;
19895 }
19896 }
19897 }
19898
19899 /* If the parm was passed in registers, but lives on the stack, then
19900 make a big endian correction if the mode of the type of the
19901 parameter is not the same as the mode of the rtl. */
19902 /* ??? This is the same series of checks that are made in dbxout.c before
19903 we reach the big endian correction code there. It isn't clear if all
19904 of these checks are necessary here, but keeping them all is the safe
19905 thing to do. */
19906 else if (MEM_P (rtl)
19907 && XEXP (rtl, 0) != const0_rtx
19908 && ! CONSTANT_P (XEXP (rtl, 0))
19909 /* Not passed in memory. */
19910 && !MEM_P (DECL_INCOMING_RTL (decl))
19911 /* Not passed by invisible reference. */
19912 && (!REG_P (XEXP (rtl, 0))
19913 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19914 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19915 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19916 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19917 #endif
19918 )
19919 /* Big endian correction check. */
19920 && BYTES_BIG_ENDIAN
19921 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19922 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19923 UNITS_PER_WORD))
19924 {
19925 machine_mode addr_mode = get_address_mode (rtl);
19926 poly_int64 offset = (UNITS_PER_WORD
19927 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19928
19929 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19930 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19931 }
19932 }
19933 else if (VAR_P (decl)
19934 && rtl
19935 && MEM_P (rtl)
19936 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19937 {
19938 machine_mode addr_mode = get_address_mode (rtl);
19939 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19940 GET_MODE (rtl));
19941
19942 /* If a variable is declared "register" yet is smaller than
19943 a register, then if we store the variable to memory, it
19944 looks like we're storing a register-sized value, when in
19945 fact we are not. We need to adjust the offset of the
19946 storage location to reflect the actual value's bytes,
19947 else gdb will not be able to display it. */
19948 if (maybe_ne (offset, 0))
19949 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19950 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19951 }
19952
19953 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19954 and will have been substituted directly into all expressions that use it.
19955 C does not have such a concept, but C++ and other languages do. */
19956 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19957 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19958
19959 if (rtl)
19960 rtl = targetm.delegitimize_address (rtl);
19961
19962 /* If we don't look past the constant pool, we risk emitting a
19963 reference to a constant pool entry that isn't referenced from
19964 code, and thus is not emitted. */
19965 if (rtl)
19966 rtl = avoid_constant_pool_reference (rtl);
19967
19968 /* Try harder to get a rtl. If this symbol ends up not being emitted
19969 in the current CU, resolve_addr will remove the expression referencing
19970 it. */
19971 if (rtl == NULL_RTX
19972 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19973 && VAR_P (decl)
19974 && !DECL_EXTERNAL (decl)
19975 && TREE_STATIC (decl)
19976 && DECL_NAME (decl)
19977 && !DECL_HARD_REGISTER (decl)
19978 && DECL_MODE (decl) != VOIDmode)
19979 {
19980 rtl = make_decl_rtl_for_debug (decl);
19981 if (!MEM_P (rtl)
19982 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19983 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19984 rtl = NULL_RTX;
19985 }
19986
19987 return rtl;
19988 }
19989
19990 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19991 returned. If so, the decl for the COMMON block is returned, and the
19992 value is the offset into the common block for the symbol. */
19993
19994 static tree
19995 fortran_common (tree decl, HOST_WIDE_INT *value)
19996 {
19997 tree val_expr, cvar;
19998 machine_mode mode;
19999 poly_int64 bitsize, bitpos;
20000 tree offset;
20001 HOST_WIDE_INT cbitpos;
20002 int unsignedp, reversep, volatilep = 0;
20003
20004 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20005 it does not have a value (the offset into the common area), or if it
20006 is thread local (as opposed to global) then it isn't common, and shouldn't
20007 be handled as such. */
20008 if (!VAR_P (decl)
20009 || !TREE_STATIC (decl)
20010 || !DECL_HAS_VALUE_EXPR_P (decl)
20011 || !is_fortran ())
20012 return NULL_TREE;
20013
20014 val_expr = DECL_VALUE_EXPR (decl);
20015 if (TREE_CODE (val_expr) != COMPONENT_REF)
20016 return NULL_TREE;
20017
20018 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20019 &unsignedp, &reversep, &volatilep);
20020
20021 if (cvar == NULL_TREE
20022 || !VAR_P (cvar)
20023 || DECL_ARTIFICIAL (cvar)
20024 || !TREE_PUBLIC (cvar)
20025 /* We don't expect to have to cope with variable offsets,
20026 since at present all static data must have a constant size. */
20027 || !bitpos.is_constant (&cbitpos))
20028 return NULL_TREE;
20029
20030 *value = 0;
20031 if (offset != NULL)
20032 {
20033 if (!tree_fits_shwi_p (offset))
20034 return NULL_TREE;
20035 *value = tree_to_shwi (offset);
20036 }
20037 if (cbitpos != 0)
20038 *value += cbitpos / BITS_PER_UNIT;
20039
20040 return cvar;
20041 }
20042
20043 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20044 data attribute for a variable or a parameter. We generate the
20045 DW_AT_const_value attribute only in those cases where the given variable
20046 or parameter does not have a true "location" either in memory or in a
20047 register. This can happen (for example) when a constant is passed as an
20048 actual argument in a call to an inline function. (It's possible that
20049 these things can crop up in other ways also.) Note that one type of
20050 constant value which can be passed into an inlined function is a constant
20051 pointer. This can happen for example if an actual argument in an inlined
20052 function call evaluates to a compile-time constant address.
20053
20054 CACHE_P is true if it is worth caching the location list for DECL,
20055 so that future calls can reuse it rather than regenerate it from scratch.
20056 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20057 since we will need to refer to them each time the function is inlined. */
20058
20059 static bool
20060 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20061 {
20062 rtx rtl;
20063 dw_loc_list_ref list;
20064 var_loc_list *loc_list;
20065 cached_dw_loc_list *cache;
20066
20067 if (early_dwarf)
20068 return false;
20069
20070 if (TREE_CODE (decl) == ERROR_MARK)
20071 return false;
20072
20073 if (get_AT (die, DW_AT_location)
20074 || get_AT (die, DW_AT_const_value))
20075 return true;
20076
20077 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20078 || TREE_CODE (decl) == RESULT_DECL);
20079
20080 /* Try to get some constant RTL for this decl, and use that as the value of
20081 the location. */
20082
20083 rtl = rtl_for_decl_location (decl);
20084 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20085 && add_const_value_attribute (die, rtl))
20086 return true;
20087
20088 /* See if we have single element location list that is equivalent to
20089 a constant value. That way we are better to use add_const_value_attribute
20090 rather than expanding constant value equivalent. */
20091 loc_list = lookup_decl_loc (decl);
20092 if (loc_list
20093 && loc_list->first
20094 && loc_list->first->next == NULL
20095 && NOTE_P (loc_list->first->loc)
20096 && NOTE_VAR_LOCATION (loc_list->first->loc)
20097 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20098 {
20099 struct var_loc_node *node;
20100
20101 node = loc_list->first;
20102 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20103 if (GET_CODE (rtl) == EXPR_LIST)
20104 rtl = XEXP (rtl, 0);
20105 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20106 && add_const_value_attribute (die, rtl))
20107 return true;
20108 }
20109 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20110 list several times. See if we've already cached the contents. */
20111 list = NULL;
20112 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20113 cache_p = false;
20114 if (cache_p)
20115 {
20116 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20117 if (cache)
20118 list = cache->loc_list;
20119 }
20120 if (list == NULL)
20121 {
20122 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20123 NULL);
20124 /* It is usually worth caching this result if the decl is from
20125 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20126 if (cache_p && list && list->dw_loc_next)
20127 {
20128 cached_dw_loc_list **slot
20129 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20130 DECL_UID (decl),
20131 INSERT);
20132 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20133 cache->decl_id = DECL_UID (decl);
20134 cache->loc_list = list;
20135 *slot = cache;
20136 }
20137 }
20138 if (list)
20139 {
20140 add_AT_location_description (die, DW_AT_location, list);
20141 return true;
20142 }
20143 /* None of that worked, so it must not really have a location;
20144 try adding a constant value attribute from the DECL_INITIAL. */
20145 return tree_add_const_value_attribute_for_decl (die, decl);
20146 }
20147
20148 /* Helper function for tree_add_const_value_attribute. Natively encode
20149 initializer INIT into an array. Return true if successful. */
20150
20151 static bool
20152 native_encode_initializer (tree init, unsigned char *array, int size)
20153 {
20154 tree type;
20155
20156 if (init == NULL_TREE)
20157 return false;
20158
20159 STRIP_NOPS (init);
20160 switch (TREE_CODE (init))
20161 {
20162 case STRING_CST:
20163 type = TREE_TYPE (init);
20164 if (TREE_CODE (type) == ARRAY_TYPE)
20165 {
20166 tree enttype = TREE_TYPE (type);
20167 scalar_int_mode mode;
20168
20169 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20170 || GET_MODE_SIZE (mode) != 1)
20171 return false;
20172 if (int_size_in_bytes (type) != size)
20173 return false;
20174 if (size > TREE_STRING_LENGTH (init))
20175 {
20176 memcpy (array, TREE_STRING_POINTER (init),
20177 TREE_STRING_LENGTH (init));
20178 memset (array + TREE_STRING_LENGTH (init),
20179 '\0', size - TREE_STRING_LENGTH (init));
20180 }
20181 else
20182 memcpy (array, TREE_STRING_POINTER (init), size);
20183 return true;
20184 }
20185 return false;
20186 case CONSTRUCTOR:
20187 type = TREE_TYPE (init);
20188 if (int_size_in_bytes (type) != size)
20189 return false;
20190 if (TREE_CODE (type) == ARRAY_TYPE)
20191 {
20192 HOST_WIDE_INT min_index;
20193 unsigned HOST_WIDE_INT cnt;
20194 int curpos = 0, fieldsize;
20195 constructor_elt *ce;
20196
20197 if (TYPE_DOMAIN (type) == NULL_TREE
20198 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20199 return false;
20200
20201 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20202 if (fieldsize <= 0)
20203 return false;
20204
20205 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20206 memset (array, '\0', size);
20207 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20208 {
20209 tree val = ce->value;
20210 tree index = ce->index;
20211 int pos = curpos;
20212 if (index && TREE_CODE (index) == RANGE_EXPR)
20213 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20214 * fieldsize;
20215 else if (index)
20216 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20217
20218 if (val)
20219 {
20220 STRIP_NOPS (val);
20221 if (!native_encode_initializer (val, array + pos, fieldsize))
20222 return false;
20223 }
20224 curpos = pos + fieldsize;
20225 if (index && TREE_CODE (index) == RANGE_EXPR)
20226 {
20227 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20228 - tree_to_shwi (TREE_OPERAND (index, 0));
20229 while (count-- > 0)
20230 {
20231 if (val)
20232 memcpy (array + curpos, array + pos, fieldsize);
20233 curpos += fieldsize;
20234 }
20235 }
20236 gcc_assert (curpos <= size);
20237 }
20238 return true;
20239 }
20240 else if (TREE_CODE (type) == RECORD_TYPE
20241 || TREE_CODE (type) == UNION_TYPE)
20242 {
20243 tree field = NULL_TREE;
20244 unsigned HOST_WIDE_INT cnt;
20245 constructor_elt *ce;
20246
20247 if (int_size_in_bytes (type) != size)
20248 return false;
20249
20250 if (TREE_CODE (type) == RECORD_TYPE)
20251 field = TYPE_FIELDS (type);
20252
20253 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20254 {
20255 tree val = ce->value;
20256 int pos, fieldsize;
20257
20258 if (ce->index != 0)
20259 field = ce->index;
20260
20261 if (val)
20262 STRIP_NOPS (val);
20263
20264 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20265 return false;
20266
20267 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20268 && TYPE_DOMAIN (TREE_TYPE (field))
20269 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20270 return false;
20271 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20272 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20273 return false;
20274 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20275 pos = int_byte_position (field);
20276 gcc_assert (pos + fieldsize <= size);
20277 if (val && fieldsize != 0
20278 && !native_encode_initializer (val, array + pos, fieldsize))
20279 return false;
20280 }
20281 return true;
20282 }
20283 return false;
20284 case VIEW_CONVERT_EXPR:
20285 case NON_LVALUE_EXPR:
20286 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20287 default:
20288 return native_encode_expr (init, array, size) == size;
20289 }
20290 }
20291
20292 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20293 attribute is the const value T. */
20294
20295 static bool
20296 tree_add_const_value_attribute (dw_die_ref die, tree t)
20297 {
20298 tree init;
20299 tree type = TREE_TYPE (t);
20300 rtx rtl;
20301
20302 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20303 return false;
20304
20305 init = t;
20306 gcc_assert (!DECL_P (init));
20307
20308 if (TREE_CODE (init) == INTEGER_CST)
20309 {
20310 if (tree_fits_uhwi_p (init))
20311 {
20312 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20313 return true;
20314 }
20315 if (tree_fits_shwi_p (init))
20316 {
20317 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20318 return true;
20319 }
20320 }
20321 if (! early_dwarf)
20322 {
20323 rtl = rtl_for_decl_init (init, type);
20324 if (rtl)
20325 return add_const_value_attribute (die, rtl);
20326 }
20327 /* If the host and target are sane, try harder. */
20328 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20329 && initializer_constant_valid_p (init, type))
20330 {
20331 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20332 if (size > 0 && (int) size == size)
20333 {
20334 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20335
20336 if (native_encode_initializer (init, array, size))
20337 {
20338 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20339 return true;
20340 }
20341 ggc_free (array);
20342 }
20343 }
20344 return false;
20345 }
20346
20347 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20348 attribute is the const value of T, where T is an integral constant
20349 variable with static storage duration
20350 (so it can't be a PARM_DECL or a RESULT_DECL). */
20351
20352 static bool
20353 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20354 {
20355
20356 if (!decl
20357 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20358 || (VAR_P (decl) && !TREE_STATIC (decl)))
20359 return false;
20360
20361 if (TREE_READONLY (decl)
20362 && ! TREE_THIS_VOLATILE (decl)
20363 && DECL_INITIAL (decl))
20364 /* OK */;
20365 else
20366 return false;
20367
20368 /* Don't add DW_AT_const_value if abstract origin already has one. */
20369 if (get_AT (var_die, DW_AT_const_value))
20370 return false;
20371
20372 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20373 }
20374
20375 /* Convert the CFI instructions for the current function into a
20376 location list. This is used for DW_AT_frame_base when we targeting
20377 a dwarf2 consumer that does not support the dwarf3
20378 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20379 expressions. */
20380
20381 static dw_loc_list_ref
20382 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20383 {
20384 int ix;
20385 dw_fde_ref fde;
20386 dw_loc_list_ref list, *list_tail;
20387 dw_cfi_ref cfi;
20388 dw_cfa_location last_cfa, next_cfa;
20389 const char *start_label, *last_label, *section;
20390 dw_cfa_location remember;
20391
20392 fde = cfun->fde;
20393 gcc_assert (fde != NULL);
20394
20395 section = secname_for_decl (current_function_decl);
20396 list_tail = &list;
20397 list = NULL;
20398
20399 memset (&next_cfa, 0, sizeof (next_cfa));
20400 next_cfa.reg = INVALID_REGNUM;
20401 remember = next_cfa;
20402
20403 start_label = fde->dw_fde_begin;
20404
20405 /* ??? Bald assumption that the CIE opcode list does not contain
20406 advance opcodes. */
20407 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20408 lookup_cfa_1 (cfi, &next_cfa, &remember);
20409
20410 last_cfa = next_cfa;
20411 last_label = start_label;
20412
20413 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20414 {
20415 /* If the first partition contained no CFI adjustments, the
20416 CIE opcodes apply to the whole first partition. */
20417 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20418 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20419 list_tail =&(*list_tail)->dw_loc_next;
20420 start_label = last_label = fde->dw_fde_second_begin;
20421 }
20422
20423 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20424 {
20425 switch (cfi->dw_cfi_opc)
20426 {
20427 case DW_CFA_set_loc:
20428 case DW_CFA_advance_loc1:
20429 case DW_CFA_advance_loc2:
20430 case DW_CFA_advance_loc4:
20431 if (!cfa_equal_p (&last_cfa, &next_cfa))
20432 {
20433 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20434 start_label, 0, last_label, 0, section);
20435
20436 list_tail = &(*list_tail)->dw_loc_next;
20437 last_cfa = next_cfa;
20438 start_label = last_label;
20439 }
20440 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20441 break;
20442
20443 case DW_CFA_advance_loc:
20444 /* The encoding is complex enough that we should never emit this. */
20445 gcc_unreachable ();
20446
20447 default:
20448 lookup_cfa_1 (cfi, &next_cfa, &remember);
20449 break;
20450 }
20451 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20452 {
20453 if (!cfa_equal_p (&last_cfa, &next_cfa))
20454 {
20455 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20456 start_label, 0, last_label, 0, section);
20457
20458 list_tail = &(*list_tail)->dw_loc_next;
20459 last_cfa = next_cfa;
20460 start_label = last_label;
20461 }
20462 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20463 start_label, 0, fde->dw_fde_end, 0, section);
20464 list_tail = &(*list_tail)->dw_loc_next;
20465 start_label = last_label = fde->dw_fde_second_begin;
20466 }
20467 }
20468
20469 if (!cfa_equal_p (&last_cfa, &next_cfa))
20470 {
20471 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20472 start_label, 0, last_label, 0, section);
20473 list_tail = &(*list_tail)->dw_loc_next;
20474 start_label = last_label;
20475 }
20476
20477 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20478 start_label, 0,
20479 fde->dw_fde_second_begin
20480 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20481 section);
20482
20483 maybe_gen_llsym (list);
20484
20485 return list;
20486 }
20487
20488 /* Compute a displacement from the "steady-state frame pointer" to the
20489 frame base (often the same as the CFA), and store it in
20490 frame_pointer_fb_offset. OFFSET is added to the displacement
20491 before the latter is negated. */
20492
20493 static void
20494 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20495 {
20496 rtx reg, elim;
20497
20498 #ifdef FRAME_POINTER_CFA_OFFSET
20499 reg = frame_pointer_rtx;
20500 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20501 #else
20502 reg = arg_pointer_rtx;
20503 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20504 #endif
20505
20506 elim = (ira_use_lra_p
20507 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20508 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20509 elim = strip_offset_and_add (elim, &offset);
20510
20511 frame_pointer_fb_offset = -offset;
20512
20513 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20514 in which to eliminate. This is because it's stack pointer isn't
20515 directly accessible as a register within the ISA. To work around
20516 this, assume that while we cannot provide a proper value for
20517 frame_pointer_fb_offset, we won't need one either. We can use
20518 hard frame pointer in debug info even if frame pointer isn't used
20519 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20520 which uses the DW_AT_frame_base attribute, not hard frame pointer
20521 directly. */
20522 frame_pointer_fb_offset_valid
20523 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20524 }
20525
20526 /* Generate a DW_AT_name attribute given some string value to be included as
20527 the value of the attribute. */
20528
20529 static void
20530 add_name_attribute (dw_die_ref die, const char *name_string)
20531 {
20532 if (name_string != NULL && *name_string != 0)
20533 {
20534 if (demangle_name_func)
20535 name_string = (*demangle_name_func) (name_string);
20536
20537 add_AT_string (die, DW_AT_name, name_string);
20538 }
20539 }
20540
20541 /* Generate a DW_AT_description attribute given some string value to be included
20542 as the value of the attribute. */
20543
20544 static void
20545 add_desc_attribute (dw_die_ref die, const char *name_string)
20546 {
20547 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20548 return;
20549
20550 if (name_string == NULL || *name_string == 0)
20551 return;
20552
20553 if (demangle_name_func)
20554 name_string = (*demangle_name_func) (name_string);
20555
20556 add_AT_string (die, DW_AT_description, name_string);
20557 }
20558
20559 /* Generate a DW_AT_description attribute given some decl to be included
20560 as the value of the attribute. */
20561
20562 static void
20563 add_desc_attribute (dw_die_ref die, tree decl)
20564 {
20565 tree decl_name;
20566
20567 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20568 return;
20569
20570 if (decl == NULL_TREE || !DECL_P (decl))
20571 return;
20572 decl_name = DECL_NAME (decl);
20573
20574 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20575 {
20576 const char *name = dwarf2_name (decl, 0);
20577 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20578 }
20579 else
20580 {
20581 char *desc = print_generic_expr_to_str (decl);
20582 add_desc_attribute (die, desc);
20583 free (desc);
20584 }
20585 }
20586
20587 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20588 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20589 of TYPE accordingly.
20590
20591 ??? This is a temporary measure until after we're able to generate
20592 regular DWARF for the complex Ada type system. */
20593
20594 static void
20595 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20596 dw_die_ref context_die)
20597 {
20598 tree dtype;
20599 dw_die_ref dtype_die;
20600
20601 if (!lang_hooks.types.descriptive_type)
20602 return;
20603
20604 dtype = lang_hooks.types.descriptive_type (type);
20605 if (!dtype)
20606 return;
20607
20608 dtype_die = lookup_type_die (dtype);
20609 if (!dtype_die)
20610 {
20611 gen_type_die (dtype, context_die);
20612 dtype_die = lookup_type_die (dtype);
20613 gcc_assert (dtype_die);
20614 }
20615
20616 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20617 }
20618
20619 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20620
20621 static const char *
20622 comp_dir_string (void)
20623 {
20624 const char *wd;
20625 char *wd1;
20626 static const char *cached_wd = NULL;
20627
20628 if (cached_wd != NULL)
20629 return cached_wd;
20630
20631 wd = get_src_pwd ();
20632 if (wd == NULL)
20633 return NULL;
20634
20635 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20636 {
20637 int wdlen;
20638
20639 wdlen = strlen (wd);
20640 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20641 strcpy (wd1, wd);
20642 wd1 [wdlen] = DIR_SEPARATOR;
20643 wd1 [wdlen + 1] = 0;
20644 wd = wd1;
20645 }
20646
20647 cached_wd = remap_debug_filename (wd);
20648 return cached_wd;
20649 }
20650
20651 /* Generate a DW_AT_comp_dir attribute for DIE. */
20652
20653 static void
20654 add_comp_dir_attribute (dw_die_ref die)
20655 {
20656 const char * wd = comp_dir_string ();
20657 if (wd != NULL)
20658 add_AT_string (die, DW_AT_comp_dir, wd);
20659 }
20660
20661 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20662 pointer computation, ...), output a representation for that bound according
20663 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20664 loc_list_from_tree for the meaning of CONTEXT. */
20665
20666 static void
20667 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20668 int forms, struct loc_descr_context *context)
20669 {
20670 dw_die_ref context_die, decl_die = NULL;
20671 dw_loc_list_ref list;
20672 bool strip_conversions = true;
20673 bool placeholder_seen = false;
20674
20675 while (strip_conversions)
20676 switch (TREE_CODE (value))
20677 {
20678 case ERROR_MARK:
20679 case SAVE_EXPR:
20680 return;
20681
20682 CASE_CONVERT:
20683 case VIEW_CONVERT_EXPR:
20684 value = TREE_OPERAND (value, 0);
20685 break;
20686
20687 default:
20688 strip_conversions = false;
20689 break;
20690 }
20691
20692 /* If possible and permitted, output the attribute as a constant. */
20693 if ((forms & dw_scalar_form_constant) != 0
20694 && TREE_CODE (value) == INTEGER_CST)
20695 {
20696 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20697
20698 /* If HOST_WIDE_INT is big enough then represent the bound as
20699 a constant value. We need to choose a form based on
20700 whether the type is signed or unsigned. We cannot just
20701 call add_AT_unsigned if the value itself is positive
20702 (add_AT_unsigned might add the unsigned value encoded as
20703 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20704 bounds type and then sign extend any unsigned values found
20705 for signed types. This is needed only for
20706 DW_AT_{lower,upper}_bound, since for most other attributes,
20707 consumers will treat DW_FORM_data[1248] as unsigned values,
20708 regardless of the underlying type. */
20709 if (prec <= HOST_BITS_PER_WIDE_INT
20710 || tree_fits_uhwi_p (value))
20711 {
20712 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20713 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20714 else
20715 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20716 }
20717 else
20718 /* Otherwise represent the bound as an unsigned value with
20719 the precision of its type. The precision and signedness
20720 of the type will be necessary to re-interpret it
20721 unambiguously. */
20722 add_AT_wide (die, attr, wi::to_wide (value));
20723 return;
20724 }
20725
20726 /* Otherwise, if it's possible and permitted too, output a reference to
20727 another DIE. */
20728 if ((forms & dw_scalar_form_reference) != 0)
20729 {
20730 tree decl = NULL_TREE;
20731
20732 /* Some type attributes reference an outer type. For instance, the upper
20733 bound of an array may reference an embedding record (this happens in
20734 Ada). */
20735 if (TREE_CODE (value) == COMPONENT_REF
20736 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20737 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20738 decl = TREE_OPERAND (value, 1);
20739
20740 else if (VAR_P (value)
20741 || TREE_CODE (value) == PARM_DECL
20742 || TREE_CODE (value) == RESULT_DECL)
20743 decl = value;
20744
20745 if (decl != NULL_TREE)
20746 {
20747 decl_die = lookup_decl_die (decl);
20748
20749 /* ??? Can this happen, or should the variable have been bound
20750 first? Probably it can, since I imagine that we try to create
20751 the types of parameters in the order in which they exist in
20752 the list, and won't have created a forward reference to a
20753 later parameter. */
20754 if (decl_die != NULL)
20755 {
20756 if (get_AT (decl_die, DW_AT_location)
20757 || get_AT (decl_die, DW_AT_const_value))
20758 {
20759 add_AT_die_ref (die, attr, decl_die);
20760 return;
20761 }
20762 }
20763 }
20764 }
20765
20766 /* Last chance: try to create a stack operation procedure to evaluate the
20767 value. Do nothing if even that is not possible or permitted. */
20768 if ((forms & dw_scalar_form_exprloc) == 0)
20769 return;
20770
20771 list = loc_list_from_tree (value, 2, context);
20772 if (context && context->placeholder_arg)
20773 {
20774 placeholder_seen = context->placeholder_seen;
20775 context->placeholder_seen = false;
20776 }
20777 if (list == NULL || single_element_loc_list_p (list))
20778 {
20779 /* If this attribute is not a reference nor constant, it is
20780 a DWARF expression rather than location description. For that
20781 loc_list_from_tree (value, 0, &context) is needed. */
20782 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20783 if (list2 && single_element_loc_list_p (list2))
20784 {
20785 if (placeholder_seen)
20786 {
20787 struct dwarf_procedure_info dpi;
20788 dpi.fndecl = NULL_TREE;
20789 dpi.args_count = 1;
20790 if (!resolve_args_picking (list2->expr, 1, &dpi))
20791 return;
20792 }
20793 add_AT_loc (die, attr, list2->expr);
20794 return;
20795 }
20796 }
20797
20798 /* If that failed to give a single element location list, fall back to
20799 outputting this as a reference... still if permitted. */
20800 if (list == NULL
20801 || (forms & dw_scalar_form_reference) == 0
20802 || placeholder_seen)
20803 return;
20804
20805 if (!decl_die)
20806 {
20807 if (current_function_decl == 0)
20808 context_die = comp_unit_die ();
20809 else
20810 context_die = lookup_decl_die (current_function_decl);
20811
20812 decl_die = new_die (DW_TAG_variable, context_die, value);
20813 add_AT_flag (decl_die, DW_AT_artificial, 1);
20814 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20815 context_die);
20816 }
20817
20818 add_AT_location_description (decl_die, DW_AT_location, list);
20819 add_AT_die_ref (die, attr, decl_die);
20820 }
20821
20822 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20823 default. */
20824
20825 static int
20826 lower_bound_default (void)
20827 {
20828 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20829 {
20830 case DW_LANG_C:
20831 case DW_LANG_C89:
20832 case DW_LANG_C99:
20833 case DW_LANG_C11:
20834 case DW_LANG_C_plus_plus:
20835 case DW_LANG_C_plus_plus_11:
20836 case DW_LANG_C_plus_plus_14:
20837 case DW_LANG_ObjC:
20838 case DW_LANG_ObjC_plus_plus:
20839 return 0;
20840 case DW_LANG_Fortran77:
20841 case DW_LANG_Fortran90:
20842 case DW_LANG_Fortran95:
20843 case DW_LANG_Fortran03:
20844 case DW_LANG_Fortran08:
20845 return 1;
20846 case DW_LANG_UPC:
20847 case DW_LANG_D:
20848 case DW_LANG_Python:
20849 return dwarf_version >= 4 ? 0 : -1;
20850 case DW_LANG_Ada95:
20851 case DW_LANG_Ada83:
20852 case DW_LANG_Cobol74:
20853 case DW_LANG_Cobol85:
20854 case DW_LANG_Modula2:
20855 case DW_LANG_PLI:
20856 return dwarf_version >= 4 ? 1 : -1;
20857 default:
20858 return -1;
20859 }
20860 }
20861
20862 /* Given a tree node describing an array bound (either lower or upper) output
20863 a representation for that bound. */
20864
20865 static void
20866 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20867 tree bound, struct loc_descr_context *context)
20868 {
20869 int dflt;
20870
20871 while (1)
20872 switch (TREE_CODE (bound))
20873 {
20874 /* Strip all conversions. */
20875 CASE_CONVERT:
20876 case VIEW_CONVERT_EXPR:
20877 bound = TREE_OPERAND (bound, 0);
20878 break;
20879
20880 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20881 are even omitted when they are the default. */
20882 case INTEGER_CST:
20883 /* If the value for this bound is the default one, we can even omit the
20884 attribute. */
20885 if (bound_attr == DW_AT_lower_bound
20886 && tree_fits_shwi_p (bound)
20887 && (dflt = lower_bound_default ()) != -1
20888 && tree_to_shwi (bound) == dflt)
20889 return;
20890
20891 /* FALLTHRU */
20892
20893 default:
20894 /* Because of the complex interaction there can be with other GNAT
20895 encodings, GDB isn't ready yet to handle proper DWARF description
20896 for self-referencial subrange bounds: let GNAT encodings do the
20897 magic in such a case. */
20898 if (is_ada ()
20899 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20900 && contains_placeholder_p (bound))
20901 return;
20902
20903 add_scalar_info (subrange_die, bound_attr, bound,
20904 dw_scalar_form_constant
20905 | dw_scalar_form_exprloc
20906 | dw_scalar_form_reference,
20907 context);
20908 return;
20909 }
20910 }
20911
20912 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20913 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20914 Note that the block of subscript information for an array type also
20915 includes information about the element type of the given array type.
20916
20917 This function reuses previously set type and bound information if
20918 available. */
20919
20920 static void
20921 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20922 {
20923 unsigned dimension_number;
20924 tree lower, upper;
20925 dw_die_ref child = type_die->die_child;
20926
20927 for (dimension_number = 0;
20928 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20929 type = TREE_TYPE (type), dimension_number++)
20930 {
20931 tree domain = TYPE_DOMAIN (type);
20932
20933 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20934 break;
20935
20936 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20937 and (in GNU C only) variable bounds. Handle all three forms
20938 here. */
20939
20940 /* Find and reuse a previously generated DW_TAG_subrange_type if
20941 available.
20942
20943 For multi-dimensional arrays, as we iterate through the
20944 various dimensions in the enclosing for loop above, we also
20945 iterate through the DIE children and pick at each
20946 DW_TAG_subrange_type previously generated (if available).
20947 Each child DW_TAG_subrange_type DIE describes the range of
20948 the current dimension. At this point we should have as many
20949 DW_TAG_subrange_type's as we have dimensions in the
20950 array. */
20951 dw_die_ref subrange_die = NULL;
20952 if (child)
20953 while (1)
20954 {
20955 child = child->die_sib;
20956 if (child->die_tag == DW_TAG_subrange_type)
20957 subrange_die = child;
20958 if (child == type_die->die_child)
20959 {
20960 /* If we wrapped around, stop looking next time. */
20961 child = NULL;
20962 break;
20963 }
20964 if (child->die_tag == DW_TAG_subrange_type)
20965 break;
20966 }
20967 if (!subrange_die)
20968 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20969
20970 if (domain)
20971 {
20972 /* We have an array type with specified bounds. */
20973 lower = TYPE_MIN_VALUE (domain);
20974 upper = TYPE_MAX_VALUE (domain);
20975
20976 /* Define the index type. */
20977 if (TREE_TYPE (domain)
20978 && !get_AT (subrange_die, DW_AT_type))
20979 {
20980 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20981 TREE_TYPE field. We can't emit debug info for this
20982 because it is an unnamed integral type. */
20983 if (TREE_CODE (domain) == INTEGER_TYPE
20984 && TYPE_NAME (domain) == NULL_TREE
20985 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20986 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20987 ;
20988 else
20989 add_type_attribute (subrange_die, TREE_TYPE (domain),
20990 TYPE_UNQUALIFIED, false, type_die);
20991 }
20992
20993 /* ??? If upper is NULL, the array has unspecified length,
20994 but it does have a lower bound. This happens with Fortran
20995 dimension arr(N:*)
20996 Since the debugger is definitely going to need to know N
20997 to produce useful results, go ahead and output the lower
20998 bound solo, and hope the debugger can cope. */
20999
21000 if (!get_AT (subrange_die, DW_AT_lower_bound))
21001 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21002 if (!get_AT (subrange_die, DW_AT_upper_bound)
21003 && !get_AT (subrange_die, DW_AT_count))
21004 {
21005 if (upper)
21006 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21007 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
21008 /* Zero-length array. */
21009 add_bound_info (subrange_die, DW_AT_count,
21010 build_int_cst (TREE_TYPE (lower), 0), NULL);
21011 }
21012 }
21013
21014 /* Otherwise we have an array type with an unspecified length. The
21015 DWARF-2 spec does not say how to handle this; let's just leave out the
21016 bounds. */
21017 }
21018 }
21019
21020 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21021
21022 static void
21023 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21024 {
21025 dw_die_ref decl_die;
21026 HOST_WIDE_INT size;
21027 dw_loc_descr_ref size_expr = NULL;
21028
21029 switch (TREE_CODE (tree_node))
21030 {
21031 case ERROR_MARK:
21032 size = 0;
21033 break;
21034 case ENUMERAL_TYPE:
21035 case RECORD_TYPE:
21036 case UNION_TYPE:
21037 case QUAL_UNION_TYPE:
21038 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21039 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21040 {
21041 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21042 return;
21043 }
21044 size_expr = type_byte_size (tree_node, &size);
21045 break;
21046 case FIELD_DECL:
21047 /* For a data member of a struct or union, the DW_AT_byte_size is
21048 generally given as the number of bytes normally allocated for an
21049 object of the *declared* type of the member itself. This is true
21050 even for bit-fields. */
21051 size = int_size_in_bytes (field_type (tree_node));
21052 break;
21053 default:
21054 gcc_unreachable ();
21055 }
21056
21057 /* Support for dynamically-sized objects was introduced by DWARFv3.
21058 At the moment, GDB does not handle variable byte sizes very well,
21059 though. */
21060 if ((dwarf_version >= 3 || !dwarf_strict)
21061 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21062 && size_expr != NULL)
21063 add_AT_loc (die, DW_AT_byte_size, size_expr);
21064
21065 /* Note that `size' might be -1 when we get to this point. If it is, that
21066 indicates that the byte size of the entity in question is variable and
21067 that we could not generate a DWARF expression that computes it. */
21068 if (size >= 0)
21069 add_AT_unsigned (die, DW_AT_byte_size, size);
21070 }
21071
21072 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21073 alignment. */
21074
21075 static void
21076 add_alignment_attribute (dw_die_ref die, tree tree_node)
21077 {
21078 if (dwarf_version < 5 && dwarf_strict)
21079 return;
21080
21081 unsigned align;
21082
21083 if (DECL_P (tree_node))
21084 {
21085 if (!DECL_USER_ALIGN (tree_node))
21086 return;
21087
21088 align = DECL_ALIGN_UNIT (tree_node);
21089 }
21090 else if (TYPE_P (tree_node))
21091 {
21092 if (!TYPE_USER_ALIGN (tree_node))
21093 return;
21094
21095 align = TYPE_ALIGN_UNIT (tree_node);
21096 }
21097 else
21098 gcc_unreachable ();
21099
21100 add_AT_unsigned (die, DW_AT_alignment, align);
21101 }
21102
21103 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21104 which specifies the distance in bits from the highest order bit of the
21105 "containing object" for the bit-field to the highest order bit of the
21106 bit-field itself.
21107
21108 For any given bit-field, the "containing object" is a hypothetical object
21109 (of some integral or enum type) within which the given bit-field lives. The
21110 type of this hypothetical "containing object" is always the same as the
21111 declared type of the individual bit-field itself. The determination of the
21112 exact location of the "containing object" for a bit-field is rather
21113 complicated. It's handled by the `field_byte_offset' function (above).
21114
21115 CTX is required: see the comment for VLR_CONTEXT.
21116
21117 Note that it is the size (in bytes) of the hypothetical "containing object"
21118 which will be given in the DW_AT_byte_size attribute for this bit-field.
21119 (See `byte_size_attribute' above). */
21120
21121 static inline void
21122 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21123 {
21124 HOST_WIDE_INT object_offset_in_bytes;
21125 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21126 HOST_WIDE_INT bitpos_int;
21127 HOST_WIDE_INT highest_order_object_bit_offset;
21128 HOST_WIDE_INT highest_order_field_bit_offset;
21129 HOST_WIDE_INT bit_offset;
21130
21131 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21132
21133 /* Must be a field and a bit field. */
21134 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21135
21136 /* We can't yet handle bit-fields whose offsets are variable, so if we
21137 encounter such things, just return without generating any attribute
21138 whatsoever. Likewise for variable or too large size. */
21139 if (! tree_fits_shwi_p (bit_position (decl))
21140 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21141 return;
21142
21143 bitpos_int = int_bit_position (decl);
21144
21145 /* Note that the bit offset is always the distance (in bits) from the
21146 highest-order bit of the "containing object" to the highest-order bit of
21147 the bit-field itself. Since the "high-order end" of any object or field
21148 is different on big-endian and little-endian machines, the computation
21149 below must take account of these differences. */
21150 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21151 highest_order_field_bit_offset = bitpos_int;
21152
21153 if (! BYTES_BIG_ENDIAN)
21154 {
21155 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21156 highest_order_object_bit_offset +=
21157 simple_type_size_in_bits (original_type);
21158 }
21159
21160 bit_offset
21161 = (! BYTES_BIG_ENDIAN
21162 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21163 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21164
21165 if (bit_offset < 0)
21166 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21167 else
21168 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21169 }
21170
21171 /* For a FIELD_DECL node which represents a bit field, output an attribute
21172 which specifies the length in bits of the given field. */
21173
21174 static inline void
21175 add_bit_size_attribute (dw_die_ref die, tree decl)
21176 {
21177 /* Must be a field and a bit field. */
21178 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21179 && DECL_BIT_FIELD_TYPE (decl));
21180
21181 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21182 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21183 }
21184
21185 /* If the compiled language is ANSI C, then add a 'prototyped'
21186 attribute, if arg types are given for the parameters of a function. */
21187
21188 static inline void
21189 add_prototyped_attribute (dw_die_ref die, tree func_type)
21190 {
21191 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21192 {
21193 case DW_LANG_C:
21194 case DW_LANG_C89:
21195 case DW_LANG_C99:
21196 case DW_LANG_C11:
21197 case DW_LANG_ObjC:
21198 if (prototype_p (func_type))
21199 add_AT_flag (die, DW_AT_prototyped, 1);
21200 break;
21201 default:
21202 break;
21203 }
21204 }
21205
21206 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21207 by looking in the type declaration, the object declaration equate table or
21208 the block mapping. */
21209
21210 static inline void
21211 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21212 {
21213 dw_die_ref origin_die = NULL;
21214
21215 /* For late LTO debug output we want to refer directly to the abstract
21216 DIE in the early debug rather to the possibly existing concrete
21217 instance and avoid creating that just for this purpose. */
21218 sym_off_pair *desc;
21219 if (in_lto_p
21220 && external_die_map
21221 && (desc = external_die_map->get (origin)))
21222 {
21223 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21224 desc->sym, desc->off);
21225 return;
21226 }
21227
21228 if (DECL_P (origin))
21229 origin_die = lookup_decl_die (origin);
21230 else if (TYPE_P (origin))
21231 origin_die = lookup_type_die (origin);
21232 else if (TREE_CODE (origin) == BLOCK)
21233 origin_die = lookup_block_die (origin);
21234
21235 /* XXX: Functions that are never lowered don't always have correct block
21236 trees (in the case of java, they simply have no block tree, in some other
21237 languages). For these functions, there is nothing we can really do to
21238 output correct debug info for inlined functions in all cases. Rather
21239 than die, we'll just produce deficient debug info now, in that we will
21240 have variables without a proper abstract origin. In the future, when all
21241 functions are lowered, we should re-add a gcc_assert (origin_die)
21242 here. */
21243
21244 if (origin_die)
21245 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21246 }
21247
21248 /* We do not currently support the pure_virtual attribute. */
21249
21250 static inline void
21251 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21252 {
21253 if (DECL_VINDEX (func_decl))
21254 {
21255 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21256
21257 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21258 add_AT_loc (die, DW_AT_vtable_elem_location,
21259 new_loc_descr (DW_OP_constu,
21260 tree_to_shwi (DECL_VINDEX (func_decl)),
21261 0));
21262
21263 /* GNU extension: Record what type this method came from originally. */
21264 if (debug_info_level > DINFO_LEVEL_TERSE
21265 && DECL_CONTEXT (func_decl))
21266 add_AT_die_ref (die, DW_AT_containing_type,
21267 lookup_type_die (DECL_CONTEXT (func_decl)));
21268 }
21269 }
21270 \f
21271 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21272 given decl. This used to be a vendor extension until after DWARF 4
21273 standardized it. */
21274
21275 static void
21276 add_linkage_attr (dw_die_ref die, tree decl)
21277 {
21278 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21279
21280 /* Mimic what assemble_name_raw does with a leading '*'. */
21281 if (name[0] == '*')
21282 name = &name[1];
21283
21284 if (dwarf_version >= 4)
21285 add_AT_string (die, DW_AT_linkage_name, name);
21286 else
21287 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21288 }
21289
21290 /* Add source coordinate attributes for the given decl. */
21291
21292 static void
21293 add_src_coords_attributes (dw_die_ref die, tree decl)
21294 {
21295 expanded_location s;
21296
21297 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21298 return;
21299 s = expand_location (DECL_SOURCE_LOCATION (decl));
21300 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21301 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21302 if (debug_column_info && s.column)
21303 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21304 }
21305
21306 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21307
21308 static void
21309 add_linkage_name_raw (dw_die_ref die, tree decl)
21310 {
21311 /* Defer until we have an assembler name set. */
21312 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21313 {
21314 limbo_die_node *asm_name;
21315
21316 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21317 asm_name->die = die;
21318 asm_name->created_for = decl;
21319 asm_name->next = deferred_asm_name;
21320 deferred_asm_name = asm_name;
21321 }
21322 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21323 add_linkage_attr (die, decl);
21324 }
21325
21326 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21327
21328 static void
21329 add_linkage_name (dw_die_ref die, tree decl)
21330 {
21331 if (debug_info_level > DINFO_LEVEL_NONE
21332 && VAR_OR_FUNCTION_DECL_P (decl)
21333 && TREE_PUBLIC (decl)
21334 && !(VAR_P (decl) && DECL_REGISTER (decl))
21335 && die->die_tag != DW_TAG_member)
21336 add_linkage_name_raw (die, decl);
21337 }
21338
21339 /* Add a DW_AT_name attribute and source coordinate attribute for the
21340 given decl, but only if it actually has a name. */
21341
21342 static void
21343 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21344 bool no_linkage_name)
21345 {
21346 tree decl_name;
21347
21348 decl_name = DECL_NAME (decl);
21349 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21350 {
21351 const char *name = dwarf2_name (decl, 0);
21352 if (name)
21353 add_name_attribute (die, name);
21354 else
21355 add_desc_attribute (die, decl);
21356
21357 if (! DECL_ARTIFICIAL (decl))
21358 add_src_coords_attributes (die, decl);
21359
21360 if (!no_linkage_name)
21361 add_linkage_name (die, decl);
21362 }
21363 else
21364 add_desc_attribute (die, decl);
21365
21366 #ifdef VMS_DEBUGGING_INFO
21367 /* Get the function's name, as described by its RTL. This may be different
21368 from the DECL_NAME name used in the source file. */
21369 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21370 {
21371 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21372 XEXP (DECL_RTL (decl), 0), false);
21373 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21374 }
21375 #endif /* VMS_DEBUGGING_INFO */
21376 }
21377
21378 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21379
21380 static void
21381 add_discr_value (dw_die_ref die, dw_discr_value *value)
21382 {
21383 dw_attr_node attr;
21384
21385 attr.dw_attr = DW_AT_discr_value;
21386 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21387 attr.dw_attr_val.val_entry = NULL;
21388 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21389 if (value->pos)
21390 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21391 else
21392 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21393 add_dwarf_attr (die, &attr);
21394 }
21395
21396 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21397
21398 static void
21399 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21400 {
21401 dw_attr_node attr;
21402
21403 attr.dw_attr = DW_AT_discr_list;
21404 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21405 attr.dw_attr_val.val_entry = NULL;
21406 attr.dw_attr_val.v.val_discr_list = discr_list;
21407 add_dwarf_attr (die, &attr);
21408 }
21409
21410 static inline dw_discr_list_ref
21411 AT_discr_list (dw_attr_node *attr)
21412 {
21413 return attr->dw_attr_val.v.val_discr_list;
21414 }
21415
21416 #ifdef VMS_DEBUGGING_INFO
21417 /* Output the debug main pointer die for VMS */
21418
21419 void
21420 dwarf2out_vms_debug_main_pointer (void)
21421 {
21422 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21423 dw_die_ref die;
21424
21425 /* Allocate the VMS debug main subprogram die. */
21426 die = new_die_raw (DW_TAG_subprogram);
21427 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21428 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21429 current_function_funcdef_no);
21430 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21431
21432 /* Make it the first child of comp_unit_die (). */
21433 die->die_parent = comp_unit_die ();
21434 if (comp_unit_die ()->die_child)
21435 {
21436 die->die_sib = comp_unit_die ()->die_child->die_sib;
21437 comp_unit_die ()->die_child->die_sib = die;
21438 }
21439 else
21440 {
21441 die->die_sib = die;
21442 comp_unit_die ()->die_child = die;
21443 }
21444 }
21445 #endif /* VMS_DEBUGGING_INFO */
21446
21447 /* walk_tree helper function for uses_local_type, below. */
21448
21449 static tree
21450 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21451 {
21452 if (!TYPE_P (*tp))
21453 *walk_subtrees = 0;
21454 else
21455 {
21456 tree name = TYPE_NAME (*tp);
21457 if (name && DECL_P (name) && decl_function_context (name))
21458 return *tp;
21459 }
21460 return NULL_TREE;
21461 }
21462
21463 /* If TYPE involves a function-local type (including a local typedef to a
21464 non-local type), returns that type; otherwise returns NULL_TREE. */
21465
21466 static tree
21467 uses_local_type (tree type)
21468 {
21469 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21470 return used;
21471 }
21472
21473 /* Return the DIE for the scope that immediately contains this type.
21474 Non-named types that do not involve a function-local type get global
21475 scope. Named types nested in namespaces or other types get their
21476 containing scope. All other types (i.e. function-local named types) get
21477 the current active scope. */
21478
21479 static dw_die_ref
21480 scope_die_for (tree t, dw_die_ref context_die)
21481 {
21482 dw_die_ref scope_die = NULL;
21483 tree containing_scope;
21484
21485 /* Non-types always go in the current scope. */
21486 gcc_assert (TYPE_P (t));
21487
21488 /* Use the scope of the typedef, rather than the scope of the type
21489 it refers to. */
21490 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21491 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21492 else
21493 containing_scope = TYPE_CONTEXT (t);
21494
21495 /* Use the containing namespace if there is one. */
21496 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21497 {
21498 if (context_die == lookup_decl_die (containing_scope))
21499 /* OK */;
21500 else if (debug_info_level > DINFO_LEVEL_TERSE)
21501 context_die = get_context_die (containing_scope);
21502 else
21503 containing_scope = NULL_TREE;
21504 }
21505
21506 /* Ignore function type "scopes" from the C frontend. They mean that
21507 a tagged type is local to a parmlist of a function declarator, but
21508 that isn't useful to DWARF. */
21509 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21510 containing_scope = NULL_TREE;
21511
21512 if (SCOPE_FILE_SCOPE_P (containing_scope))
21513 {
21514 /* If T uses a local type keep it local as well, to avoid references
21515 to function-local DIEs from outside the function. */
21516 if (current_function_decl && uses_local_type (t))
21517 scope_die = context_die;
21518 else
21519 scope_die = comp_unit_die ();
21520 }
21521 else if (TYPE_P (containing_scope))
21522 {
21523 /* For types, we can just look up the appropriate DIE. */
21524 if (debug_info_level > DINFO_LEVEL_TERSE)
21525 scope_die = get_context_die (containing_scope);
21526 else
21527 {
21528 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21529 if (scope_die == NULL)
21530 scope_die = comp_unit_die ();
21531 }
21532 }
21533 else
21534 scope_die = context_die;
21535
21536 return scope_die;
21537 }
21538
21539 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21540
21541 static inline int
21542 local_scope_p (dw_die_ref context_die)
21543 {
21544 for (; context_die; context_die = context_die->die_parent)
21545 if (context_die->die_tag == DW_TAG_inlined_subroutine
21546 || context_die->die_tag == DW_TAG_subprogram)
21547 return 1;
21548
21549 return 0;
21550 }
21551
21552 /* Returns nonzero if CONTEXT_DIE is a class. */
21553
21554 static inline int
21555 class_scope_p (dw_die_ref context_die)
21556 {
21557 return (context_die
21558 && (context_die->die_tag == DW_TAG_structure_type
21559 || context_die->die_tag == DW_TAG_class_type
21560 || context_die->die_tag == DW_TAG_interface_type
21561 || context_die->die_tag == DW_TAG_union_type));
21562 }
21563
21564 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21565 whether or not to treat a DIE in this context as a declaration. */
21566
21567 static inline int
21568 class_or_namespace_scope_p (dw_die_ref context_die)
21569 {
21570 return (class_scope_p (context_die)
21571 || (context_die && context_die->die_tag == DW_TAG_namespace));
21572 }
21573
21574 /* Many forms of DIEs require a "type description" attribute. This
21575 routine locates the proper "type descriptor" die for the type given
21576 by 'type' plus any additional qualifiers given by 'cv_quals', and
21577 adds a DW_AT_type attribute below the given die. */
21578
21579 static void
21580 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21581 bool reverse, dw_die_ref context_die)
21582 {
21583 enum tree_code code = TREE_CODE (type);
21584 dw_die_ref type_die = NULL;
21585
21586 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21587 or fixed-point type, use the inner type. This is because we have no
21588 support for unnamed types in base_type_die. This can happen if this is
21589 an Ada subrange type. Correct solution is emit a subrange type die. */
21590 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21591 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21592 type = TREE_TYPE (type), code = TREE_CODE (type);
21593
21594 if (code == ERROR_MARK
21595 /* Handle a special case. For functions whose return type is void, we
21596 generate *no* type attribute. (Note that no object may have type
21597 `void', so this only applies to function return types). */
21598 || code == VOID_TYPE)
21599 return;
21600
21601 type_die = modified_type_die (type,
21602 cv_quals | TYPE_QUALS (type),
21603 reverse,
21604 context_die);
21605
21606 if (type_die != NULL)
21607 add_AT_die_ref (object_die, DW_AT_type, type_die);
21608 }
21609
21610 /* Given an object die, add the calling convention attribute for the
21611 function call type. */
21612 static void
21613 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21614 {
21615 enum dwarf_calling_convention value = DW_CC_normal;
21616
21617 value = ((enum dwarf_calling_convention)
21618 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21619
21620 if (is_fortran ()
21621 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21622 {
21623 /* DWARF 2 doesn't provide a way to identify a program's source-level
21624 entry point. DW_AT_calling_convention attributes are only meant
21625 to describe functions' calling conventions. However, lacking a
21626 better way to signal the Fortran main program, we used this for
21627 a long time, following existing custom. Now, DWARF 4 has
21628 DW_AT_main_subprogram, which we add below, but some tools still
21629 rely on the old way, which we thus keep. */
21630 value = DW_CC_program;
21631
21632 if (dwarf_version >= 4 || !dwarf_strict)
21633 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21634 }
21635
21636 /* Only add the attribute if the backend requests it, and
21637 is not DW_CC_normal. */
21638 if (value && (value != DW_CC_normal))
21639 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21640 }
21641
21642 /* Given a tree pointer to a struct, class, union, or enum type node, return
21643 a pointer to the (string) tag name for the given type, or zero if the type
21644 was declared without a tag. */
21645
21646 static const char *
21647 type_tag (const_tree type)
21648 {
21649 const char *name = 0;
21650
21651 if (TYPE_NAME (type) != 0)
21652 {
21653 tree t = 0;
21654
21655 /* Find the IDENTIFIER_NODE for the type name. */
21656 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21657 && !TYPE_NAMELESS (type))
21658 t = TYPE_NAME (type);
21659
21660 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21661 a TYPE_DECL node, regardless of whether or not a `typedef' was
21662 involved. */
21663 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21664 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21665 {
21666 /* We want to be extra verbose. Don't call dwarf_name if
21667 DECL_NAME isn't set. The default hook for decl_printable_name
21668 doesn't like that, and in this context it's correct to return
21669 0, instead of "<anonymous>" or the like. */
21670 if (DECL_NAME (TYPE_NAME (type))
21671 && !DECL_NAMELESS (TYPE_NAME (type)))
21672 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21673 }
21674
21675 /* Now get the name as a string, or invent one. */
21676 if (!name && t != 0)
21677 name = IDENTIFIER_POINTER (t);
21678 }
21679
21680 return (name == 0 || *name == '\0') ? 0 : name;
21681 }
21682
21683 /* Return the type associated with a data member, make a special check
21684 for bit field types. */
21685
21686 static inline tree
21687 member_declared_type (const_tree member)
21688 {
21689 return (DECL_BIT_FIELD_TYPE (member)
21690 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21691 }
21692
21693 /* Get the decl's label, as described by its RTL. This may be different
21694 from the DECL_NAME name used in the source file. */
21695
21696 #if 0
21697 static const char *
21698 decl_start_label (tree decl)
21699 {
21700 rtx x;
21701 const char *fnname;
21702
21703 x = DECL_RTL (decl);
21704 gcc_assert (MEM_P (x));
21705
21706 x = XEXP (x, 0);
21707 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21708
21709 fnname = XSTR (x, 0);
21710 return fnname;
21711 }
21712 #endif
21713 \f
21714 /* For variable-length arrays that have been previously generated, but
21715 may be incomplete due to missing subscript info, fill the subscript
21716 info. Return TRUE if this is one of those cases. */
21717 static bool
21718 fill_variable_array_bounds (tree type)
21719 {
21720 if (TREE_ASM_WRITTEN (type)
21721 && TREE_CODE (type) == ARRAY_TYPE
21722 && variably_modified_type_p (type, NULL))
21723 {
21724 dw_die_ref array_die = lookup_type_die (type);
21725 if (!array_die)
21726 return false;
21727 add_subscript_info (array_die, type, !is_ada ());
21728 return true;
21729 }
21730 return false;
21731 }
21732
21733 /* These routines generate the internal representation of the DIE's for
21734 the compilation unit. Debugging information is collected by walking
21735 the declaration trees passed in from dwarf2out_decl(). */
21736
21737 static void
21738 gen_array_type_die (tree type, dw_die_ref context_die)
21739 {
21740 dw_die_ref array_die;
21741
21742 /* GNU compilers represent multidimensional array types as sequences of one
21743 dimensional array types whose element types are themselves array types.
21744 We sometimes squish that down to a single array_type DIE with multiple
21745 subscripts in the Dwarf debugging info. The draft Dwarf specification
21746 say that we are allowed to do this kind of compression in C, because
21747 there is no difference between an array of arrays and a multidimensional
21748 array. We don't do this for Ada to remain as close as possible to the
21749 actual representation, which is especially important against the language
21750 flexibilty wrt arrays of variable size. */
21751
21752 bool collapse_nested_arrays = !is_ada ();
21753
21754 if (fill_variable_array_bounds (type))
21755 return;
21756
21757 dw_die_ref scope_die = scope_die_for (type, context_die);
21758 tree element_type;
21759
21760 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21761 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21762 if (TYPE_STRING_FLAG (type)
21763 && TREE_CODE (type) == ARRAY_TYPE
21764 && is_fortran ()
21765 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21766 {
21767 HOST_WIDE_INT size;
21768
21769 array_die = new_die (DW_TAG_string_type, scope_die, type);
21770 add_name_attribute (array_die, type_tag (type));
21771 equate_type_number_to_die (type, array_die);
21772 size = int_size_in_bytes (type);
21773 if (size >= 0)
21774 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21775 /* ??? We can't annotate types late, but for LTO we may not
21776 generate a location early either (gfortran.dg/save_6.f90). */
21777 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21778 && TYPE_DOMAIN (type) != NULL_TREE
21779 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21780 {
21781 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21782 tree rszdecl = szdecl;
21783
21784 size = int_size_in_bytes (TREE_TYPE (szdecl));
21785 if (!DECL_P (szdecl))
21786 {
21787 if (TREE_CODE (szdecl) == INDIRECT_REF
21788 && DECL_P (TREE_OPERAND (szdecl, 0)))
21789 {
21790 rszdecl = TREE_OPERAND (szdecl, 0);
21791 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21792 != DWARF2_ADDR_SIZE)
21793 size = 0;
21794 }
21795 else
21796 size = 0;
21797 }
21798 if (size > 0)
21799 {
21800 dw_loc_list_ref loc
21801 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21802 NULL);
21803 if (loc)
21804 {
21805 add_AT_location_description (array_die, DW_AT_string_length,
21806 loc);
21807 if (size != DWARF2_ADDR_SIZE)
21808 add_AT_unsigned (array_die, dwarf_version >= 5
21809 ? DW_AT_string_length_byte_size
21810 : DW_AT_byte_size, size);
21811 }
21812 }
21813 }
21814 return;
21815 }
21816
21817 array_die = new_die (DW_TAG_array_type, scope_die, type);
21818 add_name_attribute (array_die, type_tag (type));
21819 equate_type_number_to_die (type, array_die);
21820
21821 if (TREE_CODE (type) == VECTOR_TYPE)
21822 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21823
21824 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21825 if (is_fortran ()
21826 && TREE_CODE (type) == ARRAY_TYPE
21827 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21828 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21829 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21830
21831 #if 0
21832 /* We default the array ordering. Debuggers will probably do the right
21833 things even if DW_AT_ordering is not present. It's not even an issue
21834 until we start to get into multidimensional arrays anyway. If a debugger
21835 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21836 then we'll have to put the DW_AT_ordering attribute back in. (But if
21837 and when we find out that we need to put these in, we will only do so
21838 for multidimensional arrays. */
21839 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21840 #endif
21841
21842 if (TREE_CODE (type) == VECTOR_TYPE)
21843 {
21844 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21845 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21846 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21847 add_bound_info (subrange_die, DW_AT_upper_bound,
21848 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21849 }
21850 else
21851 add_subscript_info (array_die, type, collapse_nested_arrays);
21852
21853 /* Add representation of the type of the elements of this array type and
21854 emit the corresponding DIE if we haven't done it already. */
21855 element_type = TREE_TYPE (type);
21856 if (collapse_nested_arrays)
21857 while (TREE_CODE (element_type) == ARRAY_TYPE)
21858 {
21859 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21860 break;
21861 element_type = TREE_TYPE (element_type);
21862 }
21863
21864 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21865 TREE_CODE (type) == ARRAY_TYPE
21866 && TYPE_REVERSE_STORAGE_ORDER (type),
21867 context_die);
21868
21869 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21870 if (TYPE_ARTIFICIAL (type))
21871 add_AT_flag (array_die, DW_AT_artificial, 1);
21872
21873 if (get_AT (array_die, DW_AT_name))
21874 add_pubtype (type, array_die);
21875
21876 add_alignment_attribute (array_die, type);
21877 }
21878
21879 /* This routine generates DIE for array with hidden descriptor, details
21880 are filled into *info by a langhook. */
21881
21882 static void
21883 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21884 dw_die_ref context_die)
21885 {
21886 const dw_die_ref scope_die = scope_die_for (type, context_die);
21887 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21888 struct loc_descr_context context = { type, info->base_decl, NULL,
21889 false, false };
21890 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21891 int dim;
21892
21893 add_name_attribute (array_die, type_tag (type));
21894 equate_type_number_to_die (type, array_die);
21895
21896 if (info->ndimensions > 1)
21897 switch (info->ordering)
21898 {
21899 case array_descr_ordering_row_major:
21900 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21901 break;
21902 case array_descr_ordering_column_major:
21903 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21904 break;
21905 default:
21906 break;
21907 }
21908
21909 if (dwarf_version >= 3 || !dwarf_strict)
21910 {
21911 if (info->data_location)
21912 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21913 dw_scalar_form_exprloc, &context);
21914 if (info->associated)
21915 add_scalar_info (array_die, DW_AT_associated, info->associated,
21916 dw_scalar_form_constant
21917 | dw_scalar_form_exprloc
21918 | dw_scalar_form_reference, &context);
21919 if (info->allocated)
21920 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21921 dw_scalar_form_constant
21922 | dw_scalar_form_exprloc
21923 | dw_scalar_form_reference, &context);
21924 if (info->stride)
21925 {
21926 const enum dwarf_attribute attr
21927 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21928 const int forms
21929 = (info->stride_in_bits)
21930 ? dw_scalar_form_constant
21931 : (dw_scalar_form_constant
21932 | dw_scalar_form_exprloc
21933 | dw_scalar_form_reference);
21934
21935 add_scalar_info (array_die, attr, info->stride, forms, &context);
21936 }
21937 }
21938 if (dwarf_version >= 5)
21939 {
21940 if (info->rank)
21941 {
21942 add_scalar_info (array_die, DW_AT_rank, info->rank,
21943 dw_scalar_form_constant
21944 | dw_scalar_form_exprloc, &context);
21945 subrange_tag = DW_TAG_generic_subrange;
21946 context.placeholder_arg = true;
21947 }
21948 }
21949
21950 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21951
21952 for (dim = 0; dim < info->ndimensions; dim++)
21953 {
21954 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21955
21956 if (info->dimen[dim].bounds_type)
21957 add_type_attribute (subrange_die,
21958 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21959 false, context_die);
21960 if (info->dimen[dim].lower_bound)
21961 add_bound_info (subrange_die, DW_AT_lower_bound,
21962 info->dimen[dim].lower_bound, &context);
21963 if (info->dimen[dim].upper_bound)
21964 add_bound_info (subrange_die, DW_AT_upper_bound,
21965 info->dimen[dim].upper_bound, &context);
21966 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21967 add_scalar_info (subrange_die, DW_AT_byte_stride,
21968 info->dimen[dim].stride,
21969 dw_scalar_form_constant
21970 | dw_scalar_form_exprloc
21971 | dw_scalar_form_reference,
21972 &context);
21973 }
21974
21975 gen_type_die (info->element_type, context_die);
21976 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21977 TREE_CODE (type) == ARRAY_TYPE
21978 && TYPE_REVERSE_STORAGE_ORDER (type),
21979 context_die);
21980
21981 if (get_AT (array_die, DW_AT_name))
21982 add_pubtype (type, array_die);
21983
21984 add_alignment_attribute (array_die, type);
21985 }
21986
21987 #if 0
21988 static void
21989 gen_entry_point_die (tree decl, dw_die_ref context_die)
21990 {
21991 tree origin = decl_ultimate_origin (decl);
21992 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21993
21994 if (origin != NULL)
21995 add_abstract_origin_attribute (decl_die, origin);
21996 else
21997 {
21998 add_name_and_src_coords_attributes (decl_die, decl);
21999 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
22000 TYPE_UNQUALIFIED, false, context_die);
22001 }
22002
22003 if (DECL_ABSTRACT_P (decl))
22004 equate_decl_number_to_die (decl, decl_die);
22005 else
22006 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
22007 }
22008 #endif
22009
22010 /* Walk through the list of incomplete types again, trying once more to
22011 emit full debugging info for them. */
22012
22013 static void
22014 retry_incomplete_types (void)
22015 {
22016 set_early_dwarf s;
22017 int i;
22018
22019 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22020 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22021 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22022 vec_safe_truncate (incomplete_types, 0);
22023 }
22024
22025 /* Determine what tag to use for a record type. */
22026
22027 static enum dwarf_tag
22028 record_type_tag (tree type)
22029 {
22030 if (! lang_hooks.types.classify_record)
22031 return DW_TAG_structure_type;
22032
22033 switch (lang_hooks.types.classify_record (type))
22034 {
22035 case RECORD_IS_STRUCT:
22036 return DW_TAG_structure_type;
22037
22038 case RECORD_IS_CLASS:
22039 return DW_TAG_class_type;
22040
22041 case RECORD_IS_INTERFACE:
22042 if (dwarf_version >= 3 || !dwarf_strict)
22043 return DW_TAG_interface_type;
22044 return DW_TAG_structure_type;
22045
22046 default:
22047 gcc_unreachable ();
22048 }
22049 }
22050
22051 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22052 include all of the information about the enumeration values also. Each
22053 enumerated type name/value is listed as a child of the enumerated type
22054 DIE. */
22055
22056 static dw_die_ref
22057 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22058 {
22059 dw_die_ref type_die = lookup_type_die (type);
22060 dw_die_ref orig_type_die = type_die;
22061
22062 if (type_die == NULL)
22063 {
22064 type_die = new_die (DW_TAG_enumeration_type,
22065 scope_die_for (type, context_die), type);
22066 equate_type_number_to_die (type, type_die);
22067 add_name_attribute (type_die, type_tag (type));
22068 if ((dwarf_version >= 4 || !dwarf_strict)
22069 && ENUM_IS_SCOPED (type))
22070 add_AT_flag (type_die, DW_AT_enum_class, 1);
22071 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22072 add_AT_flag (type_die, DW_AT_declaration, 1);
22073 if (!dwarf_strict)
22074 add_AT_unsigned (type_die, DW_AT_encoding,
22075 TYPE_UNSIGNED (type)
22076 ? DW_ATE_unsigned
22077 : DW_ATE_signed);
22078 }
22079 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22080 return type_die;
22081 else
22082 remove_AT (type_die, DW_AT_declaration);
22083
22084 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22085 given enum type is incomplete, do not generate the DW_AT_byte_size
22086 attribute or the DW_AT_element_list attribute. */
22087 if (TYPE_SIZE (type))
22088 {
22089 tree link;
22090
22091 if (!ENUM_IS_OPAQUE (type))
22092 TREE_ASM_WRITTEN (type) = 1;
22093 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22094 add_byte_size_attribute (type_die, type);
22095 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22096 add_alignment_attribute (type_die, type);
22097 if ((dwarf_version >= 3 || !dwarf_strict)
22098 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22099 {
22100 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22101 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22102 context_die);
22103 }
22104 if (TYPE_STUB_DECL (type) != NULL_TREE)
22105 {
22106 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22107 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22108 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22109 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22110 }
22111
22112 /* If the first reference to this type was as the return type of an
22113 inline function, then it may not have a parent. Fix this now. */
22114 if (type_die->die_parent == NULL)
22115 add_child_die (scope_die_for (type, context_die), type_die);
22116
22117 for (link = TYPE_VALUES (type);
22118 link != NULL; link = TREE_CHAIN (link))
22119 {
22120 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22121 tree value = TREE_VALUE (link);
22122
22123 gcc_assert (!ENUM_IS_OPAQUE (type));
22124 add_name_attribute (enum_die,
22125 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22126
22127 if (TREE_CODE (value) == CONST_DECL)
22128 value = DECL_INITIAL (value);
22129
22130 if (simple_type_size_in_bits (TREE_TYPE (value))
22131 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22132 {
22133 /* For constant forms created by add_AT_unsigned DWARF
22134 consumers (GDB, elfutils, etc.) always zero extend
22135 the value. Only when the actual value is negative
22136 do we need to use add_AT_int to generate a constant
22137 form that can represent negative values. */
22138 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22139 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22140 add_AT_unsigned (enum_die, DW_AT_const_value,
22141 (unsigned HOST_WIDE_INT) val);
22142 else
22143 add_AT_int (enum_die, DW_AT_const_value, val);
22144 }
22145 else
22146 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22147 that here. TODO: This should be re-worked to use correct
22148 signed/unsigned double tags for all cases. */
22149 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22150 }
22151
22152 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22153 if (TYPE_ARTIFICIAL (type)
22154 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22155 add_AT_flag (type_die, DW_AT_artificial, 1);
22156 }
22157 else
22158 add_AT_flag (type_die, DW_AT_declaration, 1);
22159
22160 add_pubtype (type, type_die);
22161
22162 return type_die;
22163 }
22164
22165 /* Generate a DIE to represent either a real live formal parameter decl or to
22166 represent just the type of some formal parameter position in some function
22167 type.
22168
22169 Note that this routine is a bit unusual because its argument may be a
22170 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22171 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22172 node. If it's the former then this function is being called to output a
22173 DIE to represent a formal parameter object (or some inlining thereof). If
22174 it's the latter, then this function is only being called to output a
22175 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22176 argument type of some subprogram type.
22177 If EMIT_NAME_P is true, name and source coordinate attributes
22178 are emitted. */
22179
22180 static dw_die_ref
22181 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22182 dw_die_ref context_die)
22183 {
22184 tree node_or_origin = node ? node : origin;
22185 tree ultimate_origin;
22186 dw_die_ref parm_die = NULL;
22187
22188 if (DECL_P (node_or_origin))
22189 {
22190 parm_die = lookup_decl_die (node);
22191
22192 /* If the contexts differ, we may not be talking about the same
22193 thing.
22194 ??? When in LTO the DIE parent is the "abstract" copy and the
22195 context_die is the specification "copy". But this whole block
22196 should eventually be no longer needed. */
22197 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22198 {
22199 if (!DECL_ABSTRACT_P (node))
22200 {
22201 /* This can happen when creating an inlined instance, in
22202 which case we need to create a new DIE that will get
22203 annotated with DW_AT_abstract_origin. */
22204 parm_die = NULL;
22205 }
22206 else
22207 gcc_unreachable ();
22208 }
22209
22210 if (parm_die && parm_die->die_parent == NULL)
22211 {
22212 /* Check that parm_die already has the right attributes that
22213 we would have added below. If any attributes are
22214 missing, fall through to add them. */
22215 if (! DECL_ABSTRACT_P (node_or_origin)
22216 && !get_AT (parm_die, DW_AT_location)
22217 && !get_AT (parm_die, DW_AT_const_value))
22218 /* We are missing location info, and are about to add it. */
22219 ;
22220 else
22221 {
22222 add_child_die (context_die, parm_die);
22223 return parm_die;
22224 }
22225 }
22226 }
22227
22228 /* If we have a previously generated DIE, use it, unless this is an
22229 concrete instance (origin != NULL), in which case we need a new
22230 DIE with a corresponding DW_AT_abstract_origin. */
22231 bool reusing_die;
22232 if (parm_die && origin == NULL)
22233 reusing_die = true;
22234 else
22235 {
22236 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22237 reusing_die = false;
22238 }
22239
22240 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22241 {
22242 case tcc_declaration:
22243 ultimate_origin = decl_ultimate_origin (node_or_origin);
22244 if (node || ultimate_origin)
22245 origin = ultimate_origin;
22246
22247 if (reusing_die)
22248 goto add_location;
22249
22250 if (origin != NULL)
22251 add_abstract_origin_attribute (parm_die, origin);
22252 else if (emit_name_p)
22253 add_name_and_src_coords_attributes (parm_die, node);
22254 if (origin == NULL
22255 || (! DECL_ABSTRACT_P (node_or_origin)
22256 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22257 decl_function_context
22258 (node_or_origin))))
22259 {
22260 tree type = TREE_TYPE (node_or_origin);
22261 if (decl_by_reference_p (node_or_origin))
22262 add_type_attribute (parm_die, TREE_TYPE (type),
22263 TYPE_UNQUALIFIED,
22264 false, context_die);
22265 else
22266 add_type_attribute (parm_die, type,
22267 decl_quals (node_or_origin),
22268 false, context_die);
22269 }
22270 if (origin == NULL && DECL_ARTIFICIAL (node))
22271 add_AT_flag (parm_die, DW_AT_artificial, 1);
22272 add_location:
22273 if (node && node != origin)
22274 equate_decl_number_to_die (node, parm_die);
22275 if (! DECL_ABSTRACT_P (node_or_origin))
22276 add_location_or_const_value_attribute (parm_die, node_or_origin,
22277 node == NULL);
22278
22279 break;
22280
22281 case tcc_type:
22282 /* We were called with some kind of a ..._TYPE node. */
22283 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22284 context_die);
22285 break;
22286
22287 default:
22288 gcc_unreachable ();
22289 }
22290
22291 return parm_die;
22292 }
22293
22294 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22295 children DW_TAG_formal_parameter DIEs representing the arguments of the
22296 parameter pack.
22297
22298 PARM_PACK must be a function parameter pack.
22299 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22300 must point to the subsequent arguments of the function PACK_ARG belongs to.
22301 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22302 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22303 following the last one for which a DIE was generated. */
22304
22305 static dw_die_ref
22306 gen_formal_parameter_pack_die (tree parm_pack,
22307 tree pack_arg,
22308 dw_die_ref subr_die,
22309 tree *next_arg)
22310 {
22311 tree arg;
22312 dw_die_ref parm_pack_die;
22313
22314 gcc_assert (parm_pack
22315 && lang_hooks.function_parameter_pack_p (parm_pack)
22316 && subr_die);
22317
22318 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22319 add_src_coords_attributes (parm_pack_die, parm_pack);
22320
22321 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22322 {
22323 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22324 parm_pack))
22325 break;
22326 gen_formal_parameter_die (arg, NULL,
22327 false /* Don't emit name attribute. */,
22328 parm_pack_die);
22329 }
22330 if (next_arg)
22331 *next_arg = arg;
22332 return parm_pack_die;
22333 }
22334
22335 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22336 at the end of an (ANSI prototyped) formal parameters list. */
22337
22338 static void
22339 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22340 {
22341 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22342 }
22343
22344 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22345 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22346 parameters as specified in some function type specification (except for
22347 those which appear as part of a function *definition*). */
22348
22349 static void
22350 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22351 {
22352 tree link;
22353 tree formal_type = NULL;
22354 tree first_parm_type;
22355 tree arg;
22356
22357 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22358 {
22359 arg = DECL_ARGUMENTS (function_or_method_type);
22360 function_or_method_type = TREE_TYPE (function_or_method_type);
22361 }
22362 else
22363 arg = NULL_TREE;
22364
22365 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22366
22367 /* Make our first pass over the list of formal parameter types and output a
22368 DW_TAG_formal_parameter DIE for each one. */
22369 for (link = first_parm_type; link; )
22370 {
22371 dw_die_ref parm_die;
22372
22373 formal_type = TREE_VALUE (link);
22374 if (formal_type == void_type_node)
22375 break;
22376
22377 /* Output a (nameless) DIE to represent the formal parameter itself. */
22378 parm_die = gen_formal_parameter_die (formal_type, NULL,
22379 true /* Emit name attribute. */,
22380 context_die);
22381 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22382 && link == first_parm_type)
22383 {
22384 add_AT_flag (parm_die, DW_AT_artificial, 1);
22385 if (dwarf_version >= 3 || !dwarf_strict)
22386 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22387 }
22388 else if (arg && DECL_ARTIFICIAL (arg))
22389 add_AT_flag (parm_die, DW_AT_artificial, 1);
22390
22391 link = TREE_CHAIN (link);
22392 if (arg)
22393 arg = DECL_CHAIN (arg);
22394 }
22395
22396 /* If this function type has an ellipsis, add a
22397 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22398 if (formal_type != void_type_node)
22399 gen_unspecified_parameters_die (function_or_method_type, context_die);
22400
22401 /* Make our second (and final) pass over the list of formal parameter types
22402 and output DIEs to represent those types (as necessary). */
22403 for (link = TYPE_ARG_TYPES (function_or_method_type);
22404 link && TREE_VALUE (link);
22405 link = TREE_CHAIN (link))
22406 gen_type_die (TREE_VALUE (link), context_die);
22407 }
22408
22409 /* We want to generate the DIE for TYPE so that we can generate the
22410 die for MEMBER, which has been defined; we will need to refer back
22411 to the member declaration nested within TYPE. If we're trying to
22412 generate minimal debug info for TYPE, processing TYPE won't do the
22413 trick; we need to attach the member declaration by hand. */
22414
22415 static void
22416 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22417 {
22418 gen_type_die (type, context_die);
22419
22420 /* If we're trying to avoid duplicate debug info, we may not have
22421 emitted the member decl for this function. Emit it now. */
22422 if (TYPE_STUB_DECL (type)
22423 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22424 && ! lookup_decl_die (member))
22425 {
22426 dw_die_ref type_die;
22427 gcc_assert (!decl_ultimate_origin (member));
22428
22429 type_die = lookup_type_die_strip_naming_typedef (type);
22430 if (TREE_CODE (member) == FUNCTION_DECL)
22431 gen_subprogram_die (member, type_die);
22432 else if (TREE_CODE (member) == FIELD_DECL)
22433 {
22434 /* Ignore the nameless fields that are used to skip bits but handle
22435 C++ anonymous unions and structs. */
22436 if (DECL_NAME (member) != NULL_TREE
22437 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22438 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22439 {
22440 struct vlr_context vlr_ctx = {
22441 DECL_CONTEXT (member), /* struct_type */
22442 NULL_TREE /* variant_part_offset */
22443 };
22444 gen_type_die (member_declared_type (member), type_die);
22445 gen_field_die (member, &vlr_ctx, type_die);
22446 }
22447 }
22448 else
22449 gen_variable_die (member, NULL_TREE, type_die);
22450 }
22451 }
22452 \f
22453 /* Forward declare these functions, because they are mutually recursive
22454 with their set_block_* pairing functions. */
22455 static void set_decl_origin_self (tree);
22456
22457 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22458 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22459 that it points to the node itself, thus indicating that the node is its
22460 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22461 the given node is NULL, recursively descend the decl/block tree which
22462 it is the root of, and for each other ..._DECL or BLOCK node contained
22463 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22464 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22465 values to point to themselves. */
22466
22467 static void
22468 set_block_origin_self (tree stmt)
22469 {
22470 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22471 {
22472 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22473
22474 {
22475 tree local_decl;
22476
22477 for (local_decl = BLOCK_VARS (stmt);
22478 local_decl != NULL_TREE;
22479 local_decl = DECL_CHAIN (local_decl))
22480 /* Do not recurse on nested functions since the inlining status
22481 of parent and child can be different as per the DWARF spec. */
22482 if (TREE_CODE (local_decl) != FUNCTION_DECL
22483 && !DECL_EXTERNAL (local_decl))
22484 set_decl_origin_self (local_decl);
22485 }
22486
22487 {
22488 tree subblock;
22489
22490 for (subblock = BLOCK_SUBBLOCKS (stmt);
22491 subblock != NULL_TREE;
22492 subblock = BLOCK_CHAIN (subblock))
22493 set_block_origin_self (subblock); /* Recurse. */
22494 }
22495 }
22496 }
22497
22498 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22499 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22500 node to so that it points to the node itself, thus indicating that the
22501 node represents its own (abstract) origin. Additionally, if the
22502 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22503 the decl/block tree of which the given node is the root of, and for
22504 each other ..._DECL or BLOCK node contained therein whose
22505 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22506 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22507 point to themselves. */
22508
22509 static void
22510 set_decl_origin_self (tree decl)
22511 {
22512 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22513 {
22514 DECL_ABSTRACT_ORIGIN (decl) = decl;
22515 if (TREE_CODE (decl) == FUNCTION_DECL)
22516 {
22517 tree arg;
22518
22519 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22520 DECL_ABSTRACT_ORIGIN (arg) = arg;
22521 if (DECL_INITIAL (decl) != NULL_TREE
22522 && DECL_INITIAL (decl) != error_mark_node)
22523 set_block_origin_self (DECL_INITIAL (decl));
22524 }
22525 }
22526 }
22527 \f
22528 /* Mark the early DIE for DECL as the abstract instance. */
22529
22530 static void
22531 dwarf2out_abstract_function (tree decl)
22532 {
22533 dw_die_ref old_die;
22534
22535 /* Make sure we have the actual abstract inline, not a clone. */
22536 decl = DECL_ORIGIN (decl);
22537
22538 if (DECL_IGNORED_P (decl))
22539 return;
22540
22541 /* In LTO we're all set. We already created abstract instances
22542 early and we want to avoid creating a concrete instance of that
22543 if we don't output it. */
22544 if (in_lto_p)
22545 return;
22546
22547 old_die = lookup_decl_die (decl);
22548 gcc_assert (old_die != NULL);
22549 if (get_AT (old_die, DW_AT_inline))
22550 /* We've already generated the abstract instance. */
22551 return;
22552
22553 /* Go ahead and put DW_AT_inline on the DIE. */
22554 if (DECL_DECLARED_INLINE_P (decl))
22555 {
22556 if (cgraph_function_possibly_inlined_p (decl))
22557 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22558 else
22559 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22560 }
22561 else
22562 {
22563 if (cgraph_function_possibly_inlined_p (decl))
22564 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22565 else
22566 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22567 }
22568
22569 if (DECL_DECLARED_INLINE_P (decl)
22570 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22571 add_AT_flag (old_die, DW_AT_artificial, 1);
22572
22573 set_decl_origin_self (decl);
22574 }
22575
22576 /* Helper function of premark_used_types() which gets called through
22577 htab_traverse.
22578
22579 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22580 marked as unused by prune_unused_types. */
22581
22582 bool
22583 premark_used_types_helper (tree const &type, void *)
22584 {
22585 dw_die_ref die;
22586
22587 die = lookup_type_die (type);
22588 if (die != NULL)
22589 die->die_perennial_p = 1;
22590 return true;
22591 }
22592
22593 /* Helper function of premark_types_used_by_global_vars which gets called
22594 through htab_traverse.
22595
22596 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22597 marked as unused by prune_unused_types. The DIE of the type is marked
22598 only if the global variable using the type will actually be emitted. */
22599
22600 int
22601 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22602 void *)
22603 {
22604 struct types_used_by_vars_entry *entry;
22605 dw_die_ref die;
22606
22607 entry = (struct types_used_by_vars_entry *) *slot;
22608 gcc_assert (entry->type != NULL
22609 && entry->var_decl != NULL);
22610 die = lookup_type_die (entry->type);
22611 if (die)
22612 {
22613 /* Ask cgraph if the global variable really is to be emitted.
22614 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22615 varpool_node *node = varpool_node::get (entry->var_decl);
22616 if (node && node->definition)
22617 {
22618 die->die_perennial_p = 1;
22619 /* Keep the parent DIEs as well. */
22620 while ((die = die->die_parent) && die->die_perennial_p == 0)
22621 die->die_perennial_p = 1;
22622 }
22623 }
22624 return 1;
22625 }
22626
22627 /* Mark all members of used_types_hash as perennial. */
22628
22629 static void
22630 premark_used_types (struct function *fun)
22631 {
22632 if (fun && fun->used_types_hash)
22633 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22634 }
22635
22636 /* Mark all members of types_used_by_vars_entry as perennial. */
22637
22638 static void
22639 premark_types_used_by_global_vars (void)
22640 {
22641 if (types_used_by_vars_hash)
22642 types_used_by_vars_hash
22643 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22644 }
22645
22646 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22647 for CA_LOC call arg loc node. */
22648
22649 static dw_die_ref
22650 gen_call_site_die (tree decl, dw_die_ref subr_die,
22651 struct call_arg_loc_node *ca_loc)
22652 {
22653 dw_die_ref stmt_die = NULL, die;
22654 tree block = ca_loc->block;
22655
22656 while (block
22657 && block != DECL_INITIAL (decl)
22658 && TREE_CODE (block) == BLOCK)
22659 {
22660 stmt_die = lookup_block_die (block);
22661 if (stmt_die)
22662 break;
22663 block = BLOCK_SUPERCONTEXT (block);
22664 }
22665 if (stmt_die == NULL)
22666 stmt_die = subr_die;
22667 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22668 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22669 if (ca_loc->tail_call_p)
22670 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22671 if (ca_loc->symbol_ref)
22672 {
22673 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22674 if (tdie)
22675 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22676 else
22677 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22678 false);
22679 }
22680 return die;
22681 }
22682
22683 /* Generate a DIE to represent a declared function (either file-scope or
22684 block-local). */
22685
22686 static void
22687 gen_subprogram_die (tree decl, dw_die_ref context_die)
22688 {
22689 tree origin = decl_ultimate_origin (decl);
22690 dw_die_ref subr_die;
22691 dw_die_ref old_die = lookup_decl_die (decl);
22692
22693 /* This function gets called multiple times for different stages of
22694 the debug process. For example, for func() in this code:
22695
22696 namespace S
22697 {
22698 void func() { ... }
22699 }
22700
22701 ...we get called 4 times. Twice in early debug and twice in
22702 late debug:
22703
22704 Early debug
22705 -----------
22706
22707 1. Once while generating func() within the namespace. This is
22708 the declaration. The declaration bit below is set, as the
22709 context is the namespace.
22710
22711 A new DIE will be generated with DW_AT_declaration set.
22712
22713 2. Once for func() itself. This is the specification. The
22714 declaration bit below is clear as the context is the CU.
22715
22716 We will use the cached DIE from (1) to create a new DIE with
22717 DW_AT_specification pointing to the declaration in (1).
22718
22719 Late debug via rest_of_handle_final()
22720 -------------------------------------
22721
22722 3. Once generating func() within the namespace. This is also the
22723 declaration, as in (1), but this time we will early exit below
22724 as we have a cached DIE and a declaration needs no additional
22725 annotations (no locations), as the source declaration line
22726 info is enough.
22727
22728 4. Once for func() itself. As in (2), this is the specification,
22729 but this time we will re-use the cached DIE, and just annotate
22730 it with the location information that should now be available.
22731
22732 For something without namespaces, but with abstract instances, we
22733 are also called a multiple times:
22734
22735 class Base
22736 {
22737 public:
22738 Base (); // constructor declaration (1)
22739 };
22740
22741 Base::Base () { } // constructor specification (2)
22742
22743 Early debug
22744 -----------
22745
22746 1. Once for the Base() constructor by virtue of it being a
22747 member of the Base class. This is done via
22748 rest_of_type_compilation.
22749
22750 This is a declaration, so a new DIE will be created with
22751 DW_AT_declaration.
22752
22753 2. Once for the Base() constructor definition, but this time
22754 while generating the abstract instance of the base
22755 constructor (__base_ctor) which is being generated via early
22756 debug of reachable functions.
22757
22758 Even though we have a cached version of the declaration (1),
22759 we will create a DW_AT_specification of the declaration DIE
22760 in (1).
22761
22762 3. Once for the __base_ctor itself, but this time, we generate
22763 an DW_AT_abstract_origin version of the DW_AT_specification in
22764 (2).
22765
22766 Late debug via rest_of_handle_final
22767 -----------------------------------
22768
22769 4. One final time for the __base_ctor (which will have a cached
22770 DIE with DW_AT_abstract_origin created in (3). This time,
22771 we will just annotate the location information now
22772 available.
22773 */
22774 int declaration = (current_function_decl != decl
22775 || class_or_namespace_scope_p (context_die));
22776
22777 /* A declaration that has been previously dumped needs no
22778 additional information. */
22779 if (old_die && declaration)
22780 return;
22781
22782 /* Now that the C++ front end lazily declares artificial member fns, we
22783 might need to retrofit the declaration into its class. */
22784 if (!declaration && !origin && !old_die
22785 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22786 && !class_or_namespace_scope_p (context_die)
22787 && debug_info_level > DINFO_LEVEL_TERSE)
22788 old_die = force_decl_die (decl);
22789
22790 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22791 if (origin != NULL)
22792 {
22793 gcc_assert (!declaration || local_scope_p (context_die));
22794
22795 /* Fixup die_parent for the abstract instance of a nested
22796 inline function. */
22797 if (old_die && old_die->die_parent == NULL)
22798 add_child_die (context_die, old_die);
22799
22800 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22801 {
22802 /* If we have a DW_AT_abstract_origin we have a working
22803 cached version. */
22804 subr_die = old_die;
22805 }
22806 else
22807 {
22808 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22809 add_abstract_origin_attribute (subr_die, origin);
22810 /* This is where the actual code for a cloned function is.
22811 Let's emit linkage name attribute for it. This helps
22812 debuggers to e.g, set breakpoints into
22813 constructors/destructors when the user asks "break
22814 K::K". */
22815 add_linkage_name (subr_die, decl);
22816 }
22817 }
22818 /* A cached copy, possibly from early dwarf generation. Reuse as
22819 much as possible. */
22820 else if (old_die)
22821 {
22822 if (!get_AT_flag (old_die, DW_AT_declaration)
22823 /* We can have a normal definition following an inline one in the
22824 case of redefinition of GNU C extern inlines.
22825 It seems reasonable to use AT_specification in this case. */
22826 && !get_AT (old_die, DW_AT_inline))
22827 {
22828 /* Detect and ignore this case, where we are trying to output
22829 something we have already output. */
22830 if (get_AT (old_die, DW_AT_low_pc)
22831 || get_AT (old_die, DW_AT_ranges))
22832 return;
22833
22834 /* If we have no location information, this must be a
22835 partially generated DIE from early dwarf generation.
22836 Fall through and generate it. */
22837 }
22838
22839 /* If the definition comes from the same place as the declaration,
22840 maybe use the old DIE. We always want the DIE for this function
22841 that has the *_pc attributes to be under comp_unit_die so the
22842 debugger can find it. We also need to do this for abstract
22843 instances of inlines, since the spec requires the out-of-line copy
22844 to have the same parent. For local class methods, this doesn't
22845 apply; we just use the old DIE. */
22846 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22847 struct dwarf_file_data * file_index = lookup_filename (s.file);
22848 if (((is_unit_die (old_die->die_parent)
22849 /* This condition fixes the inconsistency/ICE with the
22850 following Fortran test (or some derivative thereof) while
22851 building libgfortran:
22852
22853 module some_m
22854 contains
22855 logical function funky (FLAG)
22856 funky = .true.
22857 end function
22858 end module
22859 */
22860 || (old_die->die_parent
22861 && old_die->die_parent->die_tag == DW_TAG_module)
22862 || local_scope_p (old_die->die_parent)
22863 || context_die == NULL)
22864 && (DECL_ARTIFICIAL (decl)
22865 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22866 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22867 == (unsigned) s.line)
22868 && (!debug_column_info
22869 || s.column == 0
22870 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22871 == (unsigned) s.column)))))
22872 /* With LTO if there's an abstract instance for
22873 the old DIE, this is a concrete instance and
22874 thus re-use the DIE. */
22875 || get_AT (old_die, DW_AT_abstract_origin))
22876 {
22877 subr_die = old_die;
22878
22879 /* Clear out the declaration attribute, but leave the
22880 parameters so they can be augmented with location
22881 information later. Unless this was a declaration, in
22882 which case, wipe out the nameless parameters and recreate
22883 them further down. */
22884 if (remove_AT (subr_die, DW_AT_declaration))
22885 {
22886
22887 remove_AT (subr_die, DW_AT_object_pointer);
22888 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22889 }
22890 }
22891 /* Make a specification pointing to the previously built
22892 declaration. */
22893 else
22894 {
22895 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22896 add_AT_specification (subr_die, old_die);
22897 add_pubname (decl, subr_die);
22898 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22899 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22900 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22901 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22902 if (debug_column_info
22903 && s.column
22904 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22905 != (unsigned) s.column))
22906 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22907
22908 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22909 emit the real type on the definition die. */
22910 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22911 {
22912 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22913 if (die == auto_die || die == decltype_auto_die)
22914 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22915 TYPE_UNQUALIFIED, false, context_die);
22916 }
22917
22918 /* When we process the method declaration, we haven't seen
22919 the out-of-class defaulted definition yet, so we have to
22920 recheck now. */
22921 if ((dwarf_version >= 5 || ! dwarf_strict)
22922 && !get_AT (subr_die, DW_AT_defaulted))
22923 {
22924 int defaulted
22925 = lang_hooks.decls.decl_dwarf_attribute (decl,
22926 DW_AT_defaulted);
22927 if (defaulted != -1)
22928 {
22929 /* Other values must have been handled before. */
22930 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22931 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22932 }
22933 }
22934 }
22935 }
22936 /* Create a fresh DIE for anything else. */
22937 else
22938 {
22939 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22940
22941 if (TREE_PUBLIC (decl))
22942 add_AT_flag (subr_die, DW_AT_external, 1);
22943
22944 add_name_and_src_coords_attributes (subr_die, decl);
22945 add_pubname (decl, subr_die);
22946 if (debug_info_level > DINFO_LEVEL_TERSE)
22947 {
22948 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22949 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22950 TYPE_UNQUALIFIED, false, context_die);
22951 }
22952
22953 add_pure_or_virtual_attribute (subr_die, decl);
22954 if (DECL_ARTIFICIAL (decl))
22955 add_AT_flag (subr_die, DW_AT_artificial, 1);
22956
22957 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22958 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22959
22960 add_alignment_attribute (subr_die, decl);
22961
22962 add_accessibility_attribute (subr_die, decl);
22963 }
22964
22965 /* Unless we have an existing non-declaration DIE, equate the new
22966 DIE. */
22967 if (!old_die || is_declaration_die (old_die))
22968 equate_decl_number_to_die (decl, subr_die);
22969
22970 if (declaration)
22971 {
22972 if (!old_die || !get_AT (old_die, DW_AT_inline))
22973 {
22974 add_AT_flag (subr_die, DW_AT_declaration, 1);
22975
22976 /* If this is an explicit function declaration then generate
22977 a DW_AT_explicit attribute. */
22978 if ((dwarf_version >= 3 || !dwarf_strict)
22979 && lang_hooks.decls.decl_dwarf_attribute (decl,
22980 DW_AT_explicit) == 1)
22981 add_AT_flag (subr_die, DW_AT_explicit, 1);
22982
22983 /* If this is a C++11 deleted special function member then generate
22984 a DW_AT_deleted attribute. */
22985 if ((dwarf_version >= 5 || !dwarf_strict)
22986 && lang_hooks.decls.decl_dwarf_attribute (decl,
22987 DW_AT_deleted) == 1)
22988 add_AT_flag (subr_die, DW_AT_deleted, 1);
22989
22990 /* If this is a C++11 defaulted special function member then
22991 generate a DW_AT_defaulted attribute. */
22992 if (dwarf_version >= 5 || !dwarf_strict)
22993 {
22994 int defaulted
22995 = lang_hooks.decls.decl_dwarf_attribute (decl,
22996 DW_AT_defaulted);
22997 if (defaulted != -1)
22998 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22999 }
23000
23001 /* If this is a C++11 non-static member function with & ref-qualifier
23002 then generate a DW_AT_reference attribute. */
23003 if ((dwarf_version >= 5 || !dwarf_strict)
23004 && lang_hooks.decls.decl_dwarf_attribute (decl,
23005 DW_AT_reference) == 1)
23006 add_AT_flag (subr_die, DW_AT_reference, 1);
23007
23008 /* If this is a C++11 non-static member function with &&
23009 ref-qualifier then generate a DW_AT_reference attribute. */
23010 if ((dwarf_version >= 5 || !dwarf_strict)
23011 && lang_hooks.decls.decl_dwarf_attribute (decl,
23012 DW_AT_rvalue_reference)
23013 == 1)
23014 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23015 }
23016 }
23017 /* For non DECL_EXTERNALs, if range information is available, fill
23018 the DIE with it. */
23019 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23020 {
23021 HOST_WIDE_INT cfa_fb_offset;
23022
23023 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23024
23025 if (!crtl->has_bb_partition)
23026 {
23027 dw_fde_ref fde = fun->fde;
23028 if (fde->dw_fde_begin)
23029 {
23030 /* We have already generated the labels. */
23031 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23032 fde->dw_fde_end, false);
23033 }
23034 else
23035 {
23036 /* Create start/end labels and add the range. */
23037 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23038 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23039 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23040 current_function_funcdef_no);
23041 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23042 current_function_funcdef_no);
23043 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23044 false);
23045 }
23046
23047 #if VMS_DEBUGGING_INFO
23048 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23049 Section 2.3 Prologue and Epilogue Attributes:
23050 When a breakpoint is set on entry to a function, it is generally
23051 desirable for execution to be suspended, not on the very first
23052 instruction of the function, but rather at a point after the
23053 function's frame has been set up, after any language defined local
23054 declaration processing has been completed, and before execution of
23055 the first statement of the function begins. Debuggers generally
23056 cannot properly determine where this point is. Similarly for a
23057 breakpoint set on exit from a function. The prologue and epilogue
23058 attributes allow a compiler to communicate the location(s) to use. */
23059
23060 {
23061 if (fde->dw_fde_vms_end_prologue)
23062 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23063 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23064
23065 if (fde->dw_fde_vms_begin_epilogue)
23066 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23067 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23068 }
23069 #endif
23070
23071 }
23072 else
23073 {
23074 /* Generate pubnames entries for the split function code ranges. */
23075 dw_fde_ref fde = fun->fde;
23076
23077 if (fde->dw_fde_second_begin)
23078 {
23079 if (dwarf_version >= 3 || !dwarf_strict)
23080 {
23081 /* We should use ranges for non-contiguous code section
23082 addresses. Use the actual code range for the initial
23083 section, since the HOT/COLD labels might precede an
23084 alignment offset. */
23085 bool range_list_added = false;
23086 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23087 fde->dw_fde_end, &range_list_added,
23088 false);
23089 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23090 fde->dw_fde_second_end,
23091 &range_list_added, false);
23092 if (range_list_added)
23093 add_ranges (NULL);
23094 }
23095 else
23096 {
23097 /* There is no real support in DW2 for this .. so we make
23098 a work-around. First, emit the pub name for the segment
23099 containing the function label. Then make and emit a
23100 simplified subprogram DIE for the second segment with the
23101 name pre-fixed by __hot/cold_sect_of_. We use the same
23102 linkage name for the second die so that gdb will find both
23103 sections when given "b foo". */
23104 const char *name = NULL;
23105 tree decl_name = DECL_NAME (decl);
23106 dw_die_ref seg_die;
23107
23108 /* Do the 'primary' section. */
23109 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23110 fde->dw_fde_end, false);
23111
23112 /* Build a minimal DIE for the secondary section. */
23113 seg_die = new_die (DW_TAG_subprogram,
23114 subr_die->die_parent, decl);
23115
23116 if (TREE_PUBLIC (decl))
23117 add_AT_flag (seg_die, DW_AT_external, 1);
23118
23119 if (decl_name != NULL
23120 && IDENTIFIER_POINTER (decl_name) != NULL)
23121 {
23122 name = dwarf2_name (decl, 1);
23123 if (! DECL_ARTIFICIAL (decl))
23124 add_src_coords_attributes (seg_die, decl);
23125
23126 add_linkage_name (seg_die, decl);
23127 }
23128 gcc_assert (name != NULL);
23129 add_pure_or_virtual_attribute (seg_die, decl);
23130 if (DECL_ARTIFICIAL (decl))
23131 add_AT_flag (seg_die, DW_AT_artificial, 1);
23132
23133 name = concat ("__second_sect_of_", name, NULL);
23134 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23135 fde->dw_fde_second_end, false);
23136 add_name_attribute (seg_die, name);
23137 if (want_pubnames ())
23138 add_pubname_string (name, seg_die);
23139 }
23140 }
23141 else
23142 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23143 false);
23144 }
23145
23146 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23147
23148 /* We define the "frame base" as the function's CFA. This is more
23149 convenient for several reasons: (1) It's stable across the prologue
23150 and epilogue, which makes it better than just a frame pointer,
23151 (2) With dwarf3, there exists a one-byte encoding that allows us
23152 to reference the .debug_frame data by proxy, but failing that,
23153 (3) We can at least reuse the code inspection and interpretation
23154 code that determines the CFA position at various points in the
23155 function. */
23156 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23157 {
23158 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23159 add_AT_loc (subr_die, DW_AT_frame_base, op);
23160 }
23161 else
23162 {
23163 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23164 if (list->dw_loc_next)
23165 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23166 else
23167 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23168 }
23169
23170 /* Compute a displacement from the "steady-state frame pointer" to
23171 the CFA. The former is what all stack slots and argument slots
23172 will reference in the rtl; the latter is what we've told the
23173 debugger about. We'll need to adjust all frame_base references
23174 by this displacement. */
23175 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23176
23177 if (fun->static_chain_decl)
23178 {
23179 /* DWARF requires here a location expression that computes the
23180 address of the enclosing subprogram's frame base. The machinery
23181 in tree-nested.c is supposed to store this specific address in the
23182 last field of the FRAME record. */
23183 const tree frame_type
23184 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23185 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23186
23187 tree fb_expr
23188 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23189 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23190 fb_expr, fb_decl, NULL_TREE);
23191
23192 add_AT_location_description (subr_die, DW_AT_static_link,
23193 loc_list_from_tree (fb_expr, 0, NULL));
23194 }
23195
23196 resolve_variable_values ();
23197 }
23198
23199 /* Generate child dies for template paramaters. */
23200 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23201 gen_generic_params_dies (decl);
23202
23203 /* Now output descriptions of the arguments for this function. This gets
23204 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23205 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23206 `...' at the end of the formal parameter list. In order to find out if
23207 there was a trailing ellipsis or not, we must instead look at the type
23208 associated with the FUNCTION_DECL. This will be a node of type
23209 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23210 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23211 an ellipsis at the end. */
23212
23213 /* In the case where we are describing a mere function declaration, all we
23214 need to do here (and all we *can* do here) is to describe the *types* of
23215 its formal parameters. */
23216 if (debug_info_level <= DINFO_LEVEL_TERSE)
23217 ;
23218 else if (declaration)
23219 gen_formal_types_die (decl, subr_die);
23220 else
23221 {
23222 /* Generate DIEs to represent all known formal parameters. */
23223 tree parm = DECL_ARGUMENTS (decl);
23224 tree generic_decl = early_dwarf
23225 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23226 tree generic_decl_parm = generic_decl
23227 ? DECL_ARGUMENTS (generic_decl)
23228 : NULL;
23229
23230 /* Now we want to walk the list of parameters of the function and
23231 emit their relevant DIEs.
23232
23233 We consider the case of DECL being an instance of a generic function
23234 as well as it being a normal function.
23235
23236 If DECL is an instance of a generic function we walk the
23237 parameters of the generic function declaration _and_ the parameters of
23238 DECL itself. This is useful because we want to emit specific DIEs for
23239 function parameter packs and those are declared as part of the
23240 generic function declaration. In that particular case,
23241 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23242 That DIE has children DIEs representing the set of arguments
23243 of the pack. Note that the set of pack arguments can be empty.
23244 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23245 children DIE.
23246
23247 Otherwise, we just consider the parameters of DECL. */
23248 while (generic_decl_parm || parm)
23249 {
23250 if (generic_decl_parm
23251 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23252 gen_formal_parameter_pack_die (generic_decl_parm,
23253 parm, subr_die,
23254 &parm);
23255 else if (parm)
23256 {
23257 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23258
23259 if (early_dwarf
23260 && parm == DECL_ARGUMENTS (decl)
23261 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23262 && parm_die
23263 && (dwarf_version >= 3 || !dwarf_strict))
23264 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23265
23266 parm = DECL_CHAIN (parm);
23267 }
23268 else if (parm)
23269 parm = DECL_CHAIN (parm);
23270
23271 if (generic_decl_parm)
23272 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23273 }
23274
23275 /* Decide whether we need an unspecified_parameters DIE at the end.
23276 There are 2 more cases to do this for: 1) the ansi ... declaration -
23277 this is detectable when the end of the arg list is not a
23278 void_type_node 2) an unprototyped function declaration (not a
23279 definition). This just means that we have no info about the
23280 parameters at all. */
23281 if (early_dwarf)
23282 {
23283 if (prototype_p (TREE_TYPE (decl)))
23284 {
23285 /* This is the prototyped case, check for.... */
23286 if (stdarg_p (TREE_TYPE (decl)))
23287 gen_unspecified_parameters_die (decl, subr_die);
23288 }
23289 else if (DECL_INITIAL (decl) == NULL_TREE)
23290 gen_unspecified_parameters_die (decl, subr_die);
23291 }
23292 }
23293
23294 if (subr_die != old_die)
23295 /* Add the calling convention attribute if requested. */
23296 add_calling_convention_attribute (subr_die, decl);
23297
23298 /* Output Dwarf info for all of the stuff within the body of the function
23299 (if it has one - it may be just a declaration).
23300
23301 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23302 a function. This BLOCK actually represents the outermost binding contour
23303 for the function, i.e. the contour in which the function's formal
23304 parameters and labels get declared. Curiously, it appears that the front
23305 end doesn't actually put the PARM_DECL nodes for the current function onto
23306 the BLOCK_VARS list for this outer scope, but are strung off of the
23307 DECL_ARGUMENTS list for the function instead.
23308
23309 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23310 the LABEL_DECL nodes for the function however, and we output DWARF info
23311 for those in decls_for_scope. Just within the `outer_scope' there will be
23312 a BLOCK node representing the function's outermost pair of curly braces,
23313 and any blocks used for the base and member initializers of a C++
23314 constructor function. */
23315 tree outer_scope = DECL_INITIAL (decl);
23316 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23317 {
23318 int call_site_note_count = 0;
23319 int tail_call_site_note_count = 0;
23320
23321 /* Emit a DW_TAG_variable DIE for a named return value. */
23322 if (DECL_NAME (DECL_RESULT (decl)))
23323 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23324
23325 /* The first time through decls_for_scope we will generate the
23326 DIEs for the locals. The second time, we fill in the
23327 location info. */
23328 decls_for_scope (outer_scope, subr_die);
23329
23330 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23331 {
23332 struct call_arg_loc_node *ca_loc;
23333 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23334 {
23335 dw_die_ref die = NULL;
23336 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23337 rtx arg, next_arg;
23338 tree arg_decl = NULL_TREE;
23339
23340 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23341 ? XEXP (ca_loc->call_arg_loc_note, 0)
23342 : NULL_RTX);
23343 arg; arg = next_arg)
23344 {
23345 dw_loc_descr_ref reg, val;
23346 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23347 dw_die_ref cdie, tdie = NULL;
23348
23349 next_arg = XEXP (arg, 1);
23350 if (REG_P (XEXP (XEXP (arg, 0), 0))
23351 && next_arg
23352 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23353 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23354 && REGNO (XEXP (XEXP (arg, 0), 0))
23355 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23356 next_arg = XEXP (next_arg, 1);
23357 if (mode == VOIDmode)
23358 {
23359 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23360 if (mode == VOIDmode)
23361 mode = GET_MODE (XEXP (arg, 0));
23362 }
23363 if (mode == VOIDmode || mode == BLKmode)
23364 continue;
23365 /* Get dynamic information about call target only if we
23366 have no static information: we cannot generate both
23367 DW_AT_call_origin and DW_AT_call_target
23368 attributes. */
23369 if (ca_loc->symbol_ref == NULL_RTX)
23370 {
23371 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23372 {
23373 tloc = XEXP (XEXP (arg, 0), 1);
23374 continue;
23375 }
23376 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23377 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23378 {
23379 tlocc = XEXP (XEXP (arg, 0), 1);
23380 continue;
23381 }
23382 }
23383 reg = NULL;
23384 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23385 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23386 VAR_INIT_STATUS_INITIALIZED);
23387 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23388 {
23389 rtx mem = XEXP (XEXP (arg, 0), 0);
23390 reg = mem_loc_descriptor (XEXP (mem, 0),
23391 get_address_mode (mem),
23392 GET_MODE (mem),
23393 VAR_INIT_STATUS_INITIALIZED);
23394 }
23395 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23396 == DEBUG_PARAMETER_REF)
23397 {
23398 tree tdecl
23399 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23400 tdie = lookup_decl_die (tdecl);
23401 if (tdie == NULL)
23402 continue;
23403 arg_decl = tdecl;
23404 }
23405 else
23406 continue;
23407 if (reg == NULL
23408 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23409 != DEBUG_PARAMETER_REF)
23410 continue;
23411 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23412 VOIDmode,
23413 VAR_INIT_STATUS_INITIALIZED);
23414 if (val == NULL)
23415 continue;
23416 if (die == NULL)
23417 die = gen_call_site_die (decl, subr_die, ca_loc);
23418 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23419 NULL_TREE);
23420 add_desc_attribute (cdie, arg_decl);
23421 if (reg != NULL)
23422 add_AT_loc (cdie, DW_AT_location, reg);
23423 else if (tdie != NULL)
23424 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23425 tdie);
23426 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23427 if (next_arg != XEXP (arg, 1))
23428 {
23429 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23430 if (mode == VOIDmode)
23431 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23432 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23433 0), 1),
23434 mode, VOIDmode,
23435 VAR_INIT_STATUS_INITIALIZED);
23436 if (val != NULL)
23437 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23438 val);
23439 }
23440 }
23441 if (die == NULL
23442 && (ca_loc->symbol_ref || tloc))
23443 die = gen_call_site_die (decl, subr_die, ca_loc);
23444 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23445 {
23446 dw_loc_descr_ref tval = NULL;
23447
23448 if (tloc != NULL_RTX)
23449 tval = mem_loc_descriptor (tloc,
23450 GET_MODE (tloc) == VOIDmode
23451 ? Pmode : GET_MODE (tloc),
23452 VOIDmode,
23453 VAR_INIT_STATUS_INITIALIZED);
23454 if (tval)
23455 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23456 else if (tlocc != NULL_RTX)
23457 {
23458 tval = mem_loc_descriptor (tlocc,
23459 GET_MODE (tlocc) == VOIDmode
23460 ? Pmode : GET_MODE (tlocc),
23461 VOIDmode,
23462 VAR_INIT_STATUS_INITIALIZED);
23463 if (tval)
23464 add_AT_loc (die,
23465 dwarf_AT (DW_AT_call_target_clobbered),
23466 tval);
23467 }
23468 }
23469 if (die != NULL)
23470 {
23471 call_site_note_count++;
23472 if (ca_loc->tail_call_p)
23473 tail_call_site_note_count++;
23474 }
23475 }
23476 }
23477 call_arg_locations = NULL;
23478 call_arg_loc_last = NULL;
23479 if (tail_call_site_count >= 0
23480 && tail_call_site_count == tail_call_site_note_count
23481 && (!dwarf_strict || dwarf_version >= 5))
23482 {
23483 if (call_site_count >= 0
23484 && call_site_count == call_site_note_count)
23485 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23486 else
23487 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23488 }
23489 call_site_count = -1;
23490 tail_call_site_count = -1;
23491 }
23492
23493 /* Mark used types after we have created DIEs for the functions scopes. */
23494 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23495 }
23496
23497 /* Returns a hash value for X (which really is a die_struct). */
23498
23499 hashval_t
23500 block_die_hasher::hash (die_struct *d)
23501 {
23502 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23503 }
23504
23505 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23506 as decl_id and die_parent of die_struct Y. */
23507
23508 bool
23509 block_die_hasher::equal (die_struct *x, die_struct *y)
23510 {
23511 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23512 }
23513
23514 /* Hold information about markers for inlined entry points. */
23515 struct GTY ((for_user)) inline_entry_data
23516 {
23517 /* The block that's the inlined_function_outer_scope for an inlined
23518 function. */
23519 tree block;
23520
23521 /* The label at the inlined entry point. */
23522 const char *label_pfx;
23523 unsigned int label_num;
23524
23525 /* The view number to be used as the inlined entry point. */
23526 var_loc_view view;
23527 };
23528
23529 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23530 {
23531 typedef tree compare_type;
23532 static inline hashval_t hash (const inline_entry_data *);
23533 static inline bool equal (const inline_entry_data *, const_tree);
23534 };
23535
23536 /* Hash table routines for inline_entry_data. */
23537
23538 inline hashval_t
23539 inline_entry_data_hasher::hash (const inline_entry_data *data)
23540 {
23541 return htab_hash_pointer (data->block);
23542 }
23543
23544 inline bool
23545 inline_entry_data_hasher::equal (const inline_entry_data *data,
23546 const_tree block)
23547 {
23548 return data->block == block;
23549 }
23550
23551 /* Inlined entry points pending DIE creation in this compilation unit. */
23552
23553 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23554
23555
23556 /* Return TRUE if DECL, which may have been previously generated as
23557 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23558 true if decl (or its origin) is either an extern declaration or a
23559 class/namespace scoped declaration.
23560
23561 The declare_in_namespace support causes us to get two DIEs for one
23562 variable, both of which are declarations. We want to avoid
23563 considering one to be a specification, so we must test for
23564 DECLARATION and DW_AT_declaration. */
23565 static inline bool
23566 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23567 {
23568 return (old_die && TREE_STATIC (decl) && !declaration
23569 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23570 }
23571
23572 /* Return true if DECL is a local static. */
23573
23574 static inline bool
23575 local_function_static (tree decl)
23576 {
23577 gcc_assert (VAR_P (decl));
23578 return TREE_STATIC (decl)
23579 && DECL_CONTEXT (decl)
23580 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23581 }
23582
23583 /* Generate a DIE to represent a declared data object.
23584 Either DECL or ORIGIN must be non-null. */
23585
23586 static void
23587 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23588 {
23589 HOST_WIDE_INT off = 0;
23590 tree com_decl;
23591 tree decl_or_origin = decl ? decl : origin;
23592 tree ultimate_origin;
23593 dw_die_ref var_die;
23594 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23595 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23596 || class_or_namespace_scope_p (context_die));
23597 bool specialization_p = false;
23598 bool no_linkage_name = false;
23599
23600 /* While C++ inline static data members have definitions inside of the
23601 class, force the first DIE to be a declaration, then let gen_member_die
23602 reparent it to the class context and call gen_variable_die again
23603 to create the outside of the class DIE for the definition. */
23604 if (!declaration
23605 && old_die == NULL
23606 && decl
23607 && DECL_CONTEXT (decl)
23608 && TYPE_P (DECL_CONTEXT (decl))
23609 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23610 {
23611 declaration = true;
23612 if (dwarf_version < 5)
23613 no_linkage_name = true;
23614 }
23615
23616 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23617 if (decl || ultimate_origin)
23618 origin = ultimate_origin;
23619 com_decl = fortran_common (decl_or_origin, &off);
23620
23621 /* Symbol in common gets emitted as a child of the common block, in the form
23622 of a data member. */
23623 if (com_decl)
23624 {
23625 dw_die_ref com_die;
23626 dw_loc_list_ref loc = NULL;
23627 die_node com_die_arg;
23628
23629 var_die = lookup_decl_die (decl_or_origin);
23630 if (var_die)
23631 {
23632 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23633 {
23634 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23635 if (loc)
23636 {
23637 if (off)
23638 {
23639 /* Optimize the common case. */
23640 if (single_element_loc_list_p (loc)
23641 && loc->expr->dw_loc_opc == DW_OP_addr
23642 && loc->expr->dw_loc_next == NULL
23643 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23644 == SYMBOL_REF)
23645 {
23646 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23647 loc->expr->dw_loc_oprnd1.v.val_addr
23648 = plus_constant (GET_MODE (x), x , off);
23649 }
23650 else
23651 loc_list_plus_const (loc, off);
23652 }
23653 add_AT_location_description (var_die, DW_AT_location, loc);
23654 remove_AT (var_die, DW_AT_declaration);
23655 }
23656 }
23657 return;
23658 }
23659
23660 if (common_block_die_table == NULL)
23661 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23662
23663 com_die_arg.decl_id = DECL_UID (com_decl);
23664 com_die_arg.die_parent = context_die;
23665 com_die = common_block_die_table->find (&com_die_arg);
23666 if (! early_dwarf)
23667 loc = loc_list_from_tree (com_decl, 2, NULL);
23668 if (com_die == NULL)
23669 {
23670 const char *cnam
23671 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23672 die_node **slot;
23673
23674 com_die = new_die (DW_TAG_common_block, context_die, decl);
23675 add_name_and_src_coords_attributes (com_die, com_decl);
23676 if (loc)
23677 {
23678 add_AT_location_description (com_die, DW_AT_location, loc);
23679 /* Avoid sharing the same loc descriptor between
23680 DW_TAG_common_block and DW_TAG_variable. */
23681 loc = loc_list_from_tree (com_decl, 2, NULL);
23682 }
23683 else if (DECL_EXTERNAL (decl_or_origin))
23684 add_AT_flag (com_die, DW_AT_declaration, 1);
23685 if (want_pubnames ())
23686 add_pubname_string (cnam, com_die); /* ??? needed? */
23687 com_die->decl_id = DECL_UID (com_decl);
23688 slot = common_block_die_table->find_slot (com_die, INSERT);
23689 *slot = com_die;
23690 }
23691 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23692 {
23693 add_AT_location_description (com_die, DW_AT_location, loc);
23694 loc = loc_list_from_tree (com_decl, 2, NULL);
23695 remove_AT (com_die, DW_AT_declaration);
23696 }
23697 var_die = new_die (DW_TAG_variable, com_die, decl);
23698 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23699 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23700 decl_quals (decl_or_origin), false,
23701 context_die);
23702 add_alignment_attribute (var_die, decl);
23703 add_AT_flag (var_die, DW_AT_external, 1);
23704 if (loc)
23705 {
23706 if (off)
23707 {
23708 /* Optimize the common case. */
23709 if (single_element_loc_list_p (loc)
23710 && loc->expr->dw_loc_opc == DW_OP_addr
23711 && loc->expr->dw_loc_next == NULL
23712 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23713 {
23714 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23715 loc->expr->dw_loc_oprnd1.v.val_addr
23716 = plus_constant (GET_MODE (x), x, off);
23717 }
23718 else
23719 loc_list_plus_const (loc, off);
23720 }
23721 add_AT_location_description (var_die, DW_AT_location, loc);
23722 }
23723 else if (DECL_EXTERNAL (decl_or_origin))
23724 add_AT_flag (var_die, DW_AT_declaration, 1);
23725 if (decl)
23726 equate_decl_number_to_die (decl, var_die);
23727 return;
23728 }
23729
23730 if (old_die)
23731 {
23732 if (declaration)
23733 {
23734 /* A declaration that has been previously dumped, needs no
23735 further annotations, since it doesn't need location on
23736 the second pass. */
23737 return;
23738 }
23739 else if (decl_will_get_specification_p (old_die, decl, declaration)
23740 && !get_AT (old_die, DW_AT_specification))
23741 {
23742 /* Fall-thru so we can make a new variable die along with a
23743 DW_AT_specification. */
23744 }
23745 else if (origin && old_die->die_parent != context_die)
23746 {
23747 /* If we will be creating an inlined instance, we need a
23748 new DIE that will get annotated with
23749 DW_AT_abstract_origin. */
23750 gcc_assert (!DECL_ABSTRACT_P (decl));
23751 }
23752 else
23753 {
23754 /* If a DIE was dumped early, it still needs location info.
23755 Skip to where we fill the location bits. */
23756 var_die = old_die;
23757
23758 /* ??? In LTRANS we cannot annotate early created variably
23759 modified type DIEs without copying them and adjusting all
23760 references to them. Thus we dumped them again. Also add a
23761 reference to them but beware of -g0 compile and -g link
23762 in which case the reference will be already present. */
23763 tree type = TREE_TYPE (decl_or_origin);
23764 if (in_lto_p
23765 && ! get_AT (var_die, DW_AT_type)
23766 && variably_modified_type_p
23767 (type, decl_function_context (decl_or_origin)))
23768 {
23769 if (decl_by_reference_p (decl_or_origin))
23770 add_type_attribute (var_die, TREE_TYPE (type),
23771 TYPE_UNQUALIFIED, false, context_die);
23772 else
23773 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23774 false, context_die);
23775 }
23776
23777 goto gen_variable_die_location;
23778 }
23779 }
23780
23781 /* For static data members, the declaration in the class is supposed
23782 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23783 also in DWARF2; the specification should still be DW_TAG_variable
23784 referencing the DW_TAG_member DIE. */
23785 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23786 var_die = new_die (DW_TAG_member, context_die, decl);
23787 else
23788 var_die = new_die (DW_TAG_variable, context_die, decl);
23789
23790 if (origin != NULL)
23791 add_abstract_origin_attribute (var_die, origin);
23792
23793 /* Loop unrolling can create multiple blocks that refer to the same
23794 static variable, so we must test for the DW_AT_declaration flag.
23795
23796 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23797 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23798 sharing them.
23799
23800 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23801 else if (decl_will_get_specification_p (old_die, decl, declaration))
23802 {
23803 /* This is a definition of a C++ class level static. */
23804 add_AT_specification (var_die, old_die);
23805 specialization_p = true;
23806 if (DECL_NAME (decl))
23807 {
23808 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23809 struct dwarf_file_data * file_index = lookup_filename (s.file);
23810
23811 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23812 add_AT_file (var_die, DW_AT_decl_file, file_index);
23813
23814 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23815 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23816
23817 if (debug_column_info
23818 && s.column
23819 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23820 != (unsigned) s.column))
23821 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23822
23823 if (old_die->die_tag == DW_TAG_member)
23824 add_linkage_name (var_die, decl);
23825 }
23826 }
23827 else
23828 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23829
23830 if ((origin == NULL && !specialization_p)
23831 || (origin != NULL
23832 && !DECL_ABSTRACT_P (decl_or_origin)
23833 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23834 decl_function_context
23835 (decl_or_origin))))
23836 {
23837 tree type = TREE_TYPE (decl_or_origin);
23838
23839 if (decl_by_reference_p (decl_or_origin))
23840 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23841 context_die);
23842 else
23843 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23844 context_die);
23845 }
23846
23847 if (origin == NULL && !specialization_p)
23848 {
23849 if (TREE_PUBLIC (decl))
23850 add_AT_flag (var_die, DW_AT_external, 1);
23851
23852 if (DECL_ARTIFICIAL (decl))
23853 add_AT_flag (var_die, DW_AT_artificial, 1);
23854
23855 add_alignment_attribute (var_die, decl);
23856
23857 add_accessibility_attribute (var_die, decl);
23858 }
23859
23860 if (declaration)
23861 add_AT_flag (var_die, DW_AT_declaration, 1);
23862
23863 if (decl && (DECL_ABSTRACT_P (decl)
23864 || !old_die || is_declaration_die (old_die)))
23865 equate_decl_number_to_die (decl, var_die);
23866
23867 gen_variable_die_location:
23868 if (! declaration
23869 && (! DECL_ABSTRACT_P (decl_or_origin)
23870 /* Local static vars are shared between all clones/inlines,
23871 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23872 already set. */
23873 || (VAR_P (decl_or_origin)
23874 && TREE_STATIC (decl_or_origin)
23875 && DECL_RTL_SET_P (decl_or_origin))))
23876 {
23877 if (early_dwarf)
23878 add_pubname (decl_or_origin, var_die);
23879 else
23880 add_location_or_const_value_attribute (var_die, decl_or_origin,
23881 decl == NULL);
23882 }
23883 else
23884 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23885
23886 if ((dwarf_version >= 4 || !dwarf_strict)
23887 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23888 DW_AT_const_expr) == 1
23889 && !get_AT (var_die, DW_AT_const_expr)
23890 && !specialization_p)
23891 add_AT_flag (var_die, DW_AT_const_expr, 1);
23892
23893 if (!dwarf_strict)
23894 {
23895 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23896 DW_AT_inline);
23897 if (inl != -1
23898 && !get_AT (var_die, DW_AT_inline)
23899 && !specialization_p)
23900 add_AT_unsigned (var_die, DW_AT_inline, inl);
23901 }
23902 }
23903
23904 /* Generate a DIE to represent a named constant. */
23905
23906 static void
23907 gen_const_die (tree decl, dw_die_ref context_die)
23908 {
23909 dw_die_ref const_die;
23910 tree type = TREE_TYPE (decl);
23911
23912 const_die = lookup_decl_die (decl);
23913 if (const_die)
23914 return;
23915
23916 const_die = new_die (DW_TAG_constant, context_die, decl);
23917 equate_decl_number_to_die (decl, const_die);
23918 add_name_and_src_coords_attributes (const_die, decl);
23919 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23920 if (TREE_PUBLIC (decl))
23921 add_AT_flag (const_die, DW_AT_external, 1);
23922 if (DECL_ARTIFICIAL (decl))
23923 add_AT_flag (const_die, DW_AT_artificial, 1);
23924 tree_add_const_value_attribute_for_decl (const_die, decl);
23925 }
23926
23927 /* Generate a DIE to represent a label identifier. */
23928
23929 static void
23930 gen_label_die (tree decl, dw_die_ref context_die)
23931 {
23932 tree origin = decl_ultimate_origin (decl);
23933 dw_die_ref lbl_die = lookup_decl_die (decl);
23934 rtx insn;
23935 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23936
23937 if (!lbl_die)
23938 {
23939 lbl_die = new_die (DW_TAG_label, context_die, decl);
23940 equate_decl_number_to_die (decl, lbl_die);
23941
23942 if (origin != NULL)
23943 add_abstract_origin_attribute (lbl_die, origin);
23944 else
23945 add_name_and_src_coords_attributes (lbl_die, decl);
23946 }
23947
23948 if (DECL_ABSTRACT_P (decl))
23949 equate_decl_number_to_die (decl, lbl_die);
23950 else if (! early_dwarf)
23951 {
23952 insn = DECL_RTL_IF_SET (decl);
23953
23954 /* Deleted labels are programmer specified labels which have been
23955 eliminated because of various optimizations. We still emit them
23956 here so that it is possible to put breakpoints on them. */
23957 if (insn
23958 && (LABEL_P (insn)
23959 || ((NOTE_P (insn)
23960 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23961 {
23962 /* When optimization is enabled (via -O) some parts of the compiler
23963 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23964 represent source-level labels which were explicitly declared by
23965 the user. This really shouldn't be happening though, so catch
23966 it if it ever does happen. */
23967 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23968
23969 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23970 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23971 }
23972 else if (insn
23973 && NOTE_P (insn)
23974 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23975 && CODE_LABEL_NUMBER (insn) != -1)
23976 {
23977 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23978 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23979 }
23980 }
23981 }
23982
23983 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23984 attributes to the DIE for a block STMT, to describe where the inlined
23985 function was called from. This is similar to add_src_coords_attributes. */
23986
23987 static inline void
23988 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23989 {
23990 /* We can end up with BUILTINS_LOCATION here. */
23991 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
23992 return;
23993
23994 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23995
23996 if (dwarf_version >= 3 || !dwarf_strict)
23997 {
23998 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23999 add_AT_unsigned (die, DW_AT_call_line, s.line);
24000 if (debug_column_info && s.column)
24001 add_AT_unsigned (die, DW_AT_call_column, s.column);
24002 }
24003 }
24004
24005
24006 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24007 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24008
24009 static inline void
24010 add_high_low_attributes (tree stmt, dw_die_ref die)
24011 {
24012 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24013
24014 if (inline_entry_data **iedp
24015 = !inline_entry_data_table ? NULL
24016 : inline_entry_data_table->find_slot_with_hash (stmt,
24017 htab_hash_pointer (stmt),
24018 NO_INSERT))
24019 {
24020 inline_entry_data *ied = *iedp;
24021 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24022 gcc_assert (debug_inline_points);
24023 gcc_assert (inlined_function_outer_scope_p (stmt));
24024
24025 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24026 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24027
24028 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24029 && !dwarf_strict)
24030 {
24031 if (!output_asm_line_debug_info ())
24032 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24033 else
24034 {
24035 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24036 /* FIXME: this will resolve to a small number. Could we
24037 possibly emit smaller data? Ideally we'd emit a
24038 uleb128, but that would make the size of DIEs
24039 impossible for the compiler to compute, since it's
24040 the assembler that computes the value of the view
24041 label in this case. Ideally, we'd have a single form
24042 encompassing both the address and the view, and
24043 indirecting them through a table might make things
24044 easier, but even that would be more wasteful,
24045 space-wise, than what we have now. */
24046 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24047 }
24048 }
24049
24050 inline_entry_data_table->clear_slot (iedp);
24051 }
24052
24053 if (BLOCK_FRAGMENT_CHAIN (stmt)
24054 && (dwarf_version >= 3 || !dwarf_strict))
24055 {
24056 tree chain, superblock = NULL_TREE;
24057 dw_die_ref pdie;
24058 dw_attr_node *attr = NULL;
24059
24060 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24061 {
24062 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24063 BLOCK_NUMBER (stmt));
24064 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24065 }
24066
24067 /* Optimize duplicate .debug_ranges lists or even tails of
24068 lists. If this BLOCK has same ranges as its supercontext,
24069 lookup DW_AT_ranges attribute in the supercontext (and
24070 recursively so), verify that the ranges_table contains the
24071 right values and use it instead of adding a new .debug_range. */
24072 for (chain = stmt, pdie = die;
24073 BLOCK_SAME_RANGE (chain);
24074 chain = BLOCK_SUPERCONTEXT (chain))
24075 {
24076 dw_attr_node *new_attr;
24077
24078 pdie = pdie->die_parent;
24079 if (pdie == NULL)
24080 break;
24081 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24082 break;
24083 new_attr = get_AT (pdie, DW_AT_ranges);
24084 if (new_attr == NULL
24085 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24086 break;
24087 attr = new_attr;
24088 superblock = BLOCK_SUPERCONTEXT (chain);
24089 }
24090 if (attr != NULL
24091 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24092 == (int)BLOCK_NUMBER (superblock))
24093 && BLOCK_FRAGMENT_CHAIN (superblock))
24094 {
24095 unsigned long off = attr->dw_attr_val.v.val_offset;
24096 unsigned long supercnt = 0, thiscnt = 0;
24097 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24098 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24099 {
24100 ++supercnt;
24101 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24102 == (int)BLOCK_NUMBER (chain));
24103 }
24104 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24105 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24106 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24107 ++thiscnt;
24108 gcc_assert (supercnt >= thiscnt);
24109 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24110 false);
24111 note_rnglist_head (off + supercnt - thiscnt);
24112 return;
24113 }
24114
24115 unsigned int offset = add_ranges (stmt, true);
24116 add_AT_range_list (die, DW_AT_ranges, offset, false);
24117 note_rnglist_head (offset);
24118
24119 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24120 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24121 do
24122 {
24123 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24124 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24125 chain = BLOCK_FRAGMENT_CHAIN (chain);
24126 }
24127 while (chain);
24128 add_ranges (NULL);
24129 }
24130 else
24131 {
24132 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24133 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24134 BLOCK_NUMBER (stmt));
24135 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24136 BLOCK_NUMBER (stmt));
24137 add_AT_low_high_pc (die, label, label_high, false);
24138 }
24139 }
24140
24141 /* Generate a DIE for a lexical block. */
24142
24143 static void
24144 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24145 {
24146 dw_die_ref old_die = lookup_block_die (stmt);
24147 dw_die_ref stmt_die = NULL;
24148 if (!old_die)
24149 {
24150 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24151 equate_block_to_die (stmt, stmt_die);
24152 }
24153
24154 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24155 {
24156 /* If this is an inlined or conrecte instance, create a new lexical
24157 die for anything below to attach DW_AT_abstract_origin to. */
24158 if (old_die)
24159 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24160
24161 tree origin = block_ultimate_origin (stmt);
24162 if (origin != NULL_TREE && (origin != stmt || old_die))
24163 add_abstract_origin_attribute (stmt_die, origin);
24164
24165 old_die = NULL;
24166 }
24167
24168 if (old_die)
24169 stmt_die = old_die;
24170
24171 /* A non abstract block whose blocks have already been reordered
24172 should have the instruction range for this block. If so, set the
24173 high/low attributes. */
24174 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24175 {
24176 gcc_assert (stmt_die);
24177 add_high_low_attributes (stmt, stmt_die);
24178 }
24179
24180 decls_for_scope (stmt, stmt_die);
24181 }
24182
24183 /* Generate a DIE for an inlined subprogram. */
24184
24185 static void
24186 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24187 {
24188 tree decl = block_ultimate_origin (stmt);
24189
24190 /* Make sure any inlined functions are known to be inlineable. */
24191 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24192 || cgraph_function_possibly_inlined_p (decl));
24193
24194 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24195
24196 if (call_arg_locations || debug_inline_points)
24197 equate_block_to_die (stmt, subr_die);
24198 add_abstract_origin_attribute (subr_die, decl);
24199 if (TREE_ASM_WRITTEN (stmt))
24200 add_high_low_attributes (stmt, subr_die);
24201 add_call_src_coords_attributes (stmt, subr_die);
24202
24203 /* The inliner creates an extra BLOCK for the parameter setup,
24204 we want to merge that with the actual outermost BLOCK of the
24205 inlined function to avoid duplicate locals in consumers.
24206 Do that by doing the recursion to subblocks on the single subblock
24207 of STMT. */
24208 bool unwrap_one = false;
24209 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24210 {
24211 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24212 if (origin
24213 && TREE_CODE (origin) == BLOCK
24214 && BLOCK_SUPERCONTEXT (origin) == decl)
24215 unwrap_one = true;
24216 }
24217 decls_for_scope (stmt, subr_die, !unwrap_one);
24218 if (unwrap_one)
24219 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24220 }
24221
24222 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24223 the comment for VLR_CONTEXT. */
24224
24225 static void
24226 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24227 {
24228 dw_die_ref decl_die;
24229
24230 if (TREE_TYPE (decl) == error_mark_node)
24231 return;
24232
24233 decl_die = new_die (DW_TAG_member, context_die, decl);
24234 add_name_and_src_coords_attributes (decl_die, decl);
24235 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24236 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24237 context_die);
24238
24239 if (DECL_BIT_FIELD_TYPE (decl))
24240 {
24241 add_byte_size_attribute (decl_die, decl);
24242 add_bit_size_attribute (decl_die, decl);
24243 add_bit_offset_attribute (decl_die, decl, ctx);
24244 }
24245
24246 add_alignment_attribute (decl_die, decl);
24247
24248 /* If we have a variant part offset, then we are supposed to process a member
24249 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24250 trees. */
24251 gcc_assert (ctx->variant_part_offset == NULL_TREE
24252 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24253 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24254 add_data_member_location_attribute (decl_die, decl, ctx);
24255
24256 if (DECL_ARTIFICIAL (decl))
24257 add_AT_flag (decl_die, DW_AT_artificial, 1);
24258
24259 add_accessibility_attribute (decl_die, decl);
24260
24261 /* Equate decl number to die, so that we can look up this decl later on. */
24262 equate_decl_number_to_die (decl, decl_die);
24263 }
24264
24265 /* Generate a DIE for a pointer to a member type. TYPE can be an
24266 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24267 pointer to member function. */
24268
24269 static void
24270 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24271 {
24272 if (lookup_type_die (type))
24273 return;
24274
24275 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24276 scope_die_for (type, context_die), type);
24277
24278 equate_type_number_to_die (type, ptr_die);
24279 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24280 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24281 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24282 context_die);
24283 add_alignment_attribute (ptr_die, type);
24284
24285 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24286 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24287 {
24288 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24289 add_AT_loc (ptr_die, DW_AT_use_location, op);
24290 }
24291 }
24292
24293 static char *producer_string;
24294
24295 /* Return a heap allocated producer string including command line options
24296 if -grecord-gcc-switches. */
24297
24298 static char *
24299 gen_producer_string (void)
24300 {
24301 size_t j;
24302 auto_vec<const char *> switches;
24303 const char *language_string = lang_hooks.name;
24304 char *producer, *tail;
24305 const char *p;
24306 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24307 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24308
24309 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24310 switch (save_decoded_options[j].opt_index)
24311 {
24312 case OPT_o:
24313 case OPT_d:
24314 case OPT_dumpbase:
24315 case OPT_dumpdir:
24316 case OPT_auxbase:
24317 case OPT_auxbase_strip:
24318 case OPT_quiet:
24319 case OPT_version:
24320 case OPT_v:
24321 case OPT_w:
24322 case OPT_L:
24323 case OPT_D:
24324 case OPT_I:
24325 case OPT_U:
24326 case OPT_SPECIAL_unknown:
24327 case OPT_SPECIAL_ignore:
24328 case OPT_SPECIAL_deprecated:
24329 case OPT_SPECIAL_program_name:
24330 case OPT_SPECIAL_input_file:
24331 case OPT_grecord_gcc_switches:
24332 case OPT__output_pch_:
24333 case OPT_fdiagnostics_show_location_:
24334 case OPT_fdiagnostics_show_option:
24335 case OPT_fdiagnostics_show_caret:
24336 case OPT_fdiagnostics_show_labels:
24337 case OPT_fdiagnostics_show_line_numbers:
24338 case OPT_fdiagnostics_color_:
24339 case OPT_fdiagnostics_format_:
24340 case OPT_fverbose_asm:
24341 case OPT____:
24342 case OPT__sysroot_:
24343 case OPT_nostdinc:
24344 case OPT_nostdinc__:
24345 case OPT_fpreprocessed:
24346 case OPT_fltrans_output_list_:
24347 case OPT_fresolution_:
24348 case OPT_fdebug_prefix_map_:
24349 case OPT_fmacro_prefix_map_:
24350 case OPT_ffile_prefix_map_:
24351 case OPT_fcompare_debug:
24352 case OPT_fchecking:
24353 case OPT_fchecking_:
24354 /* Ignore these. */
24355 continue;
24356 default:
24357 if (cl_options[save_decoded_options[j].opt_index].flags
24358 & CL_NO_DWARF_RECORD)
24359 continue;
24360 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24361 == '-');
24362 switch (save_decoded_options[j].canonical_option[0][1])
24363 {
24364 case 'M':
24365 case 'i':
24366 case 'W':
24367 continue;
24368 case 'f':
24369 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24370 "dump", 4) == 0)
24371 continue;
24372 break;
24373 default:
24374 break;
24375 }
24376 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24377 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24378 break;
24379 }
24380
24381 producer = XNEWVEC (char, plen + 1 + len + 1);
24382 tail = producer;
24383 sprintf (tail, "%s %s", language_string, version_string);
24384 tail += plen;
24385
24386 FOR_EACH_VEC_ELT (switches, j, p)
24387 {
24388 len = strlen (p);
24389 *tail = ' ';
24390 memcpy (tail + 1, p, len);
24391 tail += len + 1;
24392 }
24393
24394 *tail = '\0';
24395 return producer;
24396 }
24397
24398 /* Given a C and/or C++ language/version string return the "highest".
24399 C++ is assumed to be "higher" than C in this case. Used for merging
24400 LTO translation unit languages. */
24401 static const char *
24402 highest_c_language (const char *lang1, const char *lang2)
24403 {
24404 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24405 return "GNU C++17";
24406 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24407 return "GNU C++14";
24408 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24409 return "GNU C++11";
24410 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24411 return "GNU C++98";
24412
24413 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24414 return "GNU C2X";
24415 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24416 return "GNU C17";
24417 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24418 return "GNU C11";
24419 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24420 return "GNU C99";
24421 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24422 return "GNU C89";
24423
24424 gcc_unreachable ();
24425 }
24426
24427
24428 /* Generate the DIE for the compilation unit. */
24429
24430 static dw_die_ref
24431 gen_compile_unit_die (const char *filename)
24432 {
24433 dw_die_ref die;
24434 const char *language_string = lang_hooks.name;
24435 int language;
24436
24437 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24438
24439 if (filename)
24440 {
24441 add_name_attribute (die, filename);
24442 /* Don't add cwd for <built-in>. */
24443 if (filename[0] != '<')
24444 add_comp_dir_attribute (die);
24445 }
24446
24447 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24448
24449 /* If our producer is LTO try to figure out a common language to use
24450 from the global list of translation units. */
24451 if (strcmp (language_string, "GNU GIMPLE") == 0)
24452 {
24453 unsigned i;
24454 tree t;
24455 const char *common_lang = NULL;
24456
24457 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24458 {
24459 if (!TRANSLATION_UNIT_LANGUAGE (t))
24460 continue;
24461 if (!common_lang)
24462 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24463 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24464 ;
24465 else if (strncmp (common_lang, "GNU C", 5) == 0
24466 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24467 /* Mixing C and C++ is ok, use C++ in that case. */
24468 common_lang = highest_c_language (common_lang,
24469 TRANSLATION_UNIT_LANGUAGE (t));
24470 else
24471 {
24472 /* Fall back to C. */
24473 common_lang = NULL;
24474 break;
24475 }
24476 }
24477
24478 if (common_lang)
24479 language_string = common_lang;
24480 }
24481
24482 language = DW_LANG_C;
24483 if (strncmp (language_string, "GNU C", 5) == 0
24484 && ISDIGIT (language_string[5]))
24485 {
24486 language = DW_LANG_C89;
24487 if (dwarf_version >= 3 || !dwarf_strict)
24488 {
24489 if (strcmp (language_string, "GNU C89") != 0)
24490 language = DW_LANG_C99;
24491
24492 if (dwarf_version >= 5 /* || !dwarf_strict */)
24493 if (strcmp (language_string, "GNU C11") == 0
24494 || strcmp (language_string, "GNU C17") == 0
24495 || strcmp (language_string, "GNU C2X"))
24496 language = DW_LANG_C11;
24497 }
24498 }
24499 else if (strncmp (language_string, "GNU C++", 7) == 0)
24500 {
24501 language = DW_LANG_C_plus_plus;
24502 if (dwarf_version >= 5 /* || !dwarf_strict */)
24503 {
24504 if (strcmp (language_string, "GNU C++11") == 0)
24505 language = DW_LANG_C_plus_plus_11;
24506 else if (strcmp (language_string, "GNU C++14") == 0)
24507 language = DW_LANG_C_plus_plus_14;
24508 else if (strcmp (language_string, "GNU C++17") == 0)
24509 /* For now. */
24510 language = DW_LANG_C_plus_plus_14;
24511 }
24512 }
24513 else if (strcmp (language_string, "GNU F77") == 0)
24514 language = DW_LANG_Fortran77;
24515 else if (dwarf_version >= 3 || !dwarf_strict)
24516 {
24517 if (strcmp (language_string, "GNU Ada") == 0)
24518 language = DW_LANG_Ada95;
24519 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24520 {
24521 language = DW_LANG_Fortran95;
24522 if (dwarf_version >= 5 /* || !dwarf_strict */)
24523 {
24524 if (strcmp (language_string, "GNU Fortran2003") == 0)
24525 language = DW_LANG_Fortran03;
24526 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24527 language = DW_LANG_Fortran08;
24528 }
24529 }
24530 else if (strcmp (language_string, "GNU Objective-C") == 0)
24531 language = DW_LANG_ObjC;
24532 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24533 language = DW_LANG_ObjC_plus_plus;
24534 else if (strcmp (language_string, "GNU D") == 0)
24535 language = DW_LANG_D;
24536 else if (dwarf_version >= 5 || !dwarf_strict)
24537 {
24538 if (strcmp (language_string, "GNU Go") == 0)
24539 language = DW_LANG_Go;
24540 }
24541 }
24542 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24543 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24544 language = DW_LANG_Fortran90;
24545 /* Likewise for Ada. */
24546 else if (strcmp (language_string, "GNU Ada") == 0)
24547 language = DW_LANG_Ada83;
24548
24549 add_AT_unsigned (die, DW_AT_language, language);
24550
24551 switch (language)
24552 {
24553 case DW_LANG_Fortran77:
24554 case DW_LANG_Fortran90:
24555 case DW_LANG_Fortran95:
24556 case DW_LANG_Fortran03:
24557 case DW_LANG_Fortran08:
24558 /* Fortran has case insensitive identifiers and the front-end
24559 lowercases everything. */
24560 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24561 break;
24562 default:
24563 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24564 break;
24565 }
24566 return die;
24567 }
24568
24569 /* Generate the DIE for a base class. */
24570
24571 static void
24572 gen_inheritance_die (tree binfo, tree access, tree type,
24573 dw_die_ref context_die)
24574 {
24575 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24576 struct vlr_context ctx = { type, NULL };
24577
24578 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24579 context_die);
24580 add_data_member_location_attribute (die, binfo, &ctx);
24581
24582 if (BINFO_VIRTUAL_P (binfo))
24583 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24584
24585 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24586 children, otherwise the default is DW_ACCESS_public. In DWARF2
24587 the default has always been DW_ACCESS_private. */
24588 if (access == access_public_node)
24589 {
24590 if (dwarf_version == 2
24591 || context_die->die_tag == DW_TAG_class_type)
24592 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24593 }
24594 else if (access == access_protected_node)
24595 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24596 else if (dwarf_version > 2
24597 && context_die->die_tag != DW_TAG_class_type)
24598 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24599 }
24600
24601 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24602 structure. */
24603
24604 static bool
24605 is_variant_part (tree decl)
24606 {
24607 return (TREE_CODE (decl) == FIELD_DECL
24608 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24609 }
24610
24611 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24612 return the FIELD_DECL. Return NULL_TREE otherwise. */
24613
24614 static tree
24615 analyze_discr_in_predicate (tree operand, tree struct_type)
24616 {
24617 while (CONVERT_EXPR_P (operand))
24618 operand = TREE_OPERAND (operand, 0);
24619
24620 /* Match field access to members of struct_type only. */
24621 if (TREE_CODE (operand) == COMPONENT_REF
24622 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24623 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24624 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24625 return TREE_OPERAND (operand, 1);
24626 else
24627 return NULL_TREE;
24628 }
24629
24630 /* Check that SRC is a constant integer that can be represented as a native
24631 integer constant (either signed or unsigned). If so, store it into DEST and
24632 return true. Return false otherwise. */
24633
24634 static bool
24635 get_discr_value (tree src, dw_discr_value *dest)
24636 {
24637 tree discr_type = TREE_TYPE (src);
24638
24639 if (lang_hooks.types.get_debug_type)
24640 {
24641 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24642 if (debug_type != NULL)
24643 discr_type = debug_type;
24644 }
24645
24646 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24647 return false;
24648
24649 /* Signedness can vary between the original type and the debug type. This
24650 can happen for character types in Ada for instance: the character type
24651 used for code generation can be signed, to be compatible with the C one,
24652 but from a debugger point of view, it must be unsigned. */
24653 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24654 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24655
24656 if (is_orig_unsigned != is_debug_unsigned)
24657 src = fold_convert (discr_type, src);
24658
24659 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24660 return false;
24661
24662 dest->pos = is_debug_unsigned;
24663 if (is_debug_unsigned)
24664 dest->v.uval = tree_to_uhwi (src);
24665 else
24666 dest->v.sval = tree_to_shwi (src);
24667
24668 return true;
24669 }
24670
24671 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24672 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24673 store NULL_TREE in DISCR_DECL. Otherwise:
24674
24675 - store the discriminant field in STRUCT_TYPE that controls the variant
24676 part to *DISCR_DECL
24677
24678 - put in *DISCR_LISTS_P an array where for each variant, the item
24679 represents the corresponding matching list of discriminant values.
24680
24681 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24682 the above array.
24683
24684 Note that when the array is allocated (i.e. when the analysis is
24685 successful), it is up to the caller to free the array. */
24686
24687 static void
24688 analyze_variants_discr (tree variant_part_decl,
24689 tree struct_type,
24690 tree *discr_decl,
24691 dw_discr_list_ref **discr_lists_p,
24692 unsigned *discr_lists_length)
24693 {
24694 tree variant_part_type = TREE_TYPE (variant_part_decl);
24695 tree variant;
24696 dw_discr_list_ref *discr_lists;
24697 unsigned i;
24698
24699 /* Compute how many variants there are in this variant part. */
24700 *discr_lists_length = 0;
24701 for (variant = TYPE_FIELDS (variant_part_type);
24702 variant != NULL_TREE;
24703 variant = DECL_CHAIN (variant))
24704 ++*discr_lists_length;
24705
24706 *discr_decl = NULL_TREE;
24707 *discr_lists_p
24708 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24709 sizeof (**discr_lists_p));
24710 discr_lists = *discr_lists_p;
24711
24712 /* And then analyze all variants to extract discriminant information for all
24713 of them. This analysis is conservative: as soon as we detect something we
24714 do not support, abort everything and pretend we found nothing. */
24715 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24716 variant != NULL_TREE;
24717 variant = DECL_CHAIN (variant), ++i)
24718 {
24719 tree match_expr = DECL_QUALIFIER (variant);
24720
24721 /* Now, try to analyze the predicate and deduce a discriminant for
24722 it. */
24723 if (match_expr == boolean_true_node)
24724 /* Typically happens for the default variant: it matches all cases that
24725 previous variants rejected. Don't output any matching value for
24726 this one. */
24727 continue;
24728
24729 /* The following loop tries to iterate over each discriminant
24730 possibility: single values or ranges. */
24731 while (match_expr != NULL_TREE)
24732 {
24733 tree next_round_match_expr;
24734 tree candidate_discr = NULL_TREE;
24735 dw_discr_list_ref new_node = NULL;
24736
24737 /* Possibilities are matched one after the other by nested
24738 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24739 continue with the rest at next iteration. */
24740 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24741 {
24742 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24743 match_expr = TREE_OPERAND (match_expr, 1);
24744 }
24745 else
24746 next_round_match_expr = NULL_TREE;
24747
24748 if (match_expr == boolean_false_node)
24749 /* This sub-expression matches nothing: just wait for the next
24750 one. */
24751 ;
24752
24753 else if (TREE_CODE (match_expr) == EQ_EXPR)
24754 {
24755 /* We are matching: <discr_field> == <integer_cst>
24756 This sub-expression matches a single value. */
24757 tree integer_cst = TREE_OPERAND (match_expr, 1);
24758
24759 candidate_discr
24760 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24761 struct_type);
24762
24763 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24764 if (!get_discr_value (integer_cst,
24765 &new_node->dw_discr_lower_bound))
24766 goto abort;
24767 new_node->dw_discr_range = false;
24768 }
24769
24770 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24771 {
24772 /* We are matching:
24773 <discr_field> > <integer_cst>
24774 && <discr_field> < <integer_cst>.
24775 This sub-expression matches the range of values between the
24776 two matched integer constants. Note that comparisons can be
24777 inclusive or exclusive. */
24778 tree candidate_discr_1, candidate_discr_2;
24779 tree lower_cst, upper_cst;
24780 bool lower_cst_included, upper_cst_included;
24781 tree lower_op = TREE_OPERAND (match_expr, 0);
24782 tree upper_op = TREE_OPERAND (match_expr, 1);
24783
24784 /* When the comparison is exclusive, the integer constant is not
24785 the discriminant range bound we are looking for: we will have
24786 to increment or decrement it. */
24787 if (TREE_CODE (lower_op) == GE_EXPR)
24788 lower_cst_included = true;
24789 else if (TREE_CODE (lower_op) == GT_EXPR)
24790 lower_cst_included = false;
24791 else
24792 goto abort;
24793
24794 if (TREE_CODE (upper_op) == LE_EXPR)
24795 upper_cst_included = true;
24796 else if (TREE_CODE (upper_op) == LT_EXPR)
24797 upper_cst_included = false;
24798 else
24799 goto abort;
24800
24801 /* Extract the discriminant from the first operand and check it
24802 is consistant with the same analysis in the second
24803 operand. */
24804 candidate_discr_1
24805 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24806 struct_type);
24807 candidate_discr_2
24808 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24809 struct_type);
24810 if (candidate_discr_1 == candidate_discr_2)
24811 candidate_discr = candidate_discr_1;
24812 else
24813 goto abort;
24814
24815 /* Extract bounds from both. */
24816 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24817 lower_cst = TREE_OPERAND (lower_op, 1);
24818 upper_cst = TREE_OPERAND (upper_op, 1);
24819
24820 if (!lower_cst_included)
24821 lower_cst
24822 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24823 build_int_cst (TREE_TYPE (lower_cst), 1));
24824 if (!upper_cst_included)
24825 upper_cst
24826 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24827 build_int_cst (TREE_TYPE (upper_cst), 1));
24828
24829 if (!get_discr_value (lower_cst,
24830 &new_node->dw_discr_lower_bound)
24831 || !get_discr_value (upper_cst,
24832 &new_node->dw_discr_upper_bound))
24833 goto abort;
24834
24835 new_node->dw_discr_range = true;
24836 }
24837
24838 else if ((candidate_discr
24839 = analyze_discr_in_predicate (match_expr, struct_type))
24840 && TREE_TYPE (candidate_discr) == boolean_type_node)
24841 {
24842 /* We are matching: <discr_field> for a boolean discriminant.
24843 This sub-expression matches boolean_true_node. */
24844 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24845 if (!get_discr_value (boolean_true_node,
24846 &new_node->dw_discr_lower_bound))
24847 goto abort;
24848 new_node->dw_discr_range = false;
24849 }
24850
24851 else
24852 /* Unsupported sub-expression: we cannot determine the set of
24853 matching discriminant values. Abort everything. */
24854 goto abort;
24855
24856 /* If the discriminant info is not consistant with what we saw so
24857 far, consider the analysis failed and abort everything. */
24858 if (candidate_discr == NULL_TREE
24859 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24860 goto abort;
24861 else
24862 *discr_decl = candidate_discr;
24863
24864 if (new_node != NULL)
24865 {
24866 new_node->dw_discr_next = discr_lists[i];
24867 discr_lists[i] = new_node;
24868 }
24869 match_expr = next_round_match_expr;
24870 }
24871 }
24872
24873 /* If we reach this point, we could match everything we were interested
24874 in. */
24875 return;
24876
24877 abort:
24878 /* Clean all data structure and return no result. */
24879 free (*discr_lists_p);
24880 *discr_lists_p = NULL;
24881 *discr_decl = NULL_TREE;
24882 }
24883
24884 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24885 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24886 under CONTEXT_DIE.
24887
24888 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24889 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24890 this type, which are record types, represent the available variants and each
24891 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24892 values are inferred from these attributes.
24893
24894 In trees, the offsets for the fields inside these sub-records are relative
24895 to the variant part itself, whereas the corresponding DIEs should have
24896 offset attributes that are relative to the embedding record base address.
24897 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24898 must be an expression that computes the offset of the variant part to
24899 describe in DWARF. */
24900
24901 static void
24902 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24903 dw_die_ref context_die)
24904 {
24905 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24906 tree variant_part_offset = vlr_ctx->variant_part_offset;
24907 struct loc_descr_context ctx = {
24908 vlr_ctx->struct_type, /* context_type */
24909 NULL_TREE, /* base_decl */
24910 NULL, /* dpi */
24911 false, /* placeholder_arg */
24912 false /* placeholder_seen */
24913 };
24914
24915 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24916 NULL_TREE if there is no such field. */
24917 tree discr_decl = NULL_TREE;
24918 dw_discr_list_ref *discr_lists;
24919 unsigned discr_lists_length = 0;
24920 unsigned i;
24921
24922 dw_die_ref dwarf_proc_die = NULL;
24923 dw_die_ref variant_part_die
24924 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24925
24926 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24927
24928 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24929 &discr_decl, &discr_lists, &discr_lists_length);
24930
24931 if (discr_decl != NULL_TREE)
24932 {
24933 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24934
24935 if (discr_die)
24936 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24937 else
24938 /* We have no DIE for the discriminant, so just discard all
24939 discrimimant information in the output. */
24940 discr_decl = NULL_TREE;
24941 }
24942
24943 /* If the offset for this variant part is more complex than a constant,
24944 create a DWARF procedure for it so that we will not have to generate DWARF
24945 expressions for it for each member. */
24946 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24947 && (dwarf_version >= 3 || !dwarf_strict))
24948 {
24949 const tree dwarf_proc_fndecl
24950 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24951 build_function_type (TREE_TYPE (variant_part_offset),
24952 NULL_TREE));
24953 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24954 const dw_loc_descr_ref dwarf_proc_body
24955 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24956
24957 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24958 dwarf_proc_fndecl, context_die);
24959 if (dwarf_proc_die != NULL)
24960 variant_part_offset = dwarf_proc_call;
24961 }
24962
24963 /* Output DIEs for all variants. */
24964 i = 0;
24965 for (tree variant = TYPE_FIELDS (variant_part_type);
24966 variant != NULL_TREE;
24967 variant = DECL_CHAIN (variant), ++i)
24968 {
24969 tree variant_type = TREE_TYPE (variant);
24970 dw_die_ref variant_die;
24971
24972 /* All variants (i.e. members of a variant part) are supposed to be
24973 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24974 under these records. */
24975 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24976
24977 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24978 equate_decl_number_to_die (variant, variant_die);
24979
24980 /* Output discriminant values this variant matches, if any. */
24981 if (discr_decl == NULL || discr_lists[i] == NULL)
24982 /* In the case we have discriminant information at all, this is
24983 probably the default variant: as the standard says, don't
24984 output any discriminant value/list attribute. */
24985 ;
24986 else if (discr_lists[i]->dw_discr_next == NULL
24987 && !discr_lists[i]->dw_discr_range)
24988 /* If there is only one accepted value, don't bother outputting a
24989 list. */
24990 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24991 else
24992 add_discr_list (variant_die, discr_lists[i]);
24993
24994 for (tree member = TYPE_FIELDS (variant_type);
24995 member != NULL_TREE;
24996 member = DECL_CHAIN (member))
24997 {
24998 struct vlr_context vlr_sub_ctx = {
24999 vlr_ctx->struct_type, /* struct_type */
25000 NULL /* variant_part_offset */
25001 };
25002 if (is_variant_part (member))
25003 {
25004 /* All offsets for fields inside variant parts are relative to
25005 the top-level embedding RECORD_TYPE's base address. On the
25006 other hand, offsets in GCC's types are relative to the
25007 nested-most variant part. So we have to sum offsets each time
25008 we recurse. */
25009
25010 vlr_sub_ctx.variant_part_offset
25011 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25012 variant_part_offset, byte_position (member));
25013 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25014 }
25015 else
25016 {
25017 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25018 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25019 }
25020 }
25021 }
25022
25023 free (discr_lists);
25024 }
25025
25026 /* Generate a DIE for a class member. */
25027
25028 static void
25029 gen_member_die (tree type, dw_die_ref context_die)
25030 {
25031 tree member;
25032 tree binfo = TYPE_BINFO (type);
25033
25034 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25035
25036 /* If this is not an incomplete type, output descriptions of each of its
25037 members. Note that as we output the DIEs necessary to represent the
25038 members of this record or union type, we will also be trying to output
25039 DIEs to represent the *types* of those members. However the `type'
25040 function (above) will specifically avoid generating type DIEs for member
25041 types *within* the list of member DIEs for this (containing) type except
25042 for those types (of members) which are explicitly marked as also being
25043 members of this (containing) type themselves. The g++ front- end can
25044 force any given type to be treated as a member of some other (containing)
25045 type by setting the TYPE_CONTEXT of the given (member) type to point to
25046 the TREE node representing the appropriate (containing) type. */
25047
25048 /* First output info about the base classes. */
25049 if (binfo && early_dwarf)
25050 {
25051 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25052 int i;
25053 tree base;
25054
25055 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25056 gen_inheritance_die (base,
25057 (accesses ? (*accesses)[i] : access_public_node),
25058 type,
25059 context_die);
25060 }
25061
25062 /* Now output info about the data members and type members. */
25063 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25064 {
25065 struct vlr_context vlr_ctx = { type, NULL_TREE };
25066 bool static_inline_p
25067 = (TREE_STATIC (member)
25068 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25069 != -1));
25070
25071 /* Ignore clones. */
25072 if (DECL_ABSTRACT_ORIGIN (member))
25073 continue;
25074
25075 /* If we thought we were generating minimal debug info for TYPE
25076 and then changed our minds, some of the member declarations
25077 may have already been defined. Don't define them again, but
25078 do put them in the right order. */
25079
25080 if (dw_die_ref child = lookup_decl_die (member))
25081 {
25082 /* Handle inline static data members, which only have in-class
25083 declarations. */
25084 dw_die_ref ref = NULL;
25085 if (child->die_tag == DW_TAG_variable
25086 && child->die_parent == comp_unit_die ())
25087 {
25088 ref = get_AT_ref (child, DW_AT_specification);
25089 /* For C++17 inline static data members followed by redundant
25090 out of class redeclaration, we might get here with
25091 child being the DIE created for the out of class
25092 redeclaration and with its DW_AT_specification being
25093 the DIE created for in-class definition. We want to
25094 reparent the latter, and don't want to create another
25095 DIE with DW_AT_specification in that case, because
25096 we already have one. */
25097 if (ref
25098 && static_inline_p
25099 && ref->die_tag == DW_TAG_variable
25100 && ref->die_parent == comp_unit_die ()
25101 && get_AT (ref, DW_AT_specification) == NULL)
25102 {
25103 child = ref;
25104 ref = NULL;
25105 static_inline_p = false;
25106 }
25107 }
25108
25109 if (child->die_tag == DW_TAG_variable
25110 && child->die_parent == comp_unit_die ()
25111 && ref == NULL)
25112 {
25113 reparent_child (child, context_die);
25114 if (dwarf_version < 5)
25115 child->die_tag = DW_TAG_member;
25116 }
25117 else
25118 splice_child_die (context_die, child);
25119 }
25120
25121 /* Do not generate standard DWARF for variant parts if we are generating
25122 the corresponding GNAT encodings: DIEs generated for both would
25123 conflict in our mappings. */
25124 else if (is_variant_part (member)
25125 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25126 {
25127 vlr_ctx.variant_part_offset = byte_position (member);
25128 gen_variant_part (member, &vlr_ctx, context_die);
25129 }
25130 else
25131 {
25132 vlr_ctx.variant_part_offset = NULL_TREE;
25133 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25134 }
25135
25136 /* For C++ inline static data members emit immediately a DW_TAG_variable
25137 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25138 DW_AT_specification. */
25139 if (static_inline_p)
25140 {
25141 int old_extern = DECL_EXTERNAL (member);
25142 DECL_EXTERNAL (member) = 0;
25143 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25144 DECL_EXTERNAL (member) = old_extern;
25145 }
25146 }
25147 }
25148
25149 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25150 is set, we pretend that the type was never defined, so we only get the
25151 member DIEs needed by later specification DIEs. */
25152
25153 static void
25154 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25155 enum debug_info_usage usage)
25156 {
25157 if (TREE_ASM_WRITTEN (type))
25158 {
25159 /* Fill in the bound of variable-length fields in late dwarf if
25160 still incomplete. */
25161 if (!early_dwarf && variably_modified_type_p (type, NULL))
25162 for (tree member = TYPE_FIELDS (type);
25163 member;
25164 member = DECL_CHAIN (member))
25165 fill_variable_array_bounds (TREE_TYPE (member));
25166 return;
25167 }
25168
25169 dw_die_ref type_die = lookup_type_die (type);
25170 dw_die_ref scope_die = 0;
25171 int nested = 0;
25172 int complete = (TYPE_SIZE (type)
25173 && (! TYPE_STUB_DECL (type)
25174 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25175 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25176 complete = complete && should_emit_struct_debug (type, usage);
25177
25178 if (type_die && ! complete)
25179 return;
25180
25181 if (TYPE_CONTEXT (type) != NULL_TREE
25182 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25183 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25184 nested = 1;
25185
25186 scope_die = scope_die_for (type, context_die);
25187
25188 /* Generate child dies for template paramaters. */
25189 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25190 schedule_generic_params_dies_gen (type);
25191
25192 if (! type_die || (nested && is_cu_die (scope_die)))
25193 /* First occurrence of type or toplevel definition of nested class. */
25194 {
25195 dw_die_ref old_die = type_die;
25196
25197 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25198 ? record_type_tag (type) : DW_TAG_union_type,
25199 scope_die, type);
25200 equate_type_number_to_die (type, type_die);
25201 if (old_die)
25202 add_AT_specification (type_die, old_die);
25203 else
25204 add_name_attribute (type_die, type_tag (type));
25205 }
25206 else
25207 remove_AT (type_die, DW_AT_declaration);
25208
25209 /* If this type has been completed, then give it a byte_size attribute and
25210 then give a list of members. */
25211 if (complete && !ns_decl)
25212 {
25213 /* Prevent infinite recursion in cases where the type of some member of
25214 this type is expressed in terms of this type itself. */
25215 TREE_ASM_WRITTEN (type) = 1;
25216 add_byte_size_attribute (type_die, type);
25217 add_alignment_attribute (type_die, type);
25218 if (TYPE_STUB_DECL (type) != NULL_TREE)
25219 {
25220 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25221 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25222 }
25223
25224 /* If the first reference to this type was as the return type of an
25225 inline function, then it may not have a parent. Fix this now. */
25226 if (type_die->die_parent == NULL)
25227 add_child_die (scope_die, type_die);
25228
25229 gen_member_die (type, type_die);
25230
25231 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25232 if (TYPE_ARTIFICIAL (type))
25233 add_AT_flag (type_die, DW_AT_artificial, 1);
25234
25235 /* GNU extension: Record what type our vtable lives in. */
25236 if (TYPE_VFIELD (type))
25237 {
25238 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25239
25240 gen_type_die (vtype, context_die);
25241 add_AT_die_ref (type_die, DW_AT_containing_type,
25242 lookup_type_die (vtype));
25243 }
25244 }
25245 else
25246 {
25247 add_AT_flag (type_die, DW_AT_declaration, 1);
25248
25249 /* We don't need to do this for function-local types. */
25250 if (TYPE_STUB_DECL (type)
25251 && ! decl_function_context (TYPE_STUB_DECL (type)))
25252 vec_safe_push (incomplete_types, type);
25253 }
25254
25255 if (get_AT (type_die, DW_AT_name))
25256 add_pubtype (type, type_die);
25257 }
25258
25259 /* Generate a DIE for a subroutine _type_. */
25260
25261 static void
25262 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25263 {
25264 tree return_type = TREE_TYPE (type);
25265 dw_die_ref subr_die
25266 = new_die (DW_TAG_subroutine_type,
25267 scope_die_for (type, context_die), type);
25268
25269 equate_type_number_to_die (type, subr_die);
25270 add_prototyped_attribute (subr_die, type);
25271 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25272 context_die);
25273 add_alignment_attribute (subr_die, type);
25274 gen_formal_types_die (type, subr_die);
25275
25276 if (get_AT (subr_die, DW_AT_name))
25277 add_pubtype (type, subr_die);
25278 if ((dwarf_version >= 5 || !dwarf_strict)
25279 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25280 add_AT_flag (subr_die, DW_AT_reference, 1);
25281 if ((dwarf_version >= 5 || !dwarf_strict)
25282 && lang_hooks.types.type_dwarf_attribute (type,
25283 DW_AT_rvalue_reference) != -1)
25284 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25285 }
25286
25287 /* Generate a DIE for a type definition. */
25288
25289 static void
25290 gen_typedef_die (tree decl, dw_die_ref context_die)
25291 {
25292 dw_die_ref type_die;
25293 tree type;
25294
25295 if (TREE_ASM_WRITTEN (decl))
25296 {
25297 if (DECL_ORIGINAL_TYPE (decl))
25298 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25299 return;
25300 }
25301
25302 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25303 checks in process_scope_var and modified_type_die), this should be called
25304 only for original types. */
25305 gcc_assert (decl_ultimate_origin (decl) == NULL
25306 || decl_ultimate_origin (decl) == decl);
25307
25308 TREE_ASM_WRITTEN (decl) = 1;
25309 type_die = new_die (DW_TAG_typedef, context_die, decl);
25310
25311 add_name_and_src_coords_attributes (type_die, decl);
25312 if (DECL_ORIGINAL_TYPE (decl))
25313 {
25314 type = DECL_ORIGINAL_TYPE (decl);
25315 if (type == error_mark_node)
25316 return;
25317
25318 gcc_assert (type != TREE_TYPE (decl));
25319 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25320 }
25321 else
25322 {
25323 type = TREE_TYPE (decl);
25324 if (type == error_mark_node)
25325 return;
25326
25327 if (is_naming_typedef_decl (TYPE_NAME (type)))
25328 {
25329 /* Here, we are in the case of decl being a typedef naming
25330 an anonymous type, e.g:
25331 typedef struct {...} foo;
25332 In that case TREE_TYPE (decl) is not a typedef variant
25333 type and TYPE_NAME of the anonymous type is set to the
25334 TYPE_DECL of the typedef. This construct is emitted by
25335 the C++ FE.
25336
25337 TYPE is the anonymous struct named by the typedef
25338 DECL. As we need the DW_AT_type attribute of the
25339 DW_TAG_typedef to point to the DIE of TYPE, let's
25340 generate that DIE right away. add_type_attribute
25341 called below will then pick (via lookup_type_die) that
25342 anonymous struct DIE. */
25343 if (!TREE_ASM_WRITTEN (type))
25344 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25345
25346 /* This is a GNU Extension. We are adding a
25347 DW_AT_linkage_name attribute to the DIE of the
25348 anonymous struct TYPE. The value of that attribute
25349 is the name of the typedef decl naming the anonymous
25350 struct. This greatly eases the work of consumers of
25351 this debug info. */
25352 add_linkage_name_raw (lookup_type_die (type), decl);
25353 }
25354 }
25355
25356 add_type_attribute (type_die, type, decl_quals (decl), false,
25357 context_die);
25358
25359 if (is_naming_typedef_decl (decl))
25360 /* We want that all subsequent calls to lookup_type_die with
25361 TYPE in argument yield the DW_TAG_typedef we have just
25362 created. */
25363 equate_type_number_to_die (type, type_die);
25364
25365 add_alignment_attribute (type_die, TREE_TYPE (decl));
25366
25367 add_accessibility_attribute (type_die, decl);
25368
25369 if (DECL_ABSTRACT_P (decl))
25370 equate_decl_number_to_die (decl, type_die);
25371
25372 if (get_AT (type_die, DW_AT_name))
25373 add_pubtype (decl, type_die);
25374 }
25375
25376 /* Generate a DIE for a struct, class, enum or union type. */
25377
25378 static void
25379 gen_tagged_type_die (tree type,
25380 dw_die_ref context_die,
25381 enum debug_info_usage usage)
25382 {
25383 if (type == NULL_TREE
25384 || !is_tagged_type (type))
25385 return;
25386
25387 if (TREE_ASM_WRITTEN (type))
25388 ;
25389 /* If this is a nested type whose containing class hasn't been written
25390 out yet, writing it out will cover this one, too. This does not apply
25391 to instantiations of member class templates; they need to be added to
25392 the containing class as they are generated. FIXME: This hurts the
25393 idea of combining type decls from multiple TUs, since we can't predict
25394 what set of template instantiations we'll get. */
25395 else if (TYPE_CONTEXT (type)
25396 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25397 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25398 {
25399 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25400
25401 if (TREE_ASM_WRITTEN (type))
25402 return;
25403
25404 /* If that failed, attach ourselves to the stub. */
25405 context_die = lookup_type_die (TYPE_CONTEXT (type));
25406 }
25407 else if (TYPE_CONTEXT (type) != NULL_TREE
25408 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25409 {
25410 /* If this type is local to a function that hasn't been written
25411 out yet, use a NULL context for now; it will be fixed up in
25412 decls_for_scope. */
25413 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25414 /* A declaration DIE doesn't count; nested types need to go in the
25415 specification. */
25416 if (context_die && is_declaration_die (context_die))
25417 context_die = NULL;
25418 }
25419 else
25420 context_die = declare_in_namespace (type, context_die);
25421
25422 if (TREE_CODE (type) == ENUMERAL_TYPE)
25423 {
25424 /* This might have been written out by the call to
25425 declare_in_namespace. */
25426 if (!TREE_ASM_WRITTEN (type))
25427 gen_enumeration_type_die (type, context_die);
25428 }
25429 else
25430 gen_struct_or_union_type_die (type, context_die, usage);
25431
25432 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25433 it up if it is ever completed. gen_*_type_die will set it for us
25434 when appropriate. */
25435 }
25436
25437 /* Generate a type description DIE. */
25438
25439 static void
25440 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25441 enum debug_info_usage usage)
25442 {
25443 struct array_descr_info info;
25444
25445 if (type == NULL_TREE || type == error_mark_node)
25446 return;
25447
25448 if (flag_checking && type)
25449 verify_type (type);
25450
25451 if (TYPE_NAME (type) != NULL_TREE
25452 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25453 && is_redundant_typedef (TYPE_NAME (type))
25454 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25455 /* The DECL of this type is a typedef we don't want to emit debug
25456 info for but we want debug info for its underlying typedef.
25457 This can happen for e.g, the injected-class-name of a C++
25458 type. */
25459 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25460
25461 /* If TYPE is a typedef type variant, let's generate debug info
25462 for the parent typedef which TYPE is a type of. */
25463 if (typedef_variant_p (type))
25464 {
25465 if (TREE_ASM_WRITTEN (type))
25466 return;
25467
25468 tree name = TYPE_NAME (type);
25469 tree origin = decl_ultimate_origin (name);
25470 if (origin != NULL && origin != name)
25471 {
25472 gen_decl_die (origin, NULL, NULL, context_die);
25473 return;
25474 }
25475
25476 /* Prevent broken recursion; we can't hand off to the same type. */
25477 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25478
25479 /* Give typedefs the right scope. */
25480 context_die = scope_die_for (type, context_die);
25481
25482 TREE_ASM_WRITTEN (type) = 1;
25483
25484 gen_decl_die (name, NULL, NULL, context_die);
25485 return;
25486 }
25487
25488 /* If type is an anonymous tagged type named by a typedef, let's
25489 generate debug info for the typedef. */
25490 if (is_naming_typedef_decl (TYPE_NAME (type)))
25491 {
25492 /* Give typedefs the right scope. */
25493 context_die = scope_die_for (type, context_die);
25494
25495 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25496 return;
25497 }
25498
25499 if (lang_hooks.types.get_debug_type)
25500 {
25501 tree debug_type = lang_hooks.types.get_debug_type (type);
25502
25503 if (debug_type != NULL_TREE && debug_type != type)
25504 {
25505 gen_type_die_with_usage (debug_type, context_die, usage);
25506 return;
25507 }
25508 }
25509
25510 /* We are going to output a DIE to represent the unqualified version
25511 of this type (i.e. without any const or volatile qualifiers) so
25512 get the main variant (i.e. the unqualified version) of this type
25513 now. (Vectors and arrays are special because the debugging info is in the
25514 cloned type itself. Similarly function/method types can contain extra
25515 ref-qualification). */
25516 if (TREE_CODE (type) == FUNCTION_TYPE
25517 || TREE_CODE (type) == METHOD_TYPE)
25518 {
25519 /* For function/method types, can't use type_main_variant here,
25520 because that can have different ref-qualifiers for C++,
25521 but try to canonicalize. */
25522 tree main = TYPE_MAIN_VARIANT (type);
25523 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25524 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25525 && check_base_type (t, main)
25526 && check_lang_type (t, type))
25527 {
25528 type = t;
25529 break;
25530 }
25531 }
25532 else if (TREE_CODE (type) != VECTOR_TYPE
25533 && TREE_CODE (type) != ARRAY_TYPE)
25534 type = type_main_variant (type);
25535
25536 /* If this is an array type with hidden descriptor, handle it first. */
25537 if (!TREE_ASM_WRITTEN (type)
25538 && lang_hooks.types.get_array_descr_info)
25539 {
25540 memset (&info, 0, sizeof (info));
25541 if (lang_hooks.types.get_array_descr_info (type, &info))
25542 {
25543 /* Fortran sometimes emits array types with no dimension. */
25544 gcc_assert (info.ndimensions >= 0
25545 && (info.ndimensions
25546 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25547 gen_descr_array_type_die (type, &info, context_die);
25548 TREE_ASM_WRITTEN (type) = 1;
25549 return;
25550 }
25551 }
25552
25553 if (TREE_ASM_WRITTEN (type))
25554 {
25555 /* Variable-length types may be incomplete even if
25556 TREE_ASM_WRITTEN. For such types, fall through to
25557 gen_array_type_die() and possibly fill in
25558 DW_AT_{upper,lower}_bound attributes. */
25559 if ((TREE_CODE (type) != ARRAY_TYPE
25560 && TREE_CODE (type) != RECORD_TYPE
25561 && TREE_CODE (type) != UNION_TYPE
25562 && TREE_CODE (type) != QUAL_UNION_TYPE)
25563 || !variably_modified_type_p (type, NULL))
25564 return;
25565 }
25566
25567 switch (TREE_CODE (type))
25568 {
25569 case ERROR_MARK:
25570 break;
25571
25572 case POINTER_TYPE:
25573 case REFERENCE_TYPE:
25574 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25575 ensures that the gen_type_die recursion will terminate even if the
25576 type is recursive. Recursive types are possible in Ada. */
25577 /* ??? We could perhaps do this for all types before the switch
25578 statement. */
25579 TREE_ASM_WRITTEN (type) = 1;
25580
25581 /* For these types, all that is required is that we output a DIE (or a
25582 set of DIEs) to represent the "basis" type. */
25583 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25584 DINFO_USAGE_IND_USE);
25585 break;
25586
25587 case OFFSET_TYPE:
25588 /* This code is used for C++ pointer-to-data-member types.
25589 Output a description of the relevant class type. */
25590 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25591 DINFO_USAGE_IND_USE);
25592
25593 /* Output a description of the type of the object pointed to. */
25594 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25595 DINFO_USAGE_IND_USE);
25596
25597 /* Now output a DIE to represent this pointer-to-data-member type
25598 itself. */
25599 gen_ptr_to_mbr_type_die (type, context_die);
25600 break;
25601
25602 case FUNCTION_TYPE:
25603 /* Force out return type (in case it wasn't forced out already). */
25604 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25605 DINFO_USAGE_DIR_USE);
25606 gen_subroutine_type_die (type, context_die);
25607 break;
25608
25609 case METHOD_TYPE:
25610 /* Force out return type (in case it wasn't forced out already). */
25611 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25612 DINFO_USAGE_DIR_USE);
25613 gen_subroutine_type_die (type, context_die);
25614 break;
25615
25616 case ARRAY_TYPE:
25617 case VECTOR_TYPE:
25618 gen_array_type_die (type, context_die);
25619 break;
25620
25621 case ENUMERAL_TYPE:
25622 case RECORD_TYPE:
25623 case UNION_TYPE:
25624 case QUAL_UNION_TYPE:
25625 gen_tagged_type_die (type, context_die, usage);
25626 return;
25627
25628 case VOID_TYPE:
25629 case INTEGER_TYPE:
25630 case REAL_TYPE:
25631 case FIXED_POINT_TYPE:
25632 case COMPLEX_TYPE:
25633 case BOOLEAN_TYPE:
25634 /* No DIEs needed for fundamental types. */
25635 break;
25636
25637 case NULLPTR_TYPE:
25638 case LANG_TYPE:
25639 /* Just use DW_TAG_unspecified_type. */
25640 {
25641 dw_die_ref type_die = lookup_type_die (type);
25642 if (type_die == NULL)
25643 {
25644 tree name = TYPE_IDENTIFIER (type);
25645 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25646 type);
25647 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25648 equate_type_number_to_die (type, type_die);
25649 }
25650 }
25651 break;
25652
25653 default:
25654 if (is_cxx_auto (type))
25655 {
25656 tree name = TYPE_IDENTIFIER (type);
25657 dw_die_ref *die = (name == get_identifier ("auto")
25658 ? &auto_die : &decltype_auto_die);
25659 if (!*die)
25660 {
25661 *die = new_die (DW_TAG_unspecified_type,
25662 comp_unit_die (), NULL_TREE);
25663 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25664 }
25665 equate_type_number_to_die (type, *die);
25666 break;
25667 }
25668 gcc_unreachable ();
25669 }
25670
25671 TREE_ASM_WRITTEN (type) = 1;
25672 }
25673
25674 static void
25675 gen_type_die (tree type, dw_die_ref context_die)
25676 {
25677 if (type != error_mark_node)
25678 {
25679 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25680 if (flag_checking)
25681 {
25682 dw_die_ref die = lookup_type_die (type);
25683 if (die)
25684 check_die (die);
25685 }
25686 }
25687 }
25688
25689 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25690 things which are local to the given block. */
25691
25692 static void
25693 gen_block_die (tree stmt, dw_die_ref context_die)
25694 {
25695 int must_output_die = 0;
25696 bool inlined_func;
25697
25698 /* Ignore blocks that are NULL. */
25699 if (stmt == NULL_TREE)
25700 return;
25701
25702 inlined_func = inlined_function_outer_scope_p (stmt);
25703
25704 /* If the block is one fragment of a non-contiguous block, do not
25705 process the variables, since they will have been done by the
25706 origin block. Do process subblocks. */
25707 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25708 {
25709 tree sub;
25710
25711 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25712 gen_block_die (sub, context_die);
25713
25714 return;
25715 }
25716
25717 /* Determine if we need to output any Dwarf DIEs at all to represent this
25718 block. */
25719 if (inlined_func)
25720 /* The outer scopes for inlinings *must* always be represented. We
25721 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25722 must_output_die = 1;
25723 else if (lookup_block_die (stmt))
25724 /* If we already have a DIE then it was filled early. Meanwhile
25725 we might have pruned all BLOCK_VARS as optimized out but we
25726 still want to generate high/low PC attributes so output it. */
25727 must_output_die = 1;
25728 else if (TREE_USED (stmt)
25729 || TREE_ASM_WRITTEN (stmt))
25730 {
25731 /* Determine if this block directly contains any "significant"
25732 local declarations which we will need to output DIEs for. */
25733 if (debug_info_level > DINFO_LEVEL_TERSE)
25734 {
25735 /* We are not in terse mode so any local declaration that
25736 is not ignored for debug purposes counts as being a
25737 "significant" one. */
25738 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25739 must_output_die = 1;
25740 else
25741 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25742 if (!DECL_IGNORED_P (var))
25743 {
25744 must_output_die = 1;
25745 break;
25746 }
25747 }
25748 else if (!dwarf2out_ignore_block (stmt))
25749 must_output_die = 1;
25750 }
25751
25752 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25753 DIE for any block which contains no significant local declarations at
25754 all. Rather, in such cases we just call `decls_for_scope' so that any
25755 needed Dwarf info for any sub-blocks will get properly generated. Note
25756 that in terse mode, our definition of what constitutes a "significant"
25757 local declaration gets restricted to include only inlined function
25758 instances and local (nested) function definitions. */
25759 if (must_output_die)
25760 {
25761 if (inlined_func)
25762 gen_inlined_subroutine_die (stmt, context_die);
25763 else
25764 gen_lexical_block_die (stmt, context_die);
25765 }
25766 else
25767 decls_for_scope (stmt, context_die);
25768 }
25769
25770 /* Process variable DECL (or variable with origin ORIGIN) within
25771 block STMT and add it to CONTEXT_DIE. */
25772 static void
25773 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25774 {
25775 dw_die_ref die;
25776 tree decl_or_origin = decl ? decl : origin;
25777
25778 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25779 die = lookup_decl_die (decl_or_origin);
25780 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25781 {
25782 if (TYPE_DECL_IS_STUB (decl_or_origin))
25783 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25784 else
25785 die = lookup_decl_die (decl_or_origin);
25786 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25787 if (! die && ! early_dwarf)
25788 return;
25789 }
25790 else
25791 die = NULL;
25792
25793 /* Avoid creating DIEs for local typedefs and concrete static variables that
25794 will only be pruned later. */
25795 if ((origin || decl_ultimate_origin (decl))
25796 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25797 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25798 {
25799 origin = decl_ultimate_origin (decl_or_origin);
25800 if (decl && VAR_P (decl) && die != NULL)
25801 {
25802 die = lookup_decl_die (origin);
25803 if (die != NULL)
25804 equate_decl_number_to_die (decl, die);
25805 }
25806 return;
25807 }
25808
25809 if (die != NULL && die->die_parent == NULL)
25810 add_child_die (context_die, die);
25811 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25812 {
25813 if (early_dwarf)
25814 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25815 stmt, context_die);
25816 }
25817 else
25818 {
25819 if (decl && DECL_P (decl))
25820 {
25821 die = lookup_decl_die (decl);
25822
25823 /* Early created DIEs do not have a parent as the decls refer
25824 to the function as DECL_CONTEXT rather than the BLOCK. */
25825 if (die && die->die_parent == NULL)
25826 {
25827 gcc_assert (in_lto_p);
25828 add_child_die (context_die, die);
25829 }
25830 }
25831
25832 gen_decl_die (decl, origin, NULL, context_die);
25833 }
25834 }
25835
25836 /* Generate all of the decls declared within a given scope and (recursively)
25837 all of its sub-blocks. */
25838
25839 static void
25840 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25841 {
25842 tree decl;
25843 unsigned int i;
25844 tree subblocks;
25845
25846 /* Ignore NULL blocks. */
25847 if (stmt == NULL_TREE)
25848 return;
25849
25850 /* Output the DIEs to represent all of the data objects and typedefs
25851 declared directly within this block but not within any nested
25852 sub-blocks. Also, nested function and tag DIEs have been
25853 generated with a parent of NULL; fix that up now. We don't
25854 have to do this if we're at -g1. */
25855 if (debug_info_level > DINFO_LEVEL_TERSE)
25856 {
25857 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25858 process_scope_var (stmt, decl, NULL_TREE, context_die);
25859 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25860 origin - avoid doing this twice as we have no good way to see
25861 if we've done it once already. */
25862 if (! early_dwarf)
25863 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25864 {
25865 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25866 if (decl == current_function_decl)
25867 /* Ignore declarations of the current function, while they
25868 are declarations, gen_subprogram_die would treat them
25869 as definitions again, because they are equal to
25870 current_function_decl and endlessly recurse. */;
25871 else if (TREE_CODE (decl) == FUNCTION_DECL)
25872 process_scope_var (stmt, decl, NULL_TREE, context_die);
25873 else
25874 process_scope_var (stmt, NULL_TREE, decl, context_die);
25875 }
25876 }
25877
25878 /* Even if we're at -g1, we need to process the subblocks in order to get
25879 inlined call information. */
25880
25881 /* Output the DIEs to represent all sub-blocks (and the items declared
25882 therein) of this block. */
25883 if (recurse)
25884 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25885 subblocks != NULL;
25886 subblocks = BLOCK_CHAIN (subblocks))
25887 gen_block_die (subblocks, context_die);
25888 }
25889
25890 /* Is this a typedef we can avoid emitting? */
25891
25892 static bool
25893 is_redundant_typedef (const_tree decl)
25894 {
25895 if (TYPE_DECL_IS_STUB (decl))
25896 return true;
25897
25898 if (DECL_ARTIFICIAL (decl)
25899 && DECL_CONTEXT (decl)
25900 && is_tagged_type (DECL_CONTEXT (decl))
25901 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25902 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25903 /* Also ignore the artificial member typedef for the class name. */
25904 return true;
25905
25906 return false;
25907 }
25908
25909 /* Return TRUE if TYPE is a typedef that names a type for linkage
25910 purposes. This kind of typedefs is produced by the C++ FE for
25911 constructs like:
25912
25913 typedef struct {...} foo;
25914
25915 In that case, there is no typedef variant type produced for foo.
25916 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25917 struct type. */
25918
25919 static bool
25920 is_naming_typedef_decl (const_tree decl)
25921 {
25922 if (decl == NULL_TREE
25923 || TREE_CODE (decl) != TYPE_DECL
25924 || DECL_NAMELESS (decl)
25925 || !is_tagged_type (TREE_TYPE (decl))
25926 || DECL_IS_BUILTIN (decl)
25927 || is_redundant_typedef (decl)
25928 /* It looks like Ada produces TYPE_DECLs that are very similar
25929 to C++ naming typedefs but that have different
25930 semantics. Let's be specific to c++ for now. */
25931 || !is_cxx (decl))
25932 return FALSE;
25933
25934 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25935 && TYPE_NAME (TREE_TYPE (decl)) == decl
25936 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25937 != TYPE_NAME (TREE_TYPE (decl))));
25938 }
25939
25940 /* Looks up the DIE for a context. */
25941
25942 static inline dw_die_ref
25943 lookup_context_die (tree context)
25944 {
25945 if (context)
25946 {
25947 /* Find die that represents this context. */
25948 if (TYPE_P (context))
25949 {
25950 context = TYPE_MAIN_VARIANT (context);
25951 dw_die_ref ctx = lookup_type_die (context);
25952 if (!ctx)
25953 return NULL;
25954 return strip_naming_typedef (context, ctx);
25955 }
25956 else
25957 return lookup_decl_die (context);
25958 }
25959 return comp_unit_die ();
25960 }
25961
25962 /* Returns the DIE for a context. */
25963
25964 static inline dw_die_ref
25965 get_context_die (tree context)
25966 {
25967 if (context)
25968 {
25969 /* Find die that represents this context. */
25970 if (TYPE_P (context))
25971 {
25972 context = TYPE_MAIN_VARIANT (context);
25973 return strip_naming_typedef (context, force_type_die (context));
25974 }
25975 else
25976 return force_decl_die (context);
25977 }
25978 return comp_unit_die ();
25979 }
25980
25981 /* Returns the DIE for decl. A DIE will always be returned. */
25982
25983 static dw_die_ref
25984 force_decl_die (tree decl)
25985 {
25986 dw_die_ref decl_die;
25987 unsigned saved_external_flag;
25988 tree save_fn = NULL_TREE;
25989 decl_die = lookup_decl_die (decl);
25990 if (!decl_die)
25991 {
25992 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25993
25994 decl_die = lookup_decl_die (decl);
25995 if (decl_die)
25996 return decl_die;
25997
25998 switch (TREE_CODE (decl))
25999 {
26000 case FUNCTION_DECL:
26001 /* Clear current_function_decl, so that gen_subprogram_die thinks
26002 that this is a declaration. At this point, we just want to force
26003 declaration die. */
26004 save_fn = current_function_decl;
26005 current_function_decl = NULL_TREE;
26006 gen_subprogram_die (decl, context_die);
26007 current_function_decl = save_fn;
26008 break;
26009
26010 case VAR_DECL:
26011 /* Set external flag to force declaration die. Restore it after
26012 gen_decl_die() call. */
26013 saved_external_flag = DECL_EXTERNAL (decl);
26014 DECL_EXTERNAL (decl) = 1;
26015 gen_decl_die (decl, NULL, NULL, context_die);
26016 DECL_EXTERNAL (decl) = saved_external_flag;
26017 break;
26018
26019 case NAMESPACE_DECL:
26020 if (dwarf_version >= 3 || !dwarf_strict)
26021 dwarf2out_decl (decl);
26022 else
26023 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26024 decl_die = comp_unit_die ();
26025 break;
26026
26027 case TRANSLATION_UNIT_DECL:
26028 decl_die = comp_unit_die ();
26029 break;
26030
26031 default:
26032 gcc_unreachable ();
26033 }
26034
26035 /* We should be able to find the DIE now. */
26036 if (!decl_die)
26037 decl_die = lookup_decl_die (decl);
26038 gcc_assert (decl_die);
26039 }
26040
26041 return decl_die;
26042 }
26043
26044 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26045 always returned. */
26046
26047 static dw_die_ref
26048 force_type_die (tree type)
26049 {
26050 dw_die_ref type_die;
26051
26052 type_die = lookup_type_die (type);
26053 if (!type_die)
26054 {
26055 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26056
26057 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26058 false, context_die);
26059 gcc_assert (type_die);
26060 }
26061 return type_die;
26062 }
26063
26064 /* Force out any required namespaces to be able to output DECL,
26065 and return the new context_die for it, if it's changed. */
26066
26067 static dw_die_ref
26068 setup_namespace_context (tree thing, dw_die_ref context_die)
26069 {
26070 tree context = (DECL_P (thing)
26071 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26072 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26073 /* Force out the namespace. */
26074 context_die = force_decl_die (context);
26075
26076 return context_die;
26077 }
26078
26079 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26080 type) within its namespace, if appropriate.
26081
26082 For compatibility with older debuggers, namespace DIEs only contain
26083 declarations; all definitions are emitted at CU scope, with
26084 DW_AT_specification pointing to the declaration (like with class
26085 members). */
26086
26087 static dw_die_ref
26088 declare_in_namespace (tree thing, dw_die_ref context_die)
26089 {
26090 dw_die_ref ns_context;
26091
26092 if (debug_info_level <= DINFO_LEVEL_TERSE)
26093 return context_die;
26094
26095 /* External declarations in the local scope only need to be emitted
26096 once, not once in the namespace and once in the scope.
26097
26098 This avoids declaring the `extern' below in the
26099 namespace DIE as well as in the innermost scope:
26100
26101 namespace S
26102 {
26103 int i=5;
26104 int foo()
26105 {
26106 int i=8;
26107 extern int i;
26108 return i;
26109 }
26110 }
26111 */
26112 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26113 return context_die;
26114
26115 /* If this decl is from an inlined function, then don't try to emit it in its
26116 namespace, as we will get confused. It would have already been emitted
26117 when the abstract instance of the inline function was emitted anyways. */
26118 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26119 return context_die;
26120
26121 ns_context = setup_namespace_context (thing, context_die);
26122
26123 if (ns_context != context_die)
26124 {
26125 if (is_fortran () || is_dlang ())
26126 return ns_context;
26127 if (DECL_P (thing))
26128 gen_decl_die (thing, NULL, NULL, ns_context);
26129 else
26130 gen_type_die (thing, ns_context);
26131 }
26132 return context_die;
26133 }
26134
26135 /* Generate a DIE for a namespace or namespace alias. */
26136
26137 static void
26138 gen_namespace_die (tree decl, dw_die_ref context_die)
26139 {
26140 dw_die_ref namespace_die;
26141
26142 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26143 they are an alias of. */
26144 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26145 {
26146 /* Output a real namespace or module. */
26147 context_die = setup_namespace_context (decl, comp_unit_die ());
26148 namespace_die = new_die (is_fortran () || is_dlang ()
26149 ? DW_TAG_module : DW_TAG_namespace,
26150 context_die, decl);
26151 /* For Fortran modules defined in different CU don't add src coords. */
26152 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26153 {
26154 const char *name = dwarf2_name (decl, 0);
26155 if (name)
26156 add_name_attribute (namespace_die, name);
26157 }
26158 else
26159 add_name_and_src_coords_attributes (namespace_die, decl);
26160 if (DECL_EXTERNAL (decl))
26161 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26162 equate_decl_number_to_die (decl, namespace_die);
26163 }
26164 else
26165 {
26166 /* Output a namespace alias. */
26167
26168 /* Force out the namespace we are an alias of, if necessary. */
26169 dw_die_ref origin_die
26170 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26171
26172 if (DECL_FILE_SCOPE_P (decl)
26173 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26174 context_die = setup_namespace_context (decl, comp_unit_die ());
26175 /* Now create the namespace alias DIE. */
26176 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26177 add_name_and_src_coords_attributes (namespace_die, decl);
26178 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26179 equate_decl_number_to_die (decl, namespace_die);
26180 }
26181 if ((dwarf_version >= 5 || !dwarf_strict)
26182 && lang_hooks.decls.decl_dwarf_attribute (decl,
26183 DW_AT_export_symbols) == 1)
26184 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26185
26186 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26187 if (want_pubnames ())
26188 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26189 }
26190
26191 /* Generate Dwarf debug information for a decl described by DECL.
26192 The return value is currently only meaningful for PARM_DECLs,
26193 for all other decls it returns NULL.
26194
26195 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26196 It can be NULL otherwise. */
26197
26198 static dw_die_ref
26199 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26200 dw_die_ref context_die)
26201 {
26202 tree decl_or_origin = decl ? decl : origin;
26203 tree class_origin = NULL, ultimate_origin;
26204
26205 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26206 return NULL;
26207
26208 switch (TREE_CODE (decl_or_origin))
26209 {
26210 case ERROR_MARK:
26211 break;
26212
26213 case CONST_DECL:
26214 if (!is_fortran () && !is_ada () && !is_dlang ())
26215 {
26216 /* The individual enumerators of an enum type get output when we output
26217 the Dwarf representation of the relevant enum type itself. */
26218 break;
26219 }
26220
26221 /* Emit its type. */
26222 gen_type_die (TREE_TYPE (decl), context_die);
26223
26224 /* And its containing namespace. */
26225 context_die = declare_in_namespace (decl, context_die);
26226
26227 gen_const_die (decl, context_die);
26228 break;
26229
26230 case FUNCTION_DECL:
26231 #if 0
26232 /* FIXME */
26233 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26234 on local redeclarations of global functions. That seems broken. */
26235 if (current_function_decl != decl)
26236 /* This is only a declaration. */;
26237 #endif
26238
26239 /* We should have abstract copies already and should not generate
26240 stray type DIEs in late LTO dumping. */
26241 if (! early_dwarf)
26242 ;
26243
26244 /* If we're emitting a clone, emit info for the abstract instance. */
26245 else if (origin || DECL_ORIGIN (decl) != decl)
26246 dwarf2out_abstract_function (origin
26247 ? DECL_ORIGIN (origin)
26248 : DECL_ABSTRACT_ORIGIN (decl));
26249
26250 /* If we're emitting a possibly inlined function emit it as
26251 abstract instance. */
26252 else if (cgraph_function_possibly_inlined_p (decl)
26253 && ! DECL_ABSTRACT_P (decl)
26254 && ! class_or_namespace_scope_p (context_die)
26255 /* dwarf2out_abstract_function won't emit a die if this is just
26256 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26257 that case, because that works only if we have a die. */
26258 && DECL_INITIAL (decl) != NULL_TREE)
26259 dwarf2out_abstract_function (decl);
26260
26261 /* Otherwise we're emitting the primary DIE for this decl. */
26262 else if (debug_info_level > DINFO_LEVEL_TERSE)
26263 {
26264 /* Before we describe the FUNCTION_DECL itself, make sure that we
26265 have its containing type. */
26266 if (!origin)
26267 origin = decl_class_context (decl);
26268 if (origin != NULL_TREE)
26269 gen_type_die (origin, context_die);
26270
26271 /* And its return type. */
26272 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26273
26274 /* And its virtual context. */
26275 if (DECL_VINDEX (decl) != NULL_TREE)
26276 gen_type_die (DECL_CONTEXT (decl), context_die);
26277
26278 /* Make sure we have a member DIE for decl. */
26279 if (origin != NULL_TREE)
26280 gen_type_die_for_member (origin, decl, context_die);
26281
26282 /* And its containing namespace. */
26283 context_die = declare_in_namespace (decl, context_die);
26284 }
26285
26286 /* Now output a DIE to represent the function itself. */
26287 if (decl)
26288 gen_subprogram_die (decl, context_die);
26289 break;
26290
26291 case TYPE_DECL:
26292 /* If we are in terse mode, don't generate any DIEs to represent any
26293 actual typedefs. */
26294 if (debug_info_level <= DINFO_LEVEL_TERSE)
26295 break;
26296
26297 /* In the special case of a TYPE_DECL node representing the declaration
26298 of some type tag, if the given TYPE_DECL is marked as having been
26299 instantiated from some other (original) TYPE_DECL node (e.g. one which
26300 was generated within the original definition of an inline function) we
26301 used to generate a special (abbreviated) DW_TAG_structure_type,
26302 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26303 should be actually referencing those DIEs, as variable DIEs with that
26304 type would be emitted already in the abstract origin, so it was always
26305 removed during unused type prunning. Don't add anything in this
26306 case. */
26307 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26308 break;
26309
26310 if (is_redundant_typedef (decl))
26311 gen_type_die (TREE_TYPE (decl), context_die);
26312 else
26313 /* Output a DIE to represent the typedef itself. */
26314 gen_typedef_die (decl, context_die);
26315 break;
26316
26317 case LABEL_DECL:
26318 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26319 gen_label_die (decl, context_die);
26320 break;
26321
26322 case VAR_DECL:
26323 case RESULT_DECL:
26324 /* If we are in terse mode, don't generate any DIEs to represent any
26325 variable declarations or definitions. */
26326 if (debug_info_level <= DINFO_LEVEL_TERSE)
26327 break;
26328
26329 /* Avoid generating stray type DIEs during late dwarf dumping.
26330 All types have been dumped early. */
26331 if (early_dwarf
26332 /* ??? But in LTRANS we cannot annotate early created variably
26333 modified type DIEs without copying them and adjusting all
26334 references to them. Dump them again as happens for inlining
26335 which copies both the decl and the types. */
26336 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26337 in VLA bound information for example. */
26338 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26339 current_function_decl)))
26340 {
26341 /* Output any DIEs that are needed to specify the type of this data
26342 object. */
26343 if (decl_by_reference_p (decl_or_origin))
26344 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26345 else
26346 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26347 }
26348
26349 if (early_dwarf)
26350 {
26351 /* And its containing type. */
26352 class_origin = decl_class_context (decl_or_origin);
26353 if (class_origin != NULL_TREE)
26354 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26355
26356 /* And its containing namespace. */
26357 context_die = declare_in_namespace (decl_or_origin, context_die);
26358 }
26359
26360 /* Now output the DIE to represent the data object itself. This gets
26361 complicated because of the possibility that the VAR_DECL really
26362 represents an inlined instance of a formal parameter for an inline
26363 function. */
26364 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26365 if (ultimate_origin != NULL_TREE
26366 && TREE_CODE (ultimate_origin) == PARM_DECL)
26367 gen_formal_parameter_die (decl, origin,
26368 true /* Emit name attribute. */,
26369 context_die);
26370 else
26371 gen_variable_die (decl, origin, context_die);
26372 break;
26373
26374 case FIELD_DECL:
26375 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26376 /* Ignore the nameless fields that are used to skip bits but handle C++
26377 anonymous unions and structs. */
26378 if (DECL_NAME (decl) != NULL_TREE
26379 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26380 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26381 {
26382 gen_type_die (member_declared_type (decl), context_die);
26383 gen_field_die (decl, ctx, context_die);
26384 }
26385 break;
26386
26387 case PARM_DECL:
26388 /* Avoid generating stray type DIEs during late dwarf dumping.
26389 All types have been dumped early. */
26390 if (early_dwarf
26391 /* ??? But in LTRANS we cannot annotate early created variably
26392 modified type DIEs without copying them and adjusting all
26393 references to them. Dump them again as happens for inlining
26394 which copies both the decl and the types. */
26395 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26396 in VLA bound information for example. */
26397 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26398 current_function_decl)))
26399 {
26400 if (DECL_BY_REFERENCE (decl_or_origin))
26401 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26402 else
26403 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26404 }
26405 return gen_formal_parameter_die (decl, origin,
26406 true /* Emit name attribute. */,
26407 context_die);
26408
26409 case NAMESPACE_DECL:
26410 if (dwarf_version >= 3 || !dwarf_strict)
26411 gen_namespace_die (decl, context_die);
26412 break;
26413
26414 case IMPORTED_DECL:
26415 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26416 DECL_CONTEXT (decl), context_die);
26417 break;
26418
26419 case NAMELIST_DECL:
26420 gen_namelist_decl (DECL_NAME (decl), context_die,
26421 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26422 break;
26423
26424 default:
26425 /* Probably some frontend-internal decl. Assume we don't care. */
26426 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26427 break;
26428 }
26429
26430 return NULL;
26431 }
26432 \f
26433 /* Output initial debug information for global DECL. Called at the
26434 end of the parsing process.
26435
26436 This is the initial debug generation process. As such, the DIEs
26437 generated may be incomplete. A later debug generation pass
26438 (dwarf2out_late_global_decl) will augment the information generated
26439 in this pass (e.g., with complete location info). */
26440
26441 static void
26442 dwarf2out_early_global_decl (tree decl)
26443 {
26444 set_early_dwarf s;
26445
26446 /* gen_decl_die() will set DECL_ABSTRACT because
26447 cgraph_function_possibly_inlined_p() returns true. This is in
26448 turn will cause DW_AT_inline attributes to be set.
26449
26450 This happens because at early dwarf generation, there is no
26451 cgraph information, causing cgraph_function_possibly_inlined_p()
26452 to return true. Trick cgraph_function_possibly_inlined_p()
26453 while we generate dwarf early. */
26454 bool save = symtab->global_info_ready;
26455 symtab->global_info_ready = true;
26456
26457 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26458 other DECLs and they can point to template types or other things
26459 that dwarf2out can't handle when done via dwarf2out_decl. */
26460 if (TREE_CODE (decl) != TYPE_DECL
26461 && TREE_CODE (decl) != PARM_DECL)
26462 {
26463 if (TREE_CODE (decl) == FUNCTION_DECL)
26464 {
26465 tree save_fndecl = current_function_decl;
26466
26467 /* For nested functions, make sure we have DIEs for the parents first
26468 so that all nested DIEs are generated at the proper scope in the
26469 first shot. */
26470 tree context = decl_function_context (decl);
26471 if (context != NULL)
26472 {
26473 dw_die_ref context_die = lookup_decl_die (context);
26474 current_function_decl = context;
26475
26476 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26477 enough so that it lands in its own context. This avoids type
26478 pruning issues later on. */
26479 if (context_die == NULL || is_declaration_die (context_die))
26480 dwarf2out_early_global_decl (context);
26481 }
26482
26483 /* Emit an abstract origin of a function first. This happens
26484 with C++ constructor clones for example and makes
26485 dwarf2out_abstract_function happy which requires the early
26486 DIE of the abstract instance to be present. */
26487 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26488 dw_die_ref origin_die;
26489 if (origin != NULL
26490 /* Do not emit the DIE multiple times but make sure to
26491 process it fully here in case we just saw a declaration. */
26492 && ((origin_die = lookup_decl_die (origin)) == NULL
26493 || is_declaration_die (origin_die)))
26494 {
26495 current_function_decl = origin;
26496 dwarf2out_decl (origin);
26497 }
26498
26499 /* Emit the DIE for decl but avoid doing that multiple times. */
26500 dw_die_ref old_die;
26501 if ((old_die = lookup_decl_die (decl)) == NULL
26502 || is_declaration_die (old_die))
26503 {
26504 current_function_decl = decl;
26505 dwarf2out_decl (decl);
26506 }
26507
26508 current_function_decl = save_fndecl;
26509 }
26510 else
26511 dwarf2out_decl (decl);
26512 }
26513 symtab->global_info_ready = save;
26514 }
26515
26516 /* Return whether EXPR is an expression with the following pattern:
26517 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26518
26519 static bool
26520 is_trivial_indirect_ref (tree expr)
26521 {
26522 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26523 return false;
26524
26525 tree nop = TREE_OPERAND (expr, 0);
26526 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26527 return false;
26528
26529 tree int_cst = TREE_OPERAND (nop, 0);
26530 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26531 }
26532
26533 /* Output debug information for global decl DECL. Called from
26534 toplev.c after compilation proper has finished. */
26535
26536 static void
26537 dwarf2out_late_global_decl (tree decl)
26538 {
26539 /* Fill-in any location information we were unable to determine
26540 on the first pass. */
26541 if (VAR_P (decl))
26542 {
26543 dw_die_ref die = lookup_decl_die (decl);
26544
26545 /* We may have to generate early debug late for LTO in case debug
26546 was not enabled at compile-time or the target doesn't support
26547 the LTO early debug scheme. */
26548 if (! die && in_lto_p)
26549 {
26550 dwarf2out_decl (decl);
26551 die = lookup_decl_die (decl);
26552 }
26553
26554 if (die)
26555 {
26556 /* We get called via the symtab code invoking late_global_decl
26557 for symbols that are optimized out.
26558
26559 Do not add locations for those, except if they have a
26560 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26561 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26562 INDIRECT_REF expression, as this could generate relocations to
26563 text symbols in LTO object files, which is invalid. */
26564 varpool_node *node = varpool_node::get (decl);
26565 if ((! node || ! node->definition)
26566 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26567 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26568 tree_add_const_value_attribute_for_decl (die, decl);
26569 else
26570 add_location_or_const_value_attribute (die, decl, false);
26571 }
26572 }
26573 }
26574
26575 /* Output debug information for type decl DECL. Called from toplev.c
26576 and from language front ends (to record built-in types). */
26577 static void
26578 dwarf2out_type_decl (tree decl, int local)
26579 {
26580 if (!local)
26581 {
26582 set_early_dwarf s;
26583 dwarf2out_decl (decl);
26584 }
26585 }
26586
26587 /* Output debug information for imported module or decl DECL.
26588 NAME is non-NULL name in the lexical block if the decl has been renamed.
26589 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26590 that DECL belongs to.
26591 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26592 static void
26593 dwarf2out_imported_module_or_decl_1 (tree decl,
26594 tree name,
26595 tree lexical_block,
26596 dw_die_ref lexical_block_die)
26597 {
26598 expanded_location xloc;
26599 dw_die_ref imported_die = NULL;
26600 dw_die_ref at_import_die;
26601
26602 if (TREE_CODE (decl) == IMPORTED_DECL)
26603 {
26604 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26605 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26606 gcc_assert (decl);
26607 }
26608 else
26609 xloc = expand_location (input_location);
26610
26611 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26612 {
26613 at_import_die = force_type_die (TREE_TYPE (decl));
26614 /* For namespace N { typedef void T; } using N::T; base_type_die
26615 returns NULL, but DW_TAG_imported_declaration requires
26616 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26617 if (!at_import_die)
26618 {
26619 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26620 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26621 at_import_die = lookup_type_die (TREE_TYPE (decl));
26622 gcc_assert (at_import_die);
26623 }
26624 }
26625 else
26626 {
26627 at_import_die = lookup_decl_die (decl);
26628 if (!at_import_die)
26629 {
26630 /* If we're trying to avoid duplicate debug info, we may not have
26631 emitted the member decl for this field. Emit it now. */
26632 if (TREE_CODE (decl) == FIELD_DECL)
26633 {
26634 tree type = DECL_CONTEXT (decl);
26635
26636 if (TYPE_CONTEXT (type)
26637 && TYPE_P (TYPE_CONTEXT (type))
26638 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26639 DINFO_USAGE_DIR_USE))
26640 return;
26641 gen_type_die_for_member (type, decl,
26642 get_context_die (TYPE_CONTEXT (type)));
26643 }
26644 if (TREE_CODE (decl) == NAMELIST_DECL)
26645 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26646 get_context_die (DECL_CONTEXT (decl)),
26647 NULL_TREE);
26648 else
26649 at_import_die = force_decl_die (decl);
26650 }
26651 }
26652
26653 if (TREE_CODE (decl) == NAMESPACE_DECL)
26654 {
26655 if (dwarf_version >= 3 || !dwarf_strict)
26656 imported_die = new_die (DW_TAG_imported_module,
26657 lexical_block_die,
26658 lexical_block);
26659 else
26660 return;
26661 }
26662 else
26663 imported_die = new_die (DW_TAG_imported_declaration,
26664 lexical_block_die,
26665 lexical_block);
26666
26667 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26668 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26669 if (debug_column_info && xloc.column)
26670 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26671 if (name)
26672 add_AT_string (imported_die, DW_AT_name,
26673 IDENTIFIER_POINTER (name));
26674 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26675 }
26676
26677 /* Output debug information for imported module or decl DECL.
26678 NAME is non-NULL name in context if the decl has been renamed.
26679 CHILD is true if decl is one of the renamed decls as part of
26680 importing whole module.
26681 IMPLICIT is set if this hook is called for an implicit import
26682 such as inline namespace. */
26683
26684 static void
26685 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26686 bool child, bool implicit)
26687 {
26688 /* dw_die_ref at_import_die; */
26689 dw_die_ref scope_die;
26690
26691 if (debug_info_level <= DINFO_LEVEL_TERSE)
26692 return;
26693
26694 gcc_assert (decl);
26695
26696 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26697 should be enough, for DWARF4 and older even if we emit as extension
26698 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26699 for the benefit of consumers unaware of DW_AT_export_symbols. */
26700 if (implicit
26701 && dwarf_version >= 5
26702 && lang_hooks.decls.decl_dwarf_attribute (decl,
26703 DW_AT_export_symbols) == 1)
26704 return;
26705
26706 set_early_dwarf s;
26707
26708 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26709 We need decl DIE for reference and scope die. First, get DIE for the decl
26710 itself. */
26711
26712 /* Get the scope die for decl context. Use comp_unit_die for global module
26713 or decl. If die is not found for non globals, force new die. */
26714 if (context
26715 && TYPE_P (context)
26716 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26717 return;
26718
26719 scope_die = get_context_die (context);
26720
26721 if (child)
26722 {
26723 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26724 there is nothing we can do, here. */
26725 if (dwarf_version < 3 && dwarf_strict)
26726 return;
26727
26728 gcc_assert (scope_die->die_child);
26729 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26730 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26731 scope_die = scope_die->die_child;
26732 }
26733
26734 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26735 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26736 }
26737
26738 /* Output debug information for namelists. */
26739
26740 static dw_die_ref
26741 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26742 {
26743 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26744 tree value;
26745 unsigned i;
26746
26747 if (debug_info_level <= DINFO_LEVEL_TERSE)
26748 return NULL;
26749
26750 gcc_assert (scope_die != NULL);
26751 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26752 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26753
26754 /* If there are no item_decls, we have a nondefining namelist, e.g.
26755 with USE association; hence, set DW_AT_declaration. */
26756 if (item_decls == NULL_TREE)
26757 {
26758 add_AT_flag (nml_die, DW_AT_declaration, 1);
26759 return nml_die;
26760 }
26761
26762 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26763 {
26764 nml_item_ref_die = lookup_decl_die (value);
26765 if (!nml_item_ref_die)
26766 nml_item_ref_die = force_decl_die (value);
26767
26768 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26769 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26770 }
26771 return nml_die;
26772 }
26773
26774
26775 /* Write the debugging output for DECL and return the DIE. */
26776
26777 static void
26778 dwarf2out_decl (tree decl)
26779 {
26780 dw_die_ref context_die = comp_unit_die ();
26781
26782 switch (TREE_CODE (decl))
26783 {
26784 case ERROR_MARK:
26785 return;
26786
26787 case FUNCTION_DECL:
26788 /* If we're a nested function, initially use a parent of NULL; if we're
26789 a plain function, this will be fixed up in decls_for_scope. If
26790 we're a method, it will be ignored, since we already have a DIE.
26791 Avoid doing this late though since clones of class methods may
26792 otherwise end up in limbo and create type DIEs late. */
26793 if (early_dwarf
26794 && decl_function_context (decl)
26795 /* But if we're in terse mode, we don't care about scope. */
26796 && debug_info_level > DINFO_LEVEL_TERSE)
26797 context_die = NULL;
26798 break;
26799
26800 case VAR_DECL:
26801 /* For local statics lookup proper context die. */
26802 if (local_function_static (decl))
26803 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26804
26805 /* If we are in terse mode, don't generate any DIEs to represent any
26806 variable declarations or definitions. */
26807 if (debug_info_level <= DINFO_LEVEL_TERSE)
26808 return;
26809 break;
26810
26811 case CONST_DECL:
26812 if (debug_info_level <= DINFO_LEVEL_TERSE)
26813 return;
26814 if (!is_fortran () && !is_ada () && !is_dlang ())
26815 return;
26816 if (TREE_STATIC (decl) && decl_function_context (decl))
26817 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26818 break;
26819
26820 case NAMESPACE_DECL:
26821 case IMPORTED_DECL:
26822 if (debug_info_level <= DINFO_LEVEL_TERSE)
26823 return;
26824 if (lookup_decl_die (decl) != NULL)
26825 return;
26826 break;
26827
26828 case TYPE_DECL:
26829 /* Don't emit stubs for types unless they are needed by other DIEs. */
26830 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26831 return;
26832
26833 /* Don't bother trying to generate any DIEs to represent any of the
26834 normal built-in types for the language we are compiling. */
26835 if (DECL_IS_BUILTIN (decl))
26836 return;
26837
26838 /* If we are in terse mode, don't generate any DIEs for types. */
26839 if (debug_info_level <= DINFO_LEVEL_TERSE)
26840 return;
26841
26842 /* If we're a function-scope tag, initially use a parent of NULL;
26843 this will be fixed up in decls_for_scope. */
26844 if (decl_function_context (decl))
26845 context_die = NULL;
26846
26847 break;
26848
26849 case NAMELIST_DECL:
26850 break;
26851
26852 default:
26853 return;
26854 }
26855
26856 gen_decl_die (decl, NULL, NULL, context_die);
26857
26858 if (flag_checking)
26859 {
26860 dw_die_ref die = lookup_decl_die (decl);
26861 if (die)
26862 check_die (die);
26863 }
26864 }
26865
26866 /* Write the debugging output for DECL. */
26867
26868 static void
26869 dwarf2out_function_decl (tree decl)
26870 {
26871 dwarf2out_decl (decl);
26872 call_arg_locations = NULL;
26873 call_arg_loc_last = NULL;
26874 call_site_count = -1;
26875 tail_call_site_count = -1;
26876 decl_loc_table->empty ();
26877 cached_dw_loc_list_table->empty ();
26878 }
26879
26880 /* Output a marker (i.e. a label) for the beginning of the generated code for
26881 a lexical block. */
26882
26883 static void
26884 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26885 unsigned int blocknum)
26886 {
26887 switch_to_section (current_function_section ());
26888 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26889 }
26890
26891 /* Output a marker (i.e. a label) for the end of the generated code for a
26892 lexical block. */
26893
26894 static void
26895 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26896 {
26897 switch_to_section (current_function_section ());
26898 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26899 }
26900
26901 /* Returns nonzero if it is appropriate not to emit any debugging
26902 information for BLOCK, because it doesn't contain any instructions.
26903
26904 Don't allow this for blocks with nested functions or local classes
26905 as we would end up with orphans, and in the presence of scheduling
26906 we may end up calling them anyway. */
26907
26908 static bool
26909 dwarf2out_ignore_block (const_tree block)
26910 {
26911 tree decl;
26912 unsigned int i;
26913
26914 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26915 if (TREE_CODE (decl) == FUNCTION_DECL
26916 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26917 return 0;
26918 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26919 {
26920 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26921 if (TREE_CODE (decl) == FUNCTION_DECL
26922 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26923 return 0;
26924 }
26925
26926 return 1;
26927 }
26928
26929 /* Hash table routines for file_hash. */
26930
26931 bool
26932 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26933 {
26934 return filename_cmp (p1->filename, p2) == 0;
26935 }
26936
26937 hashval_t
26938 dwarf_file_hasher::hash (dwarf_file_data *p)
26939 {
26940 return htab_hash_string (p->filename);
26941 }
26942
26943 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26944 dwarf2out.c) and return its "index". The index of each (known) filename is
26945 just a unique number which is associated with only that one filename. We
26946 need such numbers for the sake of generating labels (in the .debug_sfnames
26947 section) and references to those files numbers (in the .debug_srcinfo
26948 and .debug_macinfo sections). If the filename given as an argument is not
26949 found in our current list, add it to the list and assign it the next
26950 available unique index number. */
26951
26952 static struct dwarf_file_data *
26953 lookup_filename (const char *file_name)
26954 {
26955 struct dwarf_file_data * created;
26956
26957 if (!file_name)
26958 return NULL;
26959
26960 dwarf_file_data **slot
26961 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26962 INSERT);
26963 if (*slot)
26964 return *slot;
26965
26966 created = ggc_alloc<dwarf_file_data> ();
26967 created->filename = file_name;
26968 created->emitted_number = 0;
26969 *slot = created;
26970 return created;
26971 }
26972
26973 /* If the assembler will construct the file table, then translate the compiler
26974 internal file table number into the assembler file table number, and emit
26975 a .file directive if we haven't already emitted one yet. The file table
26976 numbers are different because we prune debug info for unused variables and
26977 types, which may include filenames. */
26978
26979 static int
26980 maybe_emit_file (struct dwarf_file_data * fd)
26981 {
26982 if (! fd->emitted_number)
26983 {
26984 if (last_emitted_file)
26985 fd->emitted_number = last_emitted_file->emitted_number + 1;
26986 else
26987 fd->emitted_number = 1;
26988 last_emitted_file = fd;
26989
26990 if (output_asm_line_debug_info ())
26991 {
26992 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26993 output_quoted_string (asm_out_file,
26994 remap_debug_filename (fd->filename));
26995 fputc ('\n', asm_out_file);
26996 }
26997 }
26998
26999 return fd->emitted_number;
27000 }
27001
27002 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27003 That generation should happen after function debug info has been
27004 generated. The value of the attribute is the constant value of ARG. */
27005
27006 static void
27007 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27008 {
27009 die_arg_entry entry;
27010
27011 if (!die || !arg)
27012 return;
27013
27014 gcc_assert (early_dwarf);
27015
27016 if (!tmpl_value_parm_die_table)
27017 vec_alloc (tmpl_value_parm_die_table, 32);
27018
27019 entry.die = die;
27020 entry.arg = arg;
27021 vec_safe_push (tmpl_value_parm_die_table, entry);
27022 }
27023
27024 /* Return TRUE if T is an instance of generic type, FALSE
27025 otherwise. */
27026
27027 static bool
27028 generic_type_p (tree t)
27029 {
27030 if (t == NULL_TREE || !TYPE_P (t))
27031 return false;
27032 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27033 }
27034
27035 /* Schedule the generation of the generic parameter dies for the
27036 instance of generic type T. The proper generation itself is later
27037 done by gen_scheduled_generic_parms_dies. */
27038
27039 static void
27040 schedule_generic_params_dies_gen (tree t)
27041 {
27042 if (!generic_type_p (t))
27043 return;
27044
27045 gcc_assert (early_dwarf);
27046
27047 if (!generic_type_instances)
27048 vec_alloc (generic_type_instances, 256);
27049
27050 vec_safe_push (generic_type_instances, t);
27051 }
27052
27053 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27054 by append_entry_to_tmpl_value_parm_die_table. This function must
27055 be called after function DIEs have been generated. */
27056
27057 static void
27058 gen_remaining_tmpl_value_param_die_attribute (void)
27059 {
27060 if (tmpl_value_parm_die_table)
27061 {
27062 unsigned i, j;
27063 die_arg_entry *e;
27064
27065 /* We do this in two phases - first get the cases we can
27066 handle during early-finish, preserving those we cannot
27067 (containing symbolic constants where we don't yet know
27068 whether we are going to output the referenced symbols).
27069 For those we try again at late-finish. */
27070 j = 0;
27071 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27072 {
27073 if (!e->die->removed
27074 && !tree_add_const_value_attribute (e->die, e->arg))
27075 {
27076 dw_loc_descr_ref loc = NULL;
27077 if (! early_dwarf
27078 && (dwarf_version >= 5 || !dwarf_strict))
27079 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27080 if (loc)
27081 add_AT_loc (e->die, DW_AT_location, loc);
27082 else
27083 (*tmpl_value_parm_die_table)[j++] = *e;
27084 }
27085 }
27086 tmpl_value_parm_die_table->truncate (j);
27087 }
27088 }
27089
27090 /* Generate generic parameters DIEs for instances of generic types
27091 that have been previously scheduled by
27092 schedule_generic_params_dies_gen. This function must be called
27093 after all the types of the CU have been laid out. */
27094
27095 static void
27096 gen_scheduled_generic_parms_dies (void)
27097 {
27098 unsigned i;
27099 tree t;
27100
27101 if (!generic_type_instances)
27102 return;
27103
27104 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27105 if (COMPLETE_TYPE_P (t))
27106 gen_generic_params_dies (t);
27107
27108 generic_type_instances = NULL;
27109 }
27110
27111
27112 /* Replace DW_AT_name for the decl with name. */
27113
27114 static void
27115 dwarf2out_set_name (tree decl, tree name)
27116 {
27117 dw_die_ref die;
27118 dw_attr_node *attr;
27119 const char *dname;
27120
27121 die = TYPE_SYMTAB_DIE (decl);
27122 if (!die)
27123 return;
27124
27125 dname = dwarf2_name (name, 0);
27126 if (!dname)
27127 return;
27128
27129 attr = get_AT (die, DW_AT_name);
27130 if (attr)
27131 {
27132 struct indirect_string_node *node;
27133
27134 node = find_AT_string (dname);
27135 /* replace the string. */
27136 attr->dw_attr_val.v.val_str = node;
27137 }
27138
27139 else
27140 add_name_attribute (die, dname);
27141 }
27142
27143 /* True if before or during processing of the first function being emitted. */
27144 static bool in_first_function_p = true;
27145 /* True if loc_note during dwarf2out_var_location call might still be
27146 before first real instruction at address equal to .Ltext0. */
27147 static bool maybe_at_text_label_p = true;
27148 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27149 static unsigned int first_loclabel_num_not_at_text_label;
27150
27151 /* Look ahead for a real insn, or for a begin stmt marker. */
27152
27153 static rtx_insn *
27154 dwarf2out_next_real_insn (rtx_insn *loc_note)
27155 {
27156 rtx_insn *next_real = NEXT_INSN (loc_note);
27157
27158 while (next_real)
27159 if (INSN_P (next_real))
27160 break;
27161 else
27162 next_real = NEXT_INSN (next_real);
27163
27164 return next_real;
27165 }
27166
27167 /* Called by the final INSN scan whenever we see a var location. We
27168 use it to drop labels in the right places, and throw the location in
27169 our lookup table. */
27170
27171 static void
27172 dwarf2out_var_location (rtx_insn *loc_note)
27173 {
27174 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27175 struct var_loc_node *newloc;
27176 rtx_insn *next_real, *next_note;
27177 rtx_insn *call_insn = NULL;
27178 static const char *last_label;
27179 static const char *last_postcall_label;
27180 static bool last_in_cold_section_p;
27181 static rtx_insn *expected_next_loc_note;
27182 tree decl;
27183 bool var_loc_p;
27184 var_loc_view view = 0;
27185
27186 if (!NOTE_P (loc_note))
27187 {
27188 if (CALL_P (loc_note))
27189 {
27190 maybe_reset_location_view (loc_note, cur_line_info_table);
27191 call_site_count++;
27192 if (SIBLING_CALL_P (loc_note))
27193 tail_call_site_count++;
27194 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27195 {
27196 call_insn = loc_note;
27197 loc_note = NULL;
27198 var_loc_p = false;
27199
27200 next_real = dwarf2out_next_real_insn (call_insn);
27201 next_note = NULL;
27202 cached_next_real_insn = NULL;
27203 goto create_label;
27204 }
27205 if (optimize == 0 && !flag_var_tracking)
27206 {
27207 /* When the var-tracking pass is not running, there is no note
27208 for indirect calls whose target is compile-time known. In this
27209 case, process such calls specifically so that we generate call
27210 sites for them anyway. */
27211 rtx x = PATTERN (loc_note);
27212 if (GET_CODE (x) == PARALLEL)
27213 x = XVECEXP (x, 0, 0);
27214 if (GET_CODE (x) == SET)
27215 x = SET_SRC (x);
27216 if (GET_CODE (x) == CALL)
27217 x = XEXP (x, 0);
27218 if (!MEM_P (x)
27219 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27220 || !SYMBOL_REF_DECL (XEXP (x, 0))
27221 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27222 != FUNCTION_DECL))
27223 {
27224 call_insn = loc_note;
27225 loc_note = NULL;
27226 var_loc_p = false;
27227
27228 next_real = dwarf2out_next_real_insn (call_insn);
27229 next_note = NULL;
27230 cached_next_real_insn = NULL;
27231 goto create_label;
27232 }
27233 }
27234 }
27235 else if (!debug_variable_location_views)
27236 gcc_unreachable ();
27237 else
27238 maybe_reset_location_view (loc_note, cur_line_info_table);
27239
27240 return;
27241 }
27242
27243 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27244 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27245 return;
27246
27247 /* Optimize processing a large consecutive sequence of location
27248 notes so we don't spend too much time in next_real_insn. If the
27249 next insn is another location note, remember the next_real_insn
27250 calculation for next time. */
27251 next_real = cached_next_real_insn;
27252 if (next_real)
27253 {
27254 if (expected_next_loc_note != loc_note)
27255 next_real = NULL;
27256 }
27257
27258 next_note = NEXT_INSN (loc_note);
27259 if (! next_note
27260 || next_note->deleted ()
27261 || ! NOTE_P (next_note)
27262 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27263 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27264 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27265 next_note = NULL;
27266
27267 if (! next_real)
27268 next_real = dwarf2out_next_real_insn (loc_note);
27269
27270 if (next_note)
27271 {
27272 expected_next_loc_note = next_note;
27273 cached_next_real_insn = next_real;
27274 }
27275 else
27276 cached_next_real_insn = NULL;
27277
27278 /* If there are no instructions which would be affected by this note,
27279 don't do anything. */
27280 if (var_loc_p
27281 && next_real == NULL_RTX
27282 && !NOTE_DURING_CALL_P (loc_note))
27283 return;
27284
27285 create_label:
27286
27287 if (next_real == NULL_RTX)
27288 next_real = get_last_insn ();
27289
27290 /* If there were any real insns between note we processed last time
27291 and this note (or if it is the first note), clear
27292 last_{,postcall_}label so that they are not reused this time. */
27293 if (last_var_location_insn == NULL_RTX
27294 || last_var_location_insn != next_real
27295 || last_in_cold_section_p != in_cold_section_p)
27296 {
27297 last_label = NULL;
27298 last_postcall_label = NULL;
27299 }
27300
27301 if (var_loc_p)
27302 {
27303 const char *label
27304 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27305 view = cur_line_info_table->view;
27306 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27307 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27308 if (newloc == NULL)
27309 return;
27310 }
27311 else
27312 {
27313 decl = NULL_TREE;
27314 newloc = NULL;
27315 }
27316
27317 /* If there were no real insns between note we processed last time
27318 and this note, use the label we emitted last time. Otherwise
27319 create a new label and emit it. */
27320 if (last_label == NULL)
27321 {
27322 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27323 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27324 loclabel_num++;
27325 last_label = ggc_strdup (loclabel);
27326 /* See if loclabel might be equal to .Ltext0. If yes,
27327 bump first_loclabel_num_not_at_text_label. */
27328 if (!have_multiple_function_sections
27329 && in_first_function_p
27330 && maybe_at_text_label_p)
27331 {
27332 static rtx_insn *last_start;
27333 rtx_insn *insn;
27334 for (insn = loc_note; insn; insn = previous_insn (insn))
27335 if (insn == last_start)
27336 break;
27337 else if (!NONDEBUG_INSN_P (insn))
27338 continue;
27339 else
27340 {
27341 rtx body = PATTERN (insn);
27342 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27343 continue;
27344 /* Inline asm could occupy zero bytes. */
27345 else if (GET_CODE (body) == ASM_INPUT
27346 || asm_noperands (body) >= 0)
27347 continue;
27348 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27349 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27350 continue;
27351 #endif
27352 else
27353 {
27354 /* Assume insn has non-zero length. */
27355 maybe_at_text_label_p = false;
27356 break;
27357 }
27358 }
27359 if (maybe_at_text_label_p)
27360 {
27361 last_start = loc_note;
27362 first_loclabel_num_not_at_text_label = loclabel_num;
27363 }
27364 }
27365 }
27366
27367 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27368 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27369
27370 if (!var_loc_p)
27371 {
27372 struct call_arg_loc_node *ca_loc
27373 = ggc_cleared_alloc<call_arg_loc_node> ();
27374 rtx_insn *prev = call_insn;
27375
27376 ca_loc->call_arg_loc_note
27377 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27378 ca_loc->next = NULL;
27379 ca_loc->label = last_label;
27380 gcc_assert (prev
27381 && (CALL_P (prev)
27382 || (NONJUMP_INSN_P (prev)
27383 && GET_CODE (PATTERN (prev)) == SEQUENCE
27384 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27385 if (!CALL_P (prev))
27386 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27387 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27388
27389 /* Look for a SYMBOL_REF in the "prev" instruction. */
27390 rtx x = get_call_rtx_from (PATTERN (prev));
27391 if (x)
27392 {
27393 /* Try to get the call symbol, if any. */
27394 if (MEM_P (XEXP (x, 0)))
27395 x = XEXP (x, 0);
27396 /* First, look for a memory access to a symbol_ref. */
27397 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27398 && SYMBOL_REF_DECL (XEXP (x, 0))
27399 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27400 ca_loc->symbol_ref = XEXP (x, 0);
27401 /* Otherwise, look at a compile-time known user-level function
27402 declaration. */
27403 else if (MEM_P (x)
27404 && MEM_EXPR (x)
27405 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27406 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27407 }
27408
27409 ca_loc->block = insn_scope (prev);
27410 if (call_arg_locations)
27411 call_arg_loc_last->next = ca_loc;
27412 else
27413 call_arg_locations = ca_loc;
27414 call_arg_loc_last = ca_loc;
27415 }
27416 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27417 {
27418 newloc->label = last_label;
27419 newloc->view = view;
27420 }
27421 else
27422 {
27423 if (!last_postcall_label)
27424 {
27425 sprintf (loclabel, "%s-1", last_label);
27426 last_postcall_label = ggc_strdup (loclabel);
27427 }
27428 newloc->label = last_postcall_label;
27429 /* ??? This view is at last_label, not last_label-1, but we
27430 could only assume view at last_label-1 is zero if we could
27431 assume calls always have length greater than one. This is
27432 probably true in general, though there might be a rare
27433 exception to this rule, e.g. if a call insn is optimized out
27434 by target magic. Then, even the -1 in the label will be
27435 wrong, which might invalidate the range. Anyway, using view,
27436 though technically possibly incorrect, will work as far as
27437 ranges go: since L-1 is in the middle of the call insn,
27438 (L-1).0 and (L-1).V shouldn't make any difference, and having
27439 the loclist entry refer to the .loc entry might be useful, so
27440 leave it like this. */
27441 newloc->view = view;
27442 }
27443
27444 if (var_loc_p && flag_debug_asm)
27445 {
27446 const char *name, *sep, *patstr;
27447 if (decl && DECL_NAME (decl))
27448 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27449 else
27450 name = "";
27451 if (NOTE_VAR_LOCATION_LOC (loc_note))
27452 {
27453 sep = " => ";
27454 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27455 }
27456 else
27457 {
27458 sep = " ";
27459 patstr = "RESET";
27460 }
27461 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27462 name, sep, patstr);
27463 }
27464
27465 last_var_location_insn = next_real;
27466 last_in_cold_section_p = in_cold_section_p;
27467 }
27468
27469 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27470 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27471 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27472 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27473 BLOCK_FRAGMENT_ORIGIN links. */
27474 static bool
27475 block_within_block_p (tree block, tree outer, bool bothways)
27476 {
27477 if (block == outer)
27478 return true;
27479
27480 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27481 for (tree context = BLOCK_SUPERCONTEXT (block);
27482 context != outer;
27483 context = BLOCK_SUPERCONTEXT (context))
27484 if (!context || TREE_CODE (context) != BLOCK)
27485 return false;
27486
27487 if (!bothways)
27488 return true;
27489
27490 /* Now check that each block is actually referenced by its
27491 parent. */
27492 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27493 context = BLOCK_SUPERCONTEXT (context))
27494 {
27495 if (BLOCK_FRAGMENT_ORIGIN (context))
27496 {
27497 gcc_assert (!BLOCK_SUBBLOCKS (context));
27498 context = BLOCK_FRAGMENT_ORIGIN (context);
27499 }
27500 for (tree sub = BLOCK_SUBBLOCKS (context);
27501 sub != block;
27502 sub = BLOCK_CHAIN (sub))
27503 if (!sub)
27504 return false;
27505 if (context == outer)
27506 return true;
27507 else
27508 block = context;
27509 }
27510 }
27511
27512 /* Called during final while assembling the marker of the entry point
27513 for an inlined function. */
27514
27515 static void
27516 dwarf2out_inline_entry (tree block)
27517 {
27518 gcc_assert (debug_inline_points);
27519
27520 /* If we can't represent it, don't bother. */
27521 if (!(dwarf_version >= 3 || !dwarf_strict))
27522 return;
27523
27524 gcc_assert (DECL_P (block_ultimate_origin (block)));
27525
27526 /* Sanity check the block tree. This would catch a case in which
27527 BLOCK got removed from the tree reachable from the outermost
27528 lexical block, but got retained in markers. It would still link
27529 back to its parents, but some ancestor would be missing a link
27530 down the path to the sub BLOCK. If the block got removed, its
27531 BLOCK_NUMBER will not be a usable value. */
27532 if (flag_checking)
27533 gcc_assert (block_within_block_p (block,
27534 DECL_INITIAL (current_function_decl),
27535 true));
27536
27537 gcc_assert (inlined_function_outer_scope_p (block));
27538 gcc_assert (!lookup_block_die (block));
27539
27540 if (BLOCK_FRAGMENT_ORIGIN (block))
27541 block = BLOCK_FRAGMENT_ORIGIN (block);
27542 /* Can the entry point ever not be at the beginning of an
27543 unfragmented lexical block? */
27544 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27545 || (cur_line_info_table
27546 && !ZERO_VIEW_P (cur_line_info_table->view))))
27547 return;
27548
27549 if (!inline_entry_data_table)
27550 inline_entry_data_table
27551 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27552
27553
27554 inline_entry_data **iedp
27555 = inline_entry_data_table->find_slot_with_hash (block,
27556 htab_hash_pointer (block),
27557 INSERT);
27558 if (*iedp)
27559 /* ??? Ideally, we'd record all entry points for the same inlined
27560 function (some may have been duplicated by e.g. unrolling), but
27561 we have no way to represent that ATM. */
27562 return;
27563
27564 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27565 ied->block = block;
27566 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27567 ied->label_num = BLOCK_NUMBER (block);
27568 if (cur_line_info_table)
27569 ied->view = cur_line_info_table->view;
27570
27571 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27572
27573 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27574 BLOCK_NUMBER (block));
27575 ASM_OUTPUT_LABEL (asm_out_file, label);
27576 }
27577
27578 /* Called from finalize_size_functions for size functions so that their body
27579 can be encoded in the debug info to describe the layout of variable-length
27580 structures. */
27581
27582 static void
27583 dwarf2out_size_function (tree decl)
27584 {
27585 function_to_dwarf_procedure (decl);
27586 }
27587
27588 /* Note in one location list that text section has changed. */
27589
27590 int
27591 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27592 {
27593 var_loc_list *list = *slot;
27594 if (list->first)
27595 list->last_before_switch
27596 = list->last->next ? list->last->next : list->last;
27597 return 1;
27598 }
27599
27600 /* Note in all location lists that text section has changed. */
27601
27602 static void
27603 var_location_switch_text_section (void)
27604 {
27605 if (decl_loc_table == NULL)
27606 return;
27607
27608 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27609 }
27610
27611 /* Create a new line number table. */
27612
27613 static dw_line_info_table *
27614 new_line_info_table (void)
27615 {
27616 dw_line_info_table *table;
27617
27618 table = ggc_cleared_alloc<dw_line_info_table> ();
27619 table->file_num = 1;
27620 table->line_num = 1;
27621 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27622 FORCE_RESET_NEXT_VIEW (table->view);
27623 table->symviews_since_reset = 0;
27624
27625 return table;
27626 }
27627
27628 /* Lookup the "current" table into which we emit line info, so
27629 that we don't have to do it for every source line. */
27630
27631 static void
27632 set_cur_line_info_table (section *sec)
27633 {
27634 dw_line_info_table *table;
27635
27636 if (sec == text_section)
27637 table = text_section_line_info;
27638 else if (sec == cold_text_section)
27639 {
27640 table = cold_text_section_line_info;
27641 if (!table)
27642 {
27643 cold_text_section_line_info = table = new_line_info_table ();
27644 table->end_label = cold_end_label;
27645 }
27646 }
27647 else
27648 {
27649 const char *end_label;
27650
27651 if (crtl->has_bb_partition)
27652 {
27653 if (in_cold_section_p)
27654 end_label = crtl->subsections.cold_section_end_label;
27655 else
27656 end_label = crtl->subsections.hot_section_end_label;
27657 }
27658 else
27659 {
27660 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27661 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27662 current_function_funcdef_no);
27663 end_label = ggc_strdup (label);
27664 }
27665
27666 table = new_line_info_table ();
27667 table->end_label = end_label;
27668
27669 vec_safe_push (separate_line_info, table);
27670 }
27671
27672 if (output_asm_line_debug_info ())
27673 table->is_stmt = (cur_line_info_table
27674 ? cur_line_info_table->is_stmt
27675 : DWARF_LINE_DEFAULT_IS_STMT_START);
27676 cur_line_info_table = table;
27677 }
27678
27679
27680 /* We need to reset the locations at the beginning of each
27681 function. We can't do this in the end_function hook, because the
27682 declarations that use the locations won't have been output when
27683 that hook is called. Also compute have_multiple_function_sections here. */
27684
27685 static void
27686 dwarf2out_begin_function (tree fun)
27687 {
27688 section *sec = function_section (fun);
27689
27690 if (sec != text_section)
27691 have_multiple_function_sections = true;
27692
27693 if (crtl->has_bb_partition && !cold_text_section)
27694 {
27695 gcc_assert (current_function_decl == fun);
27696 cold_text_section = unlikely_text_section ();
27697 switch_to_section (cold_text_section);
27698 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27699 switch_to_section (sec);
27700 }
27701
27702 dwarf2out_note_section_used ();
27703 call_site_count = 0;
27704 tail_call_site_count = 0;
27705
27706 set_cur_line_info_table (sec);
27707 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27708 }
27709
27710 /* Helper function of dwarf2out_end_function, called only after emitting
27711 the very first function into assembly. Check if some .debug_loc range
27712 might end with a .LVL* label that could be equal to .Ltext0.
27713 In that case we must force using absolute addresses in .debug_loc ranges,
27714 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27715 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27716 list terminator.
27717 Set have_multiple_function_sections to true in that case and
27718 terminate htab traversal. */
27719
27720 int
27721 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27722 {
27723 var_loc_list *entry = *slot;
27724 struct var_loc_node *node;
27725
27726 node = entry->first;
27727 if (node && node->next && node->next->label)
27728 {
27729 unsigned int i;
27730 const char *label = node->next->label;
27731 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27732
27733 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27734 {
27735 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27736 if (strcmp (label, loclabel) == 0)
27737 {
27738 have_multiple_function_sections = true;
27739 return 0;
27740 }
27741 }
27742 }
27743 return 1;
27744 }
27745
27746 /* Hook called after emitting a function into assembly.
27747 This does something only for the very first function emitted. */
27748
27749 static void
27750 dwarf2out_end_function (unsigned int)
27751 {
27752 if (in_first_function_p
27753 && !have_multiple_function_sections
27754 && first_loclabel_num_not_at_text_label
27755 && decl_loc_table)
27756 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27757 in_first_function_p = false;
27758 maybe_at_text_label_p = false;
27759 }
27760
27761 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27762 front-ends register a translation unit even before dwarf2out_init is
27763 called. */
27764 static tree main_translation_unit = NULL_TREE;
27765
27766 /* Hook called by front-ends after they built their main translation unit.
27767 Associate comp_unit_die to UNIT. */
27768
27769 static void
27770 dwarf2out_register_main_translation_unit (tree unit)
27771 {
27772 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27773 && main_translation_unit == NULL_TREE);
27774 main_translation_unit = unit;
27775 /* If dwarf2out_init has not been called yet, it will perform the association
27776 itself looking at main_translation_unit. */
27777 if (decl_die_table != NULL)
27778 equate_decl_number_to_die (unit, comp_unit_die ());
27779 }
27780
27781 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27782
27783 static void
27784 push_dw_line_info_entry (dw_line_info_table *table,
27785 enum dw_line_info_opcode opcode, unsigned int val)
27786 {
27787 dw_line_info_entry e;
27788 e.opcode = opcode;
27789 e.val = val;
27790 vec_safe_push (table->entries, e);
27791 }
27792
27793 /* Output a label to mark the beginning of a source code line entry
27794 and record information relating to this source line, in
27795 'line_info_table' for later output of the .debug_line section. */
27796 /* ??? The discriminator parameter ought to be unsigned. */
27797
27798 static void
27799 dwarf2out_source_line (unsigned int line, unsigned int column,
27800 const char *filename,
27801 int discriminator, bool is_stmt)
27802 {
27803 unsigned int file_num;
27804 dw_line_info_table *table;
27805 static var_loc_view lvugid;
27806
27807 if (debug_info_level < DINFO_LEVEL_TERSE)
27808 return;
27809
27810 table = cur_line_info_table;
27811
27812 if (line == 0)
27813 {
27814 if (debug_variable_location_views
27815 && output_asm_line_debug_info ()
27816 && table && !RESETTING_VIEW_P (table->view))
27817 {
27818 /* If we're using the assembler to compute view numbers, we
27819 can't issue a .loc directive for line zero, so we can't
27820 get a view number at this point. We might attempt to
27821 compute it from the previous view, or equate it to a
27822 subsequent view (though it might not be there!), but
27823 since we're omitting the line number entry, we might as
27824 well omit the view number as well. That means pretending
27825 it's a view number zero, which might very well turn out
27826 to be correct. ??? Extend the assembler so that the
27827 compiler could emit e.g. ".locview .LVU#", to output a
27828 view without changing line number information. We'd then
27829 have to count it in symviews_since_reset; when it's omitted,
27830 it doesn't count. */
27831 if (!zero_view_p)
27832 zero_view_p = BITMAP_GGC_ALLOC ();
27833 bitmap_set_bit (zero_view_p, table->view);
27834 if (flag_debug_asm)
27835 {
27836 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27837 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27838 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27839 ASM_COMMENT_START);
27840 assemble_name (asm_out_file, label);
27841 putc ('\n', asm_out_file);
27842 }
27843 table->view = ++lvugid;
27844 }
27845 return;
27846 }
27847
27848 /* The discriminator column was added in dwarf4. Simplify the below
27849 by simply removing it if we're not supposed to output it. */
27850 if (dwarf_version < 4 && dwarf_strict)
27851 discriminator = 0;
27852
27853 if (!debug_column_info)
27854 column = 0;
27855
27856 file_num = maybe_emit_file (lookup_filename (filename));
27857
27858 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27859 the debugger has used the second (possibly duplicate) line number
27860 at the beginning of the function to mark the end of the prologue.
27861 We could eliminate any other duplicates within the function. For
27862 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27863 that second line number entry. */
27864 /* Recall that this end-of-prologue indication is *not* the same thing
27865 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27866 to which the hook corresponds, follows the last insn that was
27867 emitted by gen_prologue. What we need is to precede the first insn
27868 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27869 insn that corresponds to something the user wrote. These may be
27870 very different locations once scheduling is enabled. */
27871
27872 if (0 && file_num == table->file_num
27873 && line == table->line_num
27874 && column == table->column_num
27875 && discriminator == table->discrim_num
27876 && is_stmt == table->is_stmt)
27877 return;
27878
27879 switch_to_section (current_function_section ());
27880
27881 /* If requested, emit something human-readable. */
27882 if (flag_debug_asm)
27883 {
27884 if (debug_column_info)
27885 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27886 filename, line, column);
27887 else
27888 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27889 filename, line);
27890 }
27891
27892 if (output_asm_line_debug_info ())
27893 {
27894 /* Emit the .loc directive understood by GNU as. */
27895 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27896 file_num, line, is_stmt, discriminator */
27897 fputs ("\t.loc ", asm_out_file);
27898 fprint_ul (asm_out_file, file_num);
27899 putc (' ', asm_out_file);
27900 fprint_ul (asm_out_file, line);
27901 putc (' ', asm_out_file);
27902 fprint_ul (asm_out_file, column);
27903
27904 if (is_stmt != table->is_stmt)
27905 {
27906 #if HAVE_GAS_LOC_STMT
27907 fputs (" is_stmt ", asm_out_file);
27908 putc (is_stmt ? '1' : '0', asm_out_file);
27909 #endif
27910 }
27911 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27912 {
27913 gcc_assert (discriminator > 0);
27914 fputs (" discriminator ", asm_out_file);
27915 fprint_ul (asm_out_file, (unsigned long) discriminator);
27916 }
27917 if (debug_variable_location_views)
27918 {
27919 if (!RESETTING_VIEW_P (table->view))
27920 {
27921 table->symviews_since_reset++;
27922 if (table->symviews_since_reset > symview_upper_bound)
27923 symview_upper_bound = table->symviews_since_reset;
27924 /* When we're using the assembler to compute view
27925 numbers, we output symbolic labels after "view" in
27926 .loc directives, and the assembler will set them for
27927 us, so that we can refer to the view numbers in
27928 location lists. The only exceptions are when we know
27929 a view will be zero: "-0" is a forced reset, used
27930 e.g. in the beginning of functions, whereas "0" tells
27931 the assembler to check that there was a PC change
27932 since the previous view, in a way that implicitly
27933 resets the next view. */
27934 fputs (" view ", asm_out_file);
27935 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27936 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27937 assemble_name (asm_out_file, label);
27938 table->view = ++lvugid;
27939 }
27940 else
27941 {
27942 table->symviews_since_reset = 0;
27943 if (FORCE_RESETTING_VIEW_P (table->view))
27944 fputs (" view -0", asm_out_file);
27945 else
27946 fputs (" view 0", asm_out_file);
27947 /* Mark the present view as a zero view. Earlier debug
27948 binds may have already added its id to loclists to be
27949 emitted later, so we can't reuse the id for something
27950 else. However, it's good to know whether a view is
27951 known to be zero, because then we may be able to
27952 optimize out locviews that are all zeros, so take
27953 note of it in zero_view_p. */
27954 if (!zero_view_p)
27955 zero_view_p = BITMAP_GGC_ALLOC ();
27956 bitmap_set_bit (zero_view_p, lvugid);
27957 table->view = ++lvugid;
27958 }
27959 }
27960 putc ('\n', asm_out_file);
27961 }
27962 else
27963 {
27964 unsigned int label_num = ++line_info_label_num;
27965
27966 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27967
27968 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27969 push_dw_line_info_entry (table, LI_adv_address, label_num);
27970 else
27971 push_dw_line_info_entry (table, LI_set_address, label_num);
27972 if (debug_variable_location_views)
27973 {
27974 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27975 if (resetting)
27976 table->view = 0;
27977
27978 if (flag_debug_asm)
27979 fprintf (asm_out_file, "\t%s view %s%d\n",
27980 ASM_COMMENT_START,
27981 resetting ? "-" : "",
27982 table->view);
27983
27984 table->view++;
27985 }
27986 if (file_num != table->file_num)
27987 push_dw_line_info_entry (table, LI_set_file, file_num);
27988 if (discriminator != table->discrim_num)
27989 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27990 if (is_stmt != table->is_stmt)
27991 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27992 push_dw_line_info_entry (table, LI_set_line, line);
27993 if (debug_column_info)
27994 push_dw_line_info_entry (table, LI_set_column, column);
27995 }
27996
27997 table->file_num = file_num;
27998 table->line_num = line;
27999 table->column_num = column;
28000 table->discrim_num = discriminator;
28001 table->is_stmt = is_stmt;
28002 table->in_use = true;
28003 }
28004
28005 /* Record the beginning of a new source file. */
28006
28007 static void
28008 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28009 {
28010 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28011 {
28012 macinfo_entry e;
28013 e.code = DW_MACINFO_start_file;
28014 e.lineno = lineno;
28015 e.info = ggc_strdup (filename);
28016 vec_safe_push (macinfo_table, e);
28017 }
28018 }
28019
28020 /* Record the end of a source file. */
28021
28022 static void
28023 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28024 {
28025 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28026 {
28027 macinfo_entry e;
28028 e.code = DW_MACINFO_end_file;
28029 e.lineno = lineno;
28030 e.info = NULL;
28031 vec_safe_push (macinfo_table, e);
28032 }
28033 }
28034
28035 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28036 the tail part of the directive line, i.e. the part which is past the
28037 initial whitespace, #, whitespace, directive-name, whitespace part. */
28038
28039 static void
28040 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28041 const char *buffer ATTRIBUTE_UNUSED)
28042 {
28043 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28044 {
28045 macinfo_entry e;
28046 /* Insert a dummy first entry to be able to optimize the whole
28047 predefined macro block using DW_MACRO_import. */
28048 if (macinfo_table->is_empty () && lineno <= 1)
28049 {
28050 e.code = 0;
28051 e.lineno = 0;
28052 e.info = NULL;
28053 vec_safe_push (macinfo_table, e);
28054 }
28055 e.code = DW_MACINFO_define;
28056 e.lineno = lineno;
28057 e.info = ggc_strdup (buffer);
28058 vec_safe_push (macinfo_table, e);
28059 }
28060 }
28061
28062 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28063 the tail part of the directive line, i.e. the part which is past the
28064 initial whitespace, #, whitespace, directive-name, whitespace part. */
28065
28066 static void
28067 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28068 const char *buffer ATTRIBUTE_UNUSED)
28069 {
28070 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28071 {
28072 macinfo_entry e;
28073 /* Insert a dummy first entry to be able to optimize the whole
28074 predefined macro block using DW_MACRO_import. */
28075 if (macinfo_table->is_empty () && lineno <= 1)
28076 {
28077 e.code = 0;
28078 e.lineno = 0;
28079 e.info = NULL;
28080 vec_safe_push (macinfo_table, e);
28081 }
28082 e.code = DW_MACINFO_undef;
28083 e.lineno = lineno;
28084 e.info = ggc_strdup (buffer);
28085 vec_safe_push (macinfo_table, e);
28086 }
28087 }
28088
28089 /* Helpers to manipulate hash table of CUs. */
28090
28091 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28092 {
28093 static inline hashval_t hash (const macinfo_entry *);
28094 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28095 };
28096
28097 inline hashval_t
28098 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28099 {
28100 return htab_hash_string (entry->info);
28101 }
28102
28103 inline bool
28104 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28105 const macinfo_entry *entry2)
28106 {
28107 return !strcmp (entry1->info, entry2->info);
28108 }
28109
28110 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28111
28112 /* Output a single .debug_macinfo entry. */
28113
28114 static void
28115 output_macinfo_op (macinfo_entry *ref)
28116 {
28117 int file_num;
28118 size_t len;
28119 struct indirect_string_node *node;
28120 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28121 struct dwarf_file_data *fd;
28122
28123 switch (ref->code)
28124 {
28125 case DW_MACINFO_start_file:
28126 fd = lookup_filename (ref->info);
28127 file_num = maybe_emit_file (fd);
28128 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28129 dw2_asm_output_data_uleb128 (ref->lineno,
28130 "Included from line number %lu",
28131 (unsigned long) ref->lineno);
28132 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28133 break;
28134 case DW_MACINFO_end_file:
28135 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28136 break;
28137 case DW_MACINFO_define:
28138 case DW_MACINFO_undef:
28139 len = strlen (ref->info) + 1;
28140 if (!dwarf_strict
28141 && len > DWARF_OFFSET_SIZE
28142 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28143 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28144 {
28145 ref->code = ref->code == DW_MACINFO_define
28146 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28147 output_macinfo_op (ref);
28148 return;
28149 }
28150 dw2_asm_output_data (1, ref->code,
28151 ref->code == DW_MACINFO_define
28152 ? "Define macro" : "Undefine macro");
28153 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28154 (unsigned long) ref->lineno);
28155 dw2_asm_output_nstring (ref->info, -1, "The macro");
28156 break;
28157 case DW_MACRO_define_strp:
28158 case DW_MACRO_undef_strp:
28159 /* NB: dwarf2out_finish performs:
28160 1. save_macinfo_strings
28161 2. hash table traverse of index_string
28162 3. output_macinfo -> output_macinfo_op
28163 4. output_indirect_strings
28164 -> hash table traverse of output_index_string
28165
28166 When output_macinfo_op is called, all index strings have been
28167 added to hash table by save_macinfo_strings and we can't pass
28168 INSERT to find_slot_with_hash which may expand hash table, even
28169 if no insertion is needed, and change hash table traverse order
28170 between index_string and output_index_string. */
28171 node = find_AT_string (ref->info, NO_INSERT);
28172 gcc_assert (node
28173 && (node->form == DW_FORM_strp
28174 || node->form == dwarf_FORM (DW_FORM_strx)));
28175 dw2_asm_output_data (1, ref->code,
28176 ref->code == DW_MACRO_define_strp
28177 ? "Define macro strp"
28178 : "Undefine macro strp");
28179 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28180 (unsigned long) ref->lineno);
28181 if (node->form == DW_FORM_strp)
28182 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28183 debug_str_section, "The macro: \"%s\"",
28184 ref->info);
28185 else
28186 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28187 ref->info);
28188 break;
28189 case DW_MACRO_import:
28190 dw2_asm_output_data (1, ref->code, "Import");
28191 ASM_GENERATE_INTERNAL_LABEL (label,
28192 DEBUG_MACRO_SECTION_LABEL,
28193 ref->lineno + macinfo_label_base);
28194 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28195 break;
28196 default:
28197 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28198 ASM_COMMENT_START, (unsigned long) ref->code);
28199 break;
28200 }
28201 }
28202
28203 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28204 other compilation unit .debug_macinfo sections. IDX is the first
28205 index of a define/undef, return the number of ops that should be
28206 emitted in a comdat .debug_macinfo section and emit
28207 a DW_MACRO_import entry referencing it.
28208 If the define/undef entry should be emitted normally, return 0. */
28209
28210 static unsigned
28211 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28212 macinfo_hash_type **macinfo_htab)
28213 {
28214 macinfo_entry *first, *second, *cur, *inc;
28215 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28216 unsigned char checksum[16];
28217 struct md5_ctx ctx;
28218 char *grp_name, *tail;
28219 const char *base;
28220 unsigned int i, count, encoded_filename_len, linebuf_len;
28221 macinfo_entry **slot;
28222
28223 first = &(*macinfo_table)[idx];
28224 second = &(*macinfo_table)[idx + 1];
28225
28226 /* Optimize only if there are at least two consecutive define/undef ops,
28227 and either all of them are before first DW_MACINFO_start_file
28228 with lineno {0,1} (i.e. predefined macro block), or all of them are
28229 in some included header file. */
28230 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28231 return 0;
28232 if (vec_safe_is_empty (files))
28233 {
28234 if (first->lineno > 1 || second->lineno > 1)
28235 return 0;
28236 }
28237 else if (first->lineno == 0)
28238 return 0;
28239
28240 /* Find the last define/undef entry that can be grouped together
28241 with first and at the same time compute md5 checksum of their
28242 codes, linenumbers and strings. */
28243 md5_init_ctx (&ctx);
28244 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28245 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28246 break;
28247 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28248 break;
28249 else
28250 {
28251 unsigned char code = cur->code;
28252 md5_process_bytes (&code, 1, &ctx);
28253 checksum_uleb128 (cur->lineno, &ctx);
28254 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28255 }
28256 md5_finish_ctx (&ctx, checksum);
28257 count = i - idx;
28258
28259 /* From the containing include filename (if any) pick up just
28260 usable characters from its basename. */
28261 if (vec_safe_is_empty (files))
28262 base = "";
28263 else
28264 base = lbasename (files->last ().info);
28265 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28266 if (ISIDNUM (base[i]) || base[i] == '.')
28267 encoded_filename_len++;
28268 /* Count . at the end. */
28269 if (encoded_filename_len)
28270 encoded_filename_len++;
28271
28272 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28273 linebuf_len = strlen (linebuf);
28274
28275 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28276 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28277 + 16 * 2 + 1);
28278 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28279 tail = grp_name + 4;
28280 if (encoded_filename_len)
28281 {
28282 for (i = 0; base[i]; i++)
28283 if (ISIDNUM (base[i]) || base[i] == '.')
28284 *tail++ = base[i];
28285 *tail++ = '.';
28286 }
28287 memcpy (tail, linebuf, linebuf_len);
28288 tail += linebuf_len;
28289 *tail++ = '.';
28290 for (i = 0; i < 16; i++)
28291 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28292
28293 /* Construct a macinfo_entry for DW_MACRO_import
28294 in the empty vector entry before the first define/undef. */
28295 inc = &(*macinfo_table)[idx - 1];
28296 inc->code = DW_MACRO_import;
28297 inc->lineno = 0;
28298 inc->info = ggc_strdup (grp_name);
28299 if (!*macinfo_htab)
28300 *macinfo_htab = new macinfo_hash_type (10);
28301 /* Avoid emitting duplicates. */
28302 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28303 if (*slot != NULL)
28304 {
28305 inc->code = 0;
28306 inc->info = NULL;
28307 /* If such an entry has been used before, just emit
28308 a DW_MACRO_import op. */
28309 inc = *slot;
28310 output_macinfo_op (inc);
28311 /* And clear all macinfo_entry in the range to avoid emitting them
28312 in the second pass. */
28313 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28314 {
28315 cur->code = 0;
28316 cur->info = NULL;
28317 }
28318 }
28319 else
28320 {
28321 *slot = inc;
28322 inc->lineno = (*macinfo_htab)->elements ();
28323 output_macinfo_op (inc);
28324 }
28325 return count;
28326 }
28327
28328 /* Save any strings needed by the macinfo table in the debug str
28329 table. All strings must be collected into the table by the time
28330 index_string is called. */
28331
28332 static void
28333 save_macinfo_strings (void)
28334 {
28335 unsigned len;
28336 unsigned i;
28337 macinfo_entry *ref;
28338
28339 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28340 {
28341 switch (ref->code)
28342 {
28343 /* Match the logic in output_macinfo_op to decide on
28344 indirect strings. */
28345 case DW_MACINFO_define:
28346 case DW_MACINFO_undef:
28347 len = strlen (ref->info) + 1;
28348 if (!dwarf_strict
28349 && len > DWARF_OFFSET_SIZE
28350 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28351 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28352 set_indirect_string (find_AT_string (ref->info));
28353 break;
28354 case DW_MACINFO_start_file:
28355 /* -gsplit-dwarf -g3 will also output filename as indirect
28356 string. */
28357 if (!dwarf_split_debug_info)
28358 break;
28359 /* Fall through. */
28360 case DW_MACRO_define_strp:
28361 case DW_MACRO_undef_strp:
28362 set_indirect_string (find_AT_string (ref->info));
28363 break;
28364 default:
28365 break;
28366 }
28367 }
28368 }
28369
28370 /* Output macinfo section(s). */
28371
28372 static void
28373 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28374 {
28375 unsigned i;
28376 unsigned long length = vec_safe_length (macinfo_table);
28377 macinfo_entry *ref;
28378 vec<macinfo_entry, va_gc> *files = NULL;
28379 macinfo_hash_type *macinfo_htab = NULL;
28380 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28381
28382 if (! length)
28383 return;
28384
28385 /* output_macinfo* uses these interchangeably. */
28386 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28387 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28388 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28389 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28390
28391 /* AIX Assembler inserts the length, so adjust the reference to match the
28392 offset expected by debuggers. */
28393 strcpy (dl_section_ref, debug_line_label);
28394 if (XCOFF_DEBUGGING_INFO)
28395 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28396
28397 /* For .debug_macro emit the section header. */
28398 if (!dwarf_strict || dwarf_version >= 5)
28399 {
28400 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28401 "DWARF macro version number");
28402 if (DWARF_OFFSET_SIZE == 8)
28403 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28404 else
28405 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28406 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28407 debug_line_section, NULL);
28408 }
28409
28410 /* In the first loop, it emits the primary .debug_macinfo section
28411 and after each emitted op the macinfo_entry is cleared.
28412 If a longer range of define/undef ops can be optimized using
28413 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28414 the vector before the first define/undef in the range and the
28415 whole range of define/undef ops is not emitted and kept. */
28416 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28417 {
28418 switch (ref->code)
28419 {
28420 case DW_MACINFO_start_file:
28421 vec_safe_push (files, *ref);
28422 break;
28423 case DW_MACINFO_end_file:
28424 if (!vec_safe_is_empty (files))
28425 files->pop ();
28426 break;
28427 case DW_MACINFO_define:
28428 case DW_MACINFO_undef:
28429 if ((!dwarf_strict || dwarf_version >= 5)
28430 && HAVE_COMDAT_GROUP
28431 && vec_safe_length (files) != 1
28432 && i > 0
28433 && i + 1 < length
28434 && (*macinfo_table)[i - 1].code == 0)
28435 {
28436 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28437 if (count)
28438 {
28439 i += count - 1;
28440 continue;
28441 }
28442 }
28443 break;
28444 case 0:
28445 /* A dummy entry may be inserted at the beginning to be able
28446 to optimize the whole block of predefined macros. */
28447 if (i == 0)
28448 continue;
28449 default:
28450 break;
28451 }
28452 output_macinfo_op (ref);
28453 ref->info = NULL;
28454 ref->code = 0;
28455 }
28456
28457 if (!macinfo_htab)
28458 return;
28459
28460 /* Save the number of transparent includes so we can adjust the
28461 label number for the fat LTO object DWARF. */
28462 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28463
28464 delete macinfo_htab;
28465 macinfo_htab = NULL;
28466
28467 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28468 terminate the current chain and switch to a new comdat .debug_macinfo
28469 section and emit the define/undef entries within it. */
28470 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28471 switch (ref->code)
28472 {
28473 case 0:
28474 continue;
28475 case DW_MACRO_import:
28476 {
28477 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28478 tree comdat_key = get_identifier (ref->info);
28479 /* Terminate the previous .debug_macinfo section. */
28480 dw2_asm_output_data (1, 0, "End compilation unit");
28481 targetm.asm_out.named_section (debug_macinfo_section_name,
28482 SECTION_DEBUG
28483 | SECTION_LINKONCE
28484 | (early_lto_debug
28485 ? SECTION_EXCLUDE : 0),
28486 comdat_key);
28487 ASM_GENERATE_INTERNAL_LABEL (label,
28488 DEBUG_MACRO_SECTION_LABEL,
28489 ref->lineno + macinfo_label_base);
28490 ASM_OUTPUT_LABEL (asm_out_file, label);
28491 ref->code = 0;
28492 ref->info = NULL;
28493 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28494 "DWARF macro version number");
28495 if (DWARF_OFFSET_SIZE == 8)
28496 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28497 else
28498 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28499 }
28500 break;
28501 case DW_MACINFO_define:
28502 case DW_MACINFO_undef:
28503 output_macinfo_op (ref);
28504 ref->code = 0;
28505 ref->info = NULL;
28506 break;
28507 default:
28508 gcc_unreachable ();
28509 }
28510
28511 macinfo_label_base += macinfo_label_base_adj;
28512 }
28513
28514 /* Initialize the various sections and labels for dwarf output and prefix
28515 them with PREFIX if non-NULL. Returns the generation (zero based
28516 number of times function was called). */
28517
28518 static unsigned
28519 init_sections_and_labels (bool early_lto_debug)
28520 {
28521 /* As we may get called multiple times have a generation count for
28522 labels. */
28523 static unsigned generation = 0;
28524
28525 if (early_lto_debug)
28526 {
28527 if (!dwarf_split_debug_info)
28528 {
28529 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28530 SECTION_DEBUG | SECTION_EXCLUDE,
28531 NULL);
28532 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28533 SECTION_DEBUG | SECTION_EXCLUDE,
28534 NULL);
28535 debug_macinfo_section_name
28536 = ((dwarf_strict && dwarf_version < 5)
28537 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28538 debug_macinfo_section = get_section (debug_macinfo_section_name,
28539 SECTION_DEBUG
28540 | SECTION_EXCLUDE, NULL);
28541 }
28542 else
28543 {
28544 /* ??? Which of the following do we need early? */
28545 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28546 SECTION_DEBUG | SECTION_EXCLUDE,
28547 NULL);
28548 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28549 SECTION_DEBUG | SECTION_EXCLUDE,
28550 NULL);
28551 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28552 SECTION_DEBUG
28553 | SECTION_EXCLUDE, NULL);
28554 debug_skeleton_abbrev_section
28555 = get_section (DEBUG_LTO_ABBREV_SECTION,
28556 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28557 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28558 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28559 generation);
28560
28561 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28562 stay in the main .o, but the skeleton_line goes into the split
28563 off dwo. */
28564 debug_skeleton_line_section
28565 = get_section (DEBUG_LTO_LINE_SECTION,
28566 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28567 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28568 DEBUG_SKELETON_LINE_SECTION_LABEL,
28569 generation);
28570 debug_str_offsets_section
28571 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28572 SECTION_DEBUG | SECTION_EXCLUDE,
28573 NULL);
28574 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28575 DEBUG_SKELETON_INFO_SECTION_LABEL,
28576 generation);
28577 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28578 DEBUG_STR_DWO_SECTION_FLAGS,
28579 NULL);
28580 debug_macinfo_section_name
28581 = ((dwarf_strict && dwarf_version < 5)
28582 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28583 debug_macinfo_section = get_section (debug_macinfo_section_name,
28584 SECTION_DEBUG | SECTION_EXCLUDE,
28585 NULL);
28586 }
28587 /* For macro info and the file table we have to refer to a
28588 debug_line section. */
28589 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28590 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28591 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28592 DEBUG_LINE_SECTION_LABEL, generation);
28593
28594 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28595 DEBUG_STR_SECTION_FLAGS
28596 | SECTION_EXCLUDE, NULL);
28597 if (!dwarf_split_debug_info)
28598 debug_line_str_section
28599 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28600 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28601 }
28602 else
28603 {
28604 if (!dwarf_split_debug_info)
28605 {
28606 debug_info_section = get_section (DEBUG_INFO_SECTION,
28607 SECTION_DEBUG, NULL);
28608 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28609 SECTION_DEBUG, NULL);
28610 debug_loc_section = get_section (dwarf_version >= 5
28611 ? DEBUG_LOCLISTS_SECTION
28612 : DEBUG_LOC_SECTION,
28613 SECTION_DEBUG, NULL);
28614 debug_macinfo_section_name
28615 = ((dwarf_strict && dwarf_version < 5)
28616 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28617 debug_macinfo_section = get_section (debug_macinfo_section_name,
28618 SECTION_DEBUG, NULL);
28619 }
28620 else
28621 {
28622 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28623 SECTION_DEBUG | SECTION_EXCLUDE,
28624 NULL);
28625 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28626 SECTION_DEBUG | SECTION_EXCLUDE,
28627 NULL);
28628 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28629 SECTION_DEBUG, NULL);
28630 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28631 SECTION_DEBUG, NULL);
28632 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28633 SECTION_DEBUG, NULL);
28634 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28635 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28636 generation);
28637
28638 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28639 stay in the main .o, but the skeleton_line goes into the
28640 split off dwo. */
28641 debug_skeleton_line_section
28642 = get_section (DEBUG_DWO_LINE_SECTION,
28643 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28644 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28645 DEBUG_SKELETON_LINE_SECTION_LABEL,
28646 generation);
28647 debug_str_offsets_section
28648 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28649 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28650 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28651 DEBUG_SKELETON_INFO_SECTION_LABEL,
28652 generation);
28653 debug_loc_section = get_section (dwarf_version >= 5
28654 ? DEBUG_DWO_LOCLISTS_SECTION
28655 : DEBUG_DWO_LOC_SECTION,
28656 SECTION_DEBUG | SECTION_EXCLUDE,
28657 NULL);
28658 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28659 DEBUG_STR_DWO_SECTION_FLAGS,
28660 NULL);
28661 debug_macinfo_section_name
28662 = ((dwarf_strict && dwarf_version < 5)
28663 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28664 debug_macinfo_section = get_section (debug_macinfo_section_name,
28665 SECTION_DEBUG | SECTION_EXCLUDE,
28666 NULL);
28667 }
28668 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28669 SECTION_DEBUG, NULL);
28670 debug_line_section = get_section (DEBUG_LINE_SECTION,
28671 SECTION_DEBUG, NULL);
28672 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28673 SECTION_DEBUG, NULL);
28674 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28675 SECTION_DEBUG, NULL);
28676 debug_str_section = get_section (DEBUG_STR_SECTION,
28677 DEBUG_STR_SECTION_FLAGS, NULL);
28678 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28679 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28680 DEBUG_STR_SECTION_FLAGS, NULL);
28681
28682 debug_ranges_section = get_section (dwarf_version >= 5
28683 ? DEBUG_RNGLISTS_SECTION
28684 : DEBUG_RANGES_SECTION,
28685 SECTION_DEBUG, NULL);
28686 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28687 SECTION_DEBUG, NULL);
28688 }
28689
28690 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28691 DEBUG_ABBREV_SECTION_LABEL, generation);
28692 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28693 DEBUG_INFO_SECTION_LABEL, generation);
28694 info_section_emitted = false;
28695 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28696 DEBUG_LINE_SECTION_LABEL, generation);
28697 /* There are up to 4 unique ranges labels per generation.
28698 See also output_rnglists. */
28699 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28700 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28701 if (dwarf_version >= 5 && dwarf_split_debug_info)
28702 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28703 DEBUG_RANGES_SECTION_LABEL,
28704 1 + generation * 4);
28705 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28706 DEBUG_ADDR_SECTION_LABEL, generation);
28707 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28708 (dwarf_strict && dwarf_version < 5)
28709 ? DEBUG_MACINFO_SECTION_LABEL
28710 : DEBUG_MACRO_SECTION_LABEL, generation);
28711 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28712 generation);
28713
28714 ++generation;
28715 return generation - 1;
28716 }
28717
28718 /* Set up for Dwarf output at the start of compilation. */
28719
28720 static void
28721 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28722 {
28723 /* Allocate the file_table. */
28724 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28725
28726 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28727 /* Allocate the decl_die_table. */
28728 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28729
28730 /* Allocate the decl_loc_table. */
28731 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28732
28733 /* Allocate the cached_dw_loc_list_table. */
28734 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28735
28736 /* Allocate the initial hunk of the abbrev_die_table. */
28737 vec_alloc (abbrev_die_table, 256);
28738 /* Zero-th entry is allocated, but unused. */
28739 abbrev_die_table->quick_push (NULL);
28740
28741 /* Allocate the dwarf_proc_stack_usage_map. */
28742 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28743
28744 /* Allocate the pubtypes and pubnames vectors. */
28745 vec_alloc (pubname_table, 32);
28746 vec_alloc (pubtype_table, 32);
28747
28748 vec_alloc (incomplete_types, 64);
28749
28750 vec_alloc (used_rtx_array, 32);
28751
28752 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28753 vec_alloc (macinfo_table, 64);
28754 #endif
28755
28756 /* If front-ends already registered a main translation unit but we were not
28757 ready to perform the association, do this now. */
28758 if (main_translation_unit != NULL_TREE)
28759 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28760 }
28761
28762 /* Called before compile () starts outputtting functions, variables
28763 and toplevel asms into assembly. */
28764
28765 static void
28766 dwarf2out_assembly_start (void)
28767 {
28768 if (text_section_line_info)
28769 return;
28770
28771 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28772 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28773 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28774 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28775 COLD_TEXT_SECTION_LABEL, 0);
28776 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28777
28778 switch_to_section (text_section);
28779 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28780 #endif
28781
28782 /* Make sure the line number table for .text always exists. */
28783 text_section_line_info = new_line_info_table ();
28784 text_section_line_info->end_label = text_end_label;
28785
28786 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28787 cur_line_info_table = text_section_line_info;
28788 #endif
28789
28790 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28791 && dwarf2out_do_cfi_asm ()
28792 && !dwarf2out_do_eh_frame ())
28793 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28794 }
28795
28796 /* A helper function for dwarf2out_finish called through
28797 htab_traverse. Assign a string its index. All strings must be
28798 collected into the table by the time index_string is called,
28799 because the indexing code relies on htab_traverse to traverse nodes
28800 in the same order for each run. */
28801
28802 int
28803 index_string (indirect_string_node **h, unsigned int *index)
28804 {
28805 indirect_string_node *node = *h;
28806
28807 find_string_form (node);
28808 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28809 {
28810 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28811 node->index = *index;
28812 *index += 1;
28813 }
28814 return 1;
28815 }
28816
28817 /* A helper function for output_indirect_strings called through
28818 htab_traverse. Output the offset to a string and update the
28819 current offset. */
28820
28821 int
28822 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28823 {
28824 indirect_string_node *node = *h;
28825
28826 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28827 {
28828 /* Assert that this node has been assigned an index. */
28829 gcc_assert (node->index != NO_INDEX_ASSIGNED
28830 && node->index != NOT_INDEXED);
28831 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28832 "indexed string 0x%x: %s", node->index, node->str);
28833 *offset += strlen (node->str) + 1;
28834 }
28835 return 1;
28836 }
28837
28838 /* A helper function for dwarf2out_finish called through
28839 htab_traverse. Output the indexed string. */
28840
28841 int
28842 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28843 {
28844 struct indirect_string_node *node = *h;
28845
28846 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28847 {
28848 /* Assert that the strings are output in the same order as their
28849 indexes were assigned. */
28850 gcc_assert (*cur_idx == node->index);
28851 assemble_string (node->str, strlen (node->str) + 1);
28852 *cur_idx += 1;
28853 }
28854 return 1;
28855 }
28856
28857 /* A helper function for output_indirect_strings. Counts the number
28858 of index strings offsets. Must match the logic of the functions
28859 output_index_string[_offsets] above. */
28860 int
28861 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28862 {
28863 struct indirect_string_node *node = *h;
28864
28865 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28866 *last_idx += 1;
28867 return 1;
28868 }
28869
28870 /* A helper function for dwarf2out_finish called through
28871 htab_traverse. Emit one queued .debug_str string. */
28872
28873 int
28874 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28875 {
28876 struct indirect_string_node *node = *h;
28877
28878 node->form = find_string_form (node);
28879 if (node->form == form && node->refcount > 0)
28880 {
28881 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28882 assemble_string (node->str, strlen (node->str) + 1);
28883 }
28884
28885 return 1;
28886 }
28887
28888 /* Output the indexed string table. */
28889
28890 static void
28891 output_indirect_strings (void)
28892 {
28893 switch_to_section (debug_str_section);
28894 if (!dwarf_split_debug_info)
28895 debug_str_hash->traverse<enum dwarf_form,
28896 output_indirect_string> (DW_FORM_strp);
28897 else
28898 {
28899 unsigned int offset = 0;
28900 unsigned int cur_idx = 0;
28901
28902 if (skeleton_debug_str_hash)
28903 skeleton_debug_str_hash->traverse<enum dwarf_form,
28904 output_indirect_string> (DW_FORM_strp);
28905
28906 switch_to_section (debug_str_offsets_section);
28907 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28908 header. Note that we don't need to generate a label to the
28909 actual index table following the header here, because this is
28910 for the split dwarf case only. In an .dwo file there is only
28911 one string offsets table (and one debug info section). But
28912 if we would start using string offset tables for the main (or
28913 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28914 pointing to the actual index after the header. Split dwarf
28915 units will never have a string offsets base attribute. When
28916 a split unit is moved into a .dwp file the string offsets can
28917 be found through the .debug_cu_index section table. */
28918 if (dwarf_version >= 5)
28919 {
28920 unsigned int last_idx = 0;
28921 unsigned long str_offsets_length;
28922
28923 debug_str_hash->traverse_noresize
28924 <unsigned int *, count_index_strings> (&last_idx);
28925 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28926 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28927 dw2_asm_output_data (4, 0xffffffff,
28928 "Escape value for 64-bit DWARF extension");
28929 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28930 "Length of string offsets unit");
28931 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28932 dw2_asm_output_data (2, 0, "Header zero padding");
28933 }
28934 debug_str_hash->traverse_noresize
28935 <unsigned int *, output_index_string_offset> (&offset);
28936 switch_to_section (debug_str_dwo_section);
28937 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28938 (&cur_idx);
28939 }
28940 }
28941
28942 /* Callback for htab_traverse to assign an index to an entry in the
28943 table, and to write that entry to the .debug_addr section. */
28944
28945 int
28946 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28947 {
28948 addr_table_entry *entry = *slot;
28949
28950 if (entry->refcount == 0)
28951 {
28952 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28953 || entry->index == NOT_INDEXED);
28954 return 1;
28955 }
28956
28957 gcc_assert (entry->index == *cur_index);
28958 (*cur_index)++;
28959
28960 switch (entry->kind)
28961 {
28962 case ate_kind_rtx:
28963 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28964 "0x%x", entry->index);
28965 break;
28966 case ate_kind_rtx_dtprel:
28967 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28968 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28969 DWARF2_ADDR_SIZE,
28970 entry->addr.rtl);
28971 fputc ('\n', asm_out_file);
28972 break;
28973 case ate_kind_label:
28974 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28975 "0x%x", entry->index);
28976 break;
28977 default:
28978 gcc_unreachable ();
28979 }
28980 return 1;
28981 }
28982
28983 /* A helper function for dwarf2out_finish. Counts the number
28984 of indexed addresses. Must match the logic of the functions
28985 output_addr_table_entry above. */
28986 int
28987 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28988 {
28989 addr_table_entry *entry = *slot;
28990
28991 if (entry->refcount > 0)
28992 *last_idx += 1;
28993 return 1;
28994 }
28995
28996 /* Produce the .debug_addr section. */
28997
28998 static void
28999 output_addr_table (void)
29000 {
29001 unsigned int index = 0;
29002 if (addr_index_table == NULL || addr_index_table->size () == 0)
29003 return;
29004
29005 switch_to_section (debug_addr_section);
29006 addr_index_table
29007 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29008 }
29009
29010 #if ENABLE_ASSERT_CHECKING
29011 /* Verify that all marks are clear. */
29012
29013 static void
29014 verify_marks_clear (dw_die_ref die)
29015 {
29016 dw_die_ref c;
29017
29018 gcc_assert (! die->die_mark);
29019 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29020 }
29021 #endif /* ENABLE_ASSERT_CHECKING */
29022
29023 /* Clear the marks for a die and its children.
29024 Be cool if the mark isn't set. */
29025
29026 static void
29027 prune_unmark_dies (dw_die_ref die)
29028 {
29029 dw_die_ref c;
29030
29031 if (die->die_mark)
29032 die->die_mark = 0;
29033 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29034 }
29035
29036 /* Given LOC that is referenced by a DIE we're marking as used, find all
29037 referenced DWARF procedures it references and mark them as used. */
29038
29039 static void
29040 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29041 {
29042 for (; loc != NULL; loc = loc->dw_loc_next)
29043 switch (loc->dw_loc_opc)
29044 {
29045 case DW_OP_implicit_pointer:
29046 case DW_OP_convert:
29047 case DW_OP_reinterpret:
29048 case DW_OP_GNU_implicit_pointer:
29049 case DW_OP_GNU_convert:
29050 case DW_OP_GNU_reinterpret:
29051 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29052 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29053 break;
29054 case DW_OP_GNU_variable_value:
29055 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29056 {
29057 dw_die_ref ref
29058 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29059 if (ref == NULL)
29060 break;
29061 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29062 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29063 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29064 }
29065 /* FALLTHRU */
29066 case DW_OP_call2:
29067 case DW_OP_call4:
29068 case DW_OP_call_ref:
29069 case DW_OP_const_type:
29070 case DW_OP_GNU_const_type:
29071 case DW_OP_GNU_parameter_ref:
29072 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29073 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29074 break;
29075 case DW_OP_regval_type:
29076 case DW_OP_deref_type:
29077 case DW_OP_GNU_regval_type:
29078 case DW_OP_GNU_deref_type:
29079 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29080 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29081 break;
29082 case DW_OP_entry_value:
29083 case DW_OP_GNU_entry_value:
29084 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29085 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29086 break;
29087 default:
29088 break;
29089 }
29090 }
29091
29092 /* Given DIE that we're marking as used, find any other dies
29093 it references as attributes and mark them as used. */
29094
29095 static void
29096 prune_unused_types_walk_attribs (dw_die_ref die)
29097 {
29098 dw_attr_node *a;
29099 unsigned ix;
29100
29101 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29102 {
29103 switch (AT_class (a))
29104 {
29105 /* Make sure DWARF procedures referenced by location descriptions will
29106 get emitted. */
29107 case dw_val_class_loc:
29108 prune_unused_types_walk_loc_descr (AT_loc (a));
29109 break;
29110 case dw_val_class_loc_list:
29111 for (dw_loc_list_ref list = AT_loc_list (a);
29112 list != NULL;
29113 list = list->dw_loc_next)
29114 prune_unused_types_walk_loc_descr (list->expr);
29115 break;
29116
29117 case dw_val_class_view_list:
29118 /* This points to a loc_list in another attribute, so it's
29119 already covered. */
29120 break;
29121
29122 case dw_val_class_die_ref:
29123 /* A reference to another DIE.
29124 Make sure that it will get emitted.
29125 If it was broken out into a comdat group, don't follow it. */
29126 if (! AT_ref (a)->comdat_type_p
29127 || a->dw_attr == DW_AT_specification)
29128 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29129 break;
29130
29131 case dw_val_class_str:
29132 /* Set the string's refcount to 0 so that prune_unused_types_mark
29133 accounts properly for it. */
29134 a->dw_attr_val.v.val_str->refcount = 0;
29135 break;
29136
29137 default:
29138 break;
29139 }
29140 }
29141 }
29142
29143 /* Mark the generic parameters and arguments children DIEs of DIE. */
29144
29145 static void
29146 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29147 {
29148 dw_die_ref c;
29149
29150 if (die == NULL || die->die_child == NULL)
29151 return;
29152 c = die->die_child;
29153 do
29154 {
29155 if (is_template_parameter (c))
29156 prune_unused_types_mark (c, 1);
29157 c = c->die_sib;
29158 } while (c && c != die->die_child);
29159 }
29160
29161 /* Mark DIE as being used. If DOKIDS is true, then walk down
29162 to DIE's children. */
29163
29164 static void
29165 prune_unused_types_mark (dw_die_ref die, int dokids)
29166 {
29167 dw_die_ref c;
29168
29169 if (die->die_mark == 0)
29170 {
29171 /* We haven't done this node yet. Mark it as used. */
29172 die->die_mark = 1;
29173 /* If this is the DIE of a generic type instantiation,
29174 mark the children DIEs that describe its generic parms and
29175 args. */
29176 prune_unused_types_mark_generic_parms_dies (die);
29177
29178 /* We also have to mark its parents as used.
29179 (But we don't want to mark our parent's kids due to this,
29180 unless it is a class.) */
29181 if (die->die_parent)
29182 prune_unused_types_mark (die->die_parent,
29183 class_scope_p (die->die_parent));
29184
29185 /* Mark any referenced nodes. */
29186 prune_unused_types_walk_attribs (die);
29187
29188 /* If this node is a specification,
29189 also mark the definition, if it exists. */
29190 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29191 prune_unused_types_mark (die->die_definition, 1);
29192 }
29193
29194 if (dokids && die->die_mark != 2)
29195 {
29196 /* We need to walk the children, but haven't done so yet.
29197 Remember that we've walked the kids. */
29198 die->die_mark = 2;
29199
29200 /* If this is an array type, we need to make sure our
29201 kids get marked, even if they're types. If we're
29202 breaking out types into comdat sections, do this
29203 for all type definitions. */
29204 if (die->die_tag == DW_TAG_array_type
29205 || (use_debug_types
29206 && is_type_die (die) && ! is_declaration_die (die)))
29207 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29208 else
29209 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29210 }
29211 }
29212
29213 /* For local classes, look if any static member functions were emitted
29214 and if so, mark them. */
29215
29216 static void
29217 prune_unused_types_walk_local_classes (dw_die_ref die)
29218 {
29219 dw_die_ref c;
29220
29221 if (die->die_mark == 2)
29222 return;
29223
29224 switch (die->die_tag)
29225 {
29226 case DW_TAG_structure_type:
29227 case DW_TAG_union_type:
29228 case DW_TAG_class_type:
29229 case DW_TAG_interface_type:
29230 break;
29231
29232 case DW_TAG_subprogram:
29233 if (!get_AT_flag (die, DW_AT_declaration)
29234 || die->die_definition != NULL)
29235 prune_unused_types_mark (die, 1);
29236 return;
29237
29238 default:
29239 return;
29240 }
29241
29242 /* Mark children. */
29243 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29244 }
29245
29246 /* Walk the tree DIE and mark types that we actually use. */
29247
29248 static void
29249 prune_unused_types_walk (dw_die_ref die)
29250 {
29251 dw_die_ref c;
29252
29253 /* Don't do anything if this node is already marked and
29254 children have been marked as well. */
29255 if (die->die_mark == 2)
29256 return;
29257
29258 switch (die->die_tag)
29259 {
29260 case DW_TAG_structure_type:
29261 case DW_TAG_union_type:
29262 case DW_TAG_class_type:
29263 case DW_TAG_interface_type:
29264 if (die->die_perennial_p)
29265 break;
29266
29267 for (c = die->die_parent; c; c = c->die_parent)
29268 if (c->die_tag == DW_TAG_subprogram)
29269 break;
29270
29271 /* Finding used static member functions inside of classes
29272 is needed just for local classes, because for other classes
29273 static member function DIEs with DW_AT_specification
29274 are emitted outside of the DW_TAG_*_type. If we ever change
29275 it, we'd need to call this even for non-local classes. */
29276 if (c)
29277 prune_unused_types_walk_local_classes (die);
29278
29279 /* It's a type node --- don't mark it. */
29280 return;
29281
29282 case DW_TAG_const_type:
29283 case DW_TAG_packed_type:
29284 case DW_TAG_pointer_type:
29285 case DW_TAG_reference_type:
29286 case DW_TAG_rvalue_reference_type:
29287 case DW_TAG_volatile_type:
29288 case DW_TAG_typedef:
29289 case DW_TAG_array_type:
29290 case DW_TAG_friend:
29291 case DW_TAG_enumeration_type:
29292 case DW_TAG_subroutine_type:
29293 case DW_TAG_string_type:
29294 case DW_TAG_set_type:
29295 case DW_TAG_subrange_type:
29296 case DW_TAG_ptr_to_member_type:
29297 case DW_TAG_file_type:
29298 /* Type nodes are useful only when other DIEs reference them --- don't
29299 mark them. */
29300 /* FALLTHROUGH */
29301
29302 case DW_TAG_dwarf_procedure:
29303 /* Likewise for DWARF procedures. */
29304
29305 if (die->die_perennial_p)
29306 break;
29307
29308 return;
29309
29310 default:
29311 /* Mark everything else. */
29312 break;
29313 }
29314
29315 if (die->die_mark == 0)
29316 {
29317 die->die_mark = 1;
29318
29319 /* Now, mark any dies referenced from here. */
29320 prune_unused_types_walk_attribs (die);
29321 }
29322
29323 die->die_mark = 2;
29324
29325 /* Mark children. */
29326 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29327 }
29328
29329 /* Increment the string counts on strings referred to from DIE's
29330 attributes. */
29331
29332 static void
29333 prune_unused_types_update_strings (dw_die_ref die)
29334 {
29335 dw_attr_node *a;
29336 unsigned ix;
29337
29338 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29339 if (AT_class (a) == dw_val_class_str)
29340 {
29341 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29342 s->refcount++;
29343 /* Avoid unnecessarily putting strings that are used less than
29344 twice in the hash table. */
29345 if (s->refcount
29346 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29347 {
29348 indirect_string_node **slot
29349 = debug_str_hash->find_slot_with_hash (s->str,
29350 htab_hash_string (s->str),
29351 INSERT);
29352 gcc_assert (*slot == NULL);
29353 *slot = s;
29354 }
29355 }
29356 }
29357
29358 /* Mark DIE and its children as removed. */
29359
29360 static void
29361 mark_removed (dw_die_ref die)
29362 {
29363 dw_die_ref c;
29364 die->removed = true;
29365 FOR_EACH_CHILD (die, c, mark_removed (c));
29366 }
29367
29368 /* Remove from the tree DIE any dies that aren't marked. */
29369
29370 static void
29371 prune_unused_types_prune (dw_die_ref die)
29372 {
29373 dw_die_ref c;
29374
29375 gcc_assert (die->die_mark);
29376 prune_unused_types_update_strings (die);
29377
29378 if (! die->die_child)
29379 return;
29380
29381 c = die->die_child;
29382 do {
29383 dw_die_ref prev = c, next;
29384 for (c = c->die_sib; ! c->die_mark; c = next)
29385 if (c == die->die_child)
29386 {
29387 /* No marked children between 'prev' and the end of the list. */
29388 if (prev == c)
29389 /* No marked children at all. */
29390 die->die_child = NULL;
29391 else
29392 {
29393 prev->die_sib = c->die_sib;
29394 die->die_child = prev;
29395 }
29396 c->die_sib = NULL;
29397 mark_removed (c);
29398 return;
29399 }
29400 else
29401 {
29402 next = c->die_sib;
29403 c->die_sib = NULL;
29404 mark_removed (c);
29405 }
29406
29407 if (c != prev->die_sib)
29408 prev->die_sib = c;
29409 prune_unused_types_prune (c);
29410 } while (c != die->die_child);
29411 }
29412
29413 /* Remove dies representing declarations that we never use. */
29414
29415 static void
29416 prune_unused_types (void)
29417 {
29418 unsigned int i;
29419 limbo_die_node *node;
29420 comdat_type_node *ctnode;
29421 pubname_entry *pub;
29422 dw_die_ref base_type;
29423
29424 #if ENABLE_ASSERT_CHECKING
29425 /* All the marks should already be clear. */
29426 verify_marks_clear (comp_unit_die ());
29427 for (node = limbo_die_list; node; node = node->next)
29428 verify_marks_clear (node->die);
29429 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29430 verify_marks_clear (ctnode->root_die);
29431 #endif /* ENABLE_ASSERT_CHECKING */
29432
29433 /* Mark types that are used in global variables. */
29434 premark_types_used_by_global_vars ();
29435
29436 /* Set the mark on nodes that are actually used. */
29437 prune_unused_types_walk (comp_unit_die ());
29438 for (node = limbo_die_list; node; node = node->next)
29439 prune_unused_types_walk (node->die);
29440 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29441 {
29442 prune_unused_types_walk (ctnode->root_die);
29443 prune_unused_types_mark (ctnode->type_die, 1);
29444 }
29445
29446 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29447 are unusual in that they are pubnames that are the children of pubtypes.
29448 They should only be marked via their parent DW_TAG_enumeration_type die,
29449 not as roots in themselves. */
29450 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29451 if (pub->die->die_tag != DW_TAG_enumerator)
29452 prune_unused_types_mark (pub->die, 1);
29453 for (i = 0; base_types.iterate (i, &base_type); i++)
29454 prune_unused_types_mark (base_type, 1);
29455
29456 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29457 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29458 callees). */
29459 cgraph_node *cnode;
29460 FOR_EACH_FUNCTION (cnode)
29461 if (cnode->referred_to_p (false))
29462 {
29463 dw_die_ref die = lookup_decl_die (cnode->decl);
29464 if (die == NULL || die->die_mark)
29465 continue;
29466 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29467 if (e->caller != cnode
29468 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29469 {
29470 prune_unused_types_mark (die, 1);
29471 break;
29472 }
29473 }
29474
29475 if (debug_str_hash)
29476 debug_str_hash->empty ();
29477 if (skeleton_debug_str_hash)
29478 skeleton_debug_str_hash->empty ();
29479 prune_unused_types_prune (comp_unit_die ());
29480 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29481 {
29482 node = *pnode;
29483 if (!node->die->die_mark)
29484 *pnode = node->next;
29485 else
29486 {
29487 prune_unused_types_prune (node->die);
29488 pnode = &node->next;
29489 }
29490 }
29491 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29492 prune_unused_types_prune (ctnode->root_die);
29493
29494 /* Leave the marks clear. */
29495 prune_unmark_dies (comp_unit_die ());
29496 for (node = limbo_die_list; node; node = node->next)
29497 prune_unmark_dies (node->die);
29498 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29499 prune_unmark_dies (ctnode->root_die);
29500 }
29501
29502 /* Helpers to manipulate hash table of comdat type units. */
29503
29504 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29505 {
29506 static inline hashval_t hash (const comdat_type_node *);
29507 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29508 };
29509
29510 inline hashval_t
29511 comdat_type_hasher::hash (const comdat_type_node *type_node)
29512 {
29513 hashval_t h;
29514 memcpy (&h, type_node->signature, sizeof (h));
29515 return h;
29516 }
29517
29518 inline bool
29519 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29520 const comdat_type_node *type_node_2)
29521 {
29522 return (! memcmp (type_node_1->signature, type_node_2->signature,
29523 DWARF_TYPE_SIGNATURE_SIZE));
29524 }
29525
29526 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29527 to the location it would have been added, should we know its
29528 DECL_ASSEMBLER_NAME when we added other attributes. This will
29529 probably improve compactness of debug info, removing equivalent
29530 abbrevs, and hide any differences caused by deferring the
29531 computation of the assembler name, triggered by e.g. PCH. */
29532
29533 static inline void
29534 move_linkage_attr (dw_die_ref die)
29535 {
29536 unsigned ix = vec_safe_length (die->die_attr);
29537 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29538
29539 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29540 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29541
29542 while (--ix > 0)
29543 {
29544 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29545
29546 if (prev->dw_attr == DW_AT_decl_line
29547 || prev->dw_attr == DW_AT_decl_column
29548 || prev->dw_attr == DW_AT_name)
29549 break;
29550 }
29551
29552 if (ix != vec_safe_length (die->die_attr) - 1)
29553 {
29554 die->die_attr->pop ();
29555 die->die_attr->quick_insert (ix, linkage);
29556 }
29557 }
29558
29559 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29560 referenced from typed stack ops and count how often they are used. */
29561
29562 static void
29563 mark_base_types (dw_loc_descr_ref loc)
29564 {
29565 dw_die_ref base_type = NULL;
29566
29567 for (; loc; loc = loc->dw_loc_next)
29568 {
29569 switch (loc->dw_loc_opc)
29570 {
29571 case DW_OP_regval_type:
29572 case DW_OP_deref_type:
29573 case DW_OP_GNU_regval_type:
29574 case DW_OP_GNU_deref_type:
29575 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29576 break;
29577 case DW_OP_convert:
29578 case DW_OP_reinterpret:
29579 case DW_OP_GNU_convert:
29580 case DW_OP_GNU_reinterpret:
29581 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29582 continue;
29583 /* FALLTHRU */
29584 case DW_OP_const_type:
29585 case DW_OP_GNU_const_type:
29586 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29587 break;
29588 case DW_OP_entry_value:
29589 case DW_OP_GNU_entry_value:
29590 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29591 continue;
29592 default:
29593 continue;
29594 }
29595 gcc_assert (base_type->die_parent == comp_unit_die ());
29596 if (base_type->die_mark)
29597 base_type->die_mark++;
29598 else
29599 {
29600 base_types.safe_push (base_type);
29601 base_type->die_mark = 1;
29602 }
29603 }
29604 }
29605
29606 /* Comparison function for sorting marked base types. */
29607
29608 static int
29609 base_type_cmp (const void *x, const void *y)
29610 {
29611 dw_die_ref dx = *(const dw_die_ref *) x;
29612 dw_die_ref dy = *(const dw_die_ref *) y;
29613 unsigned int byte_size1, byte_size2;
29614 unsigned int encoding1, encoding2;
29615 unsigned int align1, align2;
29616 if (dx->die_mark > dy->die_mark)
29617 return -1;
29618 if (dx->die_mark < dy->die_mark)
29619 return 1;
29620 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29621 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29622 if (byte_size1 < byte_size2)
29623 return 1;
29624 if (byte_size1 > byte_size2)
29625 return -1;
29626 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29627 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29628 if (encoding1 < encoding2)
29629 return 1;
29630 if (encoding1 > encoding2)
29631 return -1;
29632 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29633 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29634 if (align1 < align2)
29635 return 1;
29636 if (align1 > align2)
29637 return -1;
29638 return 0;
29639 }
29640
29641 /* Move base types marked by mark_base_types as early as possible
29642 in the CU, sorted by decreasing usage count both to make the
29643 uleb128 references as small as possible and to make sure they
29644 will have die_offset already computed by calc_die_sizes when
29645 sizes of typed stack loc ops is computed. */
29646
29647 static void
29648 move_marked_base_types (void)
29649 {
29650 unsigned int i;
29651 dw_die_ref base_type, die, c;
29652
29653 if (base_types.is_empty ())
29654 return;
29655
29656 /* Sort by decreasing usage count, they will be added again in that
29657 order later on. */
29658 base_types.qsort (base_type_cmp);
29659 die = comp_unit_die ();
29660 c = die->die_child;
29661 do
29662 {
29663 dw_die_ref prev = c;
29664 c = c->die_sib;
29665 while (c->die_mark)
29666 {
29667 remove_child_with_prev (c, prev);
29668 /* As base types got marked, there must be at least
29669 one node other than DW_TAG_base_type. */
29670 gcc_assert (die->die_child != NULL);
29671 c = prev->die_sib;
29672 }
29673 }
29674 while (c != die->die_child);
29675 gcc_assert (die->die_child);
29676 c = die->die_child;
29677 for (i = 0; base_types.iterate (i, &base_type); i++)
29678 {
29679 base_type->die_mark = 0;
29680 base_type->die_sib = c->die_sib;
29681 c->die_sib = base_type;
29682 c = base_type;
29683 }
29684 }
29685
29686 /* Helper function for resolve_addr, attempt to resolve
29687 one CONST_STRING, return true if successful. Similarly verify that
29688 SYMBOL_REFs refer to variables emitted in the current CU. */
29689
29690 static bool
29691 resolve_one_addr (rtx *addr)
29692 {
29693 rtx rtl = *addr;
29694
29695 if (GET_CODE (rtl) == CONST_STRING)
29696 {
29697 size_t len = strlen (XSTR (rtl, 0)) + 1;
29698 tree t = build_string (len, XSTR (rtl, 0));
29699 tree tlen = size_int (len - 1);
29700 TREE_TYPE (t)
29701 = build_array_type (char_type_node, build_index_type (tlen));
29702 rtl = lookup_constant_def (t);
29703 if (!rtl || !MEM_P (rtl))
29704 return false;
29705 rtl = XEXP (rtl, 0);
29706 if (GET_CODE (rtl) == SYMBOL_REF
29707 && SYMBOL_REF_DECL (rtl)
29708 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29709 return false;
29710 vec_safe_push (used_rtx_array, rtl);
29711 *addr = rtl;
29712 return true;
29713 }
29714
29715 if (GET_CODE (rtl) == SYMBOL_REF
29716 && SYMBOL_REF_DECL (rtl))
29717 {
29718 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29719 {
29720 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29721 return false;
29722 }
29723 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29724 return false;
29725 }
29726
29727 if (GET_CODE (rtl) == CONST)
29728 {
29729 subrtx_ptr_iterator::array_type array;
29730 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29731 if (!resolve_one_addr (*iter))
29732 return false;
29733 }
29734
29735 return true;
29736 }
29737
29738 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29739 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29740 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29741
29742 static rtx
29743 string_cst_pool_decl (tree t)
29744 {
29745 rtx rtl = output_constant_def (t, 1);
29746 unsigned char *array;
29747 dw_loc_descr_ref l;
29748 tree decl;
29749 size_t len;
29750 dw_die_ref ref;
29751
29752 if (!rtl || !MEM_P (rtl))
29753 return NULL_RTX;
29754 rtl = XEXP (rtl, 0);
29755 if (GET_CODE (rtl) != SYMBOL_REF
29756 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29757 return NULL_RTX;
29758
29759 decl = SYMBOL_REF_DECL (rtl);
29760 if (!lookup_decl_die (decl))
29761 {
29762 len = TREE_STRING_LENGTH (t);
29763 vec_safe_push (used_rtx_array, rtl);
29764 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29765 array = ggc_vec_alloc<unsigned char> (len);
29766 memcpy (array, TREE_STRING_POINTER (t), len);
29767 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29768 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29769 l->dw_loc_oprnd2.v.val_vec.length = len;
29770 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29771 l->dw_loc_oprnd2.v.val_vec.array = array;
29772 add_AT_loc (ref, DW_AT_location, l);
29773 equate_decl_number_to_die (decl, ref);
29774 }
29775 return rtl;
29776 }
29777
29778 /* Helper function of resolve_addr_in_expr. LOC is
29779 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29780 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29781 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29782 with DW_OP_implicit_pointer if possible
29783 and return true, if unsuccessful, return false. */
29784
29785 static bool
29786 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29787 {
29788 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29789 HOST_WIDE_INT offset = 0;
29790 dw_die_ref ref = NULL;
29791 tree decl;
29792
29793 if (GET_CODE (rtl) == CONST
29794 && GET_CODE (XEXP (rtl, 0)) == PLUS
29795 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29796 {
29797 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29798 rtl = XEXP (XEXP (rtl, 0), 0);
29799 }
29800 if (GET_CODE (rtl) == CONST_STRING)
29801 {
29802 size_t len = strlen (XSTR (rtl, 0)) + 1;
29803 tree t = build_string (len, XSTR (rtl, 0));
29804 tree tlen = size_int (len - 1);
29805
29806 TREE_TYPE (t)
29807 = build_array_type (char_type_node, build_index_type (tlen));
29808 rtl = string_cst_pool_decl (t);
29809 if (!rtl)
29810 return false;
29811 }
29812 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29813 {
29814 decl = SYMBOL_REF_DECL (rtl);
29815 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29816 {
29817 ref = lookup_decl_die (decl);
29818 if (ref && (get_AT (ref, DW_AT_location)
29819 || get_AT (ref, DW_AT_const_value)))
29820 {
29821 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29822 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29823 loc->dw_loc_oprnd1.val_entry = NULL;
29824 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29825 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29826 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29827 loc->dw_loc_oprnd2.v.val_int = offset;
29828 return true;
29829 }
29830 }
29831 }
29832 return false;
29833 }
29834
29835 /* Helper function for resolve_addr, handle one location
29836 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29837 the location list couldn't be resolved. */
29838
29839 static bool
29840 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29841 {
29842 dw_loc_descr_ref keep = NULL;
29843 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29844 switch (loc->dw_loc_opc)
29845 {
29846 case DW_OP_addr:
29847 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29848 {
29849 if ((prev == NULL
29850 || prev->dw_loc_opc == DW_OP_piece
29851 || prev->dw_loc_opc == DW_OP_bit_piece)
29852 && loc->dw_loc_next
29853 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29854 && (!dwarf_strict || dwarf_version >= 5)
29855 && optimize_one_addr_into_implicit_ptr (loc))
29856 break;
29857 return false;
29858 }
29859 break;
29860 case DW_OP_GNU_addr_index:
29861 case DW_OP_addrx:
29862 case DW_OP_GNU_const_index:
29863 case DW_OP_constx:
29864 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29865 || loc->dw_loc_opc == DW_OP_addrx)
29866 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29867 || loc->dw_loc_opc == DW_OP_constx)
29868 && loc->dtprel))
29869 {
29870 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29871 if (!resolve_one_addr (&rtl))
29872 return false;
29873 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29874 loc->dw_loc_oprnd1.val_entry
29875 = add_addr_table_entry (rtl, ate_kind_rtx);
29876 }
29877 break;
29878 case DW_OP_const4u:
29879 case DW_OP_const8u:
29880 if (loc->dtprel
29881 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29882 return false;
29883 break;
29884 case DW_OP_plus_uconst:
29885 if (size_of_loc_descr (loc)
29886 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29887 + 1
29888 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29889 {
29890 dw_loc_descr_ref repl
29891 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29892 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29893 add_loc_descr (&repl, loc->dw_loc_next);
29894 *loc = *repl;
29895 }
29896 break;
29897 case DW_OP_implicit_value:
29898 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29899 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29900 return false;
29901 break;
29902 case DW_OP_implicit_pointer:
29903 case DW_OP_GNU_implicit_pointer:
29904 case DW_OP_GNU_parameter_ref:
29905 case DW_OP_GNU_variable_value:
29906 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29907 {
29908 dw_die_ref ref
29909 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29910 if (ref == NULL)
29911 return false;
29912 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29913 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29914 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29915 }
29916 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29917 {
29918 if (prev == NULL
29919 && loc->dw_loc_next == NULL
29920 && AT_class (a) == dw_val_class_loc)
29921 switch (a->dw_attr)
29922 {
29923 /* Following attributes allow both exprloc and reference,
29924 so if the whole expression is DW_OP_GNU_variable_value
29925 alone we could transform it into reference. */
29926 case DW_AT_byte_size:
29927 case DW_AT_bit_size:
29928 case DW_AT_lower_bound:
29929 case DW_AT_upper_bound:
29930 case DW_AT_bit_stride:
29931 case DW_AT_count:
29932 case DW_AT_allocated:
29933 case DW_AT_associated:
29934 case DW_AT_byte_stride:
29935 a->dw_attr_val.val_class = dw_val_class_die_ref;
29936 a->dw_attr_val.val_entry = NULL;
29937 a->dw_attr_val.v.val_die_ref.die
29938 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29939 a->dw_attr_val.v.val_die_ref.external = 0;
29940 return true;
29941 default:
29942 break;
29943 }
29944 if (dwarf_strict)
29945 return false;
29946 }
29947 break;
29948 case DW_OP_const_type:
29949 case DW_OP_regval_type:
29950 case DW_OP_deref_type:
29951 case DW_OP_convert:
29952 case DW_OP_reinterpret:
29953 case DW_OP_GNU_const_type:
29954 case DW_OP_GNU_regval_type:
29955 case DW_OP_GNU_deref_type:
29956 case DW_OP_GNU_convert:
29957 case DW_OP_GNU_reinterpret:
29958 while (loc->dw_loc_next
29959 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29960 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29961 {
29962 dw_die_ref base1, base2;
29963 unsigned enc1, enc2, size1, size2;
29964 if (loc->dw_loc_opc == DW_OP_regval_type
29965 || loc->dw_loc_opc == DW_OP_deref_type
29966 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29967 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29968 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29969 else if (loc->dw_loc_oprnd1.val_class
29970 == dw_val_class_unsigned_const)
29971 break;
29972 else
29973 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29974 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29975 == dw_val_class_unsigned_const)
29976 break;
29977 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29978 gcc_assert (base1->die_tag == DW_TAG_base_type
29979 && base2->die_tag == DW_TAG_base_type);
29980 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29981 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29982 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29983 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29984 if (size1 == size2
29985 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29986 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29987 && loc != keep)
29988 || enc1 == enc2))
29989 {
29990 /* Optimize away next DW_OP_convert after
29991 adjusting LOC's base type die reference. */
29992 if (loc->dw_loc_opc == DW_OP_regval_type
29993 || loc->dw_loc_opc == DW_OP_deref_type
29994 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29995 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29996 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29997 else
29998 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29999 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30000 continue;
30001 }
30002 /* Don't change integer DW_OP_convert after e.g. floating
30003 point typed stack entry. */
30004 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30005 keep = loc->dw_loc_next;
30006 break;
30007 }
30008 break;
30009 default:
30010 break;
30011 }
30012 return true;
30013 }
30014
30015 /* Helper function of resolve_addr. DIE had DW_AT_location of
30016 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30017 and DW_OP_addr couldn't be resolved. resolve_addr has already
30018 removed the DW_AT_location attribute. This function attempts to
30019 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30020 to it or DW_AT_const_value attribute, if possible. */
30021
30022 static void
30023 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30024 {
30025 if (!VAR_P (decl)
30026 || lookup_decl_die (decl) != die
30027 || DECL_EXTERNAL (decl)
30028 || !TREE_STATIC (decl)
30029 || DECL_INITIAL (decl) == NULL_TREE
30030 || DECL_P (DECL_INITIAL (decl))
30031 || get_AT (die, DW_AT_const_value))
30032 return;
30033
30034 tree init = DECL_INITIAL (decl);
30035 HOST_WIDE_INT offset = 0;
30036 /* For variables that have been optimized away and thus
30037 don't have a memory location, see if we can emit
30038 DW_AT_const_value instead. */
30039 if (tree_add_const_value_attribute (die, init))
30040 return;
30041 if (dwarf_strict && dwarf_version < 5)
30042 return;
30043 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30044 and ADDR_EXPR refers to a decl that has DW_AT_location or
30045 DW_AT_const_value (but isn't addressable, otherwise
30046 resolving the original DW_OP_addr wouldn't fail), see if
30047 we can add DW_OP_implicit_pointer. */
30048 STRIP_NOPS (init);
30049 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30050 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30051 {
30052 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30053 init = TREE_OPERAND (init, 0);
30054 STRIP_NOPS (init);
30055 }
30056 if (TREE_CODE (init) != ADDR_EXPR)
30057 return;
30058 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30059 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30060 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30061 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30062 && TREE_OPERAND (init, 0) != decl))
30063 {
30064 dw_die_ref ref;
30065 dw_loc_descr_ref l;
30066
30067 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30068 {
30069 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30070 if (!rtl)
30071 return;
30072 decl = SYMBOL_REF_DECL (rtl);
30073 }
30074 else
30075 decl = TREE_OPERAND (init, 0);
30076 ref = lookup_decl_die (decl);
30077 if (ref == NULL
30078 || (!get_AT (ref, DW_AT_location)
30079 && !get_AT (ref, DW_AT_const_value)))
30080 return;
30081 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30082 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30083 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30084 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30085 add_AT_loc (die, DW_AT_location, l);
30086 }
30087 }
30088
30089 /* Return NULL if l is a DWARF expression, or first op that is not
30090 valid DWARF expression. */
30091
30092 static dw_loc_descr_ref
30093 non_dwarf_expression (dw_loc_descr_ref l)
30094 {
30095 while (l)
30096 {
30097 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30098 return l;
30099 switch (l->dw_loc_opc)
30100 {
30101 case DW_OP_regx:
30102 case DW_OP_implicit_value:
30103 case DW_OP_stack_value:
30104 case DW_OP_implicit_pointer:
30105 case DW_OP_GNU_implicit_pointer:
30106 case DW_OP_GNU_parameter_ref:
30107 case DW_OP_piece:
30108 case DW_OP_bit_piece:
30109 return l;
30110 default:
30111 break;
30112 }
30113 l = l->dw_loc_next;
30114 }
30115 return NULL;
30116 }
30117
30118 /* Return adjusted copy of EXPR:
30119 If it is empty DWARF expression, return it.
30120 If it is valid non-empty DWARF expression,
30121 return copy of EXPR with DW_OP_deref appended to it.
30122 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30123 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30124 If it is DWARF expression followed by DW_OP_stack_value, return
30125 copy of the DWARF expression without anything appended.
30126 Otherwise, return NULL. */
30127
30128 static dw_loc_descr_ref
30129 copy_deref_exprloc (dw_loc_descr_ref expr)
30130 {
30131 dw_loc_descr_ref tail = NULL;
30132
30133 if (expr == NULL)
30134 return NULL;
30135
30136 dw_loc_descr_ref l = non_dwarf_expression (expr);
30137 if (l && l->dw_loc_next)
30138 return NULL;
30139
30140 if (l)
30141 {
30142 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30143 tail = new_loc_descr ((enum dwarf_location_atom)
30144 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30145 0, 0);
30146 else
30147 switch (l->dw_loc_opc)
30148 {
30149 case DW_OP_regx:
30150 tail = new_loc_descr (DW_OP_bregx,
30151 l->dw_loc_oprnd1.v.val_unsigned, 0);
30152 break;
30153 case DW_OP_stack_value:
30154 break;
30155 default:
30156 return NULL;
30157 }
30158 }
30159 else
30160 tail = new_loc_descr (DW_OP_deref, 0, 0);
30161
30162 dw_loc_descr_ref ret = NULL, *p = &ret;
30163 while (expr != l)
30164 {
30165 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30166 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30167 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30168 p = &(*p)->dw_loc_next;
30169 expr = expr->dw_loc_next;
30170 }
30171 *p = tail;
30172 return ret;
30173 }
30174
30175 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30176 reference to a variable or argument, adjust it if needed and return:
30177 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30178 attribute if present should be removed
30179 0 keep the attribute perhaps with minor modifications, no need to rescan
30180 1 if the attribute has been successfully adjusted. */
30181
30182 static int
30183 optimize_string_length (dw_attr_node *a)
30184 {
30185 dw_loc_descr_ref l = AT_loc (a), lv;
30186 dw_die_ref die;
30187 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30188 {
30189 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30190 die = lookup_decl_die (decl);
30191 if (die)
30192 {
30193 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30194 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30195 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30196 }
30197 else
30198 return -1;
30199 }
30200 else
30201 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30202
30203 /* DWARF5 allows reference class, so we can then reference the DIE.
30204 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30205 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30206 {
30207 a->dw_attr_val.val_class = dw_val_class_die_ref;
30208 a->dw_attr_val.val_entry = NULL;
30209 a->dw_attr_val.v.val_die_ref.die = die;
30210 a->dw_attr_val.v.val_die_ref.external = 0;
30211 return 0;
30212 }
30213
30214 dw_attr_node *av = get_AT (die, DW_AT_location);
30215 dw_loc_list_ref d;
30216 bool non_dwarf_expr = false;
30217
30218 if (av == NULL)
30219 return dwarf_strict ? -1 : 0;
30220 switch (AT_class (av))
30221 {
30222 case dw_val_class_loc_list:
30223 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30224 if (d->expr && non_dwarf_expression (d->expr))
30225 non_dwarf_expr = true;
30226 break;
30227 case dw_val_class_view_list:
30228 gcc_unreachable ();
30229 case dw_val_class_loc:
30230 lv = AT_loc (av);
30231 if (lv == NULL)
30232 return dwarf_strict ? -1 : 0;
30233 if (non_dwarf_expression (lv))
30234 non_dwarf_expr = true;
30235 break;
30236 default:
30237 return dwarf_strict ? -1 : 0;
30238 }
30239
30240 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30241 into DW_OP_call4 or DW_OP_GNU_variable_value into
30242 DW_OP_call4 DW_OP_deref, do so. */
30243 if (!non_dwarf_expr
30244 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30245 {
30246 l->dw_loc_opc = DW_OP_call4;
30247 if (l->dw_loc_next)
30248 l->dw_loc_next = NULL;
30249 else
30250 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30251 return 0;
30252 }
30253
30254 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30255 copy over the DW_AT_location attribute from die to a. */
30256 if (l->dw_loc_next != NULL)
30257 {
30258 a->dw_attr_val = av->dw_attr_val;
30259 return 1;
30260 }
30261
30262 dw_loc_list_ref list, *p;
30263 switch (AT_class (av))
30264 {
30265 case dw_val_class_loc_list:
30266 p = &list;
30267 list = NULL;
30268 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30269 {
30270 lv = copy_deref_exprloc (d->expr);
30271 if (lv)
30272 {
30273 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30274 p = &(*p)->dw_loc_next;
30275 }
30276 else if (!dwarf_strict && d->expr)
30277 return 0;
30278 }
30279 if (list == NULL)
30280 return dwarf_strict ? -1 : 0;
30281 a->dw_attr_val.val_class = dw_val_class_loc_list;
30282 gen_llsym (list);
30283 *AT_loc_list_ptr (a) = list;
30284 return 1;
30285 case dw_val_class_loc:
30286 lv = copy_deref_exprloc (AT_loc (av));
30287 if (lv == NULL)
30288 return dwarf_strict ? -1 : 0;
30289 a->dw_attr_val.v.val_loc = lv;
30290 return 1;
30291 default:
30292 gcc_unreachable ();
30293 }
30294 }
30295
30296 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30297 an address in .rodata section if the string literal is emitted there,
30298 or remove the containing location list or replace DW_AT_const_value
30299 with DW_AT_location and empty location expression, if it isn't found
30300 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30301 to something that has been emitted in the current CU. */
30302
30303 static void
30304 resolve_addr (dw_die_ref die)
30305 {
30306 dw_die_ref c;
30307 dw_attr_node *a;
30308 dw_loc_list_ref *curr, *start, loc;
30309 unsigned ix;
30310 bool remove_AT_byte_size = false;
30311
30312 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30313 switch (AT_class (a))
30314 {
30315 case dw_val_class_loc_list:
30316 start = curr = AT_loc_list_ptr (a);
30317 loc = *curr;
30318 gcc_assert (loc);
30319 /* The same list can be referenced more than once. See if we have
30320 already recorded the result from a previous pass. */
30321 if (loc->replaced)
30322 *curr = loc->dw_loc_next;
30323 else if (!loc->resolved_addr)
30324 {
30325 /* As things stand, we do not expect or allow one die to
30326 reference a suffix of another die's location list chain.
30327 References must be identical or completely separate.
30328 There is therefore no need to cache the result of this
30329 pass on any list other than the first; doing so
30330 would lead to unnecessary writes. */
30331 while (*curr)
30332 {
30333 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30334 if (!resolve_addr_in_expr (a, (*curr)->expr))
30335 {
30336 dw_loc_list_ref next = (*curr)->dw_loc_next;
30337 dw_loc_descr_ref l = (*curr)->expr;
30338
30339 if (next && (*curr)->ll_symbol)
30340 {
30341 gcc_assert (!next->ll_symbol);
30342 next->ll_symbol = (*curr)->ll_symbol;
30343 next->vl_symbol = (*curr)->vl_symbol;
30344 }
30345 if (dwarf_split_debug_info)
30346 remove_loc_list_addr_table_entries (l);
30347 *curr = next;
30348 }
30349 else
30350 {
30351 mark_base_types ((*curr)->expr);
30352 curr = &(*curr)->dw_loc_next;
30353 }
30354 }
30355 if (loc == *start)
30356 loc->resolved_addr = 1;
30357 else
30358 {
30359 loc->replaced = 1;
30360 loc->dw_loc_next = *start;
30361 }
30362 }
30363 if (!*start)
30364 {
30365 remove_AT (die, a->dw_attr);
30366 ix--;
30367 }
30368 break;
30369 case dw_val_class_view_list:
30370 {
30371 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30372 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30373 dw_val_node *llnode
30374 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30375 /* If we no longer have a loclist, or it no longer needs
30376 views, drop this attribute. */
30377 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30378 {
30379 remove_AT (die, a->dw_attr);
30380 ix--;
30381 }
30382 break;
30383 }
30384 case dw_val_class_loc:
30385 {
30386 dw_loc_descr_ref l = AT_loc (a);
30387 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30388 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30389 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30390 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30391 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30392 with DW_FORM_ref referencing the same DIE as
30393 DW_OP_GNU_variable_value used to reference. */
30394 if (a->dw_attr == DW_AT_string_length
30395 && l
30396 && l->dw_loc_opc == DW_OP_GNU_variable_value
30397 && (l->dw_loc_next == NULL
30398 || (l->dw_loc_next->dw_loc_next == NULL
30399 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30400 {
30401 switch (optimize_string_length (a))
30402 {
30403 case -1:
30404 remove_AT (die, a->dw_attr);
30405 ix--;
30406 /* If we drop DW_AT_string_length, we need to drop also
30407 DW_AT_{string_length_,}byte_size. */
30408 remove_AT_byte_size = true;
30409 continue;
30410 default:
30411 break;
30412 case 1:
30413 /* Even if we keep the optimized DW_AT_string_length,
30414 it might have changed AT_class, so process it again. */
30415 ix--;
30416 continue;
30417 }
30418 }
30419 /* For -gdwarf-2 don't attempt to optimize
30420 DW_AT_data_member_location containing
30421 DW_OP_plus_uconst - older consumers might
30422 rely on it being that op instead of a more complex,
30423 but shorter, location description. */
30424 if ((dwarf_version > 2
30425 || a->dw_attr != DW_AT_data_member_location
30426 || l == NULL
30427 || l->dw_loc_opc != DW_OP_plus_uconst
30428 || l->dw_loc_next != NULL)
30429 && !resolve_addr_in_expr (a, l))
30430 {
30431 if (dwarf_split_debug_info)
30432 remove_loc_list_addr_table_entries (l);
30433 if (l != NULL
30434 && l->dw_loc_next == NULL
30435 && l->dw_loc_opc == DW_OP_addr
30436 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30437 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30438 && a->dw_attr == DW_AT_location)
30439 {
30440 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30441 remove_AT (die, a->dw_attr);
30442 ix--;
30443 optimize_location_into_implicit_ptr (die, decl);
30444 break;
30445 }
30446 if (a->dw_attr == DW_AT_string_length)
30447 /* If we drop DW_AT_string_length, we need to drop also
30448 DW_AT_{string_length_,}byte_size. */
30449 remove_AT_byte_size = true;
30450 remove_AT (die, a->dw_attr);
30451 ix--;
30452 }
30453 else
30454 mark_base_types (l);
30455 }
30456 break;
30457 case dw_val_class_addr:
30458 if (a->dw_attr == DW_AT_const_value
30459 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30460 {
30461 if (AT_index (a) != NOT_INDEXED)
30462 remove_addr_table_entry (a->dw_attr_val.val_entry);
30463 remove_AT (die, a->dw_attr);
30464 ix--;
30465 }
30466 if ((die->die_tag == DW_TAG_call_site
30467 && a->dw_attr == DW_AT_call_origin)
30468 || (die->die_tag == DW_TAG_GNU_call_site
30469 && a->dw_attr == DW_AT_abstract_origin))
30470 {
30471 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30472 dw_die_ref tdie = lookup_decl_die (tdecl);
30473 dw_die_ref cdie;
30474 if (tdie == NULL
30475 && DECL_EXTERNAL (tdecl)
30476 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30477 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30478 {
30479 dw_die_ref pdie = cdie;
30480 /* Make sure we don't add these DIEs into type units.
30481 We could emit skeleton DIEs for context (namespaces,
30482 outer structs/classes) and a skeleton DIE for the
30483 innermost context with DW_AT_signature pointing to the
30484 type unit. See PR78835. */
30485 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30486 pdie = pdie->die_parent;
30487 if (pdie == NULL)
30488 {
30489 /* Creating a full DIE for tdecl is overly expensive and
30490 at this point even wrong when in the LTO phase
30491 as it can end up generating new type DIEs we didn't
30492 output and thus optimize_external_refs will crash. */
30493 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30494 add_AT_flag (tdie, DW_AT_external, 1);
30495 add_AT_flag (tdie, DW_AT_declaration, 1);
30496 add_linkage_attr (tdie, tdecl);
30497 add_name_and_src_coords_attributes (tdie, tdecl, true);
30498 equate_decl_number_to_die (tdecl, tdie);
30499 }
30500 }
30501 if (tdie)
30502 {
30503 a->dw_attr_val.val_class = dw_val_class_die_ref;
30504 a->dw_attr_val.v.val_die_ref.die = tdie;
30505 a->dw_attr_val.v.val_die_ref.external = 0;
30506 }
30507 else
30508 {
30509 if (AT_index (a) != NOT_INDEXED)
30510 remove_addr_table_entry (a->dw_attr_val.val_entry);
30511 remove_AT (die, a->dw_attr);
30512 ix--;
30513 }
30514 }
30515 break;
30516 default:
30517 break;
30518 }
30519
30520 if (remove_AT_byte_size)
30521 remove_AT (die, dwarf_version >= 5
30522 ? DW_AT_string_length_byte_size
30523 : DW_AT_byte_size);
30524
30525 FOR_EACH_CHILD (die, c, resolve_addr (c));
30526 }
30527 \f
30528 /* Helper routines for optimize_location_lists.
30529 This pass tries to share identical local lists in .debug_loc
30530 section. */
30531
30532 /* Iteratively hash operands of LOC opcode into HSTATE. */
30533
30534 static void
30535 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30536 {
30537 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30538 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30539
30540 switch (loc->dw_loc_opc)
30541 {
30542 case DW_OP_const4u:
30543 case DW_OP_const8u:
30544 if (loc->dtprel)
30545 goto hash_addr;
30546 /* FALLTHRU */
30547 case DW_OP_const1u:
30548 case DW_OP_const1s:
30549 case DW_OP_const2u:
30550 case DW_OP_const2s:
30551 case DW_OP_const4s:
30552 case DW_OP_const8s:
30553 case DW_OP_constu:
30554 case DW_OP_consts:
30555 case DW_OP_pick:
30556 case DW_OP_plus_uconst:
30557 case DW_OP_breg0:
30558 case DW_OP_breg1:
30559 case DW_OP_breg2:
30560 case DW_OP_breg3:
30561 case DW_OP_breg4:
30562 case DW_OP_breg5:
30563 case DW_OP_breg6:
30564 case DW_OP_breg7:
30565 case DW_OP_breg8:
30566 case DW_OP_breg9:
30567 case DW_OP_breg10:
30568 case DW_OP_breg11:
30569 case DW_OP_breg12:
30570 case DW_OP_breg13:
30571 case DW_OP_breg14:
30572 case DW_OP_breg15:
30573 case DW_OP_breg16:
30574 case DW_OP_breg17:
30575 case DW_OP_breg18:
30576 case DW_OP_breg19:
30577 case DW_OP_breg20:
30578 case DW_OP_breg21:
30579 case DW_OP_breg22:
30580 case DW_OP_breg23:
30581 case DW_OP_breg24:
30582 case DW_OP_breg25:
30583 case DW_OP_breg26:
30584 case DW_OP_breg27:
30585 case DW_OP_breg28:
30586 case DW_OP_breg29:
30587 case DW_OP_breg30:
30588 case DW_OP_breg31:
30589 case DW_OP_regx:
30590 case DW_OP_fbreg:
30591 case DW_OP_piece:
30592 case DW_OP_deref_size:
30593 case DW_OP_xderef_size:
30594 hstate.add_object (val1->v.val_int);
30595 break;
30596 case DW_OP_skip:
30597 case DW_OP_bra:
30598 {
30599 int offset;
30600
30601 gcc_assert (val1->val_class == dw_val_class_loc);
30602 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30603 hstate.add_object (offset);
30604 }
30605 break;
30606 case DW_OP_implicit_value:
30607 hstate.add_object (val1->v.val_unsigned);
30608 switch (val2->val_class)
30609 {
30610 case dw_val_class_const:
30611 hstate.add_object (val2->v.val_int);
30612 break;
30613 case dw_val_class_vec:
30614 {
30615 unsigned int elt_size = val2->v.val_vec.elt_size;
30616 unsigned int len = val2->v.val_vec.length;
30617
30618 hstate.add_int (elt_size);
30619 hstate.add_int (len);
30620 hstate.add (val2->v.val_vec.array, len * elt_size);
30621 }
30622 break;
30623 case dw_val_class_const_double:
30624 hstate.add_object (val2->v.val_double.low);
30625 hstate.add_object (val2->v.val_double.high);
30626 break;
30627 case dw_val_class_wide_int:
30628 hstate.add (val2->v.val_wide->get_val (),
30629 get_full_len (*val2->v.val_wide)
30630 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30631 break;
30632 case dw_val_class_addr:
30633 inchash::add_rtx (val2->v.val_addr, hstate);
30634 break;
30635 default:
30636 gcc_unreachable ();
30637 }
30638 break;
30639 case DW_OP_bregx:
30640 case DW_OP_bit_piece:
30641 hstate.add_object (val1->v.val_int);
30642 hstate.add_object (val2->v.val_int);
30643 break;
30644 case DW_OP_addr:
30645 hash_addr:
30646 if (loc->dtprel)
30647 {
30648 unsigned char dtprel = 0xd1;
30649 hstate.add_object (dtprel);
30650 }
30651 inchash::add_rtx (val1->v.val_addr, hstate);
30652 break;
30653 case DW_OP_GNU_addr_index:
30654 case DW_OP_addrx:
30655 case DW_OP_GNU_const_index:
30656 case DW_OP_constx:
30657 {
30658 if (loc->dtprel)
30659 {
30660 unsigned char dtprel = 0xd1;
30661 hstate.add_object (dtprel);
30662 }
30663 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30664 }
30665 break;
30666 case DW_OP_implicit_pointer:
30667 case DW_OP_GNU_implicit_pointer:
30668 hstate.add_int (val2->v.val_int);
30669 break;
30670 case DW_OP_entry_value:
30671 case DW_OP_GNU_entry_value:
30672 hstate.add_object (val1->v.val_loc);
30673 break;
30674 case DW_OP_regval_type:
30675 case DW_OP_deref_type:
30676 case DW_OP_GNU_regval_type:
30677 case DW_OP_GNU_deref_type:
30678 {
30679 unsigned int byte_size
30680 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30681 unsigned int encoding
30682 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30683 hstate.add_object (val1->v.val_int);
30684 hstate.add_object (byte_size);
30685 hstate.add_object (encoding);
30686 }
30687 break;
30688 case DW_OP_convert:
30689 case DW_OP_reinterpret:
30690 case DW_OP_GNU_convert:
30691 case DW_OP_GNU_reinterpret:
30692 if (val1->val_class == dw_val_class_unsigned_const)
30693 {
30694 hstate.add_object (val1->v.val_unsigned);
30695 break;
30696 }
30697 /* FALLTHRU */
30698 case DW_OP_const_type:
30699 case DW_OP_GNU_const_type:
30700 {
30701 unsigned int byte_size
30702 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30703 unsigned int encoding
30704 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30705 hstate.add_object (byte_size);
30706 hstate.add_object (encoding);
30707 if (loc->dw_loc_opc != DW_OP_const_type
30708 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30709 break;
30710 hstate.add_object (val2->val_class);
30711 switch (val2->val_class)
30712 {
30713 case dw_val_class_const:
30714 hstate.add_object (val2->v.val_int);
30715 break;
30716 case dw_val_class_vec:
30717 {
30718 unsigned int elt_size = val2->v.val_vec.elt_size;
30719 unsigned int len = val2->v.val_vec.length;
30720
30721 hstate.add_object (elt_size);
30722 hstate.add_object (len);
30723 hstate.add (val2->v.val_vec.array, len * elt_size);
30724 }
30725 break;
30726 case dw_val_class_const_double:
30727 hstate.add_object (val2->v.val_double.low);
30728 hstate.add_object (val2->v.val_double.high);
30729 break;
30730 case dw_val_class_wide_int:
30731 hstate.add (val2->v.val_wide->get_val (),
30732 get_full_len (*val2->v.val_wide)
30733 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30734 break;
30735 default:
30736 gcc_unreachable ();
30737 }
30738 }
30739 break;
30740
30741 default:
30742 /* Other codes have no operands. */
30743 break;
30744 }
30745 }
30746
30747 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30748
30749 static inline void
30750 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30751 {
30752 dw_loc_descr_ref l;
30753 bool sizes_computed = false;
30754 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30755 size_of_locs (loc);
30756
30757 for (l = loc; l != NULL; l = l->dw_loc_next)
30758 {
30759 enum dwarf_location_atom opc = l->dw_loc_opc;
30760 hstate.add_object (opc);
30761 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30762 {
30763 size_of_locs (loc);
30764 sizes_computed = true;
30765 }
30766 hash_loc_operands (l, hstate);
30767 }
30768 }
30769
30770 /* Compute hash of the whole location list LIST_HEAD. */
30771
30772 static inline void
30773 hash_loc_list (dw_loc_list_ref list_head)
30774 {
30775 dw_loc_list_ref curr = list_head;
30776 inchash::hash hstate;
30777
30778 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30779 {
30780 hstate.add (curr->begin, strlen (curr->begin) + 1);
30781 hstate.add (curr->end, strlen (curr->end) + 1);
30782 hstate.add_object (curr->vbegin);
30783 hstate.add_object (curr->vend);
30784 if (curr->section)
30785 hstate.add (curr->section, strlen (curr->section) + 1);
30786 hash_locs (curr->expr, hstate);
30787 }
30788 list_head->hash = hstate.end ();
30789 }
30790
30791 /* Return true if X and Y opcodes have the same operands. */
30792
30793 static inline bool
30794 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30795 {
30796 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30797 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30798 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30799 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30800
30801 switch (x->dw_loc_opc)
30802 {
30803 case DW_OP_const4u:
30804 case DW_OP_const8u:
30805 if (x->dtprel)
30806 goto hash_addr;
30807 /* FALLTHRU */
30808 case DW_OP_const1u:
30809 case DW_OP_const1s:
30810 case DW_OP_const2u:
30811 case DW_OP_const2s:
30812 case DW_OP_const4s:
30813 case DW_OP_const8s:
30814 case DW_OP_constu:
30815 case DW_OP_consts:
30816 case DW_OP_pick:
30817 case DW_OP_plus_uconst:
30818 case DW_OP_breg0:
30819 case DW_OP_breg1:
30820 case DW_OP_breg2:
30821 case DW_OP_breg3:
30822 case DW_OP_breg4:
30823 case DW_OP_breg5:
30824 case DW_OP_breg6:
30825 case DW_OP_breg7:
30826 case DW_OP_breg8:
30827 case DW_OP_breg9:
30828 case DW_OP_breg10:
30829 case DW_OP_breg11:
30830 case DW_OP_breg12:
30831 case DW_OP_breg13:
30832 case DW_OP_breg14:
30833 case DW_OP_breg15:
30834 case DW_OP_breg16:
30835 case DW_OP_breg17:
30836 case DW_OP_breg18:
30837 case DW_OP_breg19:
30838 case DW_OP_breg20:
30839 case DW_OP_breg21:
30840 case DW_OP_breg22:
30841 case DW_OP_breg23:
30842 case DW_OP_breg24:
30843 case DW_OP_breg25:
30844 case DW_OP_breg26:
30845 case DW_OP_breg27:
30846 case DW_OP_breg28:
30847 case DW_OP_breg29:
30848 case DW_OP_breg30:
30849 case DW_OP_breg31:
30850 case DW_OP_regx:
30851 case DW_OP_fbreg:
30852 case DW_OP_piece:
30853 case DW_OP_deref_size:
30854 case DW_OP_xderef_size:
30855 return valx1->v.val_int == valy1->v.val_int;
30856 case DW_OP_skip:
30857 case DW_OP_bra:
30858 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30859 can cause irrelevant differences in dw_loc_addr. */
30860 gcc_assert (valx1->val_class == dw_val_class_loc
30861 && valy1->val_class == dw_val_class_loc
30862 && (dwarf_split_debug_info
30863 || x->dw_loc_addr == y->dw_loc_addr));
30864 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30865 case DW_OP_implicit_value:
30866 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30867 || valx2->val_class != valy2->val_class)
30868 return false;
30869 switch (valx2->val_class)
30870 {
30871 case dw_val_class_const:
30872 return valx2->v.val_int == valy2->v.val_int;
30873 case dw_val_class_vec:
30874 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30875 && valx2->v.val_vec.length == valy2->v.val_vec.length
30876 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30877 valx2->v.val_vec.elt_size
30878 * valx2->v.val_vec.length) == 0;
30879 case dw_val_class_const_double:
30880 return valx2->v.val_double.low == valy2->v.val_double.low
30881 && valx2->v.val_double.high == valy2->v.val_double.high;
30882 case dw_val_class_wide_int:
30883 return *valx2->v.val_wide == *valy2->v.val_wide;
30884 case dw_val_class_addr:
30885 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30886 default:
30887 gcc_unreachable ();
30888 }
30889 case DW_OP_bregx:
30890 case DW_OP_bit_piece:
30891 return valx1->v.val_int == valy1->v.val_int
30892 && valx2->v.val_int == valy2->v.val_int;
30893 case DW_OP_addr:
30894 hash_addr:
30895 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30896 case DW_OP_GNU_addr_index:
30897 case DW_OP_addrx:
30898 case DW_OP_GNU_const_index:
30899 case DW_OP_constx:
30900 {
30901 rtx ax1 = valx1->val_entry->addr.rtl;
30902 rtx ay1 = valy1->val_entry->addr.rtl;
30903 return rtx_equal_p (ax1, ay1);
30904 }
30905 case DW_OP_implicit_pointer:
30906 case DW_OP_GNU_implicit_pointer:
30907 return valx1->val_class == dw_val_class_die_ref
30908 && valx1->val_class == valy1->val_class
30909 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30910 && valx2->v.val_int == valy2->v.val_int;
30911 case DW_OP_entry_value:
30912 case DW_OP_GNU_entry_value:
30913 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30914 case DW_OP_const_type:
30915 case DW_OP_GNU_const_type:
30916 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30917 || valx2->val_class != valy2->val_class)
30918 return false;
30919 switch (valx2->val_class)
30920 {
30921 case dw_val_class_const:
30922 return valx2->v.val_int == valy2->v.val_int;
30923 case dw_val_class_vec:
30924 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30925 && valx2->v.val_vec.length == valy2->v.val_vec.length
30926 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30927 valx2->v.val_vec.elt_size
30928 * valx2->v.val_vec.length) == 0;
30929 case dw_val_class_const_double:
30930 return valx2->v.val_double.low == valy2->v.val_double.low
30931 && valx2->v.val_double.high == valy2->v.val_double.high;
30932 case dw_val_class_wide_int:
30933 return *valx2->v.val_wide == *valy2->v.val_wide;
30934 default:
30935 gcc_unreachable ();
30936 }
30937 case DW_OP_regval_type:
30938 case DW_OP_deref_type:
30939 case DW_OP_GNU_regval_type:
30940 case DW_OP_GNU_deref_type:
30941 return valx1->v.val_int == valy1->v.val_int
30942 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30943 case DW_OP_convert:
30944 case DW_OP_reinterpret:
30945 case DW_OP_GNU_convert:
30946 case DW_OP_GNU_reinterpret:
30947 if (valx1->val_class != valy1->val_class)
30948 return false;
30949 if (valx1->val_class == dw_val_class_unsigned_const)
30950 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30951 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30952 case DW_OP_GNU_parameter_ref:
30953 return valx1->val_class == dw_val_class_die_ref
30954 && valx1->val_class == valy1->val_class
30955 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30956 default:
30957 /* Other codes have no operands. */
30958 return true;
30959 }
30960 }
30961
30962 /* Return true if DWARF location expressions X and Y are the same. */
30963
30964 static inline bool
30965 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30966 {
30967 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30968 if (x->dw_loc_opc != y->dw_loc_opc
30969 || x->dtprel != y->dtprel
30970 || !compare_loc_operands (x, y))
30971 break;
30972 return x == NULL && y == NULL;
30973 }
30974
30975 /* Hashtable helpers. */
30976
30977 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30978 {
30979 static inline hashval_t hash (const dw_loc_list_struct *);
30980 static inline bool equal (const dw_loc_list_struct *,
30981 const dw_loc_list_struct *);
30982 };
30983
30984 /* Return precomputed hash of location list X. */
30985
30986 inline hashval_t
30987 loc_list_hasher::hash (const dw_loc_list_struct *x)
30988 {
30989 return x->hash;
30990 }
30991
30992 /* Return true if location lists A and B are the same. */
30993
30994 inline bool
30995 loc_list_hasher::equal (const dw_loc_list_struct *a,
30996 const dw_loc_list_struct *b)
30997 {
30998 if (a == b)
30999 return 1;
31000 if (a->hash != b->hash)
31001 return 0;
31002 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31003 if (strcmp (a->begin, b->begin) != 0
31004 || strcmp (a->end, b->end) != 0
31005 || (a->section == NULL) != (b->section == NULL)
31006 || (a->section && strcmp (a->section, b->section) != 0)
31007 || a->vbegin != b->vbegin || a->vend != b->vend
31008 || !compare_locs (a->expr, b->expr))
31009 break;
31010 return a == NULL && b == NULL;
31011 }
31012
31013 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31014
31015
31016 /* Recursively optimize location lists referenced from DIE
31017 children and share them whenever possible. */
31018
31019 static void
31020 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31021 {
31022 dw_die_ref c;
31023 dw_attr_node *a;
31024 unsigned ix;
31025 dw_loc_list_struct **slot;
31026 bool drop_locviews = false;
31027 bool has_locviews = false;
31028
31029 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31030 if (AT_class (a) == dw_val_class_loc_list)
31031 {
31032 dw_loc_list_ref list = AT_loc_list (a);
31033 /* TODO: perform some optimizations here, before hashing
31034 it and storing into the hash table. */
31035 hash_loc_list (list);
31036 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31037 if (*slot == NULL)
31038 {
31039 *slot = list;
31040 if (loc_list_has_views (list))
31041 gcc_assert (list->vl_symbol);
31042 else if (list->vl_symbol)
31043 {
31044 drop_locviews = true;
31045 list->vl_symbol = NULL;
31046 }
31047 }
31048 else
31049 {
31050 if (list->vl_symbol && !(*slot)->vl_symbol)
31051 drop_locviews = true;
31052 a->dw_attr_val.v.val_loc_list = *slot;
31053 }
31054 }
31055 else if (AT_class (a) == dw_val_class_view_list)
31056 {
31057 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31058 has_locviews = true;
31059 }
31060
31061
31062 if (drop_locviews && has_locviews)
31063 remove_AT (die, DW_AT_GNU_locviews);
31064
31065 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31066 }
31067
31068
31069 /* Recursively assign each location list a unique index into the debug_addr
31070 section. */
31071
31072 static void
31073 index_location_lists (dw_die_ref die)
31074 {
31075 dw_die_ref c;
31076 dw_attr_node *a;
31077 unsigned ix;
31078
31079 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31080 if (AT_class (a) == dw_val_class_loc_list)
31081 {
31082 dw_loc_list_ref list = AT_loc_list (a);
31083 dw_loc_list_ref curr;
31084 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31085 {
31086 /* Don't index an entry that has already been indexed
31087 or won't be output. Make sure skip_loc_list_entry doesn't
31088 call size_of_locs, because that might cause circular dependency,
31089 index_location_lists requiring address table indexes to be
31090 computed, but adding new indexes through add_addr_table_entry
31091 and address table index computation requiring no new additions
31092 to the hash table. In the rare case of DWARF[234] >= 64KB
31093 location expression, we'll just waste unused address table entry
31094 for it. */
31095 if (curr->begin_entry != NULL
31096 || skip_loc_list_entry (curr))
31097 continue;
31098
31099 curr->begin_entry
31100 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31101 }
31102 }
31103
31104 FOR_EACH_CHILD (die, c, index_location_lists (c));
31105 }
31106
31107 /* Optimize location lists referenced from DIE
31108 children and share them whenever possible. */
31109
31110 static void
31111 optimize_location_lists (dw_die_ref die)
31112 {
31113 loc_list_hash_type htab (500);
31114 optimize_location_lists_1 (die, &htab);
31115 }
31116 \f
31117 /* Traverse the limbo die list, and add parent/child links. The only
31118 dies without parents that should be here are concrete instances of
31119 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31120 For concrete instances, we can get the parent die from the abstract
31121 instance. */
31122
31123 static void
31124 flush_limbo_die_list (void)
31125 {
31126 limbo_die_node *node;
31127
31128 /* get_context_die calls force_decl_die, which can put new DIEs on the
31129 limbo list in LTO mode when nested functions are put in a different
31130 partition than that of their parent function. */
31131 while ((node = limbo_die_list))
31132 {
31133 dw_die_ref die = node->die;
31134 limbo_die_list = node->next;
31135
31136 if (die->die_parent == NULL)
31137 {
31138 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31139
31140 if (origin && origin->die_parent)
31141 add_child_die (origin->die_parent, die);
31142 else if (is_cu_die (die))
31143 ;
31144 else if (seen_error ())
31145 /* It's OK to be confused by errors in the input. */
31146 add_child_die (comp_unit_die (), die);
31147 else
31148 {
31149 /* In certain situations, the lexical block containing a
31150 nested function can be optimized away, which results
31151 in the nested function die being orphaned. Likewise
31152 with the return type of that nested function. Force
31153 this to be a child of the containing function.
31154
31155 It may happen that even the containing function got fully
31156 inlined and optimized out. In that case we are lost and
31157 assign the empty child. This should not be big issue as
31158 the function is likely unreachable too. */
31159 gcc_assert (node->created_for);
31160
31161 if (DECL_P (node->created_for))
31162 origin = get_context_die (DECL_CONTEXT (node->created_for));
31163 else if (TYPE_P (node->created_for))
31164 origin = scope_die_for (node->created_for, comp_unit_die ());
31165 else
31166 origin = comp_unit_die ();
31167
31168 add_child_die (origin, die);
31169 }
31170 }
31171 }
31172 }
31173
31174 /* Reset DIEs so we can output them again. */
31175
31176 static void
31177 reset_dies (dw_die_ref die)
31178 {
31179 dw_die_ref c;
31180
31181 /* Remove stuff we re-generate. */
31182 die->die_mark = 0;
31183 die->die_offset = 0;
31184 die->die_abbrev = 0;
31185 remove_AT (die, DW_AT_sibling);
31186
31187 FOR_EACH_CHILD (die, c, reset_dies (c));
31188 }
31189
31190 /* Output stuff that dwarf requires at the end of every file,
31191 and generate the DWARF-2 debugging info. */
31192
31193 static void
31194 dwarf2out_finish (const char *filename)
31195 {
31196 comdat_type_node *ctnode;
31197 dw_die_ref main_comp_unit_die;
31198 unsigned char checksum[16];
31199 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31200
31201 /* Flush out any latecomers to the limbo party. */
31202 flush_limbo_die_list ();
31203
31204 if (inline_entry_data_table)
31205 gcc_assert (inline_entry_data_table->elements () == 0);
31206
31207 if (flag_checking)
31208 {
31209 verify_die (comp_unit_die ());
31210 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31211 verify_die (node->die);
31212 }
31213
31214 /* We shouldn't have any symbols with delayed asm names for
31215 DIEs generated after early finish. */
31216 gcc_assert (deferred_asm_name == NULL);
31217
31218 gen_remaining_tmpl_value_param_die_attribute ();
31219
31220 if (flag_generate_lto || flag_generate_offload)
31221 {
31222 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31223
31224 /* Prune stuff so that dwarf2out_finish runs successfully
31225 for the fat part of the object. */
31226 reset_dies (comp_unit_die ());
31227 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31228 reset_dies (node->die);
31229
31230 hash_table<comdat_type_hasher> comdat_type_table (100);
31231 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31232 {
31233 comdat_type_node **slot
31234 = comdat_type_table.find_slot (ctnode, INSERT);
31235
31236 /* Don't reset types twice. */
31237 if (*slot != HTAB_EMPTY_ENTRY)
31238 continue;
31239
31240 /* Remove the pointer to the line table. */
31241 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31242
31243 if (debug_info_level >= DINFO_LEVEL_TERSE)
31244 reset_dies (ctnode->root_die);
31245
31246 *slot = ctnode;
31247 }
31248
31249 /* Reset die CU symbol so we don't output it twice. */
31250 comp_unit_die ()->die_id.die_symbol = NULL;
31251
31252 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31253 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31254 if (have_macinfo)
31255 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31256
31257 /* Remove indirect string decisions. */
31258 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31259 if (debug_line_str_hash)
31260 {
31261 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31262 debug_line_str_hash = NULL;
31263 }
31264 }
31265
31266 #if ENABLE_ASSERT_CHECKING
31267 {
31268 dw_die_ref die = comp_unit_die (), c;
31269 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31270 }
31271 #endif
31272 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31273 resolve_addr (ctnode->root_die);
31274 resolve_addr (comp_unit_die ());
31275 move_marked_base_types ();
31276
31277 if (dump_file)
31278 {
31279 fprintf (dump_file, "DWARF for %s\n", filename);
31280 print_die (comp_unit_die (), dump_file);
31281 }
31282
31283 /* Initialize sections and labels used for actual assembler output. */
31284 unsigned generation = init_sections_and_labels (false);
31285
31286 /* Traverse the DIE's and add sibling attributes to those DIE's that
31287 have children. */
31288 add_sibling_attributes (comp_unit_die ());
31289 limbo_die_node *node;
31290 for (node = cu_die_list; node; node = node->next)
31291 add_sibling_attributes (node->die);
31292 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31293 add_sibling_attributes (ctnode->root_die);
31294
31295 /* When splitting DWARF info, we put some attributes in the
31296 skeleton compile_unit DIE that remains in the .o, while
31297 most attributes go in the DWO compile_unit_die. */
31298 if (dwarf_split_debug_info)
31299 {
31300 limbo_die_node *cu;
31301 main_comp_unit_die = gen_compile_unit_die (NULL);
31302 if (dwarf_version >= 5)
31303 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31304 cu = limbo_die_list;
31305 gcc_assert (cu->die == main_comp_unit_die);
31306 limbo_die_list = limbo_die_list->next;
31307 cu->next = cu_die_list;
31308 cu_die_list = cu;
31309 }
31310 else
31311 main_comp_unit_die = comp_unit_die ();
31312
31313 /* Output a terminator label for the .text section. */
31314 switch_to_section (text_section);
31315 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31316 if (cold_text_section)
31317 {
31318 switch_to_section (cold_text_section);
31319 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31320 }
31321
31322 /* We can only use the low/high_pc attributes if all of the code was
31323 in .text. */
31324 if (!have_multiple_function_sections
31325 || (dwarf_version < 3 && dwarf_strict))
31326 {
31327 /* Don't add if the CU has no associated code. */
31328 if (text_section_used)
31329 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31330 text_end_label, true);
31331 }
31332 else
31333 {
31334 unsigned fde_idx;
31335 dw_fde_ref fde;
31336 bool range_list_added = false;
31337
31338 if (text_section_used)
31339 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31340 text_end_label, &range_list_added, true);
31341 if (cold_text_section_used)
31342 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31343 cold_end_label, &range_list_added, true);
31344
31345 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31346 {
31347 if (DECL_IGNORED_P (fde->decl))
31348 continue;
31349 if (!fde->in_std_section)
31350 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31351 fde->dw_fde_end, &range_list_added,
31352 true);
31353 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31354 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31355 fde->dw_fde_second_end, &range_list_added,
31356 true);
31357 }
31358
31359 if (range_list_added)
31360 {
31361 /* We need to give .debug_loc and .debug_ranges an appropriate
31362 "base address". Use zero so that these addresses become
31363 absolute. Historically, we've emitted the unexpected
31364 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31365 Emit both to give time for other tools to adapt. */
31366 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31367 if (! dwarf_strict && dwarf_version < 4)
31368 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31369
31370 add_ranges (NULL);
31371 }
31372 }
31373
31374 /* AIX Assembler inserts the length, so adjust the reference to match the
31375 offset expected by debuggers. */
31376 strcpy (dl_section_ref, debug_line_section_label);
31377 if (XCOFF_DEBUGGING_INFO)
31378 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31379
31380 if (debug_info_level >= DINFO_LEVEL_TERSE)
31381 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31382 dl_section_ref);
31383
31384 if (have_macinfo)
31385 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31386 macinfo_section_label);
31387
31388 if (dwarf_split_debug_info)
31389 {
31390 if (have_location_lists)
31391 {
31392 /* Since we generate the loclists in the split DWARF .dwo
31393 file itself, we don't need to generate a loclists_base
31394 attribute for the split compile unit DIE. That attribute
31395 (and using relocatable sec_offset FORMs) isn't allowed
31396 for a split compile unit. Only if the .debug_loclists
31397 section was in the main file, would we need to generate a
31398 loclists_base attribute here (for the full or skeleton
31399 unit DIE). */
31400
31401 /* optimize_location_lists calculates the size of the lists,
31402 so index them first, and assign indices to the entries.
31403 Although optimize_location_lists will remove entries from
31404 the table, it only does so for duplicates, and therefore
31405 only reduces ref_counts to 1. */
31406 index_location_lists (comp_unit_die ());
31407 }
31408
31409 if (addr_index_table != NULL)
31410 {
31411 unsigned int index = 0;
31412 addr_index_table
31413 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31414 (&index);
31415 }
31416 }
31417
31418 loc_list_idx = 0;
31419 if (have_location_lists)
31420 {
31421 optimize_location_lists (comp_unit_die ());
31422 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31423 if (dwarf_version >= 5 && dwarf_split_debug_info)
31424 assign_location_list_indexes (comp_unit_die ());
31425 }
31426
31427 save_macinfo_strings ();
31428
31429 if (dwarf_split_debug_info)
31430 {
31431 unsigned int index = 0;
31432
31433 /* Add attributes common to skeleton compile_units and
31434 type_units. Because these attributes include strings, it
31435 must be done before freezing the string table. Top-level
31436 skeleton die attrs are added when the skeleton type unit is
31437 created, so ensure it is created by this point. */
31438 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31439 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31440 }
31441
31442 /* Output all of the compilation units. We put the main one last so that
31443 the offsets are available to output_pubnames. */
31444 for (node = cu_die_list; node; node = node->next)
31445 output_comp_unit (node->die, 0, NULL);
31446
31447 hash_table<comdat_type_hasher> comdat_type_table (100);
31448 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31449 {
31450 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31451
31452 /* Don't output duplicate types. */
31453 if (*slot != HTAB_EMPTY_ENTRY)
31454 continue;
31455
31456 /* Add a pointer to the line table for the main compilation unit
31457 so that the debugger can make sense of DW_AT_decl_file
31458 attributes. */
31459 if (debug_info_level >= DINFO_LEVEL_TERSE)
31460 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31461 (!dwarf_split_debug_info
31462 ? dl_section_ref
31463 : debug_skeleton_line_section_label));
31464
31465 output_comdat_type_unit (ctnode);
31466 *slot = ctnode;
31467 }
31468
31469 if (dwarf_split_debug_info)
31470 {
31471 int mark;
31472 struct md5_ctx ctx;
31473
31474 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31475 index_rnglists ();
31476
31477 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31478 md5_init_ctx (&ctx);
31479 mark = 0;
31480 die_checksum (comp_unit_die (), &ctx, &mark);
31481 unmark_all_dies (comp_unit_die ());
31482 md5_finish_ctx (&ctx, checksum);
31483
31484 if (dwarf_version < 5)
31485 {
31486 /* Use the first 8 bytes of the checksum as the dwo_id,
31487 and add it to both comp-unit DIEs. */
31488 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31489 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31490 }
31491
31492 /* Add the base offset of the ranges table to the skeleton
31493 comp-unit DIE. */
31494 if (!vec_safe_is_empty (ranges_table))
31495 {
31496 if (dwarf_version >= 5)
31497 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31498 ranges_base_label);
31499 else
31500 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31501 ranges_section_label);
31502 }
31503
31504 switch_to_section (debug_addr_section);
31505 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31506 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31507 before DWARF5, didn't have a header for .debug_addr units.
31508 DWARF5 specifies a small header when address tables are used. */
31509 if (dwarf_version >= 5)
31510 {
31511 unsigned int last_idx = 0;
31512 unsigned long addrs_length;
31513
31514 addr_index_table->traverse_noresize
31515 <unsigned int *, count_index_addrs> (&last_idx);
31516 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31517
31518 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31519 dw2_asm_output_data (4, 0xffffffff,
31520 "Escape value for 64-bit DWARF extension");
31521 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31522 "Length of Address Unit");
31523 dw2_asm_output_data (2, 5, "DWARF addr version");
31524 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31525 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31526 }
31527 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31528 output_addr_table ();
31529 }
31530
31531 /* Output the main compilation unit if non-empty or if .debug_macinfo
31532 or .debug_macro will be emitted. */
31533 output_comp_unit (comp_unit_die (), have_macinfo,
31534 dwarf_split_debug_info ? checksum : NULL);
31535
31536 if (dwarf_split_debug_info && info_section_emitted)
31537 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31538
31539 /* Output the abbreviation table. */
31540 if (vec_safe_length (abbrev_die_table) != 1)
31541 {
31542 switch_to_section (debug_abbrev_section);
31543 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31544 output_abbrev_section ();
31545 }
31546
31547 /* Output location list section if necessary. */
31548 if (have_location_lists)
31549 {
31550 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31551 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31552 /* Output the location lists info. */
31553 switch_to_section (debug_loc_section);
31554 if (dwarf_version >= 5)
31555 {
31556 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31557 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31558 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31559 dw2_asm_output_data (4, 0xffffffff,
31560 "Initial length escape value indicating "
31561 "64-bit DWARF extension");
31562 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31563 "Length of Location Lists");
31564 ASM_OUTPUT_LABEL (asm_out_file, l1);
31565 output_dwarf_version ();
31566 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31567 dw2_asm_output_data (1, 0, "Segment Size");
31568 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31569 "Offset Entry Count");
31570 }
31571 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31572 if (dwarf_version >= 5 && dwarf_split_debug_info)
31573 {
31574 unsigned int save_loc_list_idx = loc_list_idx;
31575 loc_list_idx = 0;
31576 output_loclists_offsets (comp_unit_die ());
31577 gcc_assert (save_loc_list_idx == loc_list_idx);
31578 }
31579 output_location_lists (comp_unit_die ());
31580 if (dwarf_version >= 5)
31581 ASM_OUTPUT_LABEL (asm_out_file, l2);
31582 }
31583
31584 output_pubtables ();
31585
31586 /* Output the address range information if a CU (.debug_info section)
31587 was emitted. We output an empty table even if we had no functions
31588 to put in it. This because the consumer has no way to tell the
31589 difference between an empty table that we omitted and failure to
31590 generate a table that would have contained data. */
31591 if (info_section_emitted)
31592 {
31593 switch_to_section (debug_aranges_section);
31594 output_aranges ();
31595 }
31596
31597 /* Output ranges section if necessary. */
31598 if (!vec_safe_is_empty (ranges_table))
31599 {
31600 if (dwarf_version >= 5)
31601 output_rnglists (generation);
31602 else
31603 output_ranges ();
31604 }
31605
31606 /* Have to end the macro section. */
31607 if (have_macinfo)
31608 {
31609 switch_to_section (debug_macinfo_section);
31610 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31611 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31612 : debug_skeleton_line_section_label, false);
31613 dw2_asm_output_data (1, 0, "End compilation unit");
31614 }
31615
31616 /* Output the source line correspondence table. We must do this
31617 even if there is no line information. Otherwise, on an empty
31618 translation unit, we will generate a present, but empty,
31619 .debug_info section. IRIX 6.5 `nm' will then complain when
31620 examining the file. This is done late so that any filenames
31621 used by the debug_info section are marked as 'used'. */
31622 switch_to_section (debug_line_section);
31623 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31624 if (! output_asm_line_debug_info ())
31625 output_line_info (false);
31626
31627 if (dwarf_split_debug_info && info_section_emitted)
31628 {
31629 switch_to_section (debug_skeleton_line_section);
31630 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31631 output_line_info (true);
31632 }
31633
31634 /* If we emitted any indirect strings, output the string table too. */
31635 if (debug_str_hash || skeleton_debug_str_hash)
31636 output_indirect_strings ();
31637 if (debug_line_str_hash)
31638 {
31639 switch_to_section (debug_line_str_section);
31640 const enum dwarf_form form = DW_FORM_line_strp;
31641 debug_line_str_hash->traverse<enum dwarf_form,
31642 output_indirect_string> (form);
31643 }
31644
31645 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31646 symview_upper_bound = 0;
31647 if (zero_view_p)
31648 bitmap_clear (zero_view_p);
31649 }
31650
31651 /* Returns a hash value for X (which really is a variable_value_struct). */
31652
31653 inline hashval_t
31654 variable_value_hasher::hash (variable_value_struct *x)
31655 {
31656 return (hashval_t) x->decl_id;
31657 }
31658
31659 /* Return nonzero if decl_id of variable_value_struct X is the same as
31660 UID of decl Y. */
31661
31662 inline bool
31663 variable_value_hasher::equal (variable_value_struct *x, tree y)
31664 {
31665 return x->decl_id == DECL_UID (y);
31666 }
31667
31668 /* Helper function for resolve_variable_value, handle
31669 DW_OP_GNU_variable_value in one location expression.
31670 Return true if exprloc has been changed into loclist. */
31671
31672 static bool
31673 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31674 {
31675 dw_loc_descr_ref next;
31676 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31677 {
31678 next = loc->dw_loc_next;
31679 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31680 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31681 continue;
31682
31683 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31684 if (DECL_CONTEXT (decl) != current_function_decl)
31685 continue;
31686
31687 dw_die_ref ref = lookup_decl_die (decl);
31688 if (ref)
31689 {
31690 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31691 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31692 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31693 continue;
31694 }
31695 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31696 if (l == NULL)
31697 continue;
31698 if (l->dw_loc_next)
31699 {
31700 if (AT_class (a) != dw_val_class_loc)
31701 continue;
31702 switch (a->dw_attr)
31703 {
31704 /* Following attributes allow both exprloc and loclist
31705 classes, so we can change them into a loclist. */
31706 case DW_AT_location:
31707 case DW_AT_string_length:
31708 case DW_AT_return_addr:
31709 case DW_AT_data_member_location:
31710 case DW_AT_frame_base:
31711 case DW_AT_segment:
31712 case DW_AT_static_link:
31713 case DW_AT_use_location:
31714 case DW_AT_vtable_elem_location:
31715 if (prev)
31716 {
31717 prev->dw_loc_next = NULL;
31718 prepend_loc_descr_to_each (l, AT_loc (a));
31719 }
31720 if (next)
31721 add_loc_descr_to_each (l, next);
31722 a->dw_attr_val.val_class = dw_val_class_loc_list;
31723 a->dw_attr_val.val_entry = NULL;
31724 a->dw_attr_val.v.val_loc_list = l;
31725 have_location_lists = true;
31726 return true;
31727 /* Following attributes allow both exprloc and reference,
31728 so if the whole expression is DW_OP_GNU_variable_value alone
31729 we could transform it into reference. */
31730 case DW_AT_byte_size:
31731 case DW_AT_bit_size:
31732 case DW_AT_lower_bound:
31733 case DW_AT_upper_bound:
31734 case DW_AT_bit_stride:
31735 case DW_AT_count:
31736 case DW_AT_allocated:
31737 case DW_AT_associated:
31738 case DW_AT_byte_stride:
31739 if (prev == NULL && next == NULL)
31740 break;
31741 /* FALLTHRU */
31742 default:
31743 if (dwarf_strict)
31744 continue;
31745 break;
31746 }
31747 /* Create DW_TAG_variable that we can refer to. */
31748 gen_decl_die (decl, NULL_TREE, NULL,
31749 lookup_decl_die (current_function_decl));
31750 ref = lookup_decl_die (decl);
31751 if (ref)
31752 {
31753 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31754 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31755 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31756 }
31757 continue;
31758 }
31759 if (prev)
31760 {
31761 prev->dw_loc_next = l->expr;
31762 add_loc_descr (&prev->dw_loc_next, next);
31763 free_loc_descr (loc, NULL);
31764 next = prev->dw_loc_next;
31765 }
31766 else
31767 {
31768 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31769 add_loc_descr (&loc, next);
31770 next = loc;
31771 }
31772 loc = prev;
31773 }
31774 return false;
31775 }
31776
31777 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31778
31779 static void
31780 resolve_variable_value (dw_die_ref die)
31781 {
31782 dw_attr_node *a;
31783 dw_loc_list_ref loc;
31784 unsigned ix;
31785
31786 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31787 switch (AT_class (a))
31788 {
31789 case dw_val_class_loc:
31790 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31791 break;
31792 /* FALLTHRU */
31793 case dw_val_class_loc_list:
31794 loc = AT_loc_list (a);
31795 gcc_assert (loc);
31796 for (; loc; loc = loc->dw_loc_next)
31797 resolve_variable_value_in_expr (a, loc->expr);
31798 break;
31799 default:
31800 break;
31801 }
31802 }
31803
31804 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31805 temporaries in the current function. */
31806
31807 static void
31808 resolve_variable_values (void)
31809 {
31810 if (!variable_value_hash || !current_function_decl)
31811 return;
31812
31813 struct variable_value_struct *node
31814 = variable_value_hash->find_with_hash (current_function_decl,
31815 DECL_UID (current_function_decl));
31816
31817 if (node == NULL)
31818 return;
31819
31820 unsigned int i;
31821 dw_die_ref die;
31822 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31823 resolve_variable_value (die);
31824 }
31825
31826 /* Helper function for note_variable_value, handle one location
31827 expression. */
31828
31829 static void
31830 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31831 {
31832 for (; loc; loc = loc->dw_loc_next)
31833 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31834 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31835 {
31836 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31837 dw_die_ref ref = lookup_decl_die (decl);
31838 if (! ref && (flag_generate_lto || flag_generate_offload))
31839 {
31840 /* ??? This is somewhat a hack because we do not create DIEs
31841 for variables not in BLOCK trees early but when generating
31842 early LTO output we need the dw_val_class_decl_ref to be
31843 fully resolved. For fat LTO objects we'd also like to
31844 undo this after LTO dwarf output. */
31845 gcc_assert (DECL_CONTEXT (decl));
31846 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31847 gcc_assert (ctx != NULL);
31848 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31849 ref = lookup_decl_die (decl);
31850 gcc_assert (ref != NULL);
31851 }
31852 if (ref)
31853 {
31854 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31855 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31856 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31857 continue;
31858 }
31859 if (VAR_P (decl)
31860 && DECL_CONTEXT (decl)
31861 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31862 && lookup_decl_die (DECL_CONTEXT (decl)))
31863 {
31864 if (!variable_value_hash)
31865 variable_value_hash
31866 = hash_table<variable_value_hasher>::create_ggc (10);
31867
31868 tree fndecl = DECL_CONTEXT (decl);
31869 struct variable_value_struct *node;
31870 struct variable_value_struct **slot
31871 = variable_value_hash->find_slot_with_hash (fndecl,
31872 DECL_UID (fndecl),
31873 INSERT);
31874 if (*slot == NULL)
31875 {
31876 node = ggc_cleared_alloc<variable_value_struct> ();
31877 node->decl_id = DECL_UID (fndecl);
31878 *slot = node;
31879 }
31880 else
31881 node = *slot;
31882
31883 vec_safe_push (node->dies, die);
31884 }
31885 }
31886 }
31887
31888 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31889 with dw_val_class_decl_ref operand. */
31890
31891 static void
31892 note_variable_value (dw_die_ref die)
31893 {
31894 dw_die_ref c;
31895 dw_attr_node *a;
31896 dw_loc_list_ref loc;
31897 unsigned ix;
31898
31899 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31900 switch (AT_class (a))
31901 {
31902 case dw_val_class_loc_list:
31903 loc = AT_loc_list (a);
31904 gcc_assert (loc);
31905 if (!loc->noted_variable_value)
31906 {
31907 loc->noted_variable_value = 1;
31908 for (; loc; loc = loc->dw_loc_next)
31909 note_variable_value_in_expr (die, loc->expr);
31910 }
31911 break;
31912 case dw_val_class_loc:
31913 note_variable_value_in_expr (die, AT_loc (a));
31914 break;
31915 default:
31916 break;
31917 }
31918
31919 /* Mark children. */
31920 FOR_EACH_CHILD (die, c, note_variable_value (c));
31921 }
31922
31923 /* Perform any cleanups needed after the early debug generation pass
31924 has run. */
31925
31926 static void
31927 dwarf2out_early_finish (const char *filename)
31928 {
31929 set_early_dwarf s;
31930 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31931
31932 /* PCH might result in DW_AT_producer string being restored from the
31933 header compilation, so always fill it with empty string initially
31934 and overwrite only here. */
31935 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31936 producer_string = gen_producer_string ();
31937 producer->dw_attr_val.v.val_str->refcount--;
31938 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31939
31940 /* Add the name for the main input file now. We delayed this from
31941 dwarf2out_init to avoid complications with PCH. */
31942 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31943 add_comp_dir_attribute (comp_unit_die ());
31944
31945 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31946 DW_AT_comp_dir into .debug_line_str section. */
31947 if (!output_asm_line_debug_info ()
31948 && dwarf_version >= 5
31949 && DWARF5_USE_DEBUG_LINE_STR)
31950 {
31951 for (int i = 0; i < 2; i++)
31952 {
31953 dw_attr_node *a = get_AT (comp_unit_die (),
31954 i ? DW_AT_comp_dir : DW_AT_name);
31955 if (a == NULL
31956 || AT_class (a) != dw_val_class_str
31957 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31958 continue;
31959
31960 if (! debug_line_str_hash)
31961 debug_line_str_hash
31962 = hash_table<indirect_string_hasher>::create_ggc (10);
31963
31964 struct indirect_string_node *node
31965 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31966 set_indirect_string (node);
31967 node->form = DW_FORM_line_strp;
31968 a->dw_attr_val.v.val_str->refcount--;
31969 a->dw_attr_val.v.val_str = node;
31970 }
31971 }
31972
31973 /* With LTO early dwarf was really finished at compile-time, so make
31974 sure to adjust the phase after annotating the LTRANS CU DIE. */
31975 if (in_lto_p)
31976 {
31977 /* Force DW_TAG_imported_unit to be created now, otherwise
31978 we might end up without it or ordered after DW_TAG_inlined_subroutine
31979 referencing DIEs from it. */
31980 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
31981 {
31982 unsigned i;
31983 tree tu;
31984 if (external_die_map)
31985 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
31986 if (sym_off_pair *desc = external_die_map->get (tu))
31987 {
31988 dw_die_ref import = new_die (DW_TAG_imported_unit,
31989 comp_unit_die (), NULL_TREE);
31990 add_AT_external_die_ref (import, DW_AT_import,
31991 desc->sym, desc->off);
31992 }
31993 }
31994
31995 early_dwarf_finished = true;
31996 if (dump_file)
31997 {
31998 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
31999 print_die (comp_unit_die (), dump_file);
32000 }
32001 return;
32002 }
32003
32004 /* Walk through the list of incomplete types again, trying once more to
32005 emit full debugging info for them. */
32006 retry_incomplete_types ();
32007
32008 /* The point here is to flush out the limbo list so that it is empty
32009 and we don't need to stream it for LTO. */
32010 flush_limbo_die_list ();
32011
32012 gen_scheduled_generic_parms_dies ();
32013 gen_remaining_tmpl_value_param_die_attribute ();
32014
32015 /* Add DW_AT_linkage_name for all deferred DIEs. */
32016 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32017 {
32018 tree decl = node->created_for;
32019 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32020 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32021 ended up in deferred_asm_name before we knew it was
32022 constant and never written to disk. */
32023 && DECL_ASSEMBLER_NAME (decl))
32024 {
32025 add_linkage_attr (node->die, decl);
32026 move_linkage_attr (node->die);
32027 }
32028 }
32029 deferred_asm_name = NULL;
32030
32031 if (flag_eliminate_unused_debug_types)
32032 prune_unused_types ();
32033
32034 /* Generate separate COMDAT sections for type DIEs. */
32035 if (use_debug_types)
32036 {
32037 break_out_comdat_types (comp_unit_die ());
32038
32039 /* Each new type_unit DIE was added to the limbo die list when created.
32040 Since these have all been added to comdat_type_list, clear the
32041 limbo die list. */
32042 limbo_die_list = NULL;
32043
32044 /* For each new comdat type unit, copy declarations for incomplete
32045 types to make the new unit self-contained (i.e., no direct
32046 references to the main compile unit). */
32047 for (comdat_type_node *ctnode = comdat_type_list;
32048 ctnode != NULL; ctnode = ctnode->next)
32049 copy_decls_for_unworthy_types (ctnode->root_die);
32050 copy_decls_for_unworthy_types (comp_unit_die ());
32051
32052 /* In the process of copying declarations from one unit to another,
32053 we may have left some declarations behind that are no longer
32054 referenced. Prune them. */
32055 prune_unused_types ();
32056 }
32057
32058 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32059 with dw_val_class_decl_ref operand. */
32060 note_variable_value (comp_unit_die ());
32061 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32062 note_variable_value (node->die);
32063 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32064 ctnode = ctnode->next)
32065 note_variable_value (ctnode->root_die);
32066 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32067 note_variable_value (node->die);
32068
32069 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32070 both the main_cu and all skeleton TUs. Making this call unconditional
32071 would end up either adding a second copy of the AT_pubnames attribute, or
32072 requiring a special case in add_top_level_skeleton_die_attrs. */
32073 if (!dwarf_split_debug_info)
32074 add_AT_pubnames (comp_unit_die ());
32075
32076 /* The early debug phase is now finished. */
32077 early_dwarf_finished = true;
32078 if (dump_file)
32079 {
32080 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32081 print_die (comp_unit_die (), dump_file);
32082 }
32083
32084 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32085 if ((!flag_generate_lto && !flag_generate_offload)
32086 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32087 copy_lto_debug_sections operation of the simple object support in
32088 libiberty is not implemented for them yet. */
32089 || TARGET_PECOFF || TARGET_COFF)
32090 return;
32091
32092 /* Now as we are going to output for LTO initialize sections and labels
32093 to the LTO variants. We don't need a random-seed postfix as other
32094 LTO sections as linking the LTO debug sections into one in a partial
32095 link is fine. */
32096 init_sections_and_labels (true);
32097
32098 /* The output below is modeled after dwarf2out_finish with all
32099 location related output removed and some LTO specific changes.
32100 Some refactoring might make both smaller and easier to match up. */
32101
32102 /* Traverse the DIE's and add add sibling attributes to those DIE's
32103 that have children. */
32104 add_sibling_attributes (comp_unit_die ());
32105 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32106 add_sibling_attributes (node->die);
32107 for (comdat_type_node *ctnode = comdat_type_list;
32108 ctnode != NULL; ctnode = ctnode->next)
32109 add_sibling_attributes (ctnode->root_die);
32110
32111 /* AIX Assembler inserts the length, so adjust the reference to match the
32112 offset expected by debuggers. */
32113 strcpy (dl_section_ref, debug_line_section_label);
32114 if (XCOFF_DEBUGGING_INFO)
32115 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32116
32117 if (debug_info_level >= DINFO_LEVEL_TERSE)
32118 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32119
32120 if (have_macinfo)
32121 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32122 macinfo_section_label);
32123
32124 save_macinfo_strings ();
32125
32126 if (dwarf_split_debug_info)
32127 {
32128 unsigned int index = 0;
32129 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32130 }
32131
32132 /* Output all of the compilation units. We put the main one last so that
32133 the offsets are available to output_pubnames. */
32134 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32135 output_comp_unit (node->die, 0, NULL);
32136
32137 hash_table<comdat_type_hasher> comdat_type_table (100);
32138 for (comdat_type_node *ctnode = comdat_type_list;
32139 ctnode != NULL; ctnode = ctnode->next)
32140 {
32141 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32142
32143 /* Don't output duplicate types. */
32144 if (*slot != HTAB_EMPTY_ENTRY)
32145 continue;
32146
32147 /* Add a pointer to the line table for the main compilation unit
32148 so that the debugger can make sense of DW_AT_decl_file
32149 attributes. */
32150 if (debug_info_level >= DINFO_LEVEL_TERSE)
32151 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32152 (!dwarf_split_debug_info
32153 ? debug_line_section_label
32154 : debug_skeleton_line_section_label));
32155
32156 output_comdat_type_unit (ctnode);
32157 *slot = ctnode;
32158 }
32159
32160 /* Stick a unique symbol to the main debuginfo section. */
32161 compute_comp_unit_symbol (comp_unit_die ());
32162
32163 /* Output the main compilation unit. We always need it if only for
32164 the CU symbol. */
32165 output_comp_unit (comp_unit_die (), true, NULL);
32166
32167 /* Output the abbreviation table. */
32168 if (vec_safe_length (abbrev_die_table) != 1)
32169 {
32170 switch_to_section (debug_abbrev_section);
32171 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32172 output_abbrev_section ();
32173 }
32174
32175 /* Have to end the macro section. */
32176 if (have_macinfo)
32177 {
32178 /* We have to save macinfo state if we need to output it again
32179 for the FAT part of the object. */
32180 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32181 if (flag_fat_lto_objects)
32182 macinfo_table = macinfo_table->copy ();
32183
32184 switch_to_section (debug_macinfo_section);
32185 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32186 output_macinfo (debug_line_section_label, true);
32187 dw2_asm_output_data (1, 0, "End compilation unit");
32188
32189 if (flag_fat_lto_objects)
32190 {
32191 vec_free (macinfo_table);
32192 macinfo_table = saved_macinfo_table;
32193 }
32194 }
32195
32196 /* Emit a skeleton debug_line section. */
32197 switch_to_section (debug_line_section);
32198 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32199 output_line_info (true);
32200
32201 /* If we emitted any indirect strings, output the string table too. */
32202 if (debug_str_hash || skeleton_debug_str_hash)
32203 output_indirect_strings ();
32204 if (debug_line_str_hash)
32205 {
32206 switch_to_section (debug_line_str_section);
32207 const enum dwarf_form form = DW_FORM_line_strp;
32208 debug_line_str_hash->traverse<enum dwarf_form,
32209 output_indirect_string> (form);
32210 }
32211
32212 /* Switch back to the text section. */
32213 switch_to_section (text_section);
32214 }
32215
32216 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32217 within the same process. For use by toplev::finalize. */
32218
32219 void
32220 dwarf2out_c_finalize (void)
32221 {
32222 last_var_location_insn = NULL;
32223 cached_next_real_insn = NULL;
32224 used_rtx_array = NULL;
32225 incomplete_types = NULL;
32226 debug_info_section = NULL;
32227 debug_skeleton_info_section = NULL;
32228 debug_abbrev_section = NULL;
32229 debug_skeleton_abbrev_section = NULL;
32230 debug_aranges_section = NULL;
32231 debug_addr_section = NULL;
32232 debug_macinfo_section = NULL;
32233 debug_line_section = NULL;
32234 debug_skeleton_line_section = NULL;
32235 debug_loc_section = NULL;
32236 debug_pubnames_section = NULL;
32237 debug_pubtypes_section = NULL;
32238 debug_str_section = NULL;
32239 debug_line_str_section = NULL;
32240 debug_str_dwo_section = NULL;
32241 debug_str_offsets_section = NULL;
32242 debug_ranges_section = NULL;
32243 debug_frame_section = NULL;
32244 fde_vec = NULL;
32245 debug_str_hash = NULL;
32246 debug_line_str_hash = NULL;
32247 skeleton_debug_str_hash = NULL;
32248 dw2_string_counter = 0;
32249 have_multiple_function_sections = false;
32250 text_section_used = false;
32251 cold_text_section_used = false;
32252 cold_text_section = NULL;
32253 current_unit_personality = NULL;
32254
32255 early_dwarf = false;
32256 early_dwarf_finished = false;
32257
32258 next_die_offset = 0;
32259 single_comp_unit_die = NULL;
32260 comdat_type_list = NULL;
32261 limbo_die_list = NULL;
32262 file_table = NULL;
32263 decl_die_table = NULL;
32264 common_block_die_table = NULL;
32265 decl_loc_table = NULL;
32266 call_arg_locations = NULL;
32267 call_arg_loc_last = NULL;
32268 call_site_count = -1;
32269 tail_call_site_count = -1;
32270 cached_dw_loc_list_table = NULL;
32271 abbrev_die_table = NULL;
32272 delete dwarf_proc_stack_usage_map;
32273 dwarf_proc_stack_usage_map = NULL;
32274 line_info_label_num = 0;
32275 cur_line_info_table = NULL;
32276 text_section_line_info = NULL;
32277 cold_text_section_line_info = NULL;
32278 separate_line_info = NULL;
32279 info_section_emitted = false;
32280 pubname_table = NULL;
32281 pubtype_table = NULL;
32282 macinfo_table = NULL;
32283 ranges_table = NULL;
32284 ranges_by_label = NULL;
32285 rnglist_idx = 0;
32286 have_location_lists = false;
32287 loclabel_num = 0;
32288 poc_label_num = 0;
32289 last_emitted_file = NULL;
32290 label_num = 0;
32291 tmpl_value_parm_die_table = NULL;
32292 generic_type_instances = NULL;
32293 frame_pointer_fb_offset = 0;
32294 frame_pointer_fb_offset_valid = false;
32295 base_types.release ();
32296 XDELETEVEC (producer_string);
32297 producer_string = NULL;
32298 }
32299
32300 #include "gt-dwarf2out.h"