f39c2aa8f254bc8be0ed23d22cd897a5c9b2e23e
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2017 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "debug.h"
87 #include "common/common-target.h"
88 #include "langhooks.h"
89 #include "lra.h"
90 #include "dumpfile.h"
91 #include "opts.h"
92 #include "tree-dfa.h"
93 #include "gdb/gdb-index.h"
94 #include "rtl-iter.h"
95
96 static void dwarf2out_source_line (unsigned int, const char *, int, bool);
97 static rtx_insn *last_var_location_insn;
98 static rtx_insn *cached_next_real_insn;
99 static void dwarf2out_decl (tree);
100
101 #ifndef XCOFF_DEBUGGING_INFO
102 #define XCOFF_DEBUGGING_INFO 0
103 #endif
104
105 #ifndef HAVE_XCOFF_DWARF_EXTRAS
106 #define HAVE_XCOFF_DWARF_EXTRAS 0
107 #endif
108
109 #ifdef VMS_DEBUGGING_INFO
110 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
111
112 /* Define this macro to be a nonzero value if the directory specifications
113 which are output in the debug info should end with a separator. */
114 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
115 /* Define this macro to evaluate to a nonzero value if GCC should refrain
116 from generating indirect strings in DWARF2 debug information, for instance
117 if your target is stuck with an old version of GDB that is unable to
118 process them properly or uses VMS Debug. */
119 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
120 #else
121 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
122 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
123 #endif
124
125 /* ??? Poison these here until it can be done generically. They've been
126 totally replaced in this file; make sure it stays that way. */
127 #undef DWARF2_UNWIND_INFO
128 #undef DWARF2_FRAME_INFO
129 #if (GCC_VERSION >= 3000)
130 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
131 #endif
132
133 /* The size of the target's pointer type. */
134 #ifndef PTR_SIZE
135 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
136 #endif
137
138 /* Array of RTXes referenced by the debugging information, which therefore
139 must be kept around forever. */
140 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
141
142 /* A pointer to the base of a list of incomplete types which might be
143 completed at some later time. incomplete_types_list needs to be a
144 vec<tree, va_gc> *because we want to tell the garbage collector about
145 it. */
146 static GTY(()) vec<tree, va_gc> *incomplete_types;
147
148 /* A pointer to the base of a table of references to declaration
149 scopes. This table is a display which tracks the nesting
150 of declaration scopes at the current scope and containing
151 scopes. This table is used to find the proper place to
152 define type declaration DIE's. */
153 static GTY(()) vec<tree, va_gc> *decl_scope_table;
154
155 /* Pointers to various DWARF2 sections. */
156 static GTY(()) section *debug_info_section;
157 static GTY(()) section *debug_skeleton_info_section;
158 static GTY(()) section *debug_abbrev_section;
159 static GTY(()) section *debug_skeleton_abbrev_section;
160 static GTY(()) section *debug_aranges_section;
161 static GTY(()) section *debug_addr_section;
162 static GTY(()) section *debug_macinfo_section;
163 static const char *debug_macinfo_section_name;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 30
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 /* Round SIZE up to the nearest BOUNDARY. */
192 #define DWARF_ROUND(SIZE,BOUNDARY) \
193 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
194
195 /* CIE identifier. */
196 #if HOST_BITS_PER_WIDE_INT >= 64
197 #define DWARF_CIE_ID \
198 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
199 #else
200 #define DWARF_CIE_ID DW_CIE_ID
201 #endif
202
203
204 /* A vector for a table that contains frame description
205 information for each routine. */
206 #define NOT_INDEXED (-1U)
207 #define NO_INDEX_ASSIGNED (-2U)
208
209 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
210
211 struct GTY((for_user)) indirect_string_node {
212 const char *str;
213 unsigned int refcount;
214 enum dwarf_form form;
215 char *label;
216 unsigned int index;
217 };
218
219 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
220 {
221 typedef const char *compare_type;
222
223 static hashval_t hash (indirect_string_node *);
224 static bool equal (indirect_string_node *, const char *);
225 };
226
227 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
228
229 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
230
231 /* With split_debug_info, both the comp_dir and dwo_name go in the
232 main object file, rather than the dwo, similar to the force_direct
233 parameter elsewhere but with additional complications:
234
235 1) The string is needed in both the main object file and the dwo.
236 That is, the comp_dir and dwo_name will appear in both places.
237
238 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
239 DW_FORM_line_strp or DW_FORM_GNU_str_index.
240
241 3) GCC chooses the form to use late, depending on the size and
242 reference count.
243
244 Rather than forcing the all debug string handling functions and
245 callers to deal with these complications, simply use a separate,
246 special-cased string table for any attribute that should go in the
247 main object file. This limits the complexity to just the places
248 that need it. */
249
250 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
251
252 static GTY(()) int dw2_string_counter;
253
254 /* True if the compilation unit places functions in more than one section. */
255 static GTY(()) bool have_multiple_function_sections = false;
256
257 /* Whether the default text and cold text sections have been used at all. */
258
259 static GTY(()) bool text_section_used = false;
260 static GTY(()) bool cold_text_section_used = false;
261
262 /* The default cold text section. */
263 static GTY(()) section *cold_text_section;
264
265 /* The DIE for C++14 'auto' in a function return type. */
266 static GTY(()) dw_die_ref auto_die;
267
268 /* The DIE for C++14 'decltype(auto)' in a function return type. */
269 static GTY(()) dw_die_ref decltype_auto_die;
270
271 /* Forward declarations for functions defined in this file. */
272
273 static void output_call_frame_info (int);
274 static void dwarf2out_note_section_used (void);
275
276 /* Personality decl of current unit. Used only when assembler does not support
277 personality CFI. */
278 static GTY(()) rtx current_unit_personality;
279
280 /* .debug_rnglists next index. */
281 static unsigned int rnglist_idx;
282
283 /* Data and reference forms for relocatable data. */
284 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
285 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
286
287 #ifndef DEBUG_FRAME_SECTION
288 #define DEBUG_FRAME_SECTION ".debug_frame"
289 #endif
290
291 #ifndef FUNC_BEGIN_LABEL
292 #define FUNC_BEGIN_LABEL "LFB"
293 #endif
294
295 #ifndef FUNC_END_LABEL
296 #define FUNC_END_LABEL "LFE"
297 #endif
298
299 #ifndef PROLOGUE_END_LABEL
300 #define PROLOGUE_END_LABEL "LPE"
301 #endif
302
303 #ifndef EPILOGUE_BEGIN_LABEL
304 #define EPILOGUE_BEGIN_LABEL "LEB"
305 #endif
306
307 #ifndef FRAME_BEGIN_LABEL
308 #define FRAME_BEGIN_LABEL "Lframe"
309 #endif
310 #define CIE_AFTER_SIZE_LABEL "LSCIE"
311 #define CIE_END_LABEL "LECIE"
312 #define FDE_LABEL "LSFDE"
313 #define FDE_AFTER_SIZE_LABEL "LASFDE"
314 #define FDE_END_LABEL "LEFDE"
315 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
316 #define LINE_NUMBER_END_LABEL "LELT"
317 #define LN_PROLOG_AS_LABEL "LASLTP"
318 #define LN_PROLOG_END_LABEL "LELTP"
319 #define DIE_LABEL_PREFIX "DW"
320 \f
321 /* Match the base name of a file to the base name of a compilation unit. */
322
323 static int
324 matches_main_base (const char *path)
325 {
326 /* Cache the last query. */
327 static const char *last_path = NULL;
328 static int last_match = 0;
329 if (path != last_path)
330 {
331 const char *base;
332 int length = base_of_path (path, &base);
333 last_path = path;
334 last_match = (length == main_input_baselength
335 && memcmp (base, main_input_basename, length) == 0);
336 }
337 return last_match;
338 }
339
340 #ifdef DEBUG_DEBUG_STRUCT
341
342 static int
343 dump_struct_debug (tree type, enum debug_info_usage usage,
344 enum debug_struct_file criterion, int generic,
345 int matches, int result)
346 {
347 /* Find the type name. */
348 tree type_decl = TYPE_STUB_DECL (type);
349 tree t = type_decl;
350 const char *name = 0;
351 if (TREE_CODE (t) == TYPE_DECL)
352 t = DECL_NAME (t);
353 if (t)
354 name = IDENTIFIER_POINTER (t);
355
356 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
357 criterion,
358 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
359 matches ? "bas" : "hdr",
360 generic ? "gen" : "ord",
361 usage == DINFO_USAGE_DFN ? ";" :
362 usage == DINFO_USAGE_DIR_USE ? "." : "*",
363 result,
364 (void*) type_decl, name);
365 return result;
366 }
367 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
368 dump_struct_debug (type, usage, criterion, generic, matches, result)
369
370 #else
371
372 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
373 (result)
374
375 #endif
376
377 /* Get the number of HOST_WIDE_INTs needed to represent the precision
378 of the number. Some constants have a large uniform precision, so
379 we get the precision needed for the actual value of the number. */
380
381 static unsigned int
382 get_full_len (const wide_int &op)
383 {
384 int prec = wi::min_precision (op, UNSIGNED);
385 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
386 / HOST_BITS_PER_WIDE_INT);
387 }
388
389 static bool
390 should_emit_struct_debug (tree type, enum debug_info_usage usage)
391 {
392 enum debug_struct_file criterion;
393 tree type_decl;
394 bool generic = lang_hooks.types.generic_p (type);
395
396 if (generic)
397 criterion = debug_struct_generic[usage];
398 else
399 criterion = debug_struct_ordinary[usage];
400
401 if (criterion == DINFO_STRUCT_FILE_NONE)
402 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
403 if (criterion == DINFO_STRUCT_FILE_ANY)
404 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
405
406 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
407
408 if (type_decl != NULL)
409 {
410 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
411 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
412
413 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
414 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
415 }
416
417 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
418 }
419 \f
420 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
421 switch to the data section instead, and write out a synthetic start label
422 for collect2 the first time around. */
423
424 static void
425 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
426 {
427 if (eh_frame_section == 0)
428 {
429 int flags;
430
431 if (EH_TABLES_CAN_BE_READ_ONLY)
432 {
433 int fde_encoding;
434 int per_encoding;
435 int lsda_encoding;
436
437 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
438 /*global=*/0);
439 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
440 /*global=*/1);
441 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
442 /*global=*/0);
443 flags = ((! flag_pic
444 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
445 && (fde_encoding & 0x70) != DW_EH_PE_aligned
446 && (per_encoding & 0x70) != DW_EH_PE_absptr
447 && (per_encoding & 0x70) != DW_EH_PE_aligned
448 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
449 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
450 ? 0 : SECTION_WRITE);
451 }
452 else
453 flags = SECTION_WRITE;
454
455 #ifdef EH_FRAME_SECTION_NAME
456 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
457 #else
458 eh_frame_section = ((flags == SECTION_WRITE)
459 ? data_section : readonly_data_section);
460 #endif /* EH_FRAME_SECTION_NAME */
461 }
462
463 switch_to_section (eh_frame_section);
464
465 #ifdef EH_FRAME_THROUGH_COLLECT2
466 /* We have no special eh_frame section. Emit special labels to guide
467 collect2. */
468 if (!back)
469 {
470 tree label = get_file_function_name ("F");
471 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
472 targetm.asm_out.globalize_label (asm_out_file,
473 IDENTIFIER_POINTER (label));
474 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
475 }
476 #endif
477 }
478
479 /* Switch [BACK] to the eh or debug frame table section, depending on
480 FOR_EH. */
481
482 static void
483 switch_to_frame_table_section (int for_eh, bool back)
484 {
485 if (for_eh)
486 switch_to_eh_frame_section (back);
487 else
488 {
489 if (!debug_frame_section)
490 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
491 SECTION_DEBUG, NULL);
492 switch_to_section (debug_frame_section);
493 }
494 }
495
496 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
497
498 enum dw_cfi_oprnd_type
499 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
500 {
501 switch (cfi)
502 {
503 case DW_CFA_nop:
504 case DW_CFA_GNU_window_save:
505 case DW_CFA_remember_state:
506 case DW_CFA_restore_state:
507 return dw_cfi_oprnd_unused;
508
509 case DW_CFA_set_loc:
510 case DW_CFA_advance_loc1:
511 case DW_CFA_advance_loc2:
512 case DW_CFA_advance_loc4:
513 case DW_CFA_MIPS_advance_loc8:
514 return dw_cfi_oprnd_addr;
515
516 case DW_CFA_offset:
517 case DW_CFA_offset_extended:
518 case DW_CFA_def_cfa:
519 case DW_CFA_offset_extended_sf:
520 case DW_CFA_def_cfa_sf:
521 case DW_CFA_restore:
522 case DW_CFA_restore_extended:
523 case DW_CFA_undefined:
524 case DW_CFA_same_value:
525 case DW_CFA_def_cfa_register:
526 case DW_CFA_register:
527 case DW_CFA_expression:
528 case DW_CFA_val_expression:
529 return dw_cfi_oprnd_reg_num;
530
531 case DW_CFA_def_cfa_offset:
532 case DW_CFA_GNU_args_size:
533 case DW_CFA_def_cfa_offset_sf:
534 return dw_cfi_oprnd_offset;
535
536 case DW_CFA_def_cfa_expression:
537 return dw_cfi_oprnd_loc;
538
539 default:
540 gcc_unreachable ();
541 }
542 }
543
544 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
545
546 enum dw_cfi_oprnd_type
547 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
548 {
549 switch (cfi)
550 {
551 case DW_CFA_def_cfa:
552 case DW_CFA_def_cfa_sf:
553 case DW_CFA_offset:
554 case DW_CFA_offset_extended_sf:
555 case DW_CFA_offset_extended:
556 return dw_cfi_oprnd_offset;
557
558 case DW_CFA_register:
559 return dw_cfi_oprnd_reg_num;
560
561 case DW_CFA_expression:
562 case DW_CFA_val_expression:
563 return dw_cfi_oprnd_loc;
564
565 default:
566 return dw_cfi_oprnd_unused;
567 }
568 }
569
570 /* Output one FDE. */
571
572 static void
573 output_fde (dw_fde_ref fde, bool for_eh, bool second,
574 char *section_start_label, int fde_encoding, char *augmentation,
575 bool any_lsda_needed, int lsda_encoding)
576 {
577 const char *begin, *end;
578 static unsigned int j;
579 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
580
581 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
582 /* empty */ 0);
583 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
584 for_eh + j);
585 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
586 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
587 if (!XCOFF_DEBUGGING_INFO || for_eh)
588 {
589 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
590 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
591 " indicating 64-bit DWARF extension");
592 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
593 "FDE Length");
594 }
595 ASM_OUTPUT_LABEL (asm_out_file, l1);
596
597 if (for_eh)
598 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
599 else
600 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
601 debug_frame_section, "FDE CIE offset");
602
603 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
604 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
605
606 if (for_eh)
607 {
608 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
609 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
610 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
611 "FDE initial location");
612 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
613 end, begin, "FDE address range");
614 }
615 else
616 {
617 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
618 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
619 }
620
621 if (augmentation[0])
622 {
623 if (any_lsda_needed)
624 {
625 int size = size_of_encoded_value (lsda_encoding);
626
627 if (lsda_encoding == DW_EH_PE_aligned)
628 {
629 int offset = ( 4 /* Length */
630 + 4 /* CIE offset */
631 + 2 * size_of_encoded_value (fde_encoding)
632 + 1 /* Augmentation size */ );
633 int pad = -offset & (PTR_SIZE - 1);
634
635 size += pad;
636 gcc_assert (size_of_uleb128 (size) == 1);
637 }
638
639 dw2_asm_output_data_uleb128 (size, "Augmentation size");
640
641 if (fde->uses_eh_lsda)
642 {
643 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
644 fde->funcdef_number);
645 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
646 gen_rtx_SYMBOL_REF (Pmode, l1),
647 false,
648 "Language Specific Data Area");
649 }
650 else
651 {
652 if (lsda_encoding == DW_EH_PE_aligned)
653 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
654 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
655 "Language Specific Data Area (none)");
656 }
657 }
658 else
659 dw2_asm_output_data_uleb128 (0, "Augmentation size");
660 }
661
662 /* Loop through the Call Frame Instructions associated with this FDE. */
663 fde->dw_fde_current_label = begin;
664 {
665 size_t from, until, i;
666
667 from = 0;
668 until = vec_safe_length (fde->dw_fde_cfi);
669
670 if (fde->dw_fde_second_begin == NULL)
671 ;
672 else if (!second)
673 until = fde->dw_fde_switch_cfi_index;
674 else
675 from = fde->dw_fde_switch_cfi_index;
676
677 for (i = from; i < until; i++)
678 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
679 }
680
681 /* If we are to emit a ref/link from function bodies to their frame tables,
682 do it now. This is typically performed to make sure that tables
683 associated with functions are dragged with them and not discarded in
684 garbage collecting links. We need to do this on a per function basis to
685 cope with -ffunction-sections. */
686
687 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
688 /* Switch to the function section, emit the ref to the tables, and
689 switch *back* into the table section. */
690 switch_to_section (function_section (fde->decl));
691 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
692 switch_to_frame_table_section (for_eh, true);
693 #endif
694
695 /* Pad the FDE out to an address sized boundary. */
696 ASM_OUTPUT_ALIGN (asm_out_file,
697 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
698 ASM_OUTPUT_LABEL (asm_out_file, l2);
699
700 j += 2;
701 }
702
703 /* Return true if frame description entry FDE is needed for EH. */
704
705 static bool
706 fde_needed_for_eh_p (dw_fde_ref fde)
707 {
708 if (flag_asynchronous_unwind_tables)
709 return true;
710
711 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
712 return true;
713
714 if (fde->uses_eh_lsda)
715 return true;
716
717 /* If exceptions are enabled, we have collected nothrow info. */
718 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
719 return false;
720
721 return true;
722 }
723
724 /* Output the call frame information used to record information
725 that relates to calculating the frame pointer, and records the
726 location of saved registers. */
727
728 static void
729 output_call_frame_info (int for_eh)
730 {
731 unsigned int i;
732 dw_fde_ref fde;
733 dw_cfi_ref cfi;
734 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
735 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
736 bool any_lsda_needed = false;
737 char augmentation[6];
738 int augmentation_size;
739 int fde_encoding = DW_EH_PE_absptr;
740 int per_encoding = DW_EH_PE_absptr;
741 int lsda_encoding = DW_EH_PE_absptr;
742 int return_reg;
743 rtx personality = NULL;
744 int dw_cie_version;
745
746 /* Don't emit a CIE if there won't be any FDEs. */
747 if (!fde_vec)
748 return;
749
750 /* Nothing to do if the assembler's doing it all. */
751 if (dwarf2out_do_cfi_asm ())
752 return;
753
754 /* If we don't have any functions we'll want to unwind out of, don't emit
755 any EH unwind information. If we make FDEs linkonce, we may have to
756 emit an empty label for an FDE that wouldn't otherwise be emitted. We
757 want to avoid having an FDE kept around when the function it refers to
758 is discarded. Example where this matters: a primary function template
759 in C++ requires EH information, an explicit specialization doesn't. */
760 if (for_eh)
761 {
762 bool any_eh_needed = false;
763
764 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
765 {
766 if (fde->uses_eh_lsda)
767 any_eh_needed = any_lsda_needed = true;
768 else if (fde_needed_for_eh_p (fde))
769 any_eh_needed = true;
770 else if (TARGET_USES_WEAK_UNWIND_INFO)
771 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
772 }
773
774 if (!any_eh_needed)
775 return;
776 }
777
778 /* We're going to be generating comments, so turn on app. */
779 if (flag_debug_asm)
780 app_enable ();
781
782 /* Switch to the proper frame section, first time. */
783 switch_to_frame_table_section (for_eh, false);
784
785 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
786 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
787
788 /* Output the CIE. */
789 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
790 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
791 if (!XCOFF_DEBUGGING_INFO || for_eh)
792 {
793 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
794 dw2_asm_output_data (4, 0xffffffff,
795 "Initial length escape value indicating 64-bit DWARF extension");
796 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
797 "Length of Common Information Entry");
798 }
799 ASM_OUTPUT_LABEL (asm_out_file, l1);
800
801 /* Now that the CIE pointer is PC-relative for EH,
802 use 0 to identify the CIE. */
803 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
804 (for_eh ? 0 : DWARF_CIE_ID),
805 "CIE Identifier Tag");
806
807 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
808 use CIE version 1, unless that would produce incorrect results
809 due to overflowing the return register column. */
810 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
811 dw_cie_version = 1;
812 if (return_reg >= 256 || dwarf_version > 2)
813 dw_cie_version = 3;
814 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
815
816 augmentation[0] = 0;
817 augmentation_size = 0;
818
819 personality = current_unit_personality;
820 if (for_eh)
821 {
822 char *p;
823
824 /* Augmentation:
825 z Indicates that a uleb128 is present to size the
826 augmentation section.
827 L Indicates the encoding (and thus presence) of
828 an LSDA pointer in the FDE augmentation.
829 R Indicates a non-default pointer encoding for
830 FDE code pointers.
831 P Indicates the presence of an encoding + language
832 personality routine in the CIE augmentation. */
833
834 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
835 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
836 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
837
838 p = augmentation + 1;
839 if (personality)
840 {
841 *p++ = 'P';
842 augmentation_size += 1 + size_of_encoded_value (per_encoding);
843 assemble_external_libcall (personality);
844 }
845 if (any_lsda_needed)
846 {
847 *p++ = 'L';
848 augmentation_size += 1;
849 }
850 if (fde_encoding != DW_EH_PE_absptr)
851 {
852 *p++ = 'R';
853 augmentation_size += 1;
854 }
855 if (p > augmentation + 1)
856 {
857 augmentation[0] = 'z';
858 *p = '\0';
859 }
860
861 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
862 if (personality && per_encoding == DW_EH_PE_aligned)
863 {
864 int offset = ( 4 /* Length */
865 + 4 /* CIE Id */
866 + 1 /* CIE version */
867 + strlen (augmentation) + 1 /* Augmentation */
868 + size_of_uleb128 (1) /* Code alignment */
869 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
870 + 1 /* RA column */
871 + 1 /* Augmentation size */
872 + 1 /* Personality encoding */ );
873 int pad = -offset & (PTR_SIZE - 1);
874
875 augmentation_size += pad;
876
877 /* Augmentations should be small, so there's scarce need to
878 iterate for a solution. Die if we exceed one uleb128 byte. */
879 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
880 }
881 }
882
883 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
884 if (dw_cie_version >= 4)
885 {
886 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
887 dw2_asm_output_data (1, 0, "CIE Segment Size");
888 }
889 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
890 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
891 "CIE Data Alignment Factor");
892
893 if (dw_cie_version == 1)
894 dw2_asm_output_data (1, return_reg, "CIE RA Column");
895 else
896 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
897
898 if (augmentation[0])
899 {
900 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
901 if (personality)
902 {
903 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
904 eh_data_format_name (per_encoding));
905 dw2_asm_output_encoded_addr_rtx (per_encoding,
906 personality,
907 true, NULL);
908 }
909
910 if (any_lsda_needed)
911 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
912 eh_data_format_name (lsda_encoding));
913
914 if (fde_encoding != DW_EH_PE_absptr)
915 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
916 eh_data_format_name (fde_encoding));
917 }
918
919 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
920 output_cfi (cfi, NULL, for_eh);
921
922 /* Pad the CIE out to an address sized boundary. */
923 ASM_OUTPUT_ALIGN (asm_out_file,
924 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
925 ASM_OUTPUT_LABEL (asm_out_file, l2);
926
927 /* Loop through all of the FDE's. */
928 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
929 {
930 unsigned int k;
931
932 /* Don't emit EH unwind info for leaf functions that don't need it. */
933 if (for_eh && !fde_needed_for_eh_p (fde))
934 continue;
935
936 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
937 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
938 augmentation, any_lsda_needed, lsda_encoding);
939 }
940
941 if (for_eh && targetm.terminate_dw2_eh_frame_info)
942 dw2_asm_output_data (4, 0, "End of Table");
943
944 /* Turn off app to make assembly quicker. */
945 if (flag_debug_asm)
946 app_disable ();
947 }
948
949 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
950
951 static void
952 dwarf2out_do_cfi_startproc (bool second)
953 {
954 int enc;
955 rtx ref;
956 rtx personality = get_personality_function (current_function_decl);
957
958 fprintf (asm_out_file, "\t.cfi_startproc\n");
959
960 if (personality)
961 {
962 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
963 ref = personality;
964
965 /* ??? The GAS support isn't entirely consistent. We have to
966 handle indirect support ourselves, but PC-relative is done
967 in the assembler. Further, the assembler can't handle any
968 of the weirder relocation types. */
969 if (enc & DW_EH_PE_indirect)
970 ref = dw2_force_const_mem (ref, true);
971
972 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
973 output_addr_const (asm_out_file, ref);
974 fputc ('\n', asm_out_file);
975 }
976
977 if (crtl->uses_eh_lsda)
978 {
979 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
980
981 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
982 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
983 current_function_funcdef_no);
984 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
985 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
986
987 if (enc & DW_EH_PE_indirect)
988 ref = dw2_force_const_mem (ref, true);
989
990 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
991 output_addr_const (asm_out_file, ref);
992 fputc ('\n', asm_out_file);
993 }
994 }
995
996 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
997 this allocation may be done before pass_final. */
998
999 dw_fde_ref
1000 dwarf2out_alloc_current_fde (void)
1001 {
1002 dw_fde_ref fde;
1003
1004 fde = ggc_cleared_alloc<dw_fde_node> ();
1005 fde->decl = current_function_decl;
1006 fde->funcdef_number = current_function_funcdef_no;
1007 fde->fde_index = vec_safe_length (fde_vec);
1008 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1009 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1010 fde->nothrow = crtl->nothrow;
1011 fde->drap_reg = INVALID_REGNUM;
1012 fde->vdrap_reg = INVALID_REGNUM;
1013
1014 /* Record the FDE associated with this function. */
1015 cfun->fde = fde;
1016 vec_safe_push (fde_vec, fde);
1017
1018 return fde;
1019 }
1020
1021 /* Output a marker (i.e. a label) for the beginning of a function, before
1022 the prologue. */
1023
1024 void
1025 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1026 const char *file ATTRIBUTE_UNUSED)
1027 {
1028 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1029 char * dup_label;
1030 dw_fde_ref fde;
1031 section *fnsec;
1032 bool do_frame;
1033
1034 current_function_func_begin_label = NULL;
1035
1036 do_frame = dwarf2out_do_frame ();
1037
1038 /* ??? current_function_func_begin_label is also used by except.c for
1039 call-site information. We must emit this label if it might be used. */
1040 if (!do_frame
1041 && (!flag_exceptions
1042 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1043 return;
1044
1045 fnsec = function_section (current_function_decl);
1046 switch_to_section (fnsec);
1047 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1048 current_function_funcdef_no);
1049 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1050 current_function_funcdef_no);
1051 dup_label = xstrdup (label);
1052 current_function_func_begin_label = dup_label;
1053
1054 /* We can elide the fde allocation if we're not emitting debug info. */
1055 if (!do_frame)
1056 return;
1057
1058 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1059 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1060 would include pass_dwarf2_frame. If we've not created the FDE yet,
1061 do so now. */
1062 fde = cfun->fde;
1063 if (fde == NULL)
1064 fde = dwarf2out_alloc_current_fde ();
1065
1066 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1067 fde->dw_fde_begin = dup_label;
1068 fde->dw_fde_current_label = dup_label;
1069 fde->in_std_section = (fnsec == text_section
1070 || (cold_text_section && fnsec == cold_text_section));
1071
1072 /* We only want to output line number information for the genuine dwarf2
1073 prologue case, not the eh frame case. */
1074 #ifdef DWARF2_DEBUGGING_INFO
1075 if (file)
1076 dwarf2out_source_line (line, file, 0, true);
1077 #endif
1078
1079 if (dwarf2out_do_cfi_asm ())
1080 dwarf2out_do_cfi_startproc (false);
1081 else
1082 {
1083 rtx personality = get_personality_function (current_function_decl);
1084 if (!current_unit_personality)
1085 current_unit_personality = personality;
1086
1087 /* We cannot keep a current personality per function as without CFI
1088 asm, at the point where we emit the CFI data, there is no current
1089 function anymore. */
1090 if (personality && current_unit_personality != personality)
1091 sorry ("multiple EH personalities are supported only with assemblers "
1092 "supporting .cfi_personality directive");
1093 }
1094 }
1095
1096 /* Output a marker (i.e. a label) for the end of the generated code
1097 for a function prologue. This gets called *after* the prologue code has
1098 been generated. */
1099
1100 void
1101 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1102 const char *file ATTRIBUTE_UNUSED)
1103 {
1104 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1105
1106 /* Output a label to mark the endpoint of the code generated for this
1107 function. */
1108 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1109 current_function_funcdef_no);
1110 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1111 current_function_funcdef_no);
1112 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1113 }
1114
1115 /* Output a marker (i.e. a label) for the beginning of the generated code
1116 for a function epilogue. This gets called *before* the prologue code has
1117 been generated. */
1118
1119 void
1120 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1121 const char *file ATTRIBUTE_UNUSED)
1122 {
1123 dw_fde_ref fde = cfun->fde;
1124 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1125
1126 if (fde->dw_fde_vms_begin_epilogue)
1127 return;
1128
1129 /* Output a label to mark the endpoint of the code generated for this
1130 function. */
1131 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1132 current_function_funcdef_no);
1133 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1134 current_function_funcdef_no);
1135 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1136 }
1137
1138 /* Output a marker (i.e. a label) for the absolute end of the generated code
1139 for a function definition. This gets called *after* the epilogue code has
1140 been generated. */
1141
1142 void
1143 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1144 const char *file ATTRIBUTE_UNUSED)
1145 {
1146 dw_fde_ref fde;
1147 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1148
1149 last_var_location_insn = NULL;
1150 cached_next_real_insn = NULL;
1151
1152 if (dwarf2out_do_cfi_asm ())
1153 fprintf (asm_out_file, "\t.cfi_endproc\n");
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_LABEL (asm_out_file, label);
1160 fde = cfun->fde;
1161 gcc_assert (fde != NULL);
1162 if (fde->dw_fde_second_begin == NULL)
1163 fde->dw_fde_end = xstrdup (label);
1164 }
1165
1166 void
1167 dwarf2out_frame_finish (void)
1168 {
1169 /* Output call frame information. */
1170 if (targetm.debug_unwind_info () == UI_DWARF2)
1171 output_call_frame_info (0);
1172
1173 /* Output another copy for the unwinder. */
1174 if ((flag_unwind_tables || flag_exceptions)
1175 && targetm_common.except_unwind_info (&global_options) == UI_DWARF2)
1176 output_call_frame_info (1);
1177 }
1178
1179 /* Note that the current function section is being used for code. */
1180
1181 static void
1182 dwarf2out_note_section_used (void)
1183 {
1184 section *sec = current_function_section ();
1185 if (sec == text_section)
1186 text_section_used = true;
1187 else if (sec == cold_text_section)
1188 cold_text_section_used = true;
1189 }
1190
1191 static void var_location_switch_text_section (void);
1192 static void set_cur_line_info_table (section *);
1193
1194 void
1195 dwarf2out_switch_text_section (void)
1196 {
1197 section *sect;
1198 dw_fde_ref fde = cfun->fde;
1199
1200 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1201
1202 if (!in_cold_section_p)
1203 {
1204 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1205 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1206 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1207 }
1208 else
1209 {
1210 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1211 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1212 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1213 }
1214 have_multiple_function_sections = true;
1215
1216 /* There is no need to mark used sections when not debugging. */
1217 if (cold_text_section != NULL)
1218 dwarf2out_note_section_used ();
1219
1220 if (dwarf2out_do_cfi_asm ())
1221 fprintf (asm_out_file, "\t.cfi_endproc\n");
1222
1223 /* Now do the real section switch. */
1224 sect = current_function_section ();
1225 switch_to_section (sect);
1226
1227 fde->second_in_std_section
1228 = (sect == text_section
1229 || (cold_text_section && sect == cold_text_section));
1230
1231 if (dwarf2out_do_cfi_asm ())
1232 dwarf2out_do_cfi_startproc (true);
1233
1234 var_location_switch_text_section ();
1235
1236 if (cold_text_section != NULL)
1237 set_cur_line_info_table (sect);
1238 }
1239 \f
1240 /* And now, the subset of the debugging information support code necessary
1241 for emitting location expressions. */
1242
1243 /* Data about a single source file. */
1244 struct GTY((for_user)) dwarf_file_data {
1245 const char * filename;
1246 int emitted_number;
1247 };
1248
1249 /* Describe an entry into the .debug_addr section. */
1250
1251 enum ate_kind {
1252 ate_kind_rtx,
1253 ate_kind_rtx_dtprel,
1254 ate_kind_label
1255 };
1256
1257 struct GTY((for_user)) addr_table_entry {
1258 enum ate_kind kind;
1259 unsigned int refcount;
1260 unsigned int index;
1261 union addr_table_entry_struct_union
1262 {
1263 rtx GTY ((tag ("0"))) rtl;
1264 char * GTY ((tag ("1"))) label;
1265 }
1266 GTY ((desc ("%1.kind"))) addr;
1267 };
1268
1269 /* Location lists are ranges + location descriptions for that range,
1270 so you can track variables that are in different places over
1271 their entire life. */
1272 typedef struct GTY(()) dw_loc_list_struct {
1273 dw_loc_list_ref dw_loc_next;
1274 const char *begin; /* Label and addr_entry for start of range */
1275 addr_table_entry *begin_entry;
1276 const char *end; /* Label for end of range */
1277 char *ll_symbol; /* Label for beginning of location list.
1278 Only on head of list */
1279 const char *section; /* Section this loclist is relative to */
1280 dw_loc_descr_ref expr;
1281 hashval_t hash;
1282 /* True if all addresses in this and subsequent lists are known to be
1283 resolved. */
1284 bool resolved_addr;
1285 /* True if this list has been replaced by dw_loc_next. */
1286 bool replaced;
1287 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1288 section. */
1289 unsigned char emitted : 1;
1290 /* True if hash field is index rather than hash value. */
1291 unsigned char num_assigned : 1;
1292 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1293 unsigned char offset_emitted : 1;
1294 /* True if the range should be emitted even if begin and end
1295 are the same. */
1296 bool force;
1297 } dw_loc_list_node;
1298
1299 static dw_loc_descr_ref int_loc_descriptor (HOST_WIDE_INT);
1300 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1301
1302 /* Convert a DWARF stack opcode into its string name. */
1303
1304 static const char *
1305 dwarf_stack_op_name (unsigned int op)
1306 {
1307 const char *name = get_DW_OP_name (op);
1308
1309 if (name != NULL)
1310 return name;
1311
1312 return "OP_<unknown>";
1313 }
1314
1315 /* Return a pointer to a newly allocated location description. Location
1316 descriptions are simple expression terms that can be strung
1317 together to form more complicated location (address) descriptions. */
1318
1319 static inline dw_loc_descr_ref
1320 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1321 unsigned HOST_WIDE_INT oprnd2)
1322 {
1323 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1324
1325 descr->dw_loc_opc = op;
1326 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1327 descr->dw_loc_oprnd1.val_entry = NULL;
1328 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1329 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1330 descr->dw_loc_oprnd2.val_entry = NULL;
1331 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1332
1333 return descr;
1334 }
1335
1336 /* Return a pointer to a newly allocated location description for
1337 REG and OFFSET. */
1338
1339 static inline dw_loc_descr_ref
1340 new_reg_loc_descr (unsigned int reg, unsigned HOST_WIDE_INT offset)
1341 {
1342 if (reg <= 31)
1343 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1344 offset, 0);
1345 else
1346 return new_loc_descr (DW_OP_bregx, reg, offset);
1347 }
1348
1349 /* Add a location description term to a location description expression. */
1350
1351 static inline void
1352 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1353 {
1354 dw_loc_descr_ref *d;
1355
1356 /* Find the end of the chain. */
1357 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1358 ;
1359
1360 *d = descr;
1361 }
1362
1363 /* Compare two location operands for exact equality. */
1364
1365 static bool
1366 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1367 {
1368 if (a->val_class != b->val_class)
1369 return false;
1370 switch (a->val_class)
1371 {
1372 case dw_val_class_none:
1373 return true;
1374 case dw_val_class_addr:
1375 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1376
1377 case dw_val_class_offset:
1378 case dw_val_class_unsigned_const:
1379 case dw_val_class_const:
1380 case dw_val_class_unsigned_const_implicit:
1381 case dw_val_class_const_implicit:
1382 case dw_val_class_range_list:
1383 /* These are all HOST_WIDE_INT, signed or unsigned. */
1384 return a->v.val_unsigned == b->v.val_unsigned;
1385
1386 case dw_val_class_loc:
1387 return a->v.val_loc == b->v.val_loc;
1388 case dw_val_class_loc_list:
1389 return a->v.val_loc_list == b->v.val_loc_list;
1390 case dw_val_class_die_ref:
1391 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1392 case dw_val_class_fde_ref:
1393 return a->v.val_fde_index == b->v.val_fde_index;
1394 case dw_val_class_lbl_id:
1395 case dw_val_class_lineptr:
1396 case dw_val_class_macptr:
1397 case dw_val_class_loclistsptr:
1398 case dw_val_class_high_pc:
1399 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1400 case dw_val_class_str:
1401 return a->v.val_str == b->v.val_str;
1402 case dw_val_class_flag:
1403 return a->v.val_flag == b->v.val_flag;
1404 case dw_val_class_file:
1405 case dw_val_class_file_implicit:
1406 return a->v.val_file == b->v.val_file;
1407 case dw_val_class_decl_ref:
1408 return a->v.val_decl_ref == b->v.val_decl_ref;
1409
1410 case dw_val_class_const_double:
1411 return (a->v.val_double.high == b->v.val_double.high
1412 && a->v.val_double.low == b->v.val_double.low);
1413
1414 case dw_val_class_wide_int:
1415 return *a->v.val_wide == *b->v.val_wide;
1416
1417 case dw_val_class_vec:
1418 {
1419 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1420 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1421
1422 return (a_len == b_len
1423 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1424 }
1425
1426 case dw_val_class_data8:
1427 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1428
1429 case dw_val_class_vms_delta:
1430 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1431 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1432
1433 case dw_val_class_discr_value:
1434 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1435 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1436 case dw_val_class_discr_list:
1437 /* It makes no sense comparing two discriminant value lists. */
1438 return false;
1439 }
1440 gcc_unreachable ();
1441 }
1442
1443 /* Compare two location atoms for exact equality. */
1444
1445 static bool
1446 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1447 {
1448 if (a->dw_loc_opc != b->dw_loc_opc)
1449 return false;
1450
1451 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1452 address size, but since we always allocate cleared storage it
1453 should be zero for other types of locations. */
1454 if (a->dtprel != b->dtprel)
1455 return false;
1456
1457 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1458 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1459 }
1460
1461 /* Compare two complete location expressions for exact equality. */
1462
1463 bool
1464 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1465 {
1466 while (1)
1467 {
1468 if (a == b)
1469 return true;
1470 if (a == NULL || b == NULL)
1471 return false;
1472 if (!loc_descr_equal_p_1 (a, b))
1473 return false;
1474
1475 a = a->dw_loc_next;
1476 b = b->dw_loc_next;
1477 }
1478 }
1479
1480
1481 /* Add a constant OFFSET to a location expression. */
1482
1483 static void
1484 loc_descr_plus_const (dw_loc_descr_ref *list_head, HOST_WIDE_INT offset)
1485 {
1486 dw_loc_descr_ref loc;
1487 HOST_WIDE_INT *p;
1488
1489 gcc_assert (*list_head != NULL);
1490
1491 if (!offset)
1492 return;
1493
1494 /* Find the end of the chain. */
1495 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1496 ;
1497
1498 p = NULL;
1499 if (loc->dw_loc_opc == DW_OP_fbreg
1500 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1501 p = &loc->dw_loc_oprnd1.v.val_int;
1502 else if (loc->dw_loc_opc == DW_OP_bregx)
1503 p = &loc->dw_loc_oprnd2.v.val_int;
1504
1505 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1506 offset. Don't optimize if an signed integer overflow would happen. */
1507 if (p != NULL
1508 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1509 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1510 *p += offset;
1511
1512 else if (offset > 0)
1513 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1514
1515 else
1516 {
1517 loc->dw_loc_next
1518 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1519 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1520 }
1521 }
1522
1523 /* Add a constant OFFSET to a location list. */
1524
1525 static void
1526 loc_list_plus_const (dw_loc_list_ref list_head, HOST_WIDE_INT offset)
1527 {
1528 dw_loc_list_ref d;
1529 for (d = list_head; d != NULL; d = d->dw_loc_next)
1530 loc_descr_plus_const (&d->expr, offset);
1531 }
1532
1533 #define DWARF_REF_SIZE \
1534 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1535
1536 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1537 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1538 DW_FORM_data16 with 128 bits. */
1539 #define DWARF_LARGEST_DATA_FORM_BITS \
1540 (dwarf_version >= 5 ? 128 : 64)
1541
1542 /* Utility inline function for construction of ops that were GNU extension
1543 before DWARF 5. */
1544 static inline enum dwarf_location_atom
1545 dwarf_OP (enum dwarf_location_atom op)
1546 {
1547 switch (op)
1548 {
1549 case DW_OP_implicit_pointer:
1550 if (dwarf_version < 5)
1551 return DW_OP_GNU_implicit_pointer;
1552 break;
1553
1554 case DW_OP_entry_value:
1555 if (dwarf_version < 5)
1556 return DW_OP_GNU_entry_value;
1557 break;
1558
1559 case DW_OP_const_type:
1560 if (dwarf_version < 5)
1561 return DW_OP_GNU_const_type;
1562 break;
1563
1564 case DW_OP_regval_type:
1565 if (dwarf_version < 5)
1566 return DW_OP_GNU_regval_type;
1567 break;
1568
1569 case DW_OP_deref_type:
1570 if (dwarf_version < 5)
1571 return DW_OP_GNU_deref_type;
1572 break;
1573
1574 case DW_OP_convert:
1575 if (dwarf_version < 5)
1576 return DW_OP_GNU_convert;
1577 break;
1578
1579 case DW_OP_reinterpret:
1580 if (dwarf_version < 5)
1581 return DW_OP_GNU_reinterpret;
1582 break;
1583
1584 default:
1585 break;
1586 }
1587 return op;
1588 }
1589
1590 /* Similarly for attributes. */
1591 static inline enum dwarf_attribute
1592 dwarf_AT (enum dwarf_attribute at)
1593 {
1594 switch (at)
1595 {
1596 case DW_AT_call_return_pc:
1597 if (dwarf_version < 5)
1598 return DW_AT_low_pc;
1599 break;
1600
1601 case DW_AT_call_tail_call:
1602 if (dwarf_version < 5)
1603 return DW_AT_GNU_tail_call;
1604 break;
1605
1606 case DW_AT_call_origin:
1607 if (dwarf_version < 5)
1608 return DW_AT_abstract_origin;
1609 break;
1610
1611 case DW_AT_call_target:
1612 if (dwarf_version < 5)
1613 return DW_AT_GNU_call_site_target;
1614 break;
1615
1616 case DW_AT_call_target_clobbered:
1617 if (dwarf_version < 5)
1618 return DW_AT_GNU_call_site_target_clobbered;
1619 break;
1620
1621 case DW_AT_call_parameter:
1622 if (dwarf_version < 5)
1623 return DW_AT_abstract_origin;
1624 break;
1625
1626 case DW_AT_call_value:
1627 if (dwarf_version < 5)
1628 return DW_AT_GNU_call_site_value;
1629 break;
1630
1631 case DW_AT_call_data_value:
1632 if (dwarf_version < 5)
1633 return DW_AT_GNU_call_site_data_value;
1634 break;
1635
1636 case DW_AT_call_all_calls:
1637 if (dwarf_version < 5)
1638 return DW_AT_GNU_all_call_sites;
1639 break;
1640
1641 case DW_AT_call_all_tail_calls:
1642 if (dwarf_version < 5)
1643 return DW_AT_GNU_all_tail_call_sites;
1644 break;
1645
1646 case DW_AT_dwo_name:
1647 if (dwarf_version < 5)
1648 return DW_AT_GNU_dwo_name;
1649 break;
1650
1651 default:
1652 break;
1653 }
1654 return at;
1655 }
1656
1657 /* And similarly for tags. */
1658 static inline enum dwarf_tag
1659 dwarf_TAG (enum dwarf_tag tag)
1660 {
1661 switch (tag)
1662 {
1663 case DW_TAG_call_site:
1664 if (dwarf_version < 5)
1665 return DW_TAG_GNU_call_site;
1666 break;
1667
1668 case DW_TAG_call_site_parameter:
1669 if (dwarf_version < 5)
1670 return DW_TAG_GNU_call_site_parameter;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return tag;
1677 }
1678
1679 static unsigned long int get_base_type_offset (dw_die_ref);
1680
1681 /* Return the size of a location descriptor. */
1682
1683 static unsigned long
1684 size_of_loc_descr (dw_loc_descr_ref loc)
1685 {
1686 unsigned long size = 1;
1687
1688 switch (loc->dw_loc_opc)
1689 {
1690 case DW_OP_addr:
1691 size += DWARF2_ADDR_SIZE;
1692 break;
1693 case DW_OP_GNU_addr_index:
1694 case DW_OP_GNU_const_index:
1695 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1696 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1697 break;
1698 case DW_OP_const1u:
1699 case DW_OP_const1s:
1700 size += 1;
1701 break;
1702 case DW_OP_const2u:
1703 case DW_OP_const2s:
1704 size += 2;
1705 break;
1706 case DW_OP_const4u:
1707 case DW_OP_const4s:
1708 size += 4;
1709 break;
1710 case DW_OP_const8u:
1711 case DW_OP_const8s:
1712 size += 8;
1713 break;
1714 case DW_OP_constu:
1715 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1716 break;
1717 case DW_OP_consts:
1718 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1719 break;
1720 case DW_OP_pick:
1721 size += 1;
1722 break;
1723 case DW_OP_plus_uconst:
1724 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1725 break;
1726 case DW_OP_skip:
1727 case DW_OP_bra:
1728 size += 2;
1729 break;
1730 case DW_OP_breg0:
1731 case DW_OP_breg1:
1732 case DW_OP_breg2:
1733 case DW_OP_breg3:
1734 case DW_OP_breg4:
1735 case DW_OP_breg5:
1736 case DW_OP_breg6:
1737 case DW_OP_breg7:
1738 case DW_OP_breg8:
1739 case DW_OP_breg9:
1740 case DW_OP_breg10:
1741 case DW_OP_breg11:
1742 case DW_OP_breg12:
1743 case DW_OP_breg13:
1744 case DW_OP_breg14:
1745 case DW_OP_breg15:
1746 case DW_OP_breg16:
1747 case DW_OP_breg17:
1748 case DW_OP_breg18:
1749 case DW_OP_breg19:
1750 case DW_OP_breg20:
1751 case DW_OP_breg21:
1752 case DW_OP_breg22:
1753 case DW_OP_breg23:
1754 case DW_OP_breg24:
1755 case DW_OP_breg25:
1756 case DW_OP_breg26:
1757 case DW_OP_breg27:
1758 case DW_OP_breg28:
1759 case DW_OP_breg29:
1760 case DW_OP_breg30:
1761 case DW_OP_breg31:
1762 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1763 break;
1764 case DW_OP_regx:
1765 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1766 break;
1767 case DW_OP_fbreg:
1768 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1769 break;
1770 case DW_OP_bregx:
1771 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1772 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1773 break;
1774 case DW_OP_piece:
1775 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1776 break;
1777 case DW_OP_bit_piece:
1778 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1779 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1780 break;
1781 case DW_OP_deref_size:
1782 case DW_OP_xderef_size:
1783 size += 1;
1784 break;
1785 case DW_OP_call2:
1786 size += 2;
1787 break;
1788 case DW_OP_call4:
1789 size += 4;
1790 break;
1791 case DW_OP_call_ref:
1792 size += DWARF_REF_SIZE;
1793 break;
1794 case DW_OP_implicit_value:
1795 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1796 + loc->dw_loc_oprnd1.v.val_unsigned;
1797 break;
1798 case DW_OP_implicit_pointer:
1799 case DW_OP_GNU_implicit_pointer:
1800 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1801 break;
1802 case DW_OP_entry_value:
1803 case DW_OP_GNU_entry_value:
1804 {
1805 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1806 size += size_of_uleb128 (op_size) + op_size;
1807 break;
1808 }
1809 case DW_OP_const_type:
1810 case DW_OP_GNU_const_type:
1811 {
1812 unsigned long o
1813 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1814 size += size_of_uleb128 (o) + 1;
1815 switch (loc->dw_loc_oprnd2.val_class)
1816 {
1817 case dw_val_class_vec:
1818 size += loc->dw_loc_oprnd2.v.val_vec.length
1819 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1820 break;
1821 case dw_val_class_const:
1822 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1823 break;
1824 case dw_val_class_const_double:
1825 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1826 break;
1827 case dw_val_class_wide_int:
1828 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1829 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1830 break;
1831 default:
1832 gcc_unreachable ();
1833 }
1834 break;
1835 }
1836 case DW_OP_regval_type:
1837 case DW_OP_GNU_regval_type:
1838 {
1839 unsigned long o
1840 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1841 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1842 + size_of_uleb128 (o);
1843 }
1844 break;
1845 case DW_OP_deref_type:
1846 case DW_OP_GNU_deref_type:
1847 {
1848 unsigned long o
1849 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1850 size += 1 + size_of_uleb128 (o);
1851 }
1852 break;
1853 case DW_OP_convert:
1854 case DW_OP_reinterpret:
1855 case DW_OP_GNU_convert:
1856 case DW_OP_GNU_reinterpret:
1857 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1858 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1859 else
1860 {
1861 unsigned long o
1862 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1863 size += size_of_uleb128 (o);
1864 }
1865 break;
1866 case DW_OP_GNU_parameter_ref:
1867 size += 4;
1868 break;
1869 default:
1870 break;
1871 }
1872
1873 return size;
1874 }
1875
1876 /* Return the size of a series of location descriptors. */
1877
1878 unsigned long
1879 size_of_locs (dw_loc_descr_ref loc)
1880 {
1881 dw_loc_descr_ref l;
1882 unsigned long size;
1883
1884 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1885 field, to avoid writing to a PCH file. */
1886 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1887 {
1888 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1889 break;
1890 size += size_of_loc_descr (l);
1891 }
1892 if (! l)
1893 return size;
1894
1895 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1896 {
1897 l->dw_loc_addr = size;
1898 size += size_of_loc_descr (l);
1899 }
1900
1901 return size;
1902 }
1903
1904 /* Return the size of the value in a DW_AT_discr_value attribute. */
1905
1906 static int
1907 size_of_discr_value (dw_discr_value *discr_value)
1908 {
1909 if (discr_value->pos)
1910 return size_of_uleb128 (discr_value->v.uval);
1911 else
1912 return size_of_sleb128 (discr_value->v.sval);
1913 }
1914
1915 /* Return the size of the value in a DW_AT_discr_list attribute. */
1916
1917 static int
1918 size_of_discr_list (dw_discr_list_ref discr_list)
1919 {
1920 int size = 0;
1921
1922 for (dw_discr_list_ref list = discr_list;
1923 list != NULL;
1924 list = list->dw_discr_next)
1925 {
1926 /* One byte for the discriminant value descriptor, and then one or two
1927 LEB128 numbers, depending on whether it's a single case label or a
1928 range label. */
1929 size += 1;
1930 size += size_of_discr_value (&list->dw_discr_lower_bound);
1931 if (list->dw_discr_range != 0)
1932 size += size_of_discr_value (&list->dw_discr_upper_bound);
1933 }
1934 return size;
1935 }
1936
1937 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
1938 static void get_ref_die_offset_label (char *, dw_die_ref);
1939 static unsigned long int get_ref_die_offset (dw_die_ref);
1940
1941 /* Output location description stack opcode's operands (if any).
1942 The for_eh_or_skip parameter controls whether register numbers are
1943 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
1944 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
1945 info). This should be suppressed for the cases that have not been converted
1946 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
1947
1948 static void
1949 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
1950 {
1951 dw_val_ref val1 = &loc->dw_loc_oprnd1;
1952 dw_val_ref val2 = &loc->dw_loc_oprnd2;
1953
1954 switch (loc->dw_loc_opc)
1955 {
1956 #ifdef DWARF2_DEBUGGING_INFO
1957 case DW_OP_const2u:
1958 case DW_OP_const2s:
1959 dw2_asm_output_data (2, val1->v.val_int, NULL);
1960 break;
1961 case DW_OP_const4u:
1962 if (loc->dtprel)
1963 {
1964 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1965 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
1966 val1->v.val_addr);
1967 fputc ('\n', asm_out_file);
1968 break;
1969 }
1970 /* FALLTHRU */
1971 case DW_OP_const4s:
1972 dw2_asm_output_data (4, val1->v.val_int, NULL);
1973 break;
1974 case DW_OP_const8u:
1975 if (loc->dtprel)
1976 {
1977 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1978 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
1979 val1->v.val_addr);
1980 fputc ('\n', asm_out_file);
1981 break;
1982 }
1983 /* FALLTHRU */
1984 case DW_OP_const8s:
1985 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
1986 dw2_asm_output_data (8, val1->v.val_int, NULL);
1987 break;
1988 case DW_OP_skip:
1989 case DW_OP_bra:
1990 {
1991 int offset;
1992
1993 gcc_assert (val1->val_class == dw_val_class_loc);
1994 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
1995
1996 dw2_asm_output_data (2, offset, NULL);
1997 }
1998 break;
1999 case DW_OP_implicit_value:
2000 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2001 switch (val2->val_class)
2002 {
2003 case dw_val_class_const:
2004 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2005 break;
2006 case dw_val_class_vec:
2007 {
2008 unsigned int elt_size = val2->v.val_vec.elt_size;
2009 unsigned int len = val2->v.val_vec.length;
2010 unsigned int i;
2011 unsigned char *p;
2012
2013 if (elt_size > sizeof (HOST_WIDE_INT))
2014 {
2015 elt_size /= 2;
2016 len *= 2;
2017 }
2018 for (i = 0, p = val2->v.val_vec.array;
2019 i < len;
2020 i++, p += elt_size)
2021 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2022 "fp or vector constant word %u", i);
2023 }
2024 break;
2025 case dw_val_class_const_double:
2026 {
2027 unsigned HOST_WIDE_INT first, second;
2028
2029 if (WORDS_BIG_ENDIAN)
2030 {
2031 first = val2->v.val_double.high;
2032 second = val2->v.val_double.low;
2033 }
2034 else
2035 {
2036 first = val2->v.val_double.low;
2037 second = val2->v.val_double.high;
2038 }
2039 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2040 first, NULL);
2041 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2042 second, NULL);
2043 }
2044 break;
2045 case dw_val_class_wide_int:
2046 {
2047 int i;
2048 int len = get_full_len (*val2->v.val_wide);
2049 if (WORDS_BIG_ENDIAN)
2050 for (i = len - 1; i >= 0; --i)
2051 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2052 val2->v.val_wide->elt (i), NULL);
2053 else
2054 for (i = 0; i < len; ++i)
2055 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2056 val2->v.val_wide->elt (i), NULL);
2057 }
2058 break;
2059 case dw_val_class_addr:
2060 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2061 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2062 break;
2063 default:
2064 gcc_unreachable ();
2065 }
2066 break;
2067 #else
2068 case DW_OP_const2u:
2069 case DW_OP_const2s:
2070 case DW_OP_const4u:
2071 case DW_OP_const4s:
2072 case DW_OP_const8u:
2073 case DW_OP_const8s:
2074 case DW_OP_skip:
2075 case DW_OP_bra:
2076 case DW_OP_implicit_value:
2077 /* We currently don't make any attempt to make sure these are
2078 aligned properly like we do for the main unwind info, so
2079 don't support emitting things larger than a byte if we're
2080 only doing unwinding. */
2081 gcc_unreachable ();
2082 #endif
2083 case DW_OP_const1u:
2084 case DW_OP_const1s:
2085 dw2_asm_output_data (1, val1->v.val_int, NULL);
2086 break;
2087 case DW_OP_constu:
2088 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2089 break;
2090 case DW_OP_consts:
2091 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_pick:
2094 dw2_asm_output_data (1, val1->v.val_int, NULL);
2095 break;
2096 case DW_OP_plus_uconst:
2097 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2098 break;
2099 case DW_OP_breg0:
2100 case DW_OP_breg1:
2101 case DW_OP_breg2:
2102 case DW_OP_breg3:
2103 case DW_OP_breg4:
2104 case DW_OP_breg5:
2105 case DW_OP_breg6:
2106 case DW_OP_breg7:
2107 case DW_OP_breg8:
2108 case DW_OP_breg9:
2109 case DW_OP_breg10:
2110 case DW_OP_breg11:
2111 case DW_OP_breg12:
2112 case DW_OP_breg13:
2113 case DW_OP_breg14:
2114 case DW_OP_breg15:
2115 case DW_OP_breg16:
2116 case DW_OP_breg17:
2117 case DW_OP_breg18:
2118 case DW_OP_breg19:
2119 case DW_OP_breg20:
2120 case DW_OP_breg21:
2121 case DW_OP_breg22:
2122 case DW_OP_breg23:
2123 case DW_OP_breg24:
2124 case DW_OP_breg25:
2125 case DW_OP_breg26:
2126 case DW_OP_breg27:
2127 case DW_OP_breg28:
2128 case DW_OP_breg29:
2129 case DW_OP_breg30:
2130 case DW_OP_breg31:
2131 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2132 break;
2133 case DW_OP_regx:
2134 {
2135 unsigned r = val1->v.val_unsigned;
2136 if (for_eh_or_skip >= 0)
2137 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2138 gcc_assert (size_of_uleb128 (r)
2139 == size_of_uleb128 (val1->v.val_unsigned));
2140 dw2_asm_output_data_uleb128 (r, NULL);
2141 }
2142 break;
2143 case DW_OP_fbreg:
2144 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2145 break;
2146 case DW_OP_bregx:
2147 {
2148 unsigned r = val1->v.val_unsigned;
2149 if (for_eh_or_skip >= 0)
2150 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2151 gcc_assert (size_of_uleb128 (r)
2152 == size_of_uleb128 (val1->v.val_unsigned));
2153 dw2_asm_output_data_uleb128 (r, NULL);
2154 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2155 }
2156 break;
2157 case DW_OP_piece:
2158 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2159 break;
2160 case DW_OP_bit_piece:
2161 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2162 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2163 break;
2164 case DW_OP_deref_size:
2165 case DW_OP_xderef_size:
2166 dw2_asm_output_data (1, val1->v.val_int, NULL);
2167 break;
2168
2169 case DW_OP_addr:
2170 if (loc->dtprel)
2171 {
2172 if (targetm.asm_out.output_dwarf_dtprel)
2173 {
2174 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2175 DWARF2_ADDR_SIZE,
2176 val1->v.val_addr);
2177 fputc ('\n', asm_out_file);
2178 }
2179 else
2180 gcc_unreachable ();
2181 }
2182 else
2183 {
2184 #ifdef DWARF2_DEBUGGING_INFO
2185 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2186 #else
2187 gcc_unreachable ();
2188 #endif
2189 }
2190 break;
2191
2192 case DW_OP_GNU_addr_index:
2193 case DW_OP_GNU_const_index:
2194 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2195 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2196 "(index into .debug_addr)");
2197 break;
2198
2199 case DW_OP_call2:
2200 case DW_OP_call4:
2201 {
2202 unsigned long die_offset
2203 = get_ref_die_offset (val1->v.val_die_ref.die);
2204 /* Make sure the offset has been computed and that we can encode it as
2205 an operand. */
2206 gcc_assert (die_offset > 0
2207 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2208 ? 0xffff
2209 : 0xffffffff));
2210 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2211 die_offset, NULL);
2212 }
2213 break;
2214
2215 case DW_OP_implicit_pointer:
2216 case DW_OP_GNU_implicit_pointer:
2217 {
2218 char label[MAX_ARTIFICIAL_LABEL_BYTES
2219 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2220 gcc_assert (val1->val_class == dw_val_class_die_ref);
2221 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2222 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2223 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2224 }
2225 break;
2226
2227 case DW_OP_entry_value:
2228 case DW_OP_GNU_entry_value:
2229 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2230 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2231 break;
2232
2233 case DW_OP_const_type:
2234 case DW_OP_GNU_const_type:
2235 {
2236 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2237 gcc_assert (o);
2238 dw2_asm_output_data_uleb128 (o, NULL);
2239 switch (val2->val_class)
2240 {
2241 case dw_val_class_const:
2242 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2243 dw2_asm_output_data (1, l, NULL);
2244 dw2_asm_output_data (l, val2->v.val_int, NULL);
2245 break;
2246 case dw_val_class_vec:
2247 {
2248 unsigned int elt_size = val2->v.val_vec.elt_size;
2249 unsigned int len = val2->v.val_vec.length;
2250 unsigned int i;
2251 unsigned char *p;
2252
2253 l = len * elt_size;
2254 dw2_asm_output_data (1, l, NULL);
2255 if (elt_size > sizeof (HOST_WIDE_INT))
2256 {
2257 elt_size /= 2;
2258 len *= 2;
2259 }
2260 for (i = 0, p = val2->v.val_vec.array;
2261 i < len;
2262 i++, p += elt_size)
2263 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2264 "fp or vector constant word %u", i);
2265 }
2266 break;
2267 case dw_val_class_const_double:
2268 {
2269 unsigned HOST_WIDE_INT first, second;
2270 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2271
2272 dw2_asm_output_data (1, 2 * l, NULL);
2273 if (WORDS_BIG_ENDIAN)
2274 {
2275 first = val2->v.val_double.high;
2276 second = val2->v.val_double.low;
2277 }
2278 else
2279 {
2280 first = val2->v.val_double.low;
2281 second = val2->v.val_double.high;
2282 }
2283 dw2_asm_output_data (l, first, NULL);
2284 dw2_asm_output_data (l, second, NULL);
2285 }
2286 break;
2287 case dw_val_class_wide_int:
2288 {
2289 int i;
2290 int len = get_full_len (*val2->v.val_wide);
2291 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2292
2293 dw2_asm_output_data (1, len * l, NULL);
2294 if (WORDS_BIG_ENDIAN)
2295 for (i = len - 1; i >= 0; --i)
2296 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2297 else
2298 for (i = 0; i < len; ++i)
2299 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2300 }
2301 break;
2302 default:
2303 gcc_unreachable ();
2304 }
2305 }
2306 break;
2307 case DW_OP_regval_type:
2308 case DW_OP_GNU_regval_type:
2309 {
2310 unsigned r = val1->v.val_unsigned;
2311 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2312 gcc_assert (o);
2313 if (for_eh_or_skip >= 0)
2314 {
2315 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2316 gcc_assert (size_of_uleb128 (r)
2317 == size_of_uleb128 (val1->v.val_unsigned));
2318 }
2319 dw2_asm_output_data_uleb128 (r, NULL);
2320 dw2_asm_output_data_uleb128 (o, NULL);
2321 }
2322 break;
2323 case DW_OP_deref_type:
2324 case DW_OP_GNU_deref_type:
2325 {
2326 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2327 gcc_assert (o);
2328 dw2_asm_output_data (1, val1->v.val_int, NULL);
2329 dw2_asm_output_data_uleb128 (o, NULL);
2330 }
2331 break;
2332 case DW_OP_convert:
2333 case DW_OP_reinterpret:
2334 case DW_OP_GNU_convert:
2335 case DW_OP_GNU_reinterpret:
2336 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2337 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2338 else
2339 {
2340 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2341 gcc_assert (o);
2342 dw2_asm_output_data_uleb128 (o, NULL);
2343 }
2344 break;
2345
2346 case DW_OP_GNU_parameter_ref:
2347 {
2348 unsigned long o;
2349 gcc_assert (val1->val_class == dw_val_class_die_ref);
2350 o = get_ref_die_offset (val1->v.val_die_ref.die);
2351 dw2_asm_output_data (4, o, NULL);
2352 }
2353 break;
2354
2355 default:
2356 /* Other codes have no operands. */
2357 break;
2358 }
2359 }
2360
2361 /* Output a sequence of location operations.
2362 The for_eh_or_skip parameter controls whether register numbers are
2363 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2364 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2365 info). This should be suppressed for the cases that have not been converted
2366 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2367
2368 void
2369 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2370 {
2371 for (; loc != NULL; loc = loc->dw_loc_next)
2372 {
2373 enum dwarf_location_atom opc = loc->dw_loc_opc;
2374 /* Output the opcode. */
2375 if (for_eh_or_skip >= 0
2376 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2377 {
2378 unsigned r = (opc - DW_OP_breg0);
2379 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2380 gcc_assert (r <= 31);
2381 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2382 }
2383 else if (for_eh_or_skip >= 0
2384 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2385 {
2386 unsigned r = (opc - DW_OP_reg0);
2387 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2388 gcc_assert (r <= 31);
2389 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2390 }
2391
2392 dw2_asm_output_data (1, opc,
2393 "%s", dwarf_stack_op_name (opc));
2394
2395 /* Output the operand(s) (if any). */
2396 output_loc_operands (loc, for_eh_or_skip);
2397 }
2398 }
2399
2400 /* Output location description stack opcode's operands (if any).
2401 The output is single bytes on a line, suitable for .cfi_escape. */
2402
2403 static void
2404 output_loc_operands_raw (dw_loc_descr_ref loc)
2405 {
2406 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2407 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2408
2409 switch (loc->dw_loc_opc)
2410 {
2411 case DW_OP_addr:
2412 case DW_OP_GNU_addr_index:
2413 case DW_OP_GNU_const_index:
2414 case DW_OP_implicit_value:
2415 /* We cannot output addresses in .cfi_escape, only bytes. */
2416 gcc_unreachable ();
2417
2418 case DW_OP_const1u:
2419 case DW_OP_const1s:
2420 case DW_OP_pick:
2421 case DW_OP_deref_size:
2422 case DW_OP_xderef_size:
2423 fputc (',', asm_out_file);
2424 dw2_asm_output_data_raw (1, val1->v.val_int);
2425 break;
2426
2427 case DW_OP_const2u:
2428 case DW_OP_const2s:
2429 fputc (',', asm_out_file);
2430 dw2_asm_output_data_raw (2, val1->v.val_int);
2431 break;
2432
2433 case DW_OP_const4u:
2434 case DW_OP_const4s:
2435 fputc (',', asm_out_file);
2436 dw2_asm_output_data_raw (4, val1->v.val_int);
2437 break;
2438
2439 case DW_OP_const8u:
2440 case DW_OP_const8s:
2441 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2442 fputc (',', asm_out_file);
2443 dw2_asm_output_data_raw (8, val1->v.val_int);
2444 break;
2445
2446 case DW_OP_skip:
2447 case DW_OP_bra:
2448 {
2449 int offset;
2450
2451 gcc_assert (val1->val_class == dw_val_class_loc);
2452 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2453
2454 fputc (',', asm_out_file);
2455 dw2_asm_output_data_raw (2, offset);
2456 }
2457 break;
2458
2459 case DW_OP_regx:
2460 {
2461 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2462 gcc_assert (size_of_uleb128 (r)
2463 == size_of_uleb128 (val1->v.val_unsigned));
2464 fputc (',', asm_out_file);
2465 dw2_asm_output_data_uleb128_raw (r);
2466 }
2467 break;
2468
2469 case DW_OP_constu:
2470 case DW_OP_plus_uconst:
2471 case DW_OP_piece:
2472 fputc (',', asm_out_file);
2473 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2474 break;
2475
2476 case DW_OP_bit_piece:
2477 fputc (',', asm_out_file);
2478 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2479 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2480 break;
2481
2482 case DW_OP_consts:
2483 case DW_OP_breg0:
2484 case DW_OP_breg1:
2485 case DW_OP_breg2:
2486 case DW_OP_breg3:
2487 case DW_OP_breg4:
2488 case DW_OP_breg5:
2489 case DW_OP_breg6:
2490 case DW_OP_breg7:
2491 case DW_OP_breg8:
2492 case DW_OP_breg9:
2493 case DW_OP_breg10:
2494 case DW_OP_breg11:
2495 case DW_OP_breg12:
2496 case DW_OP_breg13:
2497 case DW_OP_breg14:
2498 case DW_OP_breg15:
2499 case DW_OP_breg16:
2500 case DW_OP_breg17:
2501 case DW_OP_breg18:
2502 case DW_OP_breg19:
2503 case DW_OP_breg20:
2504 case DW_OP_breg21:
2505 case DW_OP_breg22:
2506 case DW_OP_breg23:
2507 case DW_OP_breg24:
2508 case DW_OP_breg25:
2509 case DW_OP_breg26:
2510 case DW_OP_breg27:
2511 case DW_OP_breg28:
2512 case DW_OP_breg29:
2513 case DW_OP_breg30:
2514 case DW_OP_breg31:
2515 case DW_OP_fbreg:
2516 fputc (',', asm_out_file);
2517 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2518 break;
2519
2520 case DW_OP_bregx:
2521 {
2522 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2523 gcc_assert (size_of_uleb128 (r)
2524 == size_of_uleb128 (val1->v.val_unsigned));
2525 fputc (',', asm_out_file);
2526 dw2_asm_output_data_uleb128_raw (r);
2527 fputc (',', asm_out_file);
2528 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2529 }
2530 break;
2531
2532 case DW_OP_implicit_pointer:
2533 case DW_OP_entry_value:
2534 case DW_OP_const_type:
2535 case DW_OP_regval_type:
2536 case DW_OP_deref_type:
2537 case DW_OP_convert:
2538 case DW_OP_reinterpret:
2539 case DW_OP_GNU_implicit_pointer:
2540 case DW_OP_GNU_entry_value:
2541 case DW_OP_GNU_const_type:
2542 case DW_OP_GNU_regval_type:
2543 case DW_OP_GNU_deref_type:
2544 case DW_OP_GNU_convert:
2545 case DW_OP_GNU_reinterpret:
2546 case DW_OP_GNU_parameter_ref:
2547 gcc_unreachable ();
2548 break;
2549
2550 default:
2551 /* Other codes have no operands. */
2552 break;
2553 }
2554 }
2555
2556 void
2557 output_loc_sequence_raw (dw_loc_descr_ref loc)
2558 {
2559 while (1)
2560 {
2561 enum dwarf_location_atom opc = loc->dw_loc_opc;
2562 /* Output the opcode. */
2563 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2564 {
2565 unsigned r = (opc - DW_OP_breg0);
2566 r = DWARF2_FRAME_REG_OUT (r, 1);
2567 gcc_assert (r <= 31);
2568 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2569 }
2570 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2571 {
2572 unsigned r = (opc - DW_OP_reg0);
2573 r = DWARF2_FRAME_REG_OUT (r, 1);
2574 gcc_assert (r <= 31);
2575 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2576 }
2577 /* Output the opcode. */
2578 fprintf (asm_out_file, "%#x", opc);
2579 output_loc_operands_raw (loc);
2580
2581 if (!loc->dw_loc_next)
2582 break;
2583 loc = loc->dw_loc_next;
2584
2585 fputc (',', asm_out_file);
2586 }
2587 }
2588
2589 /* This function builds a dwarf location descriptor sequence from a
2590 dw_cfa_location, adding the given OFFSET to the result of the
2591 expression. */
2592
2593 struct dw_loc_descr_node *
2594 build_cfa_loc (dw_cfa_location *cfa, HOST_WIDE_INT offset)
2595 {
2596 struct dw_loc_descr_node *head, *tmp;
2597
2598 offset += cfa->offset;
2599
2600 if (cfa->indirect)
2601 {
2602 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2603 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2604 head->dw_loc_oprnd1.val_entry = NULL;
2605 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2606 add_loc_descr (&head, tmp);
2607 if (offset != 0)
2608 {
2609 tmp = new_loc_descr (DW_OP_plus_uconst, offset, 0);
2610 add_loc_descr (&head, tmp);
2611 }
2612 }
2613 else
2614 head = new_reg_loc_descr (cfa->reg, offset);
2615
2616 return head;
2617 }
2618
2619 /* This function builds a dwarf location descriptor sequence for
2620 the address at OFFSET from the CFA when stack is aligned to
2621 ALIGNMENT byte. */
2622
2623 struct dw_loc_descr_node *
2624 build_cfa_aligned_loc (dw_cfa_location *cfa,
2625 HOST_WIDE_INT offset, HOST_WIDE_INT alignment)
2626 {
2627 struct dw_loc_descr_node *head;
2628 unsigned int dwarf_fp
2629 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2630
2631 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2632 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2633 {
2634 head = new_reg_loc_descr (dwarf_fp, 0);
2635 add_loc_descr (&head, int_loc_descriptor (alignment));
2636 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2637 loc_descr_plus_const (&head, offset);
2638 }
2639 else
2640 head = new_reg_loc_descr (dwarf_fp, offset);
2641 return head;
2642 }
2643 \f
2644 /* And now, the support for symbolic debugging information. */
2645
2646 /* .debug_str support. */
2647
2648 static void dwarf2out_init (const char *);
2649 static void dwarf2out_finish (const char *);
2650 static void dwarf2out_early_finish (const char *);
2651 static void dwarf2out_assembly_start (void);
2652 static void dwarf2out_define (unsigned int, const char *);
2653 static void dwarf2out_undef (unsigned int, const char *);
2654 static void dwarf2out_start_source_file (unsigned, const char *);
2655 static void dwarf2out_end_source_file (unsigned);
2656 static void dwarf2out_function_decl (tree);
2657 static void dwarf2out_begin_block (unsigned, unsigned);
2658 static void dwarf2out_end_block (unsigned, unsigned);
2659 static bool dwarf2out_ignore_block (const_tree);
2660 static void dwarf2out_early_global_decl (tree);
2661 static void dwarf2out_late_global_decl (tree);
2662 static void dwarf2out_type_decl (tree, int);
2663 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool);
2664 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2665 dw_die_ref);
2666 static void dwarf2out_abstract_function (tree);
2667 static void dwarf2out_var_location (rtx_insn *);
2668 static void dwarf2out_size_function (tree);
2669 static void dwarf2out_begin_function (tree);
2670 static void dwarf2out_end_function (unsigned int);
2671 static void dwarf2out_register_main_translation_unit (tree unit);
2672 static void dwarf2out_set_name (tree, tree);
2673
2674 /* The debug hooks structure. */
2675
2676 const struct gcc_debug_hooks dwarf2_debug_hooks =
2677 {
2678 dwarf2out_init,
2679 dwarf2out_finish,
2680 dwarf2out_early_finish,
2681 dwarf2out_assembly_start,
2682 dwarf2out_define,
2683 dwarf2out_undef,
2684 dwarf2out_start_source_file,
2685 dwarf2out_end_source_file,
2686 dwarf2out_begin_block,
2687 dwarf2out_end_block,
2688 dwarf2out_ignore_block,
2689 dwarf2out_source_line,
2690 dwarf2out_begin_prologue,
2691 #if VMS_DEBUGGING_INFO
2692 dwarf2out_vms_end_prologue,
2693 dwarf2out_vms_begin_epilogue,
2694 #else
2695 debug_nothing_int_charstar,
2696 debug_nothing_int_charstar,
2697 #endif
2698 dwarf2out_end_epilogue,
2699 dwarf2out_begin_function,
2700 dwarf2out_end_function, /* end_function */
2701 dwarf2out_register_main_translation_unit,
2702 dwarf2out_function_decl, /* function_decl */
2703 dwarf2out_early_global_decl,
2704 dwarf2out_late_global_decl,
2705 dwarf2out_type_decl, /* type_decl */
2706 dwarf2out_imported_module_or_decl,
2707 debug_nothing_tree, /* deferred_inline_function */
2708 /* The DWARF 2 backend tries to reduce debugging bloat by not
2709 emitting the abstract description of inline functions until
2710 something tries to reference them. */
2711 dwarf2out_abstract_function, /* outlining_inline_function */
2712 debug_nothing_rtx_code_label, /* label */
2713 debug_nothing_int, /* handle_pch */
2714 dwarf2out_var_location,
2715 dwarf2out_size_function, /* size_function */
2716 dwarf2out_switch_text_section,
2717 dwarf2out_set_name,
2718 1, /* start_end_main_source_file */
2719 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2720 };
2721
2722 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2723 {
2724 dwarf2out_init,
2725 debug_nothing_charstar,
2726 debug_nothing_charstar,
2727 dwarf2out_assembly_start,
2728 debug_nothing_int_charstar,
2729 debug_nothing_int_charstar,
2730 debug_nothing_int_charstar,
2731 debug_nothing_int,
2732 debug_nothing_int_int, /* begin_block */
2733 debug_nothing_int_int, /* end_block */
2734 debug_true_const_tree, /* ignore_block */
2735 dwarf2out_source_line, /* source_line */
2736 debug_nothing_int_charstar, /* begin_prologue */
2737 debug_nothing_int_charstar, /* end_prologue */
2738 debug_nothing_int_charstar, /* begin_epilogue */
2739 debug_nothing_int_charstar, /* end_epilogue */
2740 debug_nothing_tree, /* begin_function */
2741 debug_nothing_int, /* end_function */
2742 debug_nothing_tree, /* register_main_translation_unit */
2743 debug_nothing_tree, /* function_decl */
2744 debug_nothing_tree, /* early_global_decl */
2745 debug_nothing_tree, /* late_global_decl */
2746 debug_nothing_tree_int, /* type_decl */
2747 debug_nothing_tree_tree_tree_bool, /* imported_module_or_decl */
2748 debug_nothing_tree, /* deferred_inline_function */
2749 debug_nothing_tree, /* outlining_inline_function */
2750 debug_nothing_rtx_code_label, /* label */
2751 debug_nothing_int, /* handle_pch */
2752 debug_nothing_rtx_insn, /* var_location */
2753 debug_nothing_tree, /* size_function */
2754 debug_nothing_void, /* switch_text_section */
2755 debug_nothing_tree_tree, /* set_name */
2756 0, /* start_end_main_source_file */
2757 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2758 };
2759 \f
2760 /* NOTE: In the comments in this file, many references are made to
2761 "Debugging Information Entries". This term is abbreviated as `DIE'
2762 throughout the remainder of this file. */
2763
2764 /* An internal representation of the DWARF output is built, and then
2765 walked to generate the DWARF debugging info. The walk of the internal
2766 representation is done after the entire program has been compiled.
2767 The types below are used to describe the internal representation. */
2768
2769 /* Whether to put type DIEs into their own section .debug_types instead
2770 of making them part of the .debug_info section. Only supported for
2771 Dwarf V4 or higher and the user didn't disable them through
2772 -fno-debug-types-section. It is more efficient to put them in a
2773 separate comdat sections since the linker will then be able to
2774 remove duplicates. But not all tools support .debug_types sections
2775 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2776 it is DW_UT_type unit type in .debug_info section. */
2777
2778 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2779
2780 /* Various DIE's use offsets relative to the beginning of the
2781 .debug_info section to refer to each other. */
2782
2783 typedef long int dw_offset;
2784
2785 struct comdat_type_node;
2786
2787 /* The entries in the line_info table more-or-less mirror the opcodes
2788 that are used in the real dwarf line table. Arrays of these entries
2789 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2790 supported. */
2791
2792 enum dw_line_info_opcode {
2793 /* Emit DW_LNE_set_address; the operand is the label index. */
2794 LI_set_address,
2795
2796 /* Emit a row to the matrix with the given line. This may be done
2797 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2798 special opcodes. */
2799 LI_set_line,
2800
2801 /* Emit a DW_LNS_set_file. */
2802 LI_set_file,
2803
2804 /* Emit a DW_LNS_set_column. */
2805 LI_set_column,
2806
2807 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2808 LI_negate_stmt,
2809
2810 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2811 LI_set_prologue_end,
2812 LI_set_epilogue_begin,
2813
2814 /* Emit a DW_LNE_set_discriminator. */
2815 LI_set_discriminator
2816 };
2817
2818 typedef struct GTY(()) dw_line_info_struct {
2819 enum dw_line_info_opcode opcode;
2820 unsigned int val;
2821 } dw_line_info_entry;
2822
2823
2824 struct GTY(()) dw_line_info_table {
2825 /* The label that marks the end of this section. */
2826 const char *end_label;
2827
2828 /* The values for the last row of the matrix, as collected in the table.
2829 These are used to minimize the changes to the next row. */
2830 unsigned int file_num;
2831 unsigned int line_num;
2832 unsigned int column_num;
2833 int discrim_num;
2834 bool is_stmt;
2835 bool in_use;
2836
2837 vec<dw_line_info_entry, va_gc> *entries;
2838 };
2839
2840
2841 /* Each DIE attribute has a field specifying the attribute kind,
2842 a link to the next attribute in the chain, and an attribute value.
2843 Attributes are typically linked below the DIE they modify. */
2844
2845 typedef struct GTY(()) dw_attr_struct {
2846 enum dwarf_attribute dw_attr;
2847 dw_val_node dw_attr_val;
2848 }
2849 dw_attr_node;
2850
2851
2852 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
2853 The children of each node form a circular list linked by
2854 die_sib. die_child points to the node *before* the "first" child node. */
2855
2856 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
2857 union die_symbol_or_type_node
2858 {
2859 const char * GTY ((tag ("0"))) die_symbol;
2860 comdat_type_node *GTY ((tag ("1"))) die_type_node;
2861 }
2862 GTY ((desc ("%0.comdat_type_p"))) die_id;
2863 vec<dw_attr_node, va_gc> *die_attr;
2864 dw_die_ref die_parent;
2865 dw_die_ref die_child;
2866 dw_die_ref die_sib;
2867 dw_die_ref die_definition; /* ref from a specification to its definition */
2868 dw_offset die_offset;
2869 unsigned long die_abbrev;
2870 int die_mark;
2871 unsigned int decl_id;
2872 enum dwarf_tag die_tag;
2873 /* Die is used and must not be pruned as unused. */
2874 BOOL_BITFIELD die_perennial_p : 1;
2875 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
2876 /* Whether this DIE was removed from the DIE tree, for example via
2877 prune_unused_types. We don't consider those present from the
2878 DIE lookup routines. */
2879 BOOL_BITFIELD removed : 1;
2880 /* Lots of spare bits. */
2881 }
2882 die_node;
2883
2884 /* Set to TRUE while dwarf2out_early_global_decl is running. */
2885 static bool early_dwarf;
2886 static bool early_dwarf_finished;
2887 struct set_early_dwarf {
2888 bool saved;
2889 set_early_dwarf () : saved(early_dwarf)
2890 {
2891 gcc_assert (! early_dwarf_finished);
2892 early_dwarf = true;
2893 }
2894 ~set_early_dwarf () { early_dwarf = saved; }
2895 };
2896
2897 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
2898 #define FOR_EACH_CHILD(die, c, expr) do { \
2899 c = die->die_child; \
2900 if (c) do { \
2901 c = c->die_sib; \
2902 expr; \
2903 } while (c != die->die_child); \
2904 } while (0)
2905
2906 /* The pubname structure */
2907
2908 typedef struct GTY(()) pubname_struct {
2909 dw_die_ref die;
2910 const char *name;
2911 }
2912 pubname_entry;
2913
2914
2915 struct GTY(()) dw_ranges {
2916 const char *label;
2917 /* If this is positive, it's a block number, otherwise it's a
2918 bitwise-negated index into dw_ranges_by_label. */
2919 int num;
2920 /* Index for the range list for DW_FORM_rnglistx. */
2921 unsigned int idx : 31;
2922 /* True if this range might be possibly in a different section
2923 from previous entry. */
2924 unsigned int maybe_new_sec : 1;
2925 };
2926
2927 /* A structure to hold a macinfo entry. */
2928
2929 typedef struct GTY(()) macinfo_struct {
2930 unsigned char code;
2931 unsigned HOST_WIDE_INT lineno;
2932 const char *info;
2933 }
2934 macinfo_entry;
2935
2936
2937 struct GTY(()) dw_ranges_by_label {
2938 const char *begin;
2939 const char *end;
2940 };
2941
2942 /* The comdat type node structure. */
2943 struct GTY(()) comdat_type_node
2944 {
2945 dw_die_ref root_die;
2946 dw_die_ref type_die;
2947 dw_die_ref skeleton_die;
2948 char signature[DWARF_TYPE_SIGNATURE_SIZE];
2949 comdat_type_node *next;
2950 };
2951
2952 /* A list of DIEs for which we can't determine ancestry (parent_die
2953 field) just yet. Later in dwarf2out_finish we will fill in the
2954 missing bits. */
2955 typedef struct GTY(()) limbo_die_struct {
2956 dw_die_ref die;
2957 /* The tree for which this DIE was created. We use this to
2958 determine ancestry later. */
2959 tree created_for;
2960 struct limbo_die_struct *next;
2961 }
2962 limbo_die_node;
2963
2964 typedef struct skeleton_chain_struct
2965 {
2966 dw_die_ref old_die;
2967 dw_die_ref new_die;
2968 struct skeleton_chain_struct *parent;
2969 }
2970 skeleton_chain_node;
2971
2972 /* Define a macro which returns nonzero for a TYPE_DECL which was
2973 implicitly generated for a type.
2974
2975 Note that, unlike the C front-end (which generates a NULL named
2976 TYPE_DECL node for each complete tagged type, each array type,
2977 and each function type node created) the C++ front-end generates
2978 a _named_ TYPE_DECL node for each tagged type node created.
2979 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
2980 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
2981 front-end, but for each type, tagged or not. */
2982
2983 #define TYPE_DECL_IS_STUB(decl) \
2984 (DECL_NAME (decl) == NULL_TREE \
2985 || (DECL_ARTIFICIAL (decl) \
2986 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
2987 /* This is necessary for stub decls that \
2988 appear in nested inline functions. */ \
2989 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
2990 && (decl_ultimate_origin (decl) \
2991 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
2992
2993 /* Information concerning the compilation unit's programming
2994 language, and compiler version. */
2995
2996 /* Fixed size portion of the DWARF compilation unit header. */
2997 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
2998 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
2999 + (dwarf_version >= 5 ? 4 : 3))
3000
3001 /* Fixed size portion of the DWARF comdat type unit header. */
3002 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3003 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3004 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3005
3006 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3007 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3008 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3009
3010 /* Fixed size portion of public names info. */
3011 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3012
3013 /* Fixed size portion of the address range info. */
3014 #define DWARF_ARANGES_HEADER_SIZE \
3015 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3016 DWARF2_ADDR_SIZE * 2) \
3017 - DWARF_INITIAL_LENGTH_SIZE)
3018
3019 /* Size of padding portion in the address range info. It must be
3020 aligned to twice the pointer size. */
3021 #define DWARF_ARANGES_PAD_SIZE \
3022 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3023 DWARF2_ADDR_SIZE * 2) \
3024 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3025
3026 /* Use assembler line directives if available. */
3027 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3028 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3029 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3030 #else
3031 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3032 #endif
3033 #endif
3034
3035 /* Minimum line offset in a special line info. opcode.
3036 This value was chosen to give a reasonable range of values. */
3037 #define DWARF_LINE_BASE -10
3038
3039 /* First special line opcode - leave room for the standard opcodes. */
3040 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3041
3042 /* Range of line offsets in a special line info. opcode. */
3043 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3044
3045 /* Flag that indicates the initial value of the is_stmt_start flag.
3046 In the present implementation, we do not mark any lines as
3047 the beginning of a source statement, because that information
3048 is not made available by the GCC front-end. */
3049 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3050
3051 /* Maximum number of operations per instruction bundle. */
3052 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3053 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3054 #endif
3055
3056 /* This location is used by calc_die_sizes() to keep track
3057 the offset of each DIE within the .debug_info section. */
3058 static unsigned long next_die_offset;
3059
3060 /* Record the root of the DIE's built for the current compilation unit. */
3061 static GTY(()) dw_die_ref single_comp_unit_die;
3062
3063 /* A list of type DIEs that have been separated into comdat sections. */
3064 static GTY(()) comdat_type_node *comdat_type_list;
3065
3066 /* A list of CU DIEs that have been separated. */
3067 static GTY(()) limbo_die_node *cu_die_list;
3068
3069 /* A list of DIEs with a NULL parent waiting to be relocated. */
3070 static GTY(()) limbo_die_node *limbo_die_list;
3071
3072 /* A list of DIEs for which we may have to generate
3073 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3074 static GTY(()) limbo_die_node *deferred_asm_name;
3075
3076 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3077 {
3078 typedef const char *compare_type;
3079
3080 static hashval_t hash (dwarf_file_data *);
3081 static bool equal (dwarf_file_data *, const char *);
3082 };
3083
3084 /* Filenames referenced by this compilation unit. */
3085 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3086
3087 struct decl_die_hasher : ggc_ptr_hash<die_node>
3088 {
3089 typedef tree compare_type;
3090
3091 static hashval_t hash (die_node *);
3092 static bool equal (die_node *, tree);
3093 };
3094 /* A hash table of references to DIE's that describe declarations.
3095 The key is a DECL_UID() which is a unique number identifying each decl. */
3096 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3097
3098 struct block_die_hasher : ggc_ptr_hash<die_struct>
3099 {
3100 static hashval_t hash (die_struct *);
3101 static bool equal (die_struct *, die_struct *);
3102 };
3103
3104 /* A hash table of references to DIE's that describe COMMON blocks.
3105 The key is DECL_UID() ^ die_parent. */
3106 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3107
3108 typedef struct GTY(()) die_arg_entry_struct {
3109 dw_die_ref die;
3110 tree arg;
3111 } die_arg_entry;
3112
3113
3114 /* Node of the variable location list. */
3115 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3116 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3117 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3118 in mode of the EXPR_LIST node and first EXPR_LIST operand
3119 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3120 location or NULL for padding. For larger bitsizes,
3121 mode is 0 and first operand is a CONCAT with bitsize
3122 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3123 NULL as second operand. */
3124 rtx GTY (()) loc;
3125 const char * GTY (()) label;
3126 struct var_loc_node * GTY (()) next;
3127 };
3128
3129 /* Variable location list. */
3130 struct GTY ((for_user)) var_loc_list_def {
3131 struct var_loc_node * GTY (()) first;
3132
3133 /* Pointer to the last but one or last element of the
3134 chained list. If the list is empty, both first and
3135 last are NULL, if the list contains just one node
3136 or the last node certainly is not redundant, it points
3137 to the last node, otherwise points to the last but one.
3138 Do not mark it for GC because it is marked through the chain. */
3139 struct var_loc_node * GTY ((skip ("%h"))) last;
3140
3141 /* Pointer to the last element before section switch,
3142 if NULL, either sections weren't switched or first
3143 is after section switch. */
3144 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3145
3146 /* DECL_UID of the variable decl. */
3147 unsigned int decl_id;
3148 };
3149 typedef struct var_loc_list_def var_loc_list;
3150
3151 /* Call argument location list. */
3152 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3153 rtx GTY (()) call_arg_loc_note;
3154 const char * GTY (()) label;
3155 tree GTY (()) block;
3156 bool tail_call_p;
3157 rtx GTY (()) symbol_ref;
3158 struct call_arg_loc_node * GTY (()) next;
3159 };
3160
3161
3162 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3163 {
3164 typedef const_tree compare_type;
3165
3166 static hashval_t hash (var_loc_list *);
3167 static bool equal (var_loc_list *, const_tree);
3168 };
3169
3170 /* Table of decl location linked lists. */
3171 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3172
3173 /* Head and tail of call_arg_loc chain. */
3174 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3175 static struct call_arg_loc_node *call_arg_loc_last;
3176
3177 /* Number of call sites in the current function. */
3178 static int call_site_count = -1;
3179 /* Number of tail call sites in the current function. */
3180 static int tail_call_site_count = -1;
3181
3182 /* A cached location list. */
3183 struct GTY ((for_user)) cached_dw_loc_list_def {
3184 /* The DECL_UID of the decl that this entry describes. */
3185 unsigned int decl_id;
3186
3187 /* The cached location list. */
3188 dw_loc_list_ref loc_list;
3189 };
3190 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3191
3192 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3193 {
3194
3195 typedef const_tree compare_type;
3196
3197 static hashval_t hash (cached_dw_loc_list *);
3198 static bool equal (cached_dw_loc_list *, const_tree);
3199 };
3200
3201 /* Table of cached location lists. */
3202 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3203
3204 /* A vector of references to DIE's that are uniquely identified by their tag,
3205 presence/absence of children DIE's, and list of attribute/value pairs. */
3206 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3207
3208 /* A hash map to remember the stack usage for DWARF procedures. The value
3209 stored is the stack size difference between before the DWARF procedure
3210 invokation and after it returned. In other words, for a DWARF procedure
3211 that consumes N stack slots and that pushes M ones, this stores M - N. */
3212 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3213
3214 /* A global counter for generating labels for line number data. */
3215 static unsigned int line_info_label_num;
3216
3217 /* The current table to which we should emit line number information
3218 for the current function. This will be set up at the beginning of
3219 assembly for the function. */
3220 static GTY(()) dw_line_info_table *cur_line_info_table;
3221
3222 /* The two default tables of line number info. */
3223 static GTY(()) dw_line_info_table *text_section_line_info;
3224 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3225
3226 /* The set of all non-default tables of line number info. */
3227 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3228
3229 /* A flag to tell pubnames/types export if there is an info section to
3230 refer to. */
3231 static bool info_section_emitted;
3232
3233 /* A pointer to the base of a table that contains a list of publicly
3234 accessible names. */
3235 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3236
3237 /* A pointer to the base of a table that contains a list of publicly
3238 accessible types. */
3239 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3240
3241 /* A pointer to the base of a table that contains a list of macro
3242 defines/undefines (and file start/end markers). */
3243 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3244
3245 /* True if .debug_macinfo or .debug_macros section is going to be
3246 emitted. */
3247 #define have_macinfo \
3248 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3249 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3250 && !macinfo_table->is_empty ())
3251
3252 /* Vector of dies for which we should generate .debug_ranges info. */
3253 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3254
3255 /* Vector of pairs of labels referenced in ranges_table. */
3256 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3257
3258 /* Whether we have location lists that need outputting */
3259 static GTY(()) bool have_location_lists;
3260
3261 /* Unique label counter. */
3262 static GTY(()) unsigned int loclabel_num;
3263
3264 /* Unique label counter for point-of-call tables. */
3265 static GTY(()) unsigned int poc_label_num;
3266
3267 /* The last file entry emitted by maybe_emit_file(). */
3268 static GTY(()) struct dwarf_file_data * last_emitted_file;
3269
3270 /* Number of internal labels generated by gen_internal_sym(). */
3271 static GTY(()) int label_num;
3272
3273 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3274
3275 /* Instances of generic types for which we need to generate debug
3276 info that describe their generic parameters and arguments. That
3277 generation needs to happen once all types are properly laid out so
3278 we do it at the end of compilation. */
3279 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3280
3281 /* Offset from the "steady-state frame pointer" to the frame base,
3282 within the current function. */
3283 static HOST_WIDE_INT frame_pointer_fb_offset;
3284 static bool frame_pointer_fb_offset_valid;
3285
3286 static vec<dw_die_ref> base_types;
3287
3288 /* Pointer to vector of DW_TAG_string_type DIEs that need finalization
3289 once all arguments are parsed. */
3290 static vec<dw_die_ref> *string_types;
3291
3292 /* Flags to represent a set of attribute classes for attributes that represent
3293 a scalar value (bounds, pointers, ...). */
3294 enum dw_scalar_form
3295 {
3296 dw_scalar_form_constant = 0x01,
3297 dw_scalar_form_exprloc = 0x02,
3298 dw_scalar_form_reference = 0x04
3299 };
3300
3301 /* Forward declarations for functions defined in this file. */
3302
3303 static int is_pseudo_reg (const_rtx);
3304 static tree type_main_variant (tree);
3305 static int is_tagged_type (const_tree);
3306 static const char *dwarf_tag_name (unsigned);
3307 static const char *dwarf_attr_name (unsigned);
3308 static const char *dwarf_form_name (unsigned);
3309 static tree decl_ultimate_origin (const_tree);
3310 static tree decl_class_context (tree);
3311 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3312 static inline enum dw_val_class AT_class (dw_attr_node *);
3313 static inline unsigned int AT_index (dw_attr_node *);
3314 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3315 static inline unsigned AT_flag (dw_attr_node *);
3316 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3317 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3318 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3319 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3320 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3321 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3322 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3323 unsigned int, unsigned char *);
3324 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3325 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3326 static inline const char *AT_string (dw_attr_node *);
3327 static enum dwarf_form AT_string_form (dw_attr_node *);
3328 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3329 static void add_AT_specification (dw_die_ref, dw_die_ref);
3330 static inline dw_die_ref AT_ref (dw_attr_node *);
3331 static inline int AT_ref_external (dw_attr_node *);
3332 static inline void set_AT_ref_external (dw_attr_node *, int);
3333 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3334 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3335 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3336 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3337 dw_loc_list_ref);
3338 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3339 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3340 static void remove_addr_table_entry (addr_table_entry *);
3341 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3342 static inline rtx AT_addr (dw_attr_node *);
3343 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3344 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3345 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3346 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3347 const char *);
3348 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3349 unsigned HOST_WIDE_INT);
3350 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3351 unsigned long, bool);
3352 static inline const char *AT_lbl (dw_attr_node *);
3353 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3354 static const char *get_AT_low_pc (dw_die_ref);
3355 static const char *get_AT_hi_pc (dw_die_ref);
3356 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3357 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3358 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3359 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3360 static bool is_cxx (void);
3361 static bool is_cxx (const_tree);
3362 static bool is_fortran (void);
3363 static bool is_ada (void);
3364 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3365 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3366 static void add_child_die (dw_die_ref, dw_die_ref);
3367 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3368 static dw_die_ref lookup_type_die (tree);
3369 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3370 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3371 static void equate_type_number_to_die (tree, dw_die_ref);
3372 static dw_die_ref lookup_decl_die (tree);
3373 static var_loc_list *lookup_decl_loc (const_tree);
3374 static void equate_decl_number_to_die (tree, dw_die_ref);
3375 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
3376 static void print_spaces (FILE *);
3377 static void print_die (dw_die_ref, FILE *);
3378 static dw_die_ref push_new_compile_unit (dw_die_ref, dw_die_ref);
3379 static dw_die_ref pop_compile_unit (dw_die_ref);
3380 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3381 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3382 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3383 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3384 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3385 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3386 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3387 struct md5_ctx *, int *);
3388 struct checksum_attributes;
3389 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3390 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3391 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3392 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3393 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3394 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3395 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3396 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3397 static int same_die_p_wrap (dw_die_ref, dw_die_ref);
3398 static void compute_section_prefix (dw_die_ref);
3399 static int is_type_die (dw_die_ref);
3400 static int is_comdat_die (dw_die_ref);
3401 static int is_symbol_die (dw_die_ref);
3402 static inline bool is_template_instantiation (dw_die_ref);
3403 static void assign_symbol_names (dw_die_ref);
3404 static void break_out_includes (dw_die_ref);
3405 static int is_declaration_die (dw_die_ref);
3406 static int should_move_die_to_comdat (dw_die_ref);
3407 static dw_die_ref clone_as_declaration (dw_die_ref);
3408 static dw_die_ref clone_die (dw_die_ref);
3409 static dw_die_ref clone_tree (dw_die_ref);
3410 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3411 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3412 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3413 static dw_die_ref generate_skeleton (dw_die_ref);
3414 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3415 dw_die_ref,
3416 dw_die_ref);
3417 static void break_out_comdat_types (dw_die_ref);
3418 static void copy_decls_for_unworthy_types (dw_die_ref);
3419
3420 static void add_sibling_attributes (dw_die_ref);
3421 static void output_location_lists (dw_die_ref);
3422 static int constant_size (unsigned HOST_WIDE_INT);
3423 static unsigned long size_of_die (dw_die_ref);
3424 static void calc_die_sizes (dw_die_ref);
3425 static void calc_base_type_die_sizes (void);
3426 static void mark_dies (dw_die_ref);
3427 static void unmark_dies (dw_die_ref);
3428 static void unmark_all_dies (dw_die_ref);
3429 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3430 static unsigned long size_of_aranges (void);
3431 static enum dwarf_form value_format (dw_attr_node *);
3432 static void output_value_format (dw_attr_node *);
3433 static void output_abbrev_section (void);
3434 static void output_die_abbrevs (unsigned long, dw_die_ref);
3435 static void output_die_symbol (dw_die_ref);
3436 static void output_die (dw_die_ref);
3437 static void output_compilation_unit_header (enum dwarf_unit_type);
3438 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3439 static void output_comdat_type_unit (comdat_type_node *);
3440 static const char *dwarf2_name (tree, int);
3441 static void add_pubname (tree, dw_die_ref);
3442 static void add_enumerator_pubname (const char *, dw_die_ref);
3443 static void add_pubname_string (const char *, dw_die_ref);
3444 static void add_pubtype (tree, dw_die_ref);
3445 static void output_pubnames (vec<pubname_entry, va_gc> *);
3446 static void output_aranges (void);
3447 static unsigned int add_ranges (const_tree, bool = false);
3448 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3449 bool *, bool);
3450 static void output_ranges (void);
3451 static dw_line_info_table *new_line_info_table (void);
3452 static void output_line_info (bool);
3453 static void output_file_names (void);
3454 static dw_die_ref base_type_die (tree, bool);
3455 static int is_base_type (tree);
3456 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3457 static int decl_quals (const_tree);
3458 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3459 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3460 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3461 static int type_is_enum (const_tree);
3462 static unsigned int dbx_reg_number (const_rtx);
3463 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3464 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3465 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3466 enum var_init_status);
3467 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3468 enum var_init_status);
3469 static dw_loc_descr_ref based_loc_descr (rtx, HOST_WIDE_INT,
3470 enum var_init_status);
3471 static int is_based_loc (const_rtx);
3472 static bool resolve_one_addr (rtx *);
3473 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3474 enum var_init_status);
3475 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3476 enum var_init_status);
3477 struct loc_descr_context;
3478 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3479 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3480 static dw_loc_list_ref loc_list_from_tree (tree, int,
3481 struct loc_descr_context *);
3482 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3483 struct loc_descr_context *);
3484 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3485 static tree field_type (const_tree);
3486 static unsigned int simple_type_align_in_bits (const_tree);
3487 static unsigned int simple_decl_align_in_bits (const_tree);
3488 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3489 struct vlr_context;
3490 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3491 HOST_WIDE_INT *);
3492 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3493 dw_loc_list_ref);
3494 static void add_data_member_location_attribute (dw_die_ref, tree,
3495 struct vlr_context *);
3496 static bool add_const_value_attribute (dw_die_ref, rtx);
3497 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3498 static void insert_wide_int (const wide_int &, unsigned char *, int);
3499 static void insert_float (const_rtx, unsigned char *);
3500 static rtx rtl_for_decl_location (tree);
3501 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3502 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3503 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3504 static void add_name_attribute (dw_die_ref, const char *);
3505 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3506 static void add_comp_dir_attribute (dw_die_ref);
3507 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3508 struct loc_descr_context *);
3509 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3510 struct loc_descr_context *);
3511 static void add_subscript_info (dw_die_ref, tree, bool);
3512 static void add_byte_size_attribute (dw_die_ref, tree);
3513 static void add_alignment_attribute (dw_die_ref, tree);
3514 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3515 struct vlr_context *);
3516 static void add_bit_size_attribute (dw_die_ref, tree);
3517 static void add_prototyped_attribute (dw_die_ref, tree);
3518 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3519 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3520 static void add_src_coords_attributes (dw_die_ref, tree);
3521 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3522 static void add_discr_value (dw_die_ref, dw_discr_value *);
3523 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3524 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3525 static void push_decl_scope (tree);
3526 static void pop_decl_scope (void);
3527 static dw_die_ref scope_die_for (tree, dw_die_ref);
3528 static inline int local_scope_p (dw_die_ref);
3529 static inline int class_scope_p (dw_die_ref);
3530 static inline int class_or_namespace_scope_p (dw_die_ref);
3531 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3532 static void add_calling_convention_attribute (dw_die_ref, tree);
3533 static const char *type_tag (const_tree);
3534 static tree member_declared_type (const_tree);
3535 #if 0
3536 static const char *decl_start_label (tree);
3537 #endif
3538 static void gen_array_type_die (tree, dw_die_ref);
3539 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3540 #if 0
3541 static void gen_entry_point_die (tree, dw_die_ref);
3542 #endif
3543 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3544 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3545 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3546 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3547 static void gen_formal_types_die (tree, dw_die_ref);
3548 static void gen_subprogram_die (tree, dw_die_ref);
3549 static void gen_variable_die (tree, tree, dw_die_ref);
3550 static void gen_const_die (tree, dw_die_ref);
3551 static void gen_label_die (tree, dw_die_ref);
3552 static void gen_lexical_block_die (tree, dw_die_ref);
3553 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3554 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3555 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3556 static dw_die_ref gen_compile_unit_die (const char *);
3557 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3558 static void gen_member_die (tree, dw_die_ref);
3559 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3560 enum debug_info_usage);
3561 static void gen_subroutine_type_die (tree, dw_die_ref);
3562 static void gen_typedef_die (tree, dw_die_ref);
3563 static void gen_type_die (tree, dw_die_ref);
3564 static void gen_block_die (tree, dw_die_ref);
3565 static void decls_for_scope (tree, dw_die_ref);
3566 static bool is_naming_typedef_decl (const_tree);
3567 static inline dw_die_ref get_context_die (tree);
3568 static void gen_namespace_die (tree, dw_die_ref);
3569 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3570 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3571 static dw_die_ref force_decl_die (tree);
3572 static dw_die_ref force_type_die (tree);
3573 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3574 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3575 static struct dwarf_file_data * lookup_filename (const char *);
3576 static void retry_incomplete_types (void);
3577 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3578 static void gen_generic_params_dies (tree);
3579 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3580 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3581 static void splice_child_die (dw_die_ref, dw_die_ref);
3582 static int file_info_cmp (const void *, const void *);
3583 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
3584 const char *, const char *);
3585 static void output_loc_list (dw_loc_list_ref);
3586 static char *gen_internal_sym (const char *);
3587 static bool want_pubnames (void);
3588
3589 static void prune_unmark_dies (dw_die_ref);
3590 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3591 static void prune_unused_types_mark (dw_die_ref, int);
3592 static void prune_unused_types_walk (dw_die_ref);
3593 static void prune_unused_types_walk_attribs (dw_die_ref);
3594 static void prune_unused_types_prune (dw_die_ref);
3595 static void prune_unused_types (void);
3596 static int maybe_emit_file (struct dwarf_file_data *fd);
3597 static inline const char *AT_vms_delta1 (dw_attr_node *);
3598 static inline const char *AT_vms_delta2 (dw_attr_node *);
3599 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3600 const char *, const char *);
3601 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3602 static void gen_remaining_tmpl_value_param_die_attribute (void);
3603 static bool generic_type_p (tree);
3604 static void schedule_generic_params_dies_gen (tree t);
3605 static void gen_scheduled_generic_parms_dies (void);
3606
3607 static const char *comp_dir_string (void);
3608
3609 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3610
3611 /* enum for tracking thread-local variables whose address is really an offset
3612 relative to the TLS pointer, which will need link-time relocation, but will
3613 not need relocation by the DWARF consumer. */
3614
3615 enum dtprel_bool
3616 {
3617 dtprel_false = 0,
3618 dtprel_true = 1
3619 };
3620
3621 /* Return the operator to use for an address of a variable. For dtprel_true, we
3622 use DW_OP_const*. For regular variables, which need both link-time
3623 relocation and consumer-level relocation (e.g., to account for shared objects
3624 loaded at a random address), we use DW_OP_addr*. */
3625
3626 static inline enum dwarf_location_atom
3627 dw_addr_op (enum dtprel_bool dtprel)
3628 {
3629 if (dtprel == dtprel_true)
3630 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3631 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3632 else
3633 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3634 }
3635
3636 /* Return a pointer to a newly allocated address location description. If
3637 dwarf_split_debug_info is true, then record the address with the appropriate
3638 relocation. */
3639 static inline dw_loc_descr_ref
3640 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3641 {
3642 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3643
3644 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3645 ref->dw_loc_oprnd1.v.val_addr = addr;
3646 ref->dtprel = dtprel;
3647 if (dwarf_split_debug_info)
3648 ref->dw_loc_oprnd1.val_entry
3649 = add_addr_table_entry (addr,
3650 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3651 else
3652 ref->dw_loc_oprnd1.val_entry = NULL;
3653
3654 return ref;
3655 }
3656
3657 /* Section names used to hold DWARF debugging information. */
3658
3659 #ifndef DEBUG_INFO_SECTION
3660 #define DEBUG_INFO_SECTION ".debug_info"
3661 #endif
3662 #ifndef DEBUG_DWO_INFO_SECTION
3663 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3664 #endif
3665 #ifndef DEBUG_ABBREV_SECTION
3666 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3667 #endif
3668 #ifndef DEBUG_DWO_ABBREV_SECTION
3669 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3670 #endif
3671 #ifndef DEBUG_ARANGES_SECTION
3672 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3673 #endif
3674 #ifndef DEBUG_ADDR_SECTION
3675 #define DEBUG_ADDR_SECTION ".debug_addr"
3676 #endif
3677 #ifndef DEBUG_MACINFO_SECTION
3678 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3679 #endif
3680 #ifndef DEBUG_DWO_MACINFO_SECTION
3681 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3682 #endif
3683 #ifndef DEBUG_DWO_MACRO_SECTION
3684 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3685 #endif
3686 #ifndef DEBUG_MACRO_SECTION
3687 #define DEBUG_MACRO_SECTION ".debug_macro"
3688 #endif
3689 #ifndef DEBUG_LINE_SECTION
3690 #define DEBUG_LINE_SECTION ".debug_line"
3691 #endif
3692 #ifndef DEBUG_DWO_LINE_SECTION
3693 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3694 #endif
3695 #ifndef DEBUG_LOC_SECTION
3696 #define DEBUG_LOC_SECTION ".debug_loc"
3697 #endif
3698 #ifndef DEBUG_DWO_LOC_SECTION
3699 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3700 #endif
3701 #ifndef DEBUG_LOCLISTS_SECTION
3702 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
3703 #endif
3704 #ifndef DEBUG_DWO_LOCLISTS_SECTION
3705 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
3706 #endif
3707 #ifndef DEBUG_PUBNAMES_SECTION
3708 #define DEBUG_PUBNAMES_SECTION \
3709 ((debug_generate_pub_sections == 2) \
3710 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3711 #endif
3712 #ifndef DEBUG_PUBTYPES_SECTION
3713 #define DEBUG_PUBTYPES_SECTION \
3714 ((debug_generate_pub_sections == 2) \
3715 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
3716 #endif
3717 #ifndef DEBUG_STR_OFFSETS_SECTION
3718 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
3719 #endif
3720 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
3721 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
3722 #endif
3723 #ifndef DEBUG_STR_DWO_SECTION
3724 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
3725 #endif
3726 #ifndef DEBUG_STR_SECTION
3727 #define DEBUG_STR_SECTION ".debug_str"
3728 #endif
3729 #ifndef DEBUG_RANGES_SECTION
3730 #define DEBUG_RANGES_SECTION ".debug_ranges"
3731 #endif
3732 #ifndef DEBUG_RNGLISTS_SECTION
3733 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
3734 #endif
3735 #ifndef DEBUG_LINE_STR_SECTION
3736 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
3737 #endif
3738
3739 /* Standard ELF section names for compiled code and data. */
3740 #ifndef TEXT_SECTION_NAME
3741 #define TEXT_SECTION_NAME ".text"
3742 #endif
3743
3744 /* Section flags for .debug_str section. */
3745 #define DEBUG_STR_SECTION_FLAGS \
3746 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
3747 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
3748 : SECTION_DEBUG)
3749
3750 /* Section flags for .debug_str.dwo section. */
3751 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
3752
3753 /* Labels we insert at beginning sections we can reference instead of
3754 the section names themselves. */
3755
3756 #ifndef TEXT_SECTION_LABEL
3757 #define TEXT_SECTION_LABEL "Ltext"
3758 #endif
3759 #ifndef COLD_TEXT_SECTION_LABEL
3760 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
3761 #endif
3762 #ifndef DEBUG_LINE_SECTION_LABEL
3763 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
3764 #endif
3765 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
3766 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
3767 #endif
3768 #ifndef DEBUG_INFO_SECTION_LABEL
3769 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
3770 #endif
3771 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
3772 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
3773 #endif
3774 #ifndef DEBUG_ABBREV_SECTION_LABEL
3775 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
3776 #endif
3777 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
3778 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
3779 #endif
3780 #ifndef DEBUG_ADDR_SECTION_LABEL
3781 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
3782 #endif
3783 #ifndef DEBUG_LOC_SECTION_LABEL
3784 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
3785 #endif
3786 #ifndef DEBUG_RANGES_SECTION_LABEL
3787 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
3788 #endif
3789 #ifndef DEBUG_MACINFO_SECTION_LABEL
3790 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
3791 #endif
3792 #ifndef DEBUG_MACRO_SECTION_LABEL
3793 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
3794 #endif
3795 #define SKELETON_COMP_DIE_ABBREV 1
3796 #define SKELETON_TYPE_DIE_ABBREV 2
3797
3798 /* Definitions of defaults for formats and names of various special
3799 (artificial) labels which may be generated within this file (when the -g
3800 options is used and DWARF2_DEBUGGING_INFO is in effect.
3801 If necessary, these may be overridden from within the tm.h file, but
3802 typically, overriding these defaults is unnecessary. */
3803
3804 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3805 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3806 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3807 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3808 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3809 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3810 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3811 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3812 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3813 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3814 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3815 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3816 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3817 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3818 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3819
3820 #ifndef TEXT_END_LABEL
3821 #define TEXT_END_LABEL "Letext"
3822 #endif
3823 #ifndef COLD_END_LABEL
3824 #define COLD_END_LABEL "Letext_cold"
3825 #endif
3826 #ifndef BLOCK_BEGIN_LABEL
3827 #define BLOCK_BEGIN_LABEL "LBB"
3828 #endif
3829 #ifndef BLOCK_END_LABEL
3830 #define BLOCK_END_LABEL "LBE"
3831 #endif
3832 #ifndef LINE_CODE_LABEL
3833 #define LINE_CODE_LABEL "LM"
3834 #endif
3835
3836 \f
3837 /* Return the root of the DIE's built for the current compilation unit. */
3838 static dw_die_ref
3839 comp_unit_die (void)
3840 {
3841 if (!single_comp_unit_die)
3842 single_comp_unit_die = gen_compile_unit_die (NULL);
3843 return single_comp_unit_die;
3844 }
3845
3846 /* We allow a language front-end to designate a function that is to be
3847 called to "demangle" any name before it is put into a DIE. */
3848
3849 static const char *(*demangle_name_func) (const char *);
3850
3851 void
3852 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
3853 {
3854 demangle_name_func = func;
3855 }
3856
3857 /* Test if rtl node points to a pseudo register. */
3858
3859 static inline int
3860 is_pseudo_reg (const_rtx rtl)
3861 {
3862 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
3863 || (GET_CODE (rtl) == SUBREG
3864 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
3865 }
3866
3867 /* Return a reference to a type, with its const and volatile qualifiers
3868 removed. */
3869
3870 static inline tree
3871 type_main_variant (tree type)
3872 {
3873 type = TYPE_MAIN_VARIANT (type);
3874
3875 /* ??? There really should be only one main variant among any group of
3876 variants of a given type (and all of the MAIN_VARIANT values for all
3877 members of the group should point to that one type) but sometimes the C
3878 front-end messes this up for array types, so we work around that bug
3879 here. */
3880 if (TREE_CODE (type) == ARRAY_TYPE)
3881 while (type != TYPE_MAIN_VARIANT (type))
3882 type = TYPE_MAIN_VARIANT (type);
3883
3884 return type;
3885 }
3886
3887 /* Return nonzero if the given type node represents a tagged type. */
3888
3889 static inline int
3890 is_tagged_type (const_tree type)
3891 {
3892 enum tree_code code = TREE_CODE (type);
3893
3894 return (code == RECORD_TYPE || code == UNION_TYPE
3895 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
3896 }
3897
3898 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
3899
3900 static void
3901 get_ref_die_offset_label (char *label, dw_die_ref ref)
3902 {
3903 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
3904 }
3905
3906 /* Return die_offset of a DIE reference to a base type. */
3907
3908 static unsigned long int
3909 get_base_type_offset (dw_die_ref ref)
3910 {
3911 if (ref->die_offset)
3912 return ref->die_offset;
3913 if (comp_unit_die ()->die_abbrev)
3914 {
3915 calc_base_type_die_sizes ();
3916 gcc_assert (ref->die_offset);
3917 }
3918 return ref->die_offset;
3919 }
3920
3921 /* Return die_offset of a DIE reference other than base type. */
3922
3923 static unsigned long int
3924 get_ref_die_offset (dw_die_ref ref)
3925 {
3926 gcc_assert (ref->die_offset);
3927 return ref->die_offset;
3928 }
3929
3930 /* Convert a DIE tag into its string name. */
3931
3932 static const char *
3933 dwarf_tag_name (unsigned int tag)
3934 {
3935 const char *name = get_DW_TAG_name (tag);
3936
3937 if (name != NULL)
3938 return name;
3939
3940 return "DW_TAG_<unknown>";
3941 }
3942
3943 /* Convert a DWARF attribute code into its string name. */
3944
3945 static const char *
3946 dwarf_attr_name (unsigned int attr)
3947 {
3948 const char *name;
3949
3950 switch (attr)
3951 {
3952 #if VMS_DEBUGGING_INFO
3953 case DW_AT_HP_prologue:
3954 return "DW_AT_HP_prologue";
3955 #else
3956 case DW_AT_MIPS_loop_unroll_factor:
3957 return "DW_AT_MIPS_loop_unroll_factor";
3958 #endif
3959
3960 #if VMS_DEBUGGING_INFO
3961 case DW_AT_HP_epilogue:
3962 return "DW_AT_HP_epilogue";
3963 #else
3964 case DW_AT_MIPS_stride:
3965 return "DW_AT_MIPS_stride";
3966 #endif
3967 }
3968
3969 name = get_DW_AT_name (attr);
3970
3971 if (name != NULL)
3972 return name;
3973
3974 return "DW_AT_<unknown>";
3975 }
3976
3977 /* Convert a DWARF value form code into its string name. */
3978
3979 static const char *
3980 dwarf_form_name (unsigned int form)
3981 {
3982 const char *name = get_DW_FORM_name (form);
3983
3984 if (name != NULL)
3985 return name;
3986
3987 return "DW_FORM_<unknown>";
3988 }
3989 \f
3990 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
3991 instance of an inlined instance of a decl which is local to an inline
3992 function, so we have to trace all of the way back through the origin chain
3993 to find out what sort of node actually served as the original seed for the
3994 given block. */
3995
3996 static tree
3997 decl_ultimate_origin (const_tree decl)
3998 {
3999 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4000 return NULL_TREE;
4001
4002 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4003 we're trying to output the abstract instance of this function. */
4004 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4005 return NULL_TREE;
4006
4007 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4008 most distant ancestor, this should never happen. */
4009 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4010
4011 return DECL_ABSTRACT_ORIGIN (decl);
4012 }
4013
4014 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4015 of a virtual function may refer to a base class, so we check the 'this'
4016 parameter. */
4017
4018 static tree
4019 decl_class_context (tree decl)
4020 {
4021 tree context = NULL_TREE;
4022
4023 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4024 context = DECL_CONTEXT (decl);
4025 else
4026 context = TYPE_MAIN_VARIANT
4027 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4028
4029 if (context && !TYPE_P (context))
4030 context = NULL_TREE;
4031
4032 return context;
4033 }
4034 \f
4035 /* Add an attribute/value pair to a DIE. */
4036
4037 static inline void
4038 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4039 {
4040 /* Maybe this should be an assert? */
4041 if (die == NULL)
4042 return;
4043
4044 vec_safe_reserve (die->die_attr, 1);
4045 vec_safe_push (die->die_attr, *attr);
4046 }
4047
4048 static inline enum dw_val_class
4049 AT_class (dw_attr_node *a)
4050 {
4051 return a->dw_attr_val.val_class;
4052 }
4053
4054 /* Return the index for any attribute that will be referenced with a
4055 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4056 are stored in dw_attr_val.v.val_str for reference counting
4057 pruning. */
4058
4059 static inline unsigned int
4060 AT_index (dw_attr_node *a)
4061 {
4062 if (AT_class (a) == dw_val_class_str)
4063 return a->dw_attr_val.v.val_str->index;
4064 else if (a->dw_attr_val.val_entry != NULL)
4065 return a->dw_attr_val.val_entry->index;
4066 return NOT_INDEXED;
4067 }
4068
4069 /* Add a flag value attribute to a DIE. */
4070
4071 static inline void
4072 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4073 {
4074 dw_attr_node attr;
4075
4076 attr.dw_attr = attr_kind;
4077 attr.dw_attr_val.val_class = dw_val_class_flag;
4078 attr.dw_attr_val.val_entry = NULL;
4079 attr.dw_attr_val.v.val_flag = flag;
4080 add_dwarf_attr (die, &attr);
4081 }
4082
4083 static inline unsigned
4084 AT_flag (dw_attr_node *a)
4085 {
4086 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4087 return a->dw_attr_val.v.val_flag;
4088 }
4089
4090 /* Add a signed integer attribute value to a DIE. */
4091
4092 static inline void
4093 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4094 {
4095 dw_attr_node attr;
4096
4097 attr.dw_attr = attr_kind;
4098 attr.dw_attr_val.val_class = dw_val_class_const;
4099 attr.dw_attr_val.val_entry = NULL;
4100 attr.dw_attr_val.v.val_int = int_val;
4101 add_dwarf_attr (die, &attr);
4102 }
4103
4104 static inline HOST_WIDE_INT
4105 AT_int (dw_attr_node *a)
4106 {
4107 gcc_assert (a && (AT_class (a) == dw_val_class_const
4108 || AT_class (a) == dw_val_class_const_implicit));
4109 return a->dw_attr_val.v.val_int;
4110 }
4111
4112 /* Add an unsigned integer attribute value to a DIE. */
4113
4114 static inline void
4115 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4116 unsigned HOST_WIDE_INT unsigned_val)
4117 {
4118 dw_attr_node attr;
4119
4120 attr.dw_attr = attr_kind;
4121 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4122 attr.dw_attr_val.val_entry = NULL;
4123 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4124 add_dwarf_attr (die, &attr);
4125 }
4126
4127 static inline unsigned HOST_WIDE_INT
4128 AT_unsigned (dw_attr_node *a)
4129 {
4130 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4131 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4132 return a->dw_attr_val.v.val_unsigned;
4133 }
4134
4135 /* Add an unsigned wide integer attribute value to a DIE. */
4136
4137 static inline void
4138 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4139 const wide_int& w)
4140 {
4141 dw_attr_node attr;
4142
4143 attr.dw_attr = attr_kind;
4144 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4145 attr.dw_attr_val.val_entry = NULL;
4146 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4147 *attr.dw_attr_val.v.val_wide = w;
4148 add_dwarf_attr (die, &attr);
4149 }
4150
4151 /* Add an unsigned double integer attribute value to a DIE. */
4152
4153 static inline void
4154 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4155 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4156 {
4157 dw_attr_node attr;
4158
4159 attr.dw_attr = attr_kind;
4160 attr.dw_attr_val.val_class = dw_val_class_const_double;
4161 attr.dw_attr_val.val_entry = NULL;
4162 attr.dw_attr_val.v.val_double.high = high;
4163 attr.dw_attr_val.v.val_double.low = low;
4164 add_dwarf_attr (die, &attr);
4165 }
4166
4167 /* Add a floating point attribute value to a DIE and return it. */
4168
4169 static inline void
4170 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4171 unsigned int length, unsigned int elt_size, unsigned char *array)
4172 {
4173 dw_attr_node attr;
4174
4175 attr.dw_attr = attr_kind;
4176 attr.dw_attr_val.val_class = dw_val_class_vec;
4177 attr.dw_attr_val.val_entry = NULL;
4178 attr.dw_attr_val.v.val_vec.length = length;
4179 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4180 attr.dw_attr_val.v.val_vec.array = array;
4181 add_dwarf_attr (die, &attr);
4182 }
4183
4184 /* Add an 8-byte data attribute value to a DIE. */
4185
4186 static inline void
4187 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4188 unsigned char data8[8])
4189 {
4190 dw_attr_node attr;
4191
4192 attr.dw_attr = attr_kind;
4193 attr.dw_attr_val.val_class = dw_val_class_data8;
4194 attr.dw_attr_val.val_entry = NULL;
4195 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4196 add_dwarf_attr (die, &attr);
4197 }
4198
4199 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4200 dwarf_split_debug_info, address attributes in dies destined for the
4201 final executable have force_direct set to avoid using indexed
4202 references. */
4203
4204 static inline void
4205 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4206 bool force_direct)
4207 {
4208 dw_attr_node attr;
4209 char * lbl_id;
4210
4211 lbl_id = xstrdup (lbl_low);
4212 attr.dw_attr = DW_AT_low_pc;
4213 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4214 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4215 if (dwarf_split_debug_info && !force_direct)
4216 attr.dw_attr_val.val_entry
4217 = add_addr_table_entry (lbl_id, ate_kind_label);
4218 else
4219 attr.dw_attr_val.val_entry = NULL;
4220 add_dwarf_attr (die, &attr);
4221
4222 attr.dw_attr = DW_AT_high_pc;
4223 if (dwarf_version < 4)
4224 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4225 else
4226 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4227 lbl_id = xstrdup (lbl_high);
4228 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4229 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4230 && dwarf_split_debug_info && !force_direct)
4231 attr.dw_attr_val.val_entry
4232 = add_addr_table_entry (lbl_id, ate_kind_label);
4233 else
4234 attr.dw_attr_val.val_entry = NULL;
4235 add_dwarf_attr (die, &attr);
4236 }
4237
4238 /* Hash and equality functions for debug_str_hash. */
4239
4240 hashval_t
4241 indirect_string_hasher::hash (indirect_string_node *x)
4242 {
4243 return htab_hash_string (x->str);
4244 }
4245
4246 bool
4247 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4248 {
4249 return strcmp (x1->str, x2) == 0;
4250 }
4251
4252 /* Add STR to the given string hash table. */
4253
4254 static struct indirect_string_node *
4255 find_AT_string_in_table (const char *str,
4256 hash_table<indirect_string_hasher> *table)
4257 {
4258 struct indirect_string_node *node;
4259
4260 indirect_string_node **slot
4261 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4262 if (*slot == NULL)
4263 {
4264 node = ggc_cleared_alloc<indirect_string_node> ();
4265 node->str = ggc_strdup (str);
4266 *slot = node;
4267 }
4268 else
4269 node = *slot;
4270
4271 node->refcount++;
4272 return node;
4273 }
4274
4275 /* Add STR to the indirect string hash table. */
4276
4277 static struct indirect_string_node *
4278 find_AT_string (const char *str)
4279 {
4280 if (! debug_str_hash)
4281 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4282
4283 return find_AT_string_in_table (str, debug_str_hash);
4284 }
4285
4286 /* Add a string attribute value to a DIE. */
4287
4288 static inline void
4289 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4290 {
4291 dw_attr_node attr;
4292 struct indirect_string_node *node;
4293
4294 node = find_AT_string (str);
4295
4296 attr.dw_attr = attr_kind;
4297 attr.dw_attr_val.val_class = dw_val_class_str;
4298 attr.dw_attr_val.val_entry = NULL;
4299 attr.dw_attr_val.v.val_str = node;
4300 add_dwarf_attr (die, &attr);
4301 }
4302
4303 static inline const char *
4304 AT_string (dw_attr_node *a)
4305 {
4306 gcc_assert (a && AT_class (a) == dw_val_class_str);
4307 return a->dw_attr_val.v.val_str->str;
4308 }
4309
4310 /* Call this function directly to bypass AT_string_form's logic to put
4311 the string inline in the die. */
4312
4313 static void
4314 set_indirect_string (struct indirect_string_node *node)
4315 {
4316 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4317 /* Already indirect is a no op. */
4318 if (node->form == DW_FORM_strp
4319 || node->form == DW_FORM_line_strp
4320 || node->form == DW_FORM_GNU_str_index)
4321 {
4322 gcc_assert (node->label);
4323 return;
4324 }
4325 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4326 ++dw2_string_counter;
4327 node->label = xstrdup (label);
4328
4329 if (!dwarf_split_debug_info)
4330 {
4331 node->form = DW_FORM_strp;
4332 node->index = NOT_INDEXED;
4333 }
4334 else
4335 {
4336 node->form = DW_FORM_GNU_str_index;
4337 node->index = NO_INDEX_ASSIGNED;
4338 }
4339 }
4340
4341 /* Find out whether a string should be output inline in DIE
4342 or out-of-line in .debug_str section. */
4343
4344 static enum dwarf_form
4345 find_string_form (struct indirect_string_node *node)
4346 {
4347 unsigned int len;
4348
4349 if (node->form)
4350 return node->form;
4351
4352 len = strlen (node->str) + 1;
4353
4354 /* If the string is shorter or equal to the size of the reference, it is
4355 always better to put it inline. */
4356 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4357 return node->form = DW_FORM_string;
4358
4359 /* If we cannot expect the linker to merge strings in .debug_str
4360 section, only put it into .debug_str if it is worth even in this
4361 single module. */
4362 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4363 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4364 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4365 return node->form = DW_FORM_string;
4366
4367 set_indirect_string (node);
4368
4369 return node->form;
4370 }
4371
4372 /* Find out whether the string referenced from the attribute should be
4373 output inline in DIE or out-of-line in .debug_str section. */
4374
4375 static enum dwarf_form
4376 AT_string_form (dw_attr_node *a)
4377 {
4378 gcc_assert (a && AT_class (a) == dw_val_class_str);
4379 return find_string_form (a->dw_attr_val.v.val_str);
4380 }
4381
4382 /* Add a DIE reference attribute value to a DIE. */
4383
4384 static inline void
4385 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4386 {
4387 dw_attr_node attr;
4388 gcc_checking_assert (targ_die != NULL);
4389
4390 /* With LTO we can end up trying to reference something we didn't create
4391 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4392 if (targ_die == NULL)
4393 return;
4394
4395 attr.dw_attr = attr_kind;
4396 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4397 attr.dw_attr_val.val_entry = NULL;
4398 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4399 attr.dw_attr_val.v.val_die_ref.external = 0;
4400 add_dwarf_attr (die, &attr);
4401 }
4402
4403 /* Change DIE reference REF to point to NEW_DIE instead. */
4404
4405 static inline void
4406 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4407 {
4408 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4409 ref->dw_attr_val.v.val_die_ref.die = new_die;
4410 ref->dw_attr_val.v.val_die_ref.external = 0;
4411 }
4412
4413 /* Add an AT_specification attribute to a DIE, and also make the back
4414 pointer from the specification to the definition. */
4415
4416 static inline void
4417 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4418 {
4419 add_AT_die_ref (die, DW_AT_specification, targ_die);
4420 gcc_assert (!targ_die->die_definition);
4421 targ_die->die_definition = die;
4422 }
4423
4424 static inline dw_die_ref
4425 AT_ref (dw_attr_node *a)
4426 {
4427 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4428 return a->dw_attr_val.v.val_die_ref.die;
4429 }
4430
4431 static inline int
4432 AT_ref_external (dw_attr_node *a)
4433 {
4434 if (a && AT_class (a) == dw_val_class_die_ref)
4435 return a->dw_attr_val.v.val_die_ref.external;
4436
4437 return 0;
4438 }
4439
4440 static inline void
4441 set_AT_ref_external (dw_attr_node *a, int i)
4442 {
4443 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4444 a->dw_attr_val.v.val_die_ref.external = i;
4445 }
4446
4447 /* Add an FDE reference attribute value to a DIE. */
4448
4449 static inline void
4450 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4451 {
4452 dw_attr_node attr;
4453
4454 attr.dw_attr = attr_kind;
4455 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4456 attr.dw_attr_val.val_entry = NULL;
4457 attr.dw_attr_val.v.val_fde_index = targ_fde;
4458 add_dwarf_attr (die, &attr);
4459 }
4460
4461 /* Add a location description attribute value to a DIE. */
4462
4463 static inline void
4464 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4465 {
4466 dw_attr_node attr;
4467
4468 attr.dw_attr = attr_kind;
4469 attr.dw_attr_val.val_class = dw_val_class_loc;
4470 attr.dw_attr_val.val_entry = NULL;
4471 attr.dw_attr_val.v.val_loc = loc;
4472 add_dwarf_attr (die, &attr);
4473 }
4474
4475 static inline dw_loc_descr_ref
4476 AT_loc (dw_attr_node *a)
4477 {
4478 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4479 return a->dw_attr_val.v.val_loc;
4480 }
4481
4482 static inline void
4483 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4484 {
4485 dw_attr_node attr;
4486
4487 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4488 return;
4489
4490 attr.dw_attr = attr_kind;
4491 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4492 attr.dw_attr_val.val_entry = NULL;
4493 attr.dw_attr_val.v.val_loc_list = loc_list;
4494 add_dwarf_attr (die, &attr);
4495 have_location_lists = true;
4496 }
4497
4498 static inline dw_loc_list_ref
4499 AT_loc_list (dw_attr_node *a)
4500 {
4501 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4502 return a->dw_attr_val.v.val_loc_list;
4503 }
4504
4505 static inline dw_loc_list_ref *
4506 AT_loc_list_ptr (dw_attr_node *a)
4507 {
4508 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4509 return &a->dw_attr_val.v.val_loc_list;
4510 }
4511
4512 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4513 {
4514 static hashval_t hash (addr_table_entry *);
4515 static bool equal (addr_table_entry *, addr_table_entry *);
4516 };
4517
4518 /* Table of entries into the .debug_addr section. */
4519
4520 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4521
4522 /* Hash an address_table_entry. */
4523
4524 hashval_t
4525 addr_hasher::hash (addr_table_entry *a)
4526 {
4527 inchash::hash hstate;
4528 switch (a->kind)
4529 {
4530 case ate_kind_rtx:
4531 hstate.add_int (0);
4532 break;
4533 case ate_kind_rtx_dtprel:
4534 hstate.add_int (1);
4535 break;
4536 case ate_kind_label:
4537 return htab_hash_string (a->addr.label);
4538 default:
4539 gcc_unreachable ();
4540 }
4541 inchash::add_rtx (a->addr.rtl, hstate);
4542 return hstate.end ();
4543 }
4544
4545 /* Determine equality for two address_table_entries. */
4546
4547 bool
4548 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4549 {
4550 if (a1->kind != a2->kind)
4551 return 0;
4552 switch (a1->kind)
4553 {
4554 case ate_kind_rtx:
4555 case ate_kind_rtx_dtprel:
4556 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4557 case ate_kind_label:
4558 return strcmp (a1->addr.label, a2->addr.label) == 0;
4559 default:
4560 gcc_unreachable ();
4561 }
4562 }
4563
4564 /* Initialize an addr_table_entry. */
4565
4566 void
4567 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4568 {
4569 e->kind = kind;
4570 switch (kind)
4571 {
4572 case ate_kind_rtx:
4573 case ate_kind_rtx_dtprel:
4574 e->addr.rtl = (rtx) addr;
4575 break;
4576 case ate_kind_label:
4577 e->addr.label = (char *) addr;
4578 break;
4579 }
4580 e->refcount = 0;
4581 e->index = NO_INDEX_ASSIGNED;
4582 }
4583
4584 /* Add attr to the address table entry to the table. Defer setting an
4585 index until output time. */
4586
4587 static addr_table_entry *
4588 add_addr_table_entry (void *addr, enum ate_kind kind)
4589 {
4590 addr_table_entry *node;
4591 addr_table_entry finder;
4592
4593 gcc_assert (dwarf_split_debug_info);
4594 if (! addr_index_table)
4595 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4596 init_addr_table_entry (&finder, kind, addr);
4597 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4598
4599 if (*slot == HTAB_EMPTY_ENTRY)
4600 {
4601 node = ggc_cleared_alloc<addr_table_entry> ();
4602 init_addr_table_entry (node, kind, addr);
4603 *slot = node;
4604 }
4605 else
4606 node = *slot;
4607
4608 node->refcount++;
4609 return node;
4610 }
4611
4612 /* Remove an entry from the addr table by decrementing its refcount.
4613 Strictly, decrementing the refcount would be enough, but the
4614 assertion that the entry is actually in the table has found
4615 bugs. */
4616
4617 static void
4618 remove_addr_table_entry (addr_table_entry *entry)
4619 {
4620 gcc_assert (dwarf_split_debug_info && addr_index_table);
4621 /* After an index is assigned, the table is frozen. */
4622 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
4623 entry->refcount--;
4624 }
4625
4626 /* Given a location list, remove all addresses it refers to from the
4627 address_table. */
4628
4629 static void
4630 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
4631 {
4632 for (; descr; descr = descr->dw_loc_next)
4633 if (descr->dw_loc_oprnd1.val_entry != NULL)
4634 {
4635 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
4636 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
4637 }
4638 }
4639
4640 /* A helper function for dwarf2out_finish called through
4641 htab_traverse. Assign an addr_table_entry its index. All entries
4642 must be collected into the table when this function is called,
4643 because the indexing code relies on htab_traverse to traverse nodes
4644 in the same order for each run. */
4645
4646 int
4647 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
4648 {
4649 addr_table_entry *node = *h;
4650
4651 /* Don't index unreferenced nodes. */
4652 if (node->refcount == 0)
4653 return 1;
4654
4655 gcc_assert (node->index == NO_INDEX_ASSIGNED);
4656 node->index = *index;
4657 *index += 1;
4658
4659 return 1;
4660 }
4661
4662 /* Add an address constant attribute value to a DIE. When using
4663 dwarf_split_debug_info, address attributes in dies destined for the
4664 final executable should be direct references--setting the parameter
4665 force_direct ensures this behavior. */
4666
4667 static inline void
4668 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
4669 bool force_direct)
4670 {
4671 dw_attr_node attr;
4672
4673 attr.dw_attr = attr_kind;
4674 attr.dw_attr_val.val_class = dw_val_class_addr;
4675 attr.dw_attr_val.v.val_addr = addr;
4676 if (dwarf_split_debug_info && !force_direct)
4677 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
4678 else
4679 attr.dw_attr_val.val_entry = NULL;
4680 add_dwarf_attr (die, &attr);
4681 }
4682
4683 /* Get the RTX from to an address DIE attribute. */
4684
4685 static inline rtx
4686 AT_addr (dw_attr_node *a)
4687 {
4688 gcc_assert (a && AT_class (a) == dw_val_class_addr);
4689 return a->dw_attr_val.v.val_addr;
4690 }
4691
4692 /* Add a file attribute value to a DIE. */
4693
4694 static inline void
4695 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
4696 struct dwarf_file_data *fd)
4697 {
4698 dw_attr_node attr;
4699
4700 attr.dw_attr = attr_kind;
4701 attr.dw_attr_val.val_class = dw_val_class_file;
4702 attr.dw_attr_val.val_entry = NULL;
4703 attr.dw_attr_val.v.val_file = fd;
4704 add_dwarf_attr (die, &attr);
4705 }
4706
4707 /* Get the dwarf_file_data from a file DIE attribute. */
4708
4709 static inline struct dwarf_file_data *
4710 AT_file (dw_attr_node *a)
4711 {
4712 gcc_assert (a && (AT_class (a) == dw_val_class_file
4713 || AT_class (a) == dw_val_class_file_implicit));
4714 return a->dw_attr_val.v.val_file;
4715 }
4716
4717 /* Add a vms delta attribute value to a DIE. */
4718
4719 static inline void
4720 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
4721 const char *lbl1, const char *lbl2)
4722 {
4723 dw_attr_node attr;
4724
4725 attr.dw_attr = attr_kind;
4726 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
4727 attr.dw_attr_val.val_entry = NULL;
4728 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
4729 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
4730 add_dwarf_attr (die, &attr);
4731 }
4732
4733 /* Add a label identifier attribute value to a DIE. */
4734
4735 static inline void
4736 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
4737 const char *lbl_id)
4738 {
4739 dw_attr_node attr;
4740
4741 attr.dw_attr = attr_kind;
4742 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4743 attr.dw_attr_val.val_entry = NULL;
4744 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
4745 if (dwarf_split_debug_info)
4746 attr.dw_attr_val.val_entry
4747 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
4748 ate_kind_label);
4749 add_dwarf_attr (die, &attr);
4750 }
4751
4752 /* Add a section offset attribute value to a DIE, an offset into the
4753 debug_line section. */
4754
4755 static inline void
4756 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4757 const char *label)
4758 {
4759 dw_attr_node attr;
4760
4761 attr.dw_attr = attr_kind;
4762 attr.dw_attr_val.val_class = dw_val_class_lineptr;
4763 attr.dw_attr_val.val_entry = NULL;
4764 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4765 add_dwarf_attr (die, &attr);
4766 }
4767
4768 /* Add a section offset attribute value to a DIE, an offset into the
4769 debug_loclists section. */
4770
4771 static inline void
4772 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4773 const char *label)
4774 {
4775 dw_attr_node attr;
4776
4777 attr.dw_attr = attr_kind;
4778 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
4779 attr.dw_attr_val.val_entry = NULL;
4780 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4781 add_dwarf_attr (die, &attr);
4782 }
4783
4784 /* Add a section offset attribute value to a DIE, an offset into the
4785 debug_macinfo section. */
4786
4787 static inline void
4788 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4789 const char *label)
4790 {
4791 dw_attr_node attr;
4792
4793 attr.dw_attr = attr_kind;
4794 attr.dw_attr_val.val_class = dw_val_class_macptr;
4795 attr.dw_attr_val.val_entry = NULL;
4796 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4797 add_dwarf_attr (die, &attr);
4798 }
4799
4800 /* Add an offset attribute value to a DIE. */
4801
4802 static inline void
4803 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
4804 unsigned HOST_WIDE_INT offset)
4805 {
4806 dw_attr_node attr;
4807
4808 attr.dw_attr = attr_kind;
4809 attr.dw_attr_val.val_class = dw_val_class_offset;
4810 attr.dw_attr_val.val_entry = NULL;
4811 attr.dw_attr_val.v.val_offset = offset;
4812 add_dwarf_attr (die, &attr);
4813 }
4814
4815 /* Add a range_list attribute value to a DIE. When using
4816 dwarf_split_debug_info, address attributes in dies destined for the
4817 final executable should be direct references--setting the parameter
4818 force_direct ensures this behavior. */
4819
4820 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
4821 #define RELOCATED_OFFSET (NULL)
4822
4823 static void
4824 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
4825 long unsigned int offset, bool force_direct)
4826 {
4827 dw_attr_node attr;
4828
4829 attr.dw_attr = attr_kind;
4830 attr.dw_attr_val.val_class = dw_val_class_range_list;
4831 /* For the range_list attribute, use val_entry to store whether the
4832 offset should follow split-debug-info or normal semantics. This
4833 value is read in output_range_list_offset. */
4834 if (dwarf_split_debug_info && !force_direct)
4835 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
4836 else
4837 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
4838 attr.dw_attr_val.v.val_offset = offset;
4839 add_dwarf_attr (die, &attr);
4840 }
4841
4842 /* Return the start label of a delta attribute. */
4843
4844 static inline const char *
4845 AT_vms_delta1 (dw_attr_node *a)
4846 {
4847 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4848 return a->dw_attr_val.v.val_vms_delta.lbl1;
4849 }
4850
4851 /* Return the end label of a delta attribute. */
4852
4853 static inline const char *
4854 AT_vms_delta2 (dw_attr_node *a)
4855 {
4856 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4857 return a->dw_attr_val.v.val_vms_delta.lbl2;
4858 }
4859
4860 static inline const char *
4861 AT_lbl (dw_attr_node *a)
4862 {
4863 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
4864 || AT_class (a) == dw_val_class_lineptr
4865 || AT_class (a) == dw_val_class_macptr
4866 || AT_class (a) == dw_val_class_loclistsptr
4867 || AT_class (a) == dw_val_class_high_pc));
4868 return a->dw_attr_val.v.val_lbl_id;
4869 }
4870
4871 /* Get the attribute of type attr_kind. */
4872
4873 static dw_attr_node *
4874 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
4875 {
4876 dw_attr_node *a;
4877 unsigned ix;
4878 dw_die_ref spec = NULL;
4879
4880 if (! die)
4881 return NULL;
4882
4883 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4884 if (a->dw_attr == attr_kind)
4885 return a;
4886 else if (a->dw_attr == DW_AT_specification
4887 || a->dw_attr == DW_AT_abstract_origin)
4888 spec = AT_ref (a);
4889
4890 if (spec)
4891 return get_AT (spec, attr_kind);
4892
4893 return NULL;
4894 }
4895
4896 /* Returns the parent of the declaration of DIE. */
4897
4898 static dw_die_ref
4899 get_die_parent (dw_die_ref die)
4900 {
4901 dw_die_ref t;
4902
4903 if (!die)
4904 return NULL;
4905
4906 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
4907 || (t = get_AT_ref (die, DW_AT_specification)))
4908 die = t;
4909
4910 return die->die_parent;
4911 }
4912
4913 /* Return the "low pc" attribute value, typically associated with a subprogram
4914 DIE. Return null if the "low pc" attribute is either not present, or if it
4915 cannot be represented as an assembler label identifier. */
4916
4917 static inline const char *
4918 get_AT_low_pc (dw_die_ref die)
4919 {
4920 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
4921
4922 return a ? AT_lbl (a) : NULL;
4923 }
4924
4925 /* Return the "high pc" attribute value, typically associated with a subprogram
4926 DIE. Return null if the "high pc" attribute is either not present, or if it
4927 cannot be represented as an assembler label identifier. */
4928
4929 static inline const char *
4930 get_AT_hi_pc (dw_die_ref die)
4931 {
4932 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
4933
4934 return a ? AT_lbl (a) : NULL;
4935 }
4936
4937 /* Return the value of the string attribute designated by ATTR_KIND, or
4938 NULL if it is not present. */
4939
4940 static inline const char *
4941 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
4942 {
4943 dw_attr_node *a = get_AT (die, attr_kind);
4944
4945 return a ? AT_string (a) : NULL;
4946 }
4947
4948 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
4949 if it is not present. */
4950
4951 static inline int
4952 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
4953 {
4954 dw_attr_node *a = get_AT (die, attr_kind);
4955
4956 return a ? AT_flag (a) : 0;
4957 }
4958
4959 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
4960 if it is not present. */
4961
4962 static inline unsigned
4963 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
4964 {
4965 dw_attr_node *a = get_AT (die, attr_kind);
4966
4967 return a ? AT_unsigned (a) : 0;
4968 }
4969
4970 static inline dw_die_ref
4971 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
4972 {
4973 dw_attr_node *a = get_AT (die, attr_kind);
4974
4975 return a ? AT_ref (a) : NULL;
4976 }
4977
4978 static inline struct dwarf_file_data *
4979 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
4980 {
4981 dw_attr_node *a = get_AT (die, attr_kind);
4982
4983 return a ? AT_file (a) : NULL;
4984 }
4985
4986 /* Return TRUE if the language is C++. */
4987
4988 static inline bool
4989 is_cxx (void)
4990 {
4991 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4992
4993 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
4994 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
4995 }
4996
4997 /* Return TRUE if DECL was created by the C++ frontend. */
4998
4999 static bool
5000 is_cxx (const_tree decl)
5001 {
5002 if (in_lto_p)
5003 {
5004 const_tree context = decl;
5005 while (context && TREE_CODE (context) != TRANSLATION_UNIT_DECL)
5006 {
5007 if (TREE_CODE (context) == BLOCK)
5008 context = BLOCK_SUPERCONTEXT (context);
5009 else
5010 context = get_containing_scope (context);
5011 }
5012 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5013 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5014 }
5015 return is_cxx ();
5016 }
5017
5018 /* Return TRUE if the language is Java. */
5019
5020 static inline bool
5021 is_java (void)
5022 {
5023 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5024
5025 return lang == DW_LANG_Java;
5026 }
5027
5028 /* Return TRUE if the language is Fortran. */
5029
5030 static inline bool
5031 is_fortran (void)
5032 {
5033 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5034
5035 return (lang == DW_LANG_Fortran77
5036 || lang == DW_LANG_Fortran90
5037 || lang == DW_LANG_Fortran95
5038 || lang == DW_LANG_Fortran03
5039 || lang == DW_LANG_Fortran08);
5040 }
5041
5042 /* Return TRUE if the language is Ada. */
5043
5044 static inline bool
5045 is_ada (void)
5046 {
5047 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5048
5049 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5050 }
5051
5052 /* Remove the specified attribute if present. Return TRUE if removal
5053 was successful. */
5054
5055 static bool
5056 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5057 {
5058 dw_attr_node *a;
5059 unsigned ix;
5060
5061 if (! die)
5062 return false;
5063
5064 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5065 if (a->dw_attr == attr_kind)
5066 {
5067 if (AT_class (a) == dw_val_class_str)
5068 if (a->dw_attr_val.v.val_str->refcount)
5069 a->dw_attr_val.v.val_str->refcount--;
5070
5071 /* vec::ordered_remove should help reduce the number of abbrevs
5072 that are needed. */
5073 die->die_attr->ordered_remove (ix);
5074 return true;
5075 }
5076 return false;
5077 }
5078
5079 /* Remove CHILD from its parent. PREV must have the property that
5080 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5081
5082 static void
5083 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5084 {
5085 gcc_assert (child->die_parent == prev->die_parent);
5086 gcc_assert (prev->die_sib == child);
5087 if (prev == child)
5088 {
5089 gcc_assert (child->die_parent->die_child == child);
5090 prev = NULL;
5091 }
5092 else
5093 prev->die_sib = child->die_sib;
5094 if (child->die_parent->die_child == child)
5095 child->die_parent->die_child = prev;
5096 child->die_sib = NULL;
5097 }
5098
5099 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5100 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5101
5102 static void
5103 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5104 {
5105 dw_die_ref parent = old_child->die_parent;
5106
5107 gcc_assert (parent == prev->die_parent);
5108 gcc_assert (prev->die_sib == old_child);
5109
5110 new_child->die_parent = parent;
5111 if (prev == old_child)
5112 {
5113 gcc_assert (parent->die_child == old_child);
5114 new_child->die_sib = new_child;
5115 }
5116 else
5117 {
5118 prev->die_sib = new_child;
5119 new_child->die_sib = old_child->die_sib;
5120 }
5121 if (old_child->die_parent->die_child == old_child)
5122 old_child->die_parent->die_child = new_child;
5123 old_child->die_sib = NULL;
5124 }
5125
5126 /* Move all children from OLD_PARENT to NEW_PARENT. */
5127
5128 static void
5129 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5130 {
5131 dw_die_ref c;
5132 new_parent->die_child = old_parent->die_child;
5133 old_parent->die_child = NULL;
5134 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5135 }
5136
5137 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5138 matches TAG. */
5139
5140 static void
5141 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5142 {
5143 dw_die_ref c;
5144
5145 c = die->die_child;
5146 if (c) do {
5147 dw_die_ref prev = c;
5148 c = c->die_sib;
5149 while (c->die_tag == tag)
5150 {
5151 remove_child_with_prev (c, prev);
5152 c->die_parent = NULL;
5153 /* Might have removed every child. */
5154 if (die->die_child == NULL)
5155 return;
5156 c = prev->die_sib;
5157 }
5158 } while (c != die->die_child);
5159 }
5160
5161 /* Add a CHILD_DIE as the last child of DIE. */
5162
5163 static void
5164 add_child_die (dw_die_ref die, dw_die_ref child_die)
5165 {
5166 /* FIXME this should probably be an assert. */
5167 if (! die || ! child_die)
5168 return;
5169 gcc_assert (die != child_die);
5170
5171 child_die->die_parent = die;
5172 if (die->die_child)
5173 {
5174 child_die->die_sib = die->die_child->die_sib;
5175 die->die_child->die_sib = child_die;
5176 }
5177 else
5178 child_die->die_sib = child_die;
5179 die->die_child = child_die;
5180 }
5181
5182 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5183
5184 static void
5185 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5186 dw_die_ref after_die)
5187 {
5188 gcc_assert (die
5189 && child_die
5190 && after_die
5191 && die->die_child
5192 && die != child_die);
5193
5194 child_die->die_parent = die;
5195 child_die->die_sib = after_die->die_sib;
5196 after_die->die_sib = child_die;
5197 if (die->die_child == after_die)
5198 die->die_child = child_die;
5199 }
5200
5201 /* Unassociate CHILD from its parent, and make its parent be
5202 NEW_PARENT. */
5203
5204 static void
5205 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5206 {
5207 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5208 if (p->die_sib == child)
5209 {
5210 remove_child_with_prev (child, p);
5211 break;
5212 }
5213 add_child_die (new_parent, child);
5214 }
5215
5216 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5217 is the specification, to the end of PARENT's list of children.
5218 This is done by removing and re-adding it. */
5219
5220 static void
5221 splice_child_die (dw_die_ref parent, dw_die_ref child)
5222 {
5223 /* We want the declaration DIE from inside the class, not the
5224 specification DIE at toplevel. */
5225 if (child->die_parent != parent)
5226 {
5227 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5228
5229 if (tmp)
5230 child = tmp;
5231 }
5232
5233 gcc_assert (child->die_parent == parent
5234 || (child->die_parent
5235 == get_AT_ref (parent, DW_AT_specification)));
5236
5237 reparent_child (child, parent);
5238 }
5239
5240 /* Create and return a new die with a parent of PARENT_DIE. If
5241 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5242 associated tree T must be supplied to determine parenthood
5243 later. */
5244
5245 static inline dw_die_ref
5246 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5247 {
5248 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5249
5250 die->die_tag = tag_value;
5251
5252 if (parent_die != NULL)
5253 add_child_die (parent_die, die);
5254 else
5255 {
5256 limbo_die_node *limbo_node;
5257
5258 /* No DIEs created after early dwarf should end up in limbo,
5259 because the limbo list should not persist past LTO
5260 streaming. */
5261 if (tag_value != DW_TAG_compile_unit
5262 /* These are allowed because they're generated while
5263 breaking out COMDAT units late. */
5264 && tag_value != DW_TAG_type_unit
5265 && tag_value != DW_TAG_skeleton_unit
5266 && !early_dwarf
5267 /* Allow nested functions to live in limbo because they will
5268 only temporarily live there, as decls_for_scope will fix
5269 them up. */
5270 && (TREE_CODE (t) != FUNCTION_DECL
5271 || !decl_function_context (t))
5272 /* Same as nested functions above but for types. Types that
5273 are local to a function will be fixed in
5274 decls_for_scope. */
5275 && (!RECORD_OR_UNION_TYPE_P (t)
5276 || !TYPE_CONTEXT (t)
5277 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5278 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5279 especially in the ltrans stage, but once we implement LTO
5280 dwarf streaming, we should remove this exception. */
5281 && !in_lto_p)
5282 {
5283 fprintf (stderr, "symbol ended up in limbo too late:");
5284 debug_generic_stmt (t);
5285 gcc_unreachable ();
5286 }
5287
5288 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5289 limbo_node->die = die;
5290 limbo_node->created_for = t;
5291 limbo_node->next = limbo_die_list;
5292 limbo_die_list = limbo_node;
5293 }
5294
5295 return die;
5296 }
5297
5298 /* Return the DIE associated with the given type specifier. */
5299
5300 static inline dw_die_ref
5301 lookup_type_die (tree type)
5302 {
5303 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5304 if (die && die->removed)
5305 {
5306 TYPE_SYMTAB_DIE (type) = NULL;
5307 return NULL;
5308 }
5309 return die;
5310 }
5311
5312 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5313 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5314 anonymous type instead the one of the naming typedef. */
5315
5316 static inline dw_die_ref
5317 strip_naming_typedef (tree type, dw_die_ref type_die)
5318 {
5319 if (type
5320 && TREE_CODE (type) == RECORD_TYPE
5321 && type_die
5322 && type_die->die_tag == DW_TAG_typedef
5323 && is_naming_typedef_decl (TYPE_NAME (type)))
5324 type_die = get_AT_ref (type_die, DW_AT_type);
5325 return type_die;
5326 }
5327
5328 /* Like lookup_type_die, but if type is an anonymous type named by a
5329 typedef[1], return the DIE of the anonymous type instead the one of
5330 the naming typedef. This is because in gen_typedef_die, we did
5331 equate the anonymous struct named by the typedef with the DIE of
5332 the naming typedef. So by default, lookup_type_die on an anonymous
5333 struct yields the DIE of the naming typedef.
5334
5335 [1]: Read the comment of is_naming_typedef_decl to learn about what
5336 a naming typedef is. */
5337
5338 static inline dw_die_ref
5339 lookup_type_die_strip_naming_typedef (tree type)
5340 {
5341 dw_die_ref die = lookup_type_die (type);
5342 return strip_naming_typedef (type, die);
5343 }
5344
5345 /* Equate a DIE to a given type specifier. */
5346
5347 static inline void
5348 equate_type_number_to_die (tree type, dw_die_ref type_die)
5349 {
5350 TYPE_SYMTAB_DIE (type) = type_die;
5351 }
5352
5353 /* Returns a hash value for X (which really is a die_struct). */
5354
5355 inline hashval_t
5356 decl_die_hasher::hash (die_node *x)
5357 {
5358 return (hashval_t) x->decl_id;
5359 }
5360
5361 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5362
5363 inline bool
5364 decl_die_hasher::equal (die_node *x, tree y)
5365 {
5366 return (x->decl_id == DECL_UID (y));
5367 }
5368
5369 /* Return the DIE associated with a given declaration. */
5370
5371 static inline dw_die_ref
5372 lookup_decl_die (tree decl)
5373 {
5374 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5375 NO_INSERT);
5376 if (!die)
5377 return NULL;
5378 if ((*die)->removed)
5379 {
5380 decl_die_table->clear_slot (die);
5381 return NULL;
5382 }
5383 return *die;
5384 }
5385
5386 /* Returns a hash value for X (which really is a var_loc_list). */
5387
5388 inline hashval_t
5389 decl_loc_hasher::hash (var_loc_list *x)
5390 {
5391 return (hashval_t) x->decl_id;
5392 }
5393
5394 /* Return nonzero if decl_id of var_loc_list X is the same as
5395 UID of decl *Y. */
5396
5397 inline bool
5398 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5399 {
5400 return (x->decl_id == DECL_UID (y));
5401 }
5402
5403 /* Return the var_loc list associated with a given declaration. */
5404
5405 static inline var_loc_list *
5406 lookup_decl_loc (const_tree decl)
5407 {
5408 if (!decl_loc_table)
5409 return NULL;
5410 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5411 }
5412
5413 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5414
5415 inline hashval_t
5416 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5417 {
5418 return (hashval_t) x->decl_id;
5419 }
5420
5421 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5422 UID of decl *Y. */
5423
5424 inline bool
5425 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5426 {
5427 return (x->decl_id == DECL_UID (y));
5428 }
5429
5430 /* Equate a DIE to a particular declaration. */
5431
5432 static void
5433 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
5434 {
5435 unsigned int decl_id = DECL_UID (decl);
5436
5437 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
5438 decl_die->decl_id = decl_id;
5439 }
5440
5441 /* Return how many bits covers PIECE EXPR_LIST. */
5442
5443 static HOST_WIDE_INT
5444 decl_piece_bitsize (rtx piece)
5445 {
5446 int ret = (int) GET_MODE (piece);
5447 if (ret)
5448 return ret;
5449 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
5450 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
5451 return INTVAL (XEXP (XEXP (piece, 0), 0));
5452 }
5453
5454 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
5455
5456 static rtx *
5457 decl_piece_varloc_ptr (rtx piece)
5458 {
5459 if ((int) GET_MODE (piece))
5460 return &XEXP (piece, 0);
5461 else
5462 return &XEXP (XEXP (piece, 0), 1);
5463 }
5464
5465 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
5466 Next is the chain of following piece nodes. */
5467
5468 static rtx_expr_list *
5469 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
5470 {
5471 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
5472 return alloc_EXPR_LIST (bitsize, loc_note, next);
5473 else
5474 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
5475 GEN_INT (bitsize),
5476 loc_note), next);
5477 }
5478
5479 /* Return rtx that should be stored into loc field for
5480 LOC_NOTE and BITPOS/BITSIZE. */
5481
5482 static rtx
5483 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
5484 HOST_WIDE_INT bitsize)
5485 {
5486 if (bitsize != -1)
5487 {
5488 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
5489 if (bitpos != 0)
5490 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
5491 }
5492 return loc_note;
5493 }
5494
5495 /* This function either modifies location piece list *DEST in
5496 place (if SRC and INNER is NULL), or copies location piece list
5497 *SRC to *DEST while modifying it. Location BITPOS is modified
5498 to contain LOC_NOTE, any pieces overlapping it are removed resp.
5499 not copied and if needed some padding around it is added.
5500 When modifying in place, DEST should point to EXPR_LIST where
5501 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
5502 to the start of the whole list and INNER points to the EXPR_LIST
5503 where earlier pieces cover PIECE_BITPOS bits. */
5504
5505 static void
5506 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
5507 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
5508 HOST_WIDE_INT bitsize, rtx loc_note)
5509 {
5510 HOST_WIDE_INT diff;
5511 bool copy = inner != NULL;
5512
5513 if (copy)
5514 {
5515 /* First copy all nodes preceding the current bitpos. */
5516 while (src != inner)
5517 {
5518 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5519 decl_piece_bitsize (*src), NULL_RTX);
5520 dest = &XEXP (*dest, 1);
5521 src = &XEXP (*src, 1);
5522 }
5523 }
5524 /* Add padding if needed. */
5525 if (bitpos != piece_bitpos)
5526 {
5527 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
5528 copy ? NULL_RTX : *dest);
5529 dest = &XEXP (*dest, 1);
5530 }
5531 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
5532 {
5533 gcc_assert (!copy);
5534 /* A piece with correct bitpos and bitsize already exist,
5535 just update the location for it and return. */
5536 *decl_piece_varloc_ptr (*dest) = loc_note;
5537 return;
5538 }
5539 /* Add the piece that changed. */
5540 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
5541 dest = &XEXP (*dest, 1);
5542 /* Skip over pieces that overlap it. */
5543 diff = bitpos - piece_bitpos + bitsize;
5544 if (!copy)
5545 src = dest;
5546 while (diff > 0 && *src)
5547 {
5548 rtx piece = *src;
5549 diff -= decl_piece_bitsize (piece);
5550 if (copy)
5551 src = &XEXP (piece, 1);
5552 else
5553 {
5554 *src = XEXP (piece, 1);
5555 free_EXPR_LIST_node (piece);
5556 }
5557 }
5558 /* Add padding if needed. */
5559 if (diff < 0 && *src)
5560 {
5561 if (!copy)
5562 dest = src;
5563 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
5564 dest = &XEXP (*dest, 1);
5565 }
5566 if (!copy)
5567 return;
5568 /* Finally copy all nodes following it. */
5569 while (*src)
5570 {
5571 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5572 decl_piece_bitsize (*src), NULL_RTX);
5573 dest = &XEXP (*dest, 1);
5574 src = &XEXP (*src, 1);
5575 }
5576 }
5577
5578 /* Add a variable location node to the linked list for DECL. */
5579
5580 static struct var_loc_node *
5581 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
5582 {
5583 unsigned int decl_id;
5584 var_loc_list *temp;
5585 struct var_loc_node *loc = NULL;
5586 HOST_WIDE_INT bitsize = -1, bitpos = -1;
5587
5588 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
5589 {
5590 tree realdecl = DECL_DEBUG_EXPR (decl);
5591 if (handled_component_p (realdecl)
5592 || (TREE_CODE (realdecl) == MEM_REF
5593 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5594 {
5595 HOST_WIDE_INT maxsize;
5596 bool reverse;
5597 tree innerdecl
5598 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize,
5599 &reverse);
5600 if (!DECL_P (innerdecl)
5601 || DECL_IGNORED_P (innerdecl)
5602 || TREE_STATIC (innerdecl)
5603 || bitsize <= 0
5604 || bitpos + bitsize > 256
5605 || bitsize != maxsize)
5606 return NULL;
5607 decl = innerdecl;
5608 }
5609 }
5610
5611 decl_id = DECL_UID (decl);
5612 var_loc_list **slot
5613 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
5614 if (*slot == NULL)
5615 {
5616 temp = ggc_cleared_alloc<var_loc_list> ();
5617 temp->decl_id = decl_id;
5618 *slot = temp;
5619 }
5620 else
5621 temp = *slot;
5622
5623 /* For PARM_DECLs try to keep around the original incoming value,
5624 even if that means we'll emit a zero-range .debug_loc entry. */
5625 if (temp->last
5626 && temp->first == temp->last
5627 && TREE_CODE (decl) == PARM_DECL
5628 && NOTE_P (temp->first->loc)
5629 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
5630 && DECL_INCOMING_RTL (decl)
5631 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
5632 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
5633 == GET_CODE (DECL_INCOMING_RTL (decl))
5634 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
5635 && (bitsize != -1
5636 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
5637 NOTE_VAR_LOCATION_LOC (loc_note))
5638 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
5639 != NOTE_VAR_LOCATION_STATUS (loc_note))))
5640 {
5641 loc = ggc_cleared_alloc<var_loc_node> ();
5642 temp->first->next = loc;
5643 temp->last = loc;
5644 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5645 }
5646 else if (temp->last)
5647 {
5648 struct var_loc_node *last = temp->last, *unused = NULL;
5649 rtx *piece_loc = NULL, last_loc_note;
5650 HOST_WIDE_INT piece_bitpos = 0;
5651 if (last->next)
5652 {
5653 last = last->next;
5654 gcc_assert (last->next == NULL);
5655 }
5656 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
5657 {
5658 piece_loc = &last->loc;
5659 do
5660 {
5661 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
5662 if (piece_bitpos + cur_bitsize > bitpos)
5663 break;
5664 piece_bitpos += cur_bitsize;
5665 piece_loc = &XEXP (*piece_loc, 1);
5666 }
5667 while (*piece_loc);
5668 }
5669 /* TEMP->LAST here is either pointer to the last but one or
5670 last element in the chained list, LAST is pointer to the
5671 last element. */
5672 if (label && strcmp (last->label, label) == 0)
5673 {
5674 /* For SRA optimized variables if there weren't any real
5675 insns since last note, just modify the last node. */
5676 if (piece_loc != NULL)
5677 {
5678 adjust_piece_list (piece_loc, NULL, NULL,
5679 bitpos, piece_bitpos, bitsize, loc_note);
5680 return NULL;
5681 }
5682 /* If the last note doesn't cover any instructions, remove it. */
5683 if (temp->last != last)
5684 {
5685 temp->last->next = NULL;
5686 unused = last;
5687 last = temp->last;
5688 gcc_assert (strcmp (last->label, label) != 0);
5689 }
5690 else
5691 {
5692 gcc_assert (temp->first == temp->last
5693 || (temp->first->next == temp->last
5694 && TREE_CODE (decl) == PARM_DECL));
5695 memset (temp->last, '\0', sizeof (*temp->last));
5696 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
5697 return temp->last;
5698 }
5699 }
5700 if (bitsize == -1 && NOTE_P (last->loc))
5701 last_loc_note = last->loc;
5702 else if (piece_loc != NULL
5703 && *piece_loc != NULL_RTX
5704 && piece_bitpos == bitpos
5705 && decl_piece_bitsize (*piece_loc) == bitsize)
5706 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
5707 else
5708 last_loc_note = NULL_RTX;
5709 /* If the current location is the same as the end of the list,
5710 and either both or neither of the locations is uninitialized,
5711 we have nothing to do. */
5712 if (last_loc_note == NULL_RTX
5713 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
5714 NOTE_VAR_LOCATION_LOC (loc_note)))
5715 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
5716 != NOTE_VAR_LOCATION_STATUS (loc_note))
5717 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
5718 == VAR_INIT_STATUS_UNINITIALIZED)
5719 || (NOTE_VAR_LOCATION_STATUS (loc_note)
5720 == VAR_INIT_STATUS_UNINITIALIZED))))
5721 {
5722 /* Add LOC to the end of list and update LAST. If the last
5723 element of the list has been removed above, reuse its
5724 memory for the new node, otherwise allocate a new one. */
5725 if (unused)
5726 {
5727 loc = unused;
5728 memset (loc, '\0', sizeof (*loc));
5729 }
5730 else
5731 loc = ggc_cleared_alloc<var_loc_node> ();
5732 if (bitsize == -1 || piece_loc == NULL)
5733 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5734 else
5735 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
5736 bitpos, piece_bitpos, bitsize, loc_note);
5737 last->next = loc;
5738 /* Ensure TEMP->LAST will point either to the new last but one
5739 element of the chain, or to the last element in it. */
5740 if (last != temp->last)
5741 temp->last = last;
5742 }
5743 else if (unused)
5744 ggc_free (unused);
5745 }
5746 else
5747 {
5748 loc = ggc_cleared_alloc<var_loc_node> ();
5749 temp->first = loc;
5750 temp->last = loc;
5751 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5752 }
5753 return loc;
5754 }
5755 \f
5756 /* Keep track of the number of spaces used to indent the
5757 output of the debugging routines that print the structure of
5758 the DIE internal representation. */
5759 static int print_indent;
5760
5761 /* Indent the line the number of spaces given by print_indent. */
5762
5763 static inline void
5764 print_spaces (FILE *outfile)
5765 {
5766 fprintf (outfile, "%*s", print_indent, "");
5767 }
5768
5769 /* Print a type signature in hex. */
5770
5771 static inline void
5772 print_signature (FILE *outfile, char *sig)
5773 {
5774 int i;
5775
5776 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
5777 fprintf (outfile, "%02x", sig[i] & 0xff);
5778 }
5779
5780 static inline void
5781 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
5782 {
5783 if (discr_value->pos)
5784 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
5785 else
5786 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
5787 }
5788
5789 static void print_loc_descr (dw_loc_descr_ref, FILE *);
5790
5791 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
5792 RECURSE, output location descriptor operations. */
5793
5794 static void
5795 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
5796 {
5797 switch (val->val_class)
5798 {
5799 case dw_val_class_addr:
5800 fprintf (outfile, "address");
5801 break;
5802 case dw_val_class_offset:
5803 fprintf (outfile, "offset");
5804 break;
5805 case dw_val_class_loc:
5806 fprintf (outfile, "location descriptor");
5807 if (val->v.val_loc == NULL)
5808 fprintf (outfile, " -> <null>\n");
5809 else if (recurse)
5810 {
5811 fprintf (outfile, ":\n");
5812 print_indent += 4;
5813 print_loc_descr (val->v.val_loc, outfile);
5814 print_indent -= 4;
5815 }
5816 else
5817 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
5818 break;
5819 case dw_val_class_loc_list:
5820 fprintf (outfile, "location list -> label:%s",
5821 val->v.val_loc_list->ll_symbol);
5822 break;
5823 case dw_val_class_range_list:
5824 fprintf (outfile, "range list");
5825 break;
5826 case dw_val_class_const:
5827 case dw_val_class_const_implicit:
5828 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
5829 break;
5830 case dw_val_class_unsigned_const:
5831 case dw_val_class_unsigned_const_implicit:
5832 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
5833 break;
5834 case dw_val_class_const_double:
5835 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
5836 HOST_WIDE_INT_PRINT_UNSIGNED")",
5837 val->v.val_double.high,
5838 val->v.val_double.low);
5839 break;
5840 case dw_val_class_wide_int:
5841 {
5842 int i = val->v.val_wide->get_len ();
5843 fprintf (outfile, "constant (");
5844 gcc_assert (i > 0);
5845 if (val->v.val_wide->elt (i - 1) == 0)
5846 fprintf (outfile, "0x");
5847 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
5848 val->v.val_wide->elt (--i));
5849 while (--i >= 0)
5850 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
5851 val->v.val_wide->elt (i));
5852 fprintf (outfile, ")");
5853 break;
5854 }
5855 case dw_val_class_vec:
5856 fprintf (outfile, "floating-point or vector constant");
5857 break;
5858 case dw_val_class_flag:
5859 fprintf (outfile, "%u", val->v.val_flag);
5860 break;
5861 case dw_val_class_die_ref:
5862 if (val->v.val_die_ref.die != NULL)
5863 {
5864 dw_die_ref die = val->v.val_die_ref.die;
5865
5866 if (die->comdat_type_p)
5867 {
5868 fprintf (outfile, "die -> signature: ");
5869 print_signature (outfile,
5870 die->die_id.die_type_node->signature);
5871 }
5872 else if (die->die_id.die_symbol)
5873 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
5874 else
5875 fprintf (outfile, "die -> %ld", die->die_offset);
5876 fprintf (outfile, " (%p)", (void *) die);
5877 }
5878 else
5879 fprintf (outfile, "die -> <null>");
5880 break;
5881 case dw_val_class_vms_delta:
5882 fprintf (outfile, "delta: @slotcount(%s-%s)",
5883 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
5884 break;
5885 case dw_val_class_lbl_id:
5886 case dw_val_class_lineptr:
5887 case dw_val_class_macptr:
5888 case dw_val_class_loclistsptr:
5889 case dw_val_class_high_pc:
5890 fprintf (outfile, "label: %s", val->v.val_lbl_id);
5891 break;
5892 case dw_val_class_str:
5893 if (val->v.val_str->str != NULL)
5894 fprintf (outfile, "\"%s\"", val->v.val_str->str);
5895 else
5896 fprintf (outfile, "<null>");
5897 break;
5898 case dw_val_class_file:
5899 case dw_val_class_file_implicit:
5900 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
5901 val->v.val_file->emitted_number);
5902 break;
5903 case dw_val_class_data8:
5904 {
5905 int i;
5906
5907 for (i = 0; i < 8; i++)
5908 fprintf (outfile, "%02x", val->v.val_data8[i]);
5909 break;
5910 }
5911 case dw_val_class_discr_value:
5912 print_discr_value (outfile, &val->v.val_discr_value);
5913 break;
5914 case dw_val_class_discr_list:
5915 for (dw_discr_list_ref node = val->v.val_discr_list;
5916 node != NULL;
5917 node = node->dw_discr_next)
5918 {
5919 if (node->dw_discr_range)
5920 {
5921 fprintf (outfile, " .. ");
5922 print_discr_value (outfile, &node->dw_discr_lower_bound);
5923 print_discr_value (outfile, &node->dw_discr_upper_bound);
5924 }
5925 else
5926 print_discr_value (outfile, &node->dw_discr_lower_bound);
5927
5928 if (node->dw_discr_next != NULL)
5929 fprintf (outfile, " | ");
5930 }
5931 default:
5932 break;
5933 }
5934 }
5935
5936 /* Likewise, for a DIE attribute. */
5937
5938 static void
5939 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
5940 {
5941 print_dw_val (&a->dw_attr_val, recurse, outfile);
5942 }
5943
5944
5945 /* Print the list of operands in the LOC location description to OUTFILE. This
5946 routine is a debugging aid only. */
5947
5948 static void
5949 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
5950 {
5951 dw_loc_descr_ref l = loc;
5952
5953 if (loc == NULL)
5954 {
5955 print_spaces (outfile);
5956 fprintf (outfile, "<null>\n");
5957 return;
5958 }
5959
5960 for (l = loc; l != NULL; l = l->dw_loc_next)
5961 {
5962 print_spaces (outfile);
5963 fprintf (outfile, "(%p) %s",
5964 (void *) l,
5965 dwarf_stack_op_name (l->dw_loc_opc));
5966 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
5967 {
5968 fprintf (outfile, " ");
5969 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
5970 }
5971 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
5972 {
5973 fprintf (outfile, ", ");
5974 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
5975 }
5976 fprintf (outfile, "\n");
5977 }
5978 }
5979
5980 /* Print the information associated with a given DIE, and its children.
5981 This routine is a debugging aid only. */
5982
5983 static void
5984 print_die (dw_die_ref die, FILE *outfile)
5985 {
5986 dw_attr_node *a;
5987 dw_die_ref c;
5988 unsigned ix;
5989
5990 print_spaces (outfile);
5991 fprintf (outfile, "DIE %4ld: %s (%p)\n",
5992 die->die_offset, dwarf_tag_name (die->die_tag),
5993 (void*) die);
5994 print_spaces (outfile);
5995 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
5996 fprintf (outfile, " offset: %ld", die->die_offset);
5997 fprintf (outfile, " mark: %d\n", die->die_mark);
5998
5999 if (die->comdat_type_p)
6000 {
6001 print_spaces (outfile);
6002 fprintf (outfile, " signature: ");
6003 print_signature (outfile, die->die_id.die_type_node->signature);
6004 fprintf (outfile, "\n");
6005 }
6006
6007 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6008 {
6009 print_spaces (outfile);
6010 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6011
6012 print_attribute (a, true, outfile);
6013 fprintf (outfile, "\n");
6014 }
6015
6016 if (die->die_child != NULL)
6017 {
6018 print_indent += 4;
6019 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6020 print_indent -= 4;
6021 }
6022 if (print_indent == 0)
6023 fprintf (outfile, "\n");
6024 }
6025
6026 /* Print the list of operations in the LOC location description. */
6027
6028 DEBUG_FUNCTION void
6029 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6030 {
6031 print_loc_descr (loc, stderr);
6032 }
6033
6034 /* Print the information collected for a given DIE. */
6035
6036 DEBUG_FUNCTION void
6037 debug_dwarf_die (dw_die_ref die)
6038 {
6039 print_die (die, stderr);
6040 }
6041
6042 DEBUG_FUNCTION void
6043 debug (die_struct &ref)
6044 {
6045 print_die (&ref, stderr);
6046 }
6047
6048 DEBUG_FUNCTION void
6049 debug (die_struct *ptr)
6050 {
6051 if (ptr)
6052 debug (*ptr);
6053 else
6054 fprintf (stderr, "<nil>\n");
6055 }
6056
6057
6058 /* Print all DWARF information collected for the compilation unit.
6059 This routine is a debugging aid only. */
6060
6061 DEBUG_FUNCTION void
6062 debug_dwarf (void)
6063 {
6064 print_indent = 0;
6065 print_die (comp_unit_die (), stderr);
6066 }
6067
6068 /* Verify the DIE tree structure. */
6069
6070 DEBUG_FUNCTION void
6071 verify_die (dw_die_ref die)
6072 {
6073 gcc_assert (!die->die_mark);
6074 if (die->die_parent == NULL
6075 && die->die_sib == NULL)
6076 return;
6077 /* Verify the die_sib list is cyclic. */
6078 dw_die_ref x = die;
6079 do
6080 {
6081 x->die_mark = 1;
6082 x = x->die_sib;
6083 }
6084 while (x && !x->die_mark);
6085 gcc_assert (x == die);
6086 x = die;
6087 do
6088 {
6089 /* Verify all dies have the same parent. */
6090 gcc_assert (x->die_parent == die->die_parent);
6091 if (x->die_child)
6092 {
6093 /* Verify the child has the proper parent and recurse. */
6094 gcc_assert (x->die_child->die_parent == x);
6095 verify_die (x->die_child);
6096 }
6097 x->die_mark = 0;
6098 x = x->die_sib;
6099 }
6100 while (x && x->die_mark);
6101 }
6102
6103 /* Sanity checks on DIEs. */
6104
6105 static void
6106 check_die (dw_die_ref die)
6107 {
6108 unsigned ix;
6109 dw_attr_node *a;
6110 bool inline_found = false;
6111 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6112 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6113 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6114 {
6115 switch (a->dw_attr)
6116 {
6117 case DW_AT_inline:
6118 if (a->dw_attr_val.v.val_unsigned)
6119 inline_found = true;
6120 break;
6121 case DW_AT_location:
6122 ++n_location;
6123 break;
6124 case DW_AT_low_pc:
6125 ++n_low_pc;
6126 break;
6127 case DW_AT_high_pc:
6128 ++n_high_pc;
6129 break;
6130 case DW_AT_artificial:
6131 ++n_artificial;
6132 break;
6133 case DW_AT_decl_column:
6134 ++n_decl_column;
6135 break;
6136 case DW_AT_decl_line:
6137 ++n_decl_line;
6138 break;
6139 case DW_AT_decl_file:
6140 ++n_decl_file;
6141 break;
6142 default:
6143 break;
6144 }
6145 }
6146 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6147 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6148 {
6149 fprintf (stderr, "Duplicate attributes in DIE:\n");
6150 debug_dwarf_die (die);
6151 gcc_unreachable ();
6152 }
6153 if (inline_found)
6154 {
6155 /* A debugging information entry that is a member of an abstract
6156 instance tree [that has DW_AT_inline] should not contain any
6157 attributes which describe aspects of the subroutine which vary
6158 between distinct inlined expansions or distinct out-of-line
6159 expansions. */
6160 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6161 gcc_assert (a->dw_attr != DW_AT_low_pc
6162 && a->dw_attr != DW_AT_high_pc
6163 && a->dw_attr != DW_AT_location
6164 && a->dw_attr != DW_AT_frame_base
6165 && a->dw_attr != DW_AT_call_all_calls
6166 && a->dw_attr != DW_AT_GNU_all_call_sites);
6167 }
6168 }
6169 \f
6170 /* Start a new compilation unit DIE for an include file. OLD_UNIT is the CU
6171 for the enclosing include file, if any. BINCL_DIE is the DW_TAG_GNU_BINCL
6172 DIE that marks the start of the DIEs for this include file. */
6173
6174 static dw_die_ref
6175 push_new_compile_unit (dw_die_ref old_unit, dw_die_ref bincl_die)
6176 {
6177 const char *filename = get_AT_string (bincl_die, DW_AT_name);
6178 dw_die_ref new_unit = gen_compile_unit_die (filename);
6179
6180 new_unit->die_sib = old_unit;
6181 return new_unit;
6182 }
6183
6184 /* Close an include-file CU and reopen the enclosing one. */
6185
6186 static dw_die_ref
6187 pop_compile_unit (dw_die_ref old_unit)
6188 {
6189 dw_die_ref new_unit = old_unit->die_sib;
6190
6191 old_unit->die_sib = NULL;
6192 return new_unit;
6193 }
6194
6195 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6196 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6197 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6198
6199 /* Calculate the checksum of a location expression. */
6200
6201 static inline void
6202 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6203 {
6204 int tem;
6205 inchash::hash hstate;
6206 hashval_t hash;
6207
6208 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6209 CHECKSUM (tem);
6210 hash_loc_operands (loc, hstate);
6211 hash = hstate.end();
6212 CHECKSUM (hash);
6213 }
6214
6215 /* Calculate the checksum of an attribute. */
6216
6217 static void
6218 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6219 {
6220 dw_loc_descr_ref loc;
6221 rtx r;
6222
6223 CHECKSUM (at->dw_attr);
6224
6225 /* We don't care that this was compiled with a different compiler
6226 snapshot; if the output is the same, that's what matters. */
6227 if (at->dw_attr == DW_AT_producer)
6228 return;
6229
6230 switch (AT_class (at))
6231 {
6232 case dw_val_class_const:
6233 case dw_val_class_const_implicit:
6234 CHECKSUM (at->dw_attr_val.v.val_int);
6235 break;
6236 case dw_val_class_unsigned_const:
6237 case dw_val_class_unsigned_const_implicit:
6238 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6239 break;
6240 case dw_val_class_const_double:
6241 CHECKSUM (at->dw_attr_val.v.val_double);
6242 break;
6243 case dw_val_class_wide_int:
6244 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6245 get_full_len (*at->dw_attr_val.v.val_wide)
6246 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6247 break;
6248 case dw_val_class_vec:
6249 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6250 (at->dw_attr_val.v.val_vec.length
6251 * at->dw_attr_val.v.val_vec.elt_size));
6252 break;
6253 case dw_val_class_flag:
6254 CHECKSUM (at->dw_attr_val.v.val_flag);
6255 break;
6256 case dw_val_class_str:
6257 CHECKSUM_STRING (AT_string (at));
6258 break;
6259
6260 case dw_val_class_addr:
6261 r = AT_addr (at);
6262 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6263 CHECKSUM_STRING (XSTR (r, 0));
6264 break;
6265
6266 case dw_val_class_offset:
6267 CHECKSUM (at->dw_attr_val.v.val_offset);
6268 break;
6269
6270 case dw_val_class_loc:
6271 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6272 loc_checksum (loc, ctx);
6273 break;
6274
6275 case dw_val_class_die_ref:
6276 die_checksum (AT_ref (at), ctx, mark);
6277 break;
6278
6279 case dw_val_class_fde_ref:
6280 case dw_val_class_vms_delta:
6281 case dw_val_class_lbl_id:
6282 case dw_val_class_lineptr:
6283 case dw_val_class_macptr:
6284 case dw_val_class_loclistsptr:
6285 case dw_val_class_high_pc:
6286 break;
6287
6288 case dw_val_class_file:
6289 case dw_val_class_file_implicit:
6290 CHECKSUM_STRING (AT_file (at)->filename);
6291 break;
6292
6293 case dw_val_class_data8:
6294 CHECKSUM (at->dw_attr_val.v.val_data8);
6295 break;
6296
6297 default:
6298 break;
6299 }
6300 }
6301
6302 /* Calculate the checksum of a DIE. */
6303
6304 static void
6305 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6306 {
6307 dw_die_ref c;
6308 dw_attr_node *a;
6309 unsigned ix;
6310
6311 /* To avoid infinite recursion. */
6312 if (die->die_mark)
6313 {
6314 CHECKSUM (die->die_mark);
6315 return;
6316 }
6317 die->die_mark = ++(*mark);
6318
6319 CHECKSUM (die->die_tag);
6320
6321 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6322 attr_checksum (a, ctx, mark);
6323
6324 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6325 }
6326
6327 #undef CHECKSUM
6328 #undef CHECKSUM_BLOCK
6329 #undef CHECKSUM_STRING
6330
6331 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6332 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6333 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6334 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6335 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6336 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6337 #define CHECKSUM_ATTR(FOO) \
6338 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6339
6340 /* Calculate the checksum of a number in signed LEB128 format. */
6341
6342 static void
6343 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6344 {
6345 unsigned char byte;
6346 bool more;
6347
6348 while (1)
6349 {
6350 byte = (value & 0x7f);
6351 value >>= 7;
6352 more = !((value == 0 && (byte & 0x40) == 0)
6353 || (value == -1 && (byte & 0x40) != 0));
6354 if (more)
6355 byte |= 0x80;
6356 CHECKSUM (byte);
6357 if (!more)
6358 break;
6359 }
6360 }
6361
6362 /* Calculate the checksum of a number in unsigned LEB128 format. */
6363
6364 static void
6365 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6366 {
6367 while (1)
6368 {
6369 unsigned char byte = (value & 0x7f);
6370 value >>= 7;
6371 if (value != 0)
6372 /* More bytes to follow. */
6373 byte |= 0x80;
6374 CHECKSUM (byte);
6375 if (value == 0)
6376 break;
6377 }
6378 }
6379
6380 /* Checksum the context of the DIE. This adds the names of any
6381 surrounding namespaces or structures to the checksum. */
6382
6383 static void
6384 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6385 {
6386 const char *name;
6387 dw_die_ref spec;
6388 int tag = die->die_tag;
6389
6390 if (tag != DW_TAG_namespace
6391 && tag != DW_TAG_structure_type
6392 && tag != DW_TAG_class_type)
6393 return;
6394
6395 name = get_AT_string (die, DW_AT_name);
6396
6397 spec = get_AT_ref (die, DW_AT_specification);
6398 if (spec != NULL)
6399 die = spec;
6400
6401 if (die->die_parent != NULL)
6402 checksum_die_context (die->die_parent, ctx);
6403
6404 CHECKSUM_ULEB128 ('C');
6405 CHECKSUM_ULEB128 (tag);
6406 if (name != NULL)
6407 CHECKSUM_STRING (name);
6408 }
6409
6410 /* Calculate the checksum of a location expression. */
6411
6412 static inline void
6413 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6414 {
6415 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6416 were emitted as a DW_FORM_sdata instead of a location expression. */
6417 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6418 {
6419 CHECKSUM_ULEB128 (DW_FORM_sdata);
6420 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6421 return;
6422 }
6423
6424 /* Otherwise, just checksum the raw location expression. */
6425 while (loc != NULL)
6426 {
6427 inchash::hash hstate;
6428 hashval_t hash;
6429
6430 CHECKSUM_ULEB128 (loc->dtprel);
6431 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6432 hash_loc_operands (loc, hstate);
6433 hash = hstate.end ();
6434 CHECKSUM (hash);
6435 loc = loc->dw_loc_next;
6436 }
6437 }
6438
6439 /* Calculate the checksum of an attribute. */
6440
6441 static void
6442 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6443 struct md5_ctx *ctx, int *mark)
6444 {
6445 dw_loc_descr_ref loc;
6446 rtx r;
6447
6448 if (AT_class (at) == dw_val_class_die_ref)
6449 {
6450 dw_die_ref target_die = AT_ref (at);
6451
6452 /* For pointer and reference types, we checksum only the (qualified)
6453 name of the target type (if there is a name). For friend entries,
6454 we checksum only the (qualified) name of the target type or function.
6455 This allows the checksum to remain the same whether the target type
6456 is complete or not. */
6457 if ((at->dw_attr == DW_AT_type
6458 && (tag == DW_TAG_pointer_type
6459 || tag == DW_TAG_reference_type
6460 || tag == DW_TAG_rvalue_reference_type
6461 || tag == DW_TAG_ptr_to_member_type))
6462 || (at->dw_attr == DW_AT_friend
6463 && tag == DW_TAG_friend))
6464 {
6465 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
6466
6467 if (name_attr != NULL)
6468 {
6469 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6470
6471 if (decl == NULL)
6472 decl = target_die;
6473 CHECKSUM_ULEB128 ('N');
6474 CHECKSUM_ULEB128 (at->dw_attr);
6475 if (decl->die_parent != NULL)
6476 checksum_die_context (decl->die_parent, ctx);
6477 CHECKSUM_ULEB128 ('E');
6478 CHECKSUM_STRING (AT_string (name_attr));
6479 return;
6480 }
6481 }
6482
6483 /* For all other references to another DIE, we check to see if the
6484 target DIE has already been visited. If it has, we emit a
6485 backward reference; if not, we descend recursively. */
6486 if (target_die->die_mark > 0)
6487 {
6488 CHECKSUM_ULEB128 ('R');
6489 CHECKSUM_ULEB128 (at->dw_attr);
6490 CHECKSUM_ULEB128 (target_die->die_mark);
6491 }
6492 else
6493 {
6494 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6495
6496 if (decl == NULL)
6497 decl = target_die;
6498 target_die->die_mark = ++(*mark);
6499 CHECKSUM_ULEB128 ('T');
6500 CHECKSUM_ULEB128 (at->dw_attr);
6501 if (decl->die_parent != NULL)
6502 checksum_die_context (decl->die_parent, ctx);
6503 die_checksum_ordered (target_die, ctx, mark);
6504 }
6505 return;
6506 }
6507
6508 CHECKSUM_ULEB128 ('A');
6509 CHECKSUM_ULEB128 (at->dw_attr);
6510
6511 switch (AT_class (at))
6512 {
6513 case dw_val_class_const:
6514 case dw_val_class_const_implicit:
6515 CHECKSUM_ULEB128 (DW_FORM_sdata);
6516 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
6517 break;
6518
6519 case dw_val_class_unsigned_const:
6520 case dw_val_class_unsigned_const_implicit:
6521 CHECKSUM_ULEB128 (DW_FORM_sdata);
6522 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
6523 break;
6524
6525 case dw_val_class_const_double:
6526 CHECKSUM_ULEB128 (DW_FORM_block);
6527 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
6528 CHECKSUM (at->dw_attr_val.v.val_double);
6529 break;
6530
6531 case dw_val_class_wide_int:
6532 CHECKSUM_ULEB128 (DW_FORM_block);
6533 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
6534 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
6535 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6536 get_full_len (*at->dw_attr_val.v.val_wide)
6537 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6538 break;
6539
6540 case dw_val_class_vec:
6541 CHECKSUM_ULEB128 (DW_FORM_block);
6542 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
6543 * at->dw_attr_val.v.val_vec.elt_size);
6544 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6545 (at->dw_attr_val.v.val_vec.length
6546 * at->dw_attr_val.v.val_vec.elt_size));
6547 break;
6548
6549 case dw_val_class_flag:
6550 CHECKSUM_ULEB128 (DW_FORM_flag);
6551 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
6552 break;
6553
6554 case dw_val_class_str:
6555 CHECKSUM_ULEB128 (DW_FORM_string);
6556 CHECKSUM_STRING (AT_string (at));
6557 break;
6558
6559 case dw_val_class_addr:
6560 r = AT_addr (at);
6561 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6562 CHECKSUM_ULEB128 (DW_FORM_string);
6563 CHECKSUM_STRING (XSTR (r, 0));
6564 break;
6565
6566 case dw_val_class_offset:
6567 CHECKSUM_ULEB128 (DW_FORM_sdata);
6568 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
6569 break;
6570
6571 case dw_val_class_loc:
6572 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6573 loc_checksum_ordered (loc, ctx);
6574 break;
6575
6576 case dw_val_class_fde_ref:
6577 case dw_val_class_lbl_id:
6578 case dw_val_class_lineptr:
6579 case dw_val_class_macptr:
6580 case dw_val_class_loclistsptr:
6581 case dw_val_class_high_pc:
6582 break;
6583
6584 case dw_val_class_file:
6585 case dw_val_class_file_implicit:
6586 CHECKSUM_ULEB128 (DW_FORM_string);
6587 CHECKSUM_STRING (AT_file (at)->filename);
6588 break;
6589
6590 case dw_val_class_data8:
6591 CHECKSUM (at->dw_attr_val.v.val_data8);
6592 break;
6593
6594 default:
6595 break;
6596 }
6597 }
6598
6599 struct checksum_attributes
6600 {
6601 dw_attr_node *at_name;
6602 dw_attr_node *at_type;
6603 dw_attr_node *at_friend;
6604 dw_attr_node *at_accessibility;
6605 dw_attr_node *at_address_class;
6606 dw_attr_node *at_allocated;
6607 dw_attr_node *at_artificial;
6608 dw_attr_node *at_associated;
6609 dw_attr_node *at_binary_scale;
6610 dw_attr_node *at_bit_offset;
6611 dw_attr_node *at_bit_size;
6612 dw_attr_node *at_bit_stride;
6613 dw_attr_node *at_byte_size;
6614 dw_attr_node *at_byte_stride;
6615 dw_attr_node *at_const_value;
6616 dw_attr_node *at_containing_type;
6617 dw_attr_node *at_count;
6618 dw_attr_node *at_data_location;
6619 dw_attr_node *at_data_member_location;
6620 dw_attr_node *at_decimal_scale;
6621 dw_attr_node *at_decimal_sign;
6622 dw_attr_node *at_default_value;
6623 dw_attr_node *at_digit_count;
6624 dw_attr_node *at_discr;
6625 dw_attr_node *at_discr_list;
6626 dw_attr_node *at_discr_value;
6627 dw_attr_node *at_encoding;
6628 dw_attr_node *at_endianity;
6629 dw_attr_node *at_explicit;
6630 dw_attr_node *at_is_optional;
6631 dw_attr_node *at_location;
6632 dw_attr_node *at_lower_bound;
6633 dw_attr_node *at_mutable;
6634 dw_attr_node *at_ordering;
6635 dw_attr_node *at_picture_string;
6636 dw_attr_node *at_prototyped;
6637 dw_attr_node *at_small;
6638 dw_attr_node *at_segment;
6639 dw_attr_node *at_string_length;
6640 dw_attr_node *at_string_length_bit_size;
6641 dw_attr_node *at_string_length_byte_size;
6642 dw_attr_node *at_threads_scaled;
6643 dw_attr_node *at_upper_bound;
6644 dw_attr_node *at_use_location;
6645 dw_attr_node *at_use_UTF8;
6646 dw_attr_node *at_variable_parameter;
6647 dw_attr_node *at_virtuality;
6648 dw_attr_node *at_visibility;
6649 dw_attr_node *at_vtable_elem_location;
6650 };
6651
6652 /* Collect the attributes that we will want to use for the checksum. */
6653
6654 static void
6655 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
6656 {
6657 dw_attr_node *a;
6658 unsigned ix;
6659
6660 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6661 {
6662 switch (a->dw_attr)
6663 {
6664 case DW_AT_name:
6665 attrs->at_name = a;
6666 break;
6667 case DW_AT_type:
6668 attrs->at_type = a;
6669 break;
6670 case DW_AT_friend:
6671 attrs->at_friend = a;
6672 break;
6673 case DW_AT_accessibility:
6674 attrs->at_accessibility = a;
6675 break;
6676 case DW_AT_address_class:
6677 attrs->at_address_class = a;
6678 break;
6679 case DW_AT_allocated:
6680 attrs->at_allocated = a;
6681 break;
6682 case DW_AT_artificial:
6683 attrs->at_artificial = a;
6684 break;
6685 case DW_AT_associated:
6686 attrs->at_associated = a;
6687 break;
6688 case DW_AT_binary_scale:
6689 attrs->at_binary_scale = a;
6690 break;
6691 case DW_AT_bit_offset:
6692 attrs->at_bit_offset = a;
6693 break;
6694 case DW_AT_bit_size:
6695 attrs->at_bit_size = a;
6696 break;
6697 case DW_AT_bit_stride:
6698 attrs->at_bit_stride = a;
6699 break;
6700 case DW_AT_byte_size:
6701 attrs->at_byte_size = a;
6702 break;
6703 case DW_AT_byte_stride:
6704 attrs->at_byte_stride = a;
6705 break;
6706 case DW_AT_const_value:
6707 attrs->at_const_value = a;
6708 break;
6709 case DW_AT_containing_type:
6710 attrs->at_containing_type = a;
6711 break;
6712 case DW_AT_count:
6713 attrs->at_count = a;
6714 break;
6715 case DW_AT_data_location:
6716 attrs->at_data_location = a;
6717 break;
6718 case DW_AT_data_member_location:
6719 attrs->at_data_member_location = a;
6720 break;
6721 case DW_AT_decimal_scale:
6722 attrs->at_decimal_scale = a;
6723 break;
6724 case DW_AT_decimal_sign:
6725 attrs->at_decimal_sign = a;
6726 break;
6727 case DW_AT_default_value:
6728 attrs->at_default_value = a;
6729 break;
6730 case DW_AT_digit_count:
6731 attrs->at_digit_count = a;
6732 break;
6733 case DW_AT_discr:
6734 attrs->at_discr = a;
6735 break;
6736 case DW_AT_discr_list:
6737 attrs->at_discr_list = a;
6738 break;
6739 case DW_AT_discr_value:
6740 attrs->at_discr_value = a;
6741 break;
6742 case DW_AT_encoding:
6743 attrs->at_encoding = a;
6744 break;
6745 case DW_AT_endianity:
6746 attrs->at_endianity = a;
6747 break;
6748 case DW_AT_explicit:
6749 attrs->at_explicit = a;
6750 break;
6751 case DW_AT_is_optional:
6752 attrs->at_is_optional = a;
6753 break;
6754 case DW_AT_location:
6755 attrs->at_location = a;
6756 break;
6757 case DW_AT_lower_bound:
6758 attrs->at_lower_bound = a;
6759 break;
6760 case DW_AT_mutable:
6761 attrs->at_mutable = a;
6762 break;
6763 case DW_AT_ordering:
6764 attrs->at_ordering = a;
6765 break;
6766 case DW_AT_picture_string:
6767 attrs->at_picture_string = a;
6768 break;
6769 case DW_AT_prototyped:
6770 attrs->at_prototyped = a;
6771 break;
6772 case DW_AT_small:
6773 attrs->at_small = a;
6774 break;
6775 case DW_AT_segment:
6776 attrs->at_segment = a;
6777 break;
6778 case DW_AT_string_length:
6779 attrs->at_string_length = a;
6780 break;
6781 case DW_AT_string_length_bit_size:
6782 attrs->at_string_length_bit_size = a;
6783 break;
6784 case DW_AT_string_length_byte_size:
6785 attrs->at_string_length_byte_size = a;
6786 break;
6787 case DW_AT_threads_scaled:
6788 attrs->at_threads_scaled = a;
6789 break;
6790 case DW_AT_upper_bound:
6791 attrs->at_upper_bound = a;
6792 break;
6793 case DW_AT_use_location:
6794 attrs->at_use_location = a;
6795 break;
6796 case DW_AT_use_UTF8:
6797 attrs->at_use_UTF8 = a;
6798 break;
6799 case DW_AT_variable_parameter:
6800 attrs->at_variable_parameter = a;
6801 break;
6802 case DW_AT_virtuality:
6803 attrs->at_virtuality = a;
6804 break;
6805 case DW_AT_visibility:
6806 attrs->at_visibility = a;
6807 break;
6808 case DW_AT_vtable_elem_location:
6809 attrs->at_vtable_elem_location = a;
6810 break;
6811 default:
6812 break;
6813 }
6814 }
6815 }
6816
6817 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
6818
6819 static void
6820 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6821 {
6822 dw_die_ref c;
6823 dw_die_ref decl;
6824 struct checksum_attributes attrs;
6825
6826 CHECKSUM_ULEB128 ('D');
6827 CHECKSUM_ULEB128 (die->die_tag);
6828
6829 memset (&attrs, 0, sizeof (attrs));
6830
6831 decl = get_AT_ref (die, DW_AT_specification);
6832 if (decl != NULL)
6833 collect_checksum_attributes (&attrs, decl);
6834 collect_checksum_attributes (&attrs, die);
6835
6836 CHECKSUM_ATTR (attrs.at_name);
6837 CHECKSUM_ATTR (attrs.at_accessibility);
6838 CHECKSUM_ATTR (attrs.at_address_class);
6839 CHECKSUM_ATTR (attrs.at_allocated);
6840 CHECKSUM_ATTR (attrs.at_artificial);
6841 CHECKSUM_ATTR (attrs.at_associated);
6842 CHECKSUM_ATTR (attrs.at_binary_scale);
6843 CHECKSUM_ATTR (attrs.at_bit_offset);
6844 CHECKSUM_ATTR (attrs.at_bit_size);
6845 CHECKSUM_ATTR (attrs.at_bit_stride);
6846 CHECKSUM_ATTR (attrs.at_byte_size);
6847 CHECKSUM_ATTR (attrs.at_byte_stride);
6848 CHECKSUM_ATTR (attrs.at_const_value);
6849 CHECKSUM_ATTR (attrs.at_containing_type);
6850 CHECKSUM_ATTR (attrs.at_count);
6851 CHECKSUM_ATTR (attrs.at_data_location);
6852 CHECKSUM_ATTR (attrs.at_data_member_location);
6853 CHECKSUM_ATTR (attrs.at_decimal_scale);
6854 CHECKSUM_ATTR (attrs.at_decimal_sign);
6855 CHECKSUM_ATTR (attrs.at_default_value);
6856 CHECKSUM_ATTR (attrs.at_digit_count);
6857 CHECKSUM_ATTR (attrs.at_discr);
6858 CHECKSUM_ATTR (attrs.at_discr_list);
6859 CHECKSUM_ATTR (attrs.at_discr_value);
6860 CHECKSUM_ATTR (attrs.at_encoding);
6861 CHECKSUM_ATTR (attrs.at_endianity);
6862 CHECKSUM_ATTR (attrs.at_explicit);
6863 CHECKSUM_ATTR (attrs.at_is_optional);
6864 CHECKSUM_ATTR (attrs.at_location);
6865 CHECKSUM_ATTR (attrs.at_lower_bound);
6866 CHECKSUM_ATTR (attrs.at_mutable);
6867 CHECKSUM_ATTR (attrs.at_ordering);
6868 CHECKSUM_ATTR (attrs.at_picture_string);
6869 CHECKSUM_ATTR (attrs.at_prototyped);
6870 CHECKSUM_ATTR (attrs.at_small);
6871 CHECKSUM_ATTR (attrs.at_segment);
6872 CHECKSUM_ATTR (attrs.at_string_length);
6873 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
6874 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
6875 CHECKSUM_ATTR (attrs.at_threads_scaled);
6876 CHECKSUM_ATTR (attrs.at_upper_bound);
6877 CHECKSUM_ATTR (attrs.at_use_location);
6878 CHECKSUM_ATTR (attrs.at_use_UTF8);
6879 CHECKSUM_ATTR (attrs.at_variable_parameter);
6880 CHECKSUM_ATTR (attrs.at_virtuality);
6881 CHECKSUM_ATTR (attrs.at_visibility);
6882 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
6883 CHECKSUM_ATTR (attrs.at_type);
6884 CHECKSUM_ATTR (attrs.at_friend);
6885
6886 /* Checksum the child DIEs. */
6887 c = die->die_child;
6888 if (c) do {
6889 dw_attr_node *name_attr;
6890
6891 c = c->die_sib;
6892 name_attr = get_AT (c, DW_AT_name);
6893 if (is_template_instantiation (c))
6894 {
6895 /* Ignore instantiations of member type and function templates. */
6896 }
6897 else if (name_attr != NULL
6898 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
6899 {
6900 /* Use a shallow checksum for named nested types and member
6901 functions. */
6902 CHECKSUM_ULEB128 ('S');
6903 CHECKSUM_ULEB128 (c->die_tag);
6904 CHECKSUM_STRING (AT_string (name_attr));
6905 }
6906 else
6907 {
6908 /* Use a deep checksum for other children. */
6909 /* Mark this DIE so it gets processed when unmarking. */
6910 if (c->die_mark == 0)
6911 c->die_mark = -1;
6912 die_checksum_ordered (c, ctx, mark);
6913 }
6914 } while (c != die->die_child);
6915
6916 CHECKSUM_ULEB128 (0);
6917 }
6918
6919 /* Add a type name and tag to a hash. */
6920 static void
6921 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
6922 {
6923 CHECKSUM_ULEB128 (tag);
6924 CHECKSUM_STRING (name);
6925 }
6926
6927 #undef CHECKSUM
6928 #undef CHECKSUM_STRING
6929 #undef CHECKSUM_ATTR
6930 #undef CHECKSUM_LEB128
6931 #undef CHECKSUM_ULEB128
6932
6933 /* Generate the type signature for DIE. This is computed by generating an
6934 MD5 checksum over the DIE's tag, its relevant attributes, and its
6935 children. Attributes that are references to other DIEs are processed
6936 by recursion, using the MARK field to prevent infinite recursion.
6937 If the DIE is nested inside a namespace or another type, we also
6938 need to include that context in the signature. The lower 64 bits
6939 of the resulting MD5 checksum comprise the signature. */
6940
6941 static void
6942 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
6943 {
6944 int mark;
6945 const char *name;
6946 unsigned char checksum[16];
6947 struct md5_ctx ctx;
6948 dw_die_ref decl;
6949 dw_die_ref parent;
6950
6951 name = get_AT_string (die, DW_AT_name);
6952 decl = get_AT_ref (die, DW_AT_specification);
6953 parent = get_die_parent (die);
6954
6955 /* First, compute a signature for just the type name (and its surrounding
6956 context, if any. This is stored in the type unit DIE for link-time
6957 ODR (one-definition rule) checking. */
6958
6959 if (is_cxx () && name != NULL)
6960 {
6961 md5_init_ctx (&ctx);
6962
6963 /* Checksum the names of surrounding namespaces and structures. */
6964 if (parent != NULL)
6965 checksum_die_context (parent, &ctx);
6966
6967 /* Checksum the current DIE. */
6968 die_odr_checksum (die->die_tag, name, &ctx);
6969 md5_finish_ctx (&ctx, checksum);
6970
6971 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
6972 }
6973
6974 /* Next, compute the complete type signature. */
6975
6976 md5_init_ctx (&ctx);
6977 mark = 1;
6978 die->die_mark = mark;
6979
6980 /* Checksum the names of surrounding namespaces and structures. */
6981 if (parent != NULL)
6982 checksum_die_context (parent, &ctx);
6983
6984 /* Checksum the DIE and its children. */
6985 die_checksum_ordered (die, &ctx, &mark);
6986 unmark_all_dies (die);
6987 md5_finish_ctx (&ctx, checksum);
6988
6989 /* Store the signature in the type node and link the type DIE and the
6990 type node together. */
6991 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
6992 DWARF_TYPE_SIGNATURE_SIZE);
6993 die->comdat_type_p = true;
6994 die->die_id.die_type_node = type_node;
6995 type_node->type_die = die;
6996
6997 /* If the DIE is a specification, link its declaration to the type node
6998 as well. */
6999 if (decl != NULL)
7000 {
7001 decl->comdat_type_p = true;
7002 decl->die_id.die_type_node = type_node;
7003 }
7004 }
7005
7006 /* Do the location expressions look same? */
7007 static inline int
7008 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7009 {
7010 return loc1->dw_loc_opc == loc2->dw_loc_opc
7011 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7012 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7013 }
7014
7015 /* Do the values look the same? */
7016 static int
7017 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7018 {
7019 dw_loc_descr_ref loc1, loc2;
7020 rtx r1, r2;
7021
7022 if (v1->val_class != v2->val_class)
7023 return 0;
7024
7025 switch (v1->val_class)
7026 {
7027 case dw_val_class_const:
7028 case dw_val_class_const_implicit:
7029 return v1->v.val_int == v2->v.val_int;
7030 case dw_val_class_unsigned_const:
7031 case dw_val_class_unsigned_const_implicit:
7032 return v1->v.val_unsigned == v2->v.val_unsigned;
7033 case dw_val_class_const_double:
7034 return v1->v.val_double.high == v2->v.val_double.high
7035 && v1->v.val_double.low == v2->v.val_double.low;
7036 case dw_val_class_wide_int:
7037 return *v1->v.val_wide == *v2->v.val_wide;
7038 case dw_val_class_vec:
7039 if (v1->v.val_vec.length != v2->v.val_vec.length
7040 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7041 return 0;
7042 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7043 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7044 return 0;
7045 return 1;
7046 case dw_val_class_flag:
7047 return v1->v.val_flag == v2->v.val_flag;
7048 case dw_val_class_str:
7049 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7050
7051 case dw_val_class_addr:
7052 r1 = v1->v.val_addr;
7053 r2 = v2->v.val_addr;
7054 if (GET_CODE (r1) != GET_CODE (r2))
7055 return 0;
7056 return !rtx_equal_p (r1, r2);
7057
7058 case dw_val_class_offset:
7059 return v1->v.val_offset == v2->v.val_offset;
7060
7061 case dw_val_class_loc:
7062 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7063 loc1 && loc2;
7064 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7065 if (!same_loc_p (loc1, loc2, mark))
7066 return 0;
7067 return !loc1 && !loc2;
7068
7069 case dw_val_class_die_ref:
7070 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7071
7072 case dw_val_class_fde_ref:
7073 case dw_val_class_vms_delta:
7074 case dw_val_class_lbl_id:
7075 case dw_val_class_lineptr:
7076 case dw_val_class_macptr:
7077 case dw_val_class_loclistsptr:
7078 case dw_val_class_high_pc:
7079 return 1;
7080
7081 case dw_val_class_file:
7082 case dw_val_class_file_implicit:
7083 return v1->v.val_file == v2->v.val_file;
7084
7085 case dw_val_class_data8:
7086 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7087
7088 default:
7089 return 1;
7090 }
7091 }
7092
7093 /* Do the attributes look the same? */
7094
7095 static int
7096 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7097 {
7098 if (at1->dw_attr != at2->dw_attr)
7099 return 0;
7100
7101 /* We don't care that this was compiled with a different compiler
7102 snapshot; if the output is the same, that's what matters. */
7103 if (at1->dw_attr == DW_AT_producer)
7104 return 1;
7105
7106 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7107 }
7108
7109 /* Do the dies look the same? */
7110
7111 static int
7112 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7113 {
7114 dw_die_ref c1, c2;
7115 dw_attr_node *a1;
7116 unsigned ix;
7117
7118 /* To avoid infinite recursion. */
7119 if (die1->die_mark)
7120 return die1->die_mark == die2->die_mark;
7121 die1->die_mark = die2->die_mark = ++(*mark);
7122
7123 if (die1->die_tag != die2->die_tag)
7124 return 0;
7125
7126 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7127 return 0;
7128
7129 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7130 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7131 return 0;
7132
7133 c1 = die1->die_child;
7134 c2 = die2->die_child;
7135 if (! c1)
7136 {
7137 if (c2)
7138 return 0;
7139 }
7140 else
7141 for (;;)
7142 {
7143 if (!same_die_p (c1, c2, mark))
7144 return 0;
7145 c1 = c1->die_sib;
7146 c2 = c2->die_sib;
7147 if (c1 == die1->die_child)
7148 {
7149 if (c2 == die2->die_child)
7150 break;
7151 else
7152 return 0;
7153 }
7154 }
7155
7156 return 1;
7157 }
7158
7159 /* Do the dies look the same? Wrapper around same_die_p. */
7160
7161 static int
7162 same_die_p_wrap (dw_die_ref die1, dw_die_ref die2)
7163 {
7164 int mark = 0;
7165 int ret = same_die_p (die1, die2, &mark);
7166
7167 unmark_all_dies (die1);
7168 unmark_all_dies (die2);
7169
7170 return ret;
7171 }
7172
7173 /* The prefix to attach to symbols on DIEs in the current comdat debug
7174 info section. */
7175 static const char *comdat_symbol_id;
7176
7177 /* The index of the current symbol within the current comdat CU. */
7178 static unsigned int comdat_symbol_number;
7179
7180 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7181 children, and set comdat_symbol_id accordingly. */
7182
7183 static void
7184 compute_section_prefix (dw_die_ref unit_die)
7185 {
7186 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7187 const char *base = die_name ? lbasename (die_name) : "anonymous";
7188 char *name = XALLOCAVEC (char, strlen (base) + 64);
7189 char *p;
7190 int i, mark;
7191 unsigned char checksum[16];
7192 struct md5_ctx ctx;
7193
7194 /* Compute the checksum of the DIE, then append part of it as hex digits to
7195 the name filename of the unit. */
7196
7197 md5_init_ctx (&ctx);
7198 mark = 0;
7199 die_checksum (unit_die, &ctx, &mark);
7200 unmark_all_dies (unit_die);
7201 md5_finish_ctx (&ctx, checksum);
7202
7203 sprintf (name, "%s.", base);
7204 clean_symbol_name (name);
7205
7206 p = name + strlen (name);
7207 for (i = 0; i < 4; i++)
7208 {
7209 sprintf (p, "%.2x", checksum[i]);
7210 p += 2;
7211 }
7212
7213 comdat_symbol_id = unit_die->die_id.die_symbol = xstrdup (name);
7214 comdat_symbol_number = 0;
7215 }
7216
7217 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7218
7219 static int
7220 is_type_die (dw_die_ref die)
7221 {
7222 switch (die->die_tag)
7223 {
7224 case DW_TAG_array_type:
7225 case DW_TAG_class_type:
7226 case DW_TAG_interface_type:
7227 case DW_TAG_enumeration_type:
7228 case DW_TAG_pointer_type:
7229 case DW_TAG_reference_type:
7230 case DW_TAG_rvalue_reference_type:
7231 case DW_TAG_string_type:
7232 case DW_TAG_structure_type:
7233 case DW_TAG_subroutine_type:
7234 case DW_TAG_union_type:
7235 case DW_TAG_ptr_to_member_type:
7236 case DW_TAG_set_type:
7237 case DW_TAG_subrange_type:
7238 case DW_TAG_base_type:
7239 case DW_TAG_const_type:
7240 case DW_TAG_file_type:
7241 case DW_TAG_packed_type:
7242 case DW_TAG_volatile_type:
7243 case DW_TAG_typedef:
7244 return 1;
7245 default:
7246 return 0;
7247 }
7248 }
7249
7250 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7251 Basically, we want to choose the bits that are likely to be shared between
7252 compilations (types) and leave out the bits that are specific to individual
7253 compilations (functions). */
7254
7255 static int
7256 is_comdat_die (dw_die_ref c)
7257 {
7258 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7259 we do for stabs. The advantage is a greater likelihood of sharing between
7260 objects that don't include headers in the same order (and therefore would
7261 put the base types in a different comdat). jason 8/28/00 */
7262
7263 if (c->die_tag == DW_TAG_base_type)
7264 return 0;
7265
7266 if (c->die_tag == DW_TAG_pointer_type
7267 || c->die_tag == DW_TAG_reference_type
7268 || c->die_tag == DW_TAG_rvalue_reference_type
7269 || c->die_tag == DW_TAG_const_type
7270 || c->die_tag == DW_TAG_volatile_type)
7271 {
7272 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7273
7274 return t ? is_comdat_die (t) : 0;
7275 }
7276
7277 return is_type_die (c);
7278 }
7279
7280 /* Returns 1 iff C is the sort of DIE that might be referred to from another
7281 compilation unit. */
7282
7283 static int
7284 is_symbol_die (dw_die_ref c)
7285 {
7286 return (is_type_die (c)
7287 || is_declaration_die (c)
7288 || c->die_tag == DW_TAG_namespace
7289 || c->die_tag == DW_TAG_module);
7290 }
7291
7292 /* Returns true iff C is a compile-unit DIE. */
7293
7294 static inline bool
7295 is_cu_die (dw_die_ref c)
7296 {
7297 return c && (c->die_tag == DW_TAG_compile_unit
7298 || c->die_tag == DW_TAG_skeleton_unit);
7299 }
7300
7301 /* Returns true iff C is a unit DIE of some sort. */
7302
7303 static inline bool
7304 is_unit_die (dw_die_ref c)
7305 {
7306 return c && (c->die_tag == DW_TAG_compile_unit
7307 || c->die_tag == DW_TAG_partial_unit
7308 || c->die_tag == DW_TAG_type_unit
7309 || c->die_tag == DW_TAG_skeleton_unit);
7310 }
7311
7312 /* Returns true iff C is a namespace DIE. */
7313
7314 static inline bool
7315 is_namespace_die (dw_die_ref c)
7316 {
7317 return c && c->die_tag == DW_TAG_namespace;
7318 }
7319
7320 /* Returns true iff C is a class or structure DIE. */
7321
7322 static inline bool
7323 is_class_die (dw_die_ref c)
7324 {
7325 return c && (c->die_tag == DW_TAG_class_type
7326 || c->die_tag == DW_TAG_structure_type);
7327 }
7328
7329 /* Return non-zero if this DIE is a template parameter. */
7330
7331 static inline bool
7332 is_template_parameter (dw_die_ref die)
7333 {
7334 switch (die->die_tag)
7335 {
7336 case DW_TAG_template_type_param:
7337 case DW_TAG_template_value_param:
7338 case DW_TAG_GNU_template_template_param:
7339 case DW_TAG_GNU_template_parameter_pack:
7340 return true;
7341 default:
7342 return false;
7343 }
7344 }
7345
7346 /* Return non-zero if this DIE represents a template instantiation. */
7347
7348 static inline bool
7349 is_template_instantiation (dw_die_ref die)
7350 {
7351 dw_die_ref c;
7352
7353 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7354 return false;
7355 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7356 return false;
7357 }
7358
7359 static char *
7360 gen_internal_sym (const char *prefix)
7361 {
7362 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7363
7364 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7365 return xstrdup (buf);
7366 }
7367
7368 /* Assign symbols to all worthy DIEs under DIE. */
7369
7370 static void
7371 assign_symbol_names (dw_die_ref die)
7372 {
7373 dw_die_ref c;
7374
7375 if (is_symbol_die (die) && !die->comdat_type_p)
7376 {
7377 if (comdat_symbol_id)
7378 {
7379 char *p = XALLOCAVEC (char, strlen (comdat_symbol_id) + 64);
7380
7381 sprintf (p, "%s.%s.%x", DIE_LABEL_PREFIX,
7382 comdat_symbol_id, comdat_symbol_number++);
7383 die->die_id.die_symbol = xstrdup (p);
7384 }
7385 else
7386 die->die_id.die_symbol = gen_internal_sym ("LDIE");
7387 }
7388
7389 FOR_EACH_CHILD (die, c, assign_symbol_names (c));
7390 }
7391
7392 struct cu_hash_table_entry
7393 {
7394 dw_die_ref cu;
7395 unsigned min_comdat_num, max_comdat_num;
7396 struct cu_hash_table_entry *next;
7397 };
7398
7399 /* Helpers to manipulate hash table of CUs. */
7400
7401 struct cu_hash_table_entry_hasher : pointer_hash <cu_hash_table_entry>
7402 {
7403 typedef die_struct *compare_type;
7404 static inline hashval_t hash (const cu_hash_table_entry *);
7405 static inline bool equal (const cu_hash_table_entry *, const die_struct *);
7406 static inline void remove (cu_hash_table_entry *);
7407 };
7408
7409 inline hashval_t
7410 cu_hash_table_entry_hasher::hash (const cu_hash_table_entry *entry)
7411 {
7412 return htab_hash_string (entry->cu->die_id.die_symbol);
7413 }
7414
7415 inline bool
7416 cu_hash_table_entry_hasher::equal (const cu_hash_table_entry *entry1,
7417 const die_struct *entry2)
7418 {
7419 return !strcmp (entry1->cu->die_id.die_symbol, entry2->die_id.die_symbol);
7420 }
7421
7422 inline void
7423 cu_hash_table_entry_hasher::remove (cu_hash_table_entry *entry)
7424 {
7425 struct cu_hash_table_entry *next;
7426
7427 while (entry)
7428 {
7429 next = entry->next;
7430 free (entry);
7431 entry = next;
7432 }
7433 }
7434
7435 typedef hash_table<cu_hash_table_entry_hasher> cu_hash_type;
7436
7437 /* Check whether we have already seen this CU and set up SYM_NUM
7438 accordingly. */
7439 static int
7440 check_duplicate_cu (dw_die_ref cu, cu_hash_type *htable, unsigned int *sym_num)
7441 {
7442 struct cu_hash_table_entry dummy;
7443 struct cu_hash_table_entry **slot, *entry, *last = &dummy;
7444
7445 dummy.max_comdat_num = 0;
7446
7447 slot = htable->find_slot_with_hash (cu,
7448 htab_hash_string (cu->die_id.die_symbol),
7449 INSERT);
7450 entry = *slot;
7451
7452 for (; entry; last = entry, entry = entry->next)
7453 {
7454 if (same_die_p_wrap (cu, entry->cu))
7455 break;
7456 }
7457
7458 if (entry)
7459 {
7460 *sym_num = entry->min_comdat_num;
7461 return 1;
7462 }
7463
7464 entry = XCNEW (struct cu_hash_table_entry);
7465 entry->cu = cu;
7466 entry->min_comdat_num = *sym_num = last->max_comdat_num;
7467 entry->next = *slot;
7468 *slot = entry;
7469
7470 return 0;
7471 }
7472
7473 /* Record SYM_NUM to record of CU in HTABLE. */
7474 static void
7475 record_comdat_symbol_number (dw_die_ref cu, cu_hash_type *htable,
7476 unsigned int sym_num)
7477 {
7478 struct cu_hash_table_entry **slot, *entry;
7479
7480 slot = htable->find_slot_with_hash (cu,
7481 htab_hash_string (cu->die_id.die_symbol),
7482 NO_INSERT);
7483 entry = *slot;
7484
7485 entry->max_comdat_num = sym_num;
7486 }
7487
7488 /* Traverse the DIE (which is always comp_unit_die), and set up
7489 additional compilation units for each of the include files we see
7490 bracketed by BINCL/EINCL. */
7491
7492 static void
7493 break_out_includes (dw_die_ref die)
7494 {
7495 dw_die_ref c;
7496 dw_die_ref unit = NULL;
7497 limbo_die_node *node, **pnode;
7498
7499 c = die->die_child;
7500 if (c) do {
7501 dw_die_ref prev = c;
7502 c = c->die_sib;
7503 while (c->die_tag == DW_TAG_GNU_BINCL || c->die_tag == DW_TAG_GNU_EINCL
7504 || (unit && is_comdat_die (c)))
7505 {
7506 dw_die_ref next = c->die_sib;
7507
7508 /* This DIE is for a secondary CU; remove it from the main one. */
7509 remove_child_with_prev (c, prev);
7510
7511 if (c->die_tag == DW_TAG_GNU_BINCL)
7512 unit = push_new_compile_unit (unit, c);
7513 else if (c->die_tag == DW_TAG_GNU_EINCL)
7514 unit = pop_compile_unit (unit);
7515 else
7516 add_child_die (unit, c);
7517 c = next;
7518 if (c == die->die_child)
7519 break;
7520 }
7521 } while (c != die->die_child);
7522
7523 #if 0
7524 /* We can only use this in debugging, since the frontend doesn't check
7525 to make sure that we leave every include file we enter. */
7526 gcc_assert (!unit);
7527 #endif
7528
7529 assign_symbol_names (die);
7530 cu_hash_type cu_hash_table (10);
7531 for (node = limbo_die_list, pnode = &limbo_die_list;
7532 node;
7533 node = node->next)
7534 {
7535 int is_dupl;
7536
7537 compute_section_prefix (node->die);
7538 is_dupl = check_duplicate_cu (node->die, &cu_hash_table,
7539 &comdat_symbol_number);
7540 assign_symbol_names (node->die);
7541 if (is_dupl)
7542 *pnode = node->next;
7543 else
7544 {
7545 pnode = &node->next;
7546 record_comdat_symbol_number (node->die, &cu_hash_table,
7547 comdat_symbol_number);
7548 }
7549 }
7550 }
7551
7552 /* Return non-zero if this DIE is a declaration. */
7553
7554 static int
7555 is_declaration_die (dw_die_ref die)
7556 {
7557 dw_attr_node *a;
7558 unsigned ix;
7559
7560 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7561 if (a->dw_attr == DW_AT_declaration)
7562 return 1;
7563
7564 return 0;
7565 }
7566
7567 /* Return non-zero if this DIE is nested inside a subprogram. */
7568
7569 static int
7570 is_nested_in_subprogram (dw_die_ref die)
7571 {
7572 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7573
7574 if (decl == NULL)
7575 decl = die;
7576 return local_scope_p (decl);
7577 }
7578
7579 /* Return non-zero if this DIE contains a defining declaration of a
7580 subprogram. */
7581
7582 static int
7583 contains_subprogram_definition (dw_die_ref die)
7584 {
7585 dw_die_ref c;
7586
7587 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7588 return 1;
7589 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7590 return 0;
7591 }
7592
7593 /* Return non-zero if this is a type DIE that should be moved to a
7594 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7595 unit type. */
7596
7597 static int
7598 should_move_die_to_comdat (dw_die_ref die)
7599 {
7600 switch (die->die_tag)
7601 {
7602 case DW_TAG_class_type:
7603 case DW_TAG_structure_type:
7604 case DW_TAG_enumeration_type:
7605 case DW_TAG_union_type:
7606 /* Don't move declarations, inlined instances, types nested in a
7607 subprogram, or types that contain subprogram definitions. */
7608 if (is_declaration_die (die)
7609 || get_AT (die, DW_AT_abstract_origin)
7610 || is_nested_in_subprogram (die)
7611 || contains_subprogram_definition (die))
7612 return 0;
7613 return 1;
7614 case DW_TAG_array_type:
7615 case DW_TAG_interface_type:
7616 case DW_TAG_pointer_type:
7617 case DW_TAG_reference_type:
7618 case DW_TAG_rvalue_reference_type:
7619 case DW_TAG_string_type:
7620 case DW_TAG_subroutine_type:
7621 case DW_TAG_ptr_to_member_type:
7622 case DW_TAG_set_type:
7623 case DW_TAG_subrange_type:
7624 case DW_TAG_base_type:
7625 case DW_TAG_const_type:
7626 case DW_TAG_file_type:
7627 case DW_TAG_packed_type:
7628 case DW_TAG_volatile_type:
7629 case DW_TAG_typedef:
7630 default:
7631 return 0;
7632 }
7633 }
7634
7635 /* Make a clone of DIE. */
7636
7637 static dw_die_ref
7638 clone_die (dw_die_ref die)
7639 {
7640 dw_die_ref clone;
7641 dw_attr_node *a;
7642 unsigned ix;
7643
7644 clone = ggc_cleared_alloc<die_node> ();
7645 clone->die_tag = die->die_tag;
7646
7647 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7648 add_dwarf_attr (clone, a);
7649
7650 return clone;
7651 }
7652
7653 /* Make a clone of the tree rooted at DIE. */
7654
7655 static dw_die_ref
7656 clone_tree (dw_die_ref die)
7657 {
7658 dw_die_ref c;
7659 dw_die_ref clone = clone_die (die);
7660
7661 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
7662
7663 return clone;
7664 }
7665
7666 /* Make a clone of DIE as a declaration. */
7667
7668 static dw_die_ref
7669 clone_as_declaration (dw_die_ref die)
7670 {
7671 dw_die_ref clone;
7672 dw_die_ref decl;
7673 dw_attr_node *a;
7674 unsigned ix;
7675
7676 /* If the DIE is already a declaration, just clone it. */
7677 if (is_declaration_die (die))
7678 return clone_die (die);
7679
7680 /* If the DIE is a specification, just clone its declaration DIE. */
7681 decl = get_AT_ref (die, DW_AT_specification);
7682 if (decl != NULL)
7683 {
7684 clone = clone_die (decl);
7685 if (die->comdat_type_p)
7686 add_AT_die_ref (clone, DW_AT_signature, die);
7687 return clone;
7688 }
7689
7690 clone = ggc_cleared_alloc<die_node> ();
7691 clone->die_tag = die->die_tag;
7692
7693 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7694 {
7695 /* We don't want to copy over all attributes.
7696 For example we don't want DW_AT_byte_size because otherwise we will no
7697 longer have a declaration and GDB will treat it as a definition. */
7698
7699 switch (a->dw_attr)
7700 {
7701 case DW_AT_abstract_origin:
7702 case DW_AT_artificial:
7703 case DW_AT_containing_type:
7704 case DW_AT_external:
7705 case DW_AT_name:
7706 case DW_AT_type:
7707 case DW_AT_virtuality:
7708 case DW_AT_linkage_name:
7709 case DW_AT_MIPS_linkage_name:
7710 add_dwarf_attr (clone, a);
7711 break;
7712 case DW_AT_byte_size:
7713 case DW_AT_alignment:
7714 default:
7715 break;
7716 }
7717 }
7718
7719 if (die->comdat_type_p)
7720 add_AT_die_ref (clone, DW_AT_signature, die);
7721
7722 add_AT_flag (clone, DW_AT_declaration, 1);
7723 return clone;
7724 }
7725
7726
7727 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
7728
7729 struct decl_table_entry
7730 {
7731 dw_die_ref orig;
7732 dw_die_ref copy;
7733 };
7734
7735 /* Helpers to manipulate hash table of copied declarations. */
7736
7737 /* Hashtable helpers. */
7738
7739 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
7740 {
7741 typedef die_struct *compare_type;
7742 static inline hashval_t hash (const decl_table_entry *);
7743 static inline bool equal (const decl_table_entry *, const die_struct *);
7744 };
7745
7746 inline hashval_t
7747 decl_table_entry_hasher::hash (const decl_table_entry *entry)
7748 {
7749 return htab_hash_pointer (entry->orig);
7750 }
7751
7752 inline bool
7753 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
7754 const die_struct *entry2)
7755 {
7756 return entry1->orig == entry2;
7757 }
7758
7759 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
7760
7761 /* Copy DIE and its ancestors, up to, but not including, the compile unit
7762 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
7763 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
7764 to check if the ancestor has already been copied into UNIT. */
7765
7766 static dw_die_ref
7767 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
7768 decl_hash_type *decl_table)
7769 {
7770 dw_die_ref parent = die->die_parent;
7771 dw_die_ref new_parent = unit;
7772 dw_die_ref copy;
7773 decl_table_entry **slot = NULL;
7774 struct decl_table_entry *entry = NULL;
7775
7776 if (decl_table)
7777 {
7778 /* Check if the entry has already been copied to UNIT. */
7779 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
7780 INSERT);
7781 if (*slot != HTAB_EMPTY_ENTRY)
7782 {
7783 entry = *slot;
7784 return entry->copy;
7785 }
7786
7787 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
7788 entry = XCNEW (struct decl_table_entry);
7789 entry->orig = die;
7790 entry->copy = NULL;
7791 *slot = entry;
7792 }
7793
7794 if (parent != NULL)
7795 {
7796 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
7797 if (spec != NULL)
7798 parent = spec;
7799 if (!is_unit_die (parent))
7800 new_parent = copy_ancestor_tree (unit, parent, decl_table);
7801 }
7802
7803 copy = clone_as_declaration (die);
7804 add_child_die (new_parent, copy);
7805
7806 if (decl_table)
7807 {
7808 /* Record the pointer to the copy. */
7809 entry->copy = copy;
7810 }
7811
7812 return copy;
7813 }
7814 /* Copy the declaration context to the new type unit DIE. This includes
7815 any surrounding namespace or type declarations. If the DIE has an
7816 AT_specification attribute, it also includes attributes and children
7817 attached to the specification, and returns a pointer to the original
7818 parent of the declaration DIE. Returns NULL otherwise. */
7819
7820 static dw_die_ref
7821 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
7822 {
7823 dw_die_ref decl;
7824 dw_die_ref new_decl;
7825 dw_die_ref orig_parent = NULL;
7826
7827 decl = get_AT_ref (die, DW_AT_specification);
7828 if (decl == NULL)
7829 decl = die;
7830 else
7831 {
7832 unsigned ix;
7833 dw_die_ref c;
7834 dw_attr_node *a;
7835
7836 /* The original DIE will be changed to a declaration, and must
7837 be moved to be a child of the original declaration DIE. */
7838 orig_parent = decl->die_parent;
7839
7840 /* Copy the type node pointer from the new DIE to the original
7841 declaration DIE so we can forward references later. */
7842 decl->comdat_type_p = true;
7843 decl->die_id.die_type_node = die->die_id.die_type_node;
7844
7845 remove_AT (die, DW_AT_specification);
7846
7847 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
7848 {
7849 if (a->dw_attr != DW_AT_name
7850 && a->dw_attr != DW_AT_declaration
7851 && a->dw_attr != DW_AT_external)
7852 add_dwarf_attr (die, a);
7853 }
7854
7855 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
7856 }
7857
7858 if (decl->die_parent != NULL
7859 && !is_unit_die (decl->die_parent))
7860 {
7861 new_decl = copy_ancestor_tree (unit, decl, NULL);
7862 if (new_decl != NULL)
7863 {
7864 remove_AT (new_decl, DW_AT_signature);
7865 add_AT_specification (die, new_decl);
7866 }
7867 }
7868
7869 return orig_parent;
7870 }
7871
7872 /* Generate the skeleton ancestor tree for the given NODE, then clone
7873 the DIE and add the clone into the tree. */
7874
7875 static void
7876 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
7877 {
7878 if (node->new_die != NULL)
7879 return;
7880
7881 node->new_die = clone_as_declaration (node->old_die);
7882
7883 if (node->parent != NULL)
7884 {
7885 generate_skeleton_ancestor_tree (node->parent);
7886 add_child_die (node->parent->new_die, node->new_die);
7887 }
7888 }
7889
7890 /* Generate a skeleton tree of DIEs containing any declarations that are
7891 found in the original tree. We traverse the tree looking for declaration
7892 DIEs, and construct the skeleton from the bottom up whenever we find one. */
7893
7894 static void
7895 generate_skeleton_bottom_up (skeleton_chain_node *parent)
7896 {
7897 skeleton_chain_node node;
7898 dw_die_ref c;
7899 dw_die_ref first;
7900 dw_die_ref prev = NULL;
7901 dw_die_ref next = NULL;
7902
7903 node.parent = parent;
7904
7905 first = c = parent->old_die->die_child;
7906 if (c)
7907 next = c->die_sib;
7908 if (c) do {
7909 if (prev == NULL || prev->die_sib == c)
7910 prev = c;
7911 c = next;
7912 next = (c == first ? NULL : c->die_sib);
7913 node.old_die = c;
7914 node.new_die = NULL;
7915 if (is_declaration_die (c))
7916 {
7917 if (is_template_instantiation (c))
7918 {
7919 /* Instantiated templates do not need to be cloned into the
7920 type unit. Just move the DIE and its children back to
7921 the skeleton tree (in the main CU). */
7922 remove_child_with_prev (c, prev);
7923 add_child_die (parent->new_die, c);
7924 c = prev;
7925 }
7926 else if (c->comdat_type_p)
7927 {
7928 /* This is the skeleton of earlier break_out_comdat_types
7929 type. Clone the existing DIE, but keep the children
7930 under the original (which is in the main CU). */
7931 dw_die_ref clone = clone_die (c);
7932
7933 replace_child (c, clone, prev);
7934 generate_skeleton_ancestor_tree (parent);
7935 add_child_die (parent->new_die, c);
7936 c = clone;
7937 continue;
7938 }
7939 else
7940 {
7941 /* Clone the existing DIE, move the original to the skeleton
7942 tree (which is in the main CU), and put the clone, with
7943 all the original's children, where the original came from
7944 (which is about to be moved to the type unit). */
7945 dw_die_ref clone = clone_die (c);
7946 move_all_children (c, clone);
7947
7948 /* If the original has a DW_AT_object_pointer attribute,
7949 it would now point to a child DIE just moved to the
7950 cloned tree, so we need to remove that attribute from
7951 the original. */
7952 remove_AT (c, DW_AT_object_pointer);
7953
7954 replace_child (c, clone, prev);
7955 generate_skeleton_ancestor_tree (parent);
7956 add_child_die (parent->new_die, c);
7957 node.old_die = clone;
7958 node.new_die = c;
7959 c = clone;
7960 }
7961 }
7962 generate_skeleton_bottom_up (&node);
7963 } while (next != NULL);
7964 }
7965
7966 /* Wrapper function for generate_skeleton_bottom_up. */
7967
7968 static dw_die_ref
7969 generate_skeleton (dw_die_ref die)
7970 {
7971 skeleton_chain_node node;
7972
7973 node.old_die = die;
7974 node.new_die = NULL;
7975 node.parent = NULL;
7976
7977 /* If this type definition is nested inside another type,
7978 and is not an instantiation of a template, always leave
7979 at least a declaration in its place. */
7980 if (die->die_parent != NULL
7981 && is_type_die (die->die_parent)
7982 && !is_template_instantiation (die))
7983 node.new_die = clone_as_declaration (die);
7984
7985 generate_skeleton_bottom_up (&node);
7986 return node.new_die;
7987 }
7988
7989 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
7990 declaration. The original DIE is moved to a new compile unit so that
7991 existing references to it follow it to the new location. If any of the
7992 original DIE's descendants is a declaration, we need to replace the
7993 original DIE with a skeleton tree and move the declarations back into the
7994 skeleton tree. */
7995
7996 static dw_die_ref
7997 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
7998 dw_die_ref prev)
7999 {
8000 dw_die_ref skeleton, orig_parent;
8001
8002 /* Copy the declaration context to the type unit DIE. If the returned
8003 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8004 that DIE. */
8005 orig_parent = copy_declaration_context (unit, child);
8006
8007 skeleton = generate_skeleton (child);
8008 if (skeleton == NULL)
8009 remove_child_with_prev (child, prev);
8010 else
8011 {
8012 skeleton->comdat_type_p = true;
8013 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8014
8015 /* If the original DIE was a specification, we need to put
8016 the skeleton under the parent DIE of the declaration.
8017 This leaves the original declaration in the tree, but
8018 it will be pruned later since there are no longer any
8019 references to it. */
8020 if (orig_parent != NULL)
8021 {
8022 remove_child_with_prev (child, prev);
8023 add_child_die (orig_parent, skeleton);
8024 }
8025 else
8026 replace_child (child, skeleton, prev);
8027 }
8028
8029 return skeleton;
8030 }
8031
8032 static void
8033 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8034 comdat_type_node *type_node,
8035 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8036
8037 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8038 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8039 DWARF procedure references in the DW_AT_location attribute. */
8040
8041 static dw_die_ref
8042 copy_dwarf_procedure (dw_die_ref die,
8043 comdat_type_node *type_node,
8044 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8045 {
8046 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8047
8048 /* DWARF procedures are not supposed to have children... */
8049 gcc_assert (die->die_child == NULL);
8050
8051 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8052 gcc_assert (vec_safe_length (die->die_attr) == 1
8053 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8054
8055 /* Do not copy more than once DWARF procedures. */
8056 bool existed;
8057 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8058 if (existed)
8059 return die_copy;
8060
8061 die_copy = clone_die (die);
8062 add_child_die (type_node->root_die, die_copy);
8063 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8064 return die_copy;
8065 }
8066
8067 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8068 procedures in DIE's attributes. */
8069
8070 static void
8071 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8072 comdat_type_node *type_node,
8073 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8074 {
8075 dw_attr_node *a;
8076 unsigned i;
8077
8078 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8079 {
8080 dw_loc_descr_ref loc;
8081
8082 if (a->dw_attr_val.val_class != dw_val_class_loc)
8083 continue;
8084
8085 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8086 {
8087 switch (loc->dw_loc_opc)
8088 {
8089 case DW_OP_call2:
8090 case DW_OP_call4:
8091 case DW_OP_call_ref:
8092 gcc_assert (loc->dw_loc_oprnd1.val_class
8093 == dw_val_class_die_ref);
8094 loc->dw_loc_oprnd1.v.val_die_ref.die
8095 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8096 type_node,
8097 copied_dwarf_procs);
8098
8099 default:
8100 break;
8101 }
8102 }
8103 }
8104 }
8105
8106 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8107 rewrite references to point to the copies.
8108
8109 References are looked for in DIE's attributes and recursively in all its
8110 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8111 mapping from old DWARF procedures to their copy. It is used not to copy
8112 twice the same DWARF procedure under TYPE_NODE. */
8113
8114 static void
8115 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8116 comdat_type_node *type_node,
8117 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8118 {
8119 dw_die_ref c;
8120
8121 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8122 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8123 type_node,
8124 copied_dwarf_procs));
8125 }
8126
8127 /* Traverse the DIE and set up additional .debug_types or .debug_info
8128 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8129 section. */
8130
8131 static void
8132 break_out_comdat_types (dw_die_ref die)
8133 {
8134 dw_die_ref c;
8135 dw_die_ref first;
8136 dw_die_ref prev = NULL;
8137 dw_die_ref next = NULL;
8138 dw_die_ref unit = NULL;
8139
8140 first = c = die->die_child;
8141 if (c)
8142 next = c->die_sib;
8143 if (c) do {
8144 if (prev == NULL || prev->die_sib == c)
8145 prev = c;
8146 c = next;
8147 next = (c == first ? NULL : c->die_sib);
8148 if (should_move_die_to_comdat (c))
8149 {
8150 dw_die_ref replacement;
8151 comdat_type_node *type_node;
8152
8153 /* Break out nested types into their own type units. */
8154 break_out_comdat_types (c);
8155
8156 /* Create a new type unit DIE as the root for the new tree, and
8157 add it to the list of comdat types. */
8158 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8159 add_AT_unsigned (unit, DW_AT_language,
8160 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8161 type_node = ggc_cleared_alloc<comdat_type_node> ();
8162 type_node->root_die = unit;
8163 type_node->next = comdat_type_list;
8164 comdat_type_list = type_node;
8165
8166 /* Generate the type signature. */
8167 generate_type_signature (c, type_node);
8168
8169 /* Copy the declaration context, attributes, and children of the
8170 declaration into the new type unit DIE, then remove this DIE
8171 from the main CU (or replace it with a skeleton if necessary). */
8172 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8173 type_node->skeleton_die = replacement;
8174
8175 /* Add the DIE to the new compunit. */
8176 add_child_die (unit, c);
8177
8178 /* Types can reference DWARF procedures for type size or data location
8179 expressions. Calls in DWARF expressions cannot target procedures
8180 that are not in the same section. So we must copy DWARF procedures
8181 along with this type and then rewrite references to them. */
8182 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8183 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8184
8185 if (replacement != NULL)
8186 c = replacement;
8187 }
8188 else if (c->die_tag == DW_TAG_namespace
8189 || c->die_tag == DW_TAG_class_type
8190 || c->die_tag == DW_TAG_structure_type
8191 || c->die_tag == DW_TAG_union_type)
8192 {
8193 /* Look for nested types that can be broken out. */
8194 break_out_comdat_types (c);
8195 }
8196 } while (next != NULL);
8197 }
8198
8199 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8200 Enter all the cloned children into the hash table decl_table. */
8201
8202 static dw_die_ref
8203 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8204 {
8205 dw_die_ref c;
8206 dw_die_ref clone;
8207 struct decl_table_entry *entry;
8208 decl_table_entry **slot;
8209
8210 if (die->die_tag == DW_TAG_subprogram)
8211 clone = clone_as_declaration (die);
8212 else
8213 clone = clone_die (die);
8214
8215 slot = decl_table->find_slot_with_hash (die,
8216 htab_hash_pointer (die), INSERT);
8217
8218 /* Assert that DIE isn't in the hash table yet. If it would be there
8219 before, the ancestors would be necessarily there as well, therefore
8220 clone_tree_partial wouldn't be called. */
8221 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8222
8223 entry = XCNEW (struct decl_table_entry);
8224 entry->orig = die;
8225 entry->copy = clone;
8226 *slot = entry;
8227
8228 if (die->die_tag != DW_TAG_subprogram)
8229 FOR_EACH_CHILD (die, c,
8230 add_child_die (clone, clone_tree_partial (c, decl_table)));
8231
8232 return clone;
8233 }
8234
8235 /* Walk the DIE and its children, looking for references to incomplete
8236 or trivial types that are unmarked (i.e., that are not in the current
8237 type_unit). */
8238
8239 static void
8240 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8241 {
8242 dw_die_ref c;
8243 dw_attr_node *a;
8244 unsigned ix;
8245
8246 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8247 {
8248 if (AT_class (a) == dw_val_class_die_ref)
8249 {
8250 dw_die_ref targ = AT_ref (a);
8251 decl_table_entry **slot;
8252 struct decl_table_entry *entry;
8253
8254 if (targ->die_mark != 0 || targ->comdat_type_p)
8255 continue;
8256
8257 slot = decl_table->find_slot_with_hash (targ,
8258 htab_hash_pointer (targ),
8259 INSERT);
8260
8261 if (*slot != HTAB_EMPTY_ENTRY)
8262 {
8263 /* TARG has already been copied, so we just need to
8264 modify the reference to point to the copy. */
8265 entry = *slot;
8266 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8267 }
8268 else
8269 {
8270 dw_die_ref parent = unit;
8271 dw_die_ref copy = clone_die (targ);
8272
8273 /* Record in DECL_TABLE that TARG has been copied.
8274 Need to do this now, before the recursive call,
8275 because DECL_TABLE may be expanded and SLOT
8276 would no longer be a valid pointer. */
8277 entry = XCNEW (struct decl_table_entry);
8278 entry->orig = targ;
8279 entry->copy = copy;
8280 *slot = entry;
8281
8282 /* If TARG is not a declaration DIE, we need to copy its
8283 children. */
8284 if (!is_declaration_die (targ))
8285 {
8286 FOR_EACH_CHILD (
8287 targ, c,
8288 add_child_die (copy,
8289 clone_tree_partial (c, decl_table)));
8290 }
8291
8292 /* Make sure the cloned tree is marked as part of the
8293 type unit. */
8294 mark_dies (copy);
8295
8296 /* If TARG has surrounding context, copy its ancestor tree
8297 into the new type unit. */
8298 if (targ->die_parent != NULL
8299 && !is_unit_die (targ->die_parent))
8300 parent = copy_ancestor_tree (unit, targ->die_parent,
8301 decl_table);
8302
8303 add_child_die (parent, copy);
8304 a->dw_attr_val.v.val_die_ref.die = copy;
8305
8306 /* Make sure the newly-copied DIE is walked. If it was
8307 installed in a previously-added context, it won't
8308 get visited otherwise. */
8309 if (parent != unit)
8310 {
8311 /* Find the highest point of the newly-added tree,
8312 mark each node along the way, and walk from there. */
8313 parent->die_mark = 1;
8314 while (parent->die_parent
8315 && parent->die_parent->die_mark == 0)
8316 {
8317 parent = parent->die_parent;
8318 parent->die_mark = 1;
8319 }
8320 copy_decls_walk (unit, parent, decl_table);
8321 }
8322 }
8323 }
8324 }
8325
8326 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8327 }
8328
8329 /* Copy declarations for "unworthy" types into the new comdat section.
8330 Incomplete types, modified types, and certain other types aren't broken
8331 out into comdat sections of their own, so they don't have a signature,
8332 and we need to copy the declaration into the same section so that we
8333 don't have an external reference. */
8334
8335 static void
8336 copy_decls_for_unworthy_types (dw_die_ref unit)
8337 {
8338 mark_dies (unit);
8339 decl_hash_type decl_table (10);
8340 copy_decls_walk (unit, unit, &decl_table);
8341 unmark_dies (unit);
8342 }
8343
8344 /* Traverse the DIE and add a sibling attribute if it may have the
8345 effect of speeding up access to siblings. To save some space,
8346 avoid generating sibling attributes for DIE's without children. */
8347
8348 static void
8349 add_sibling_attributes (dw_die_ref die)
8350 {
8351 dw_die_ref c;
8352
8353 if (! die->die_child)
8354 return;
8355
8356 if (die->die_parent && die != die->die_parent->die_child)
8357 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8358
8359 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8360 }
8361
8362 /* Output all location lists for the DIE and its children. */
8363
8364 static void
8365 output_location_lists (dw_die_ref die)
8366 {
8367 dw_die_ref c;
8368 dw_attr_node *a;
8369 unsigned ix;
8370
8371 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8372 if (AT_class (a) == dw_val_class_loc_list)
8373 output_loc_list (AT_loc_list (a));
8374
8375 FOR_EACH_CHILD (die, c, output_location_lists (c));
8376 }
8377
8378 /* During assign_location_list_indexes and output_loclists_offset the
8379 current index, after it the number of assigned indexes (i.e. how
8380 large the .debug_loclists* offset table should be). */
8381 static unsigned int loc_list_idx;
8382
8383 /* Output all location list offsets for the DIE and its children. */
8384
8385 static void
8386 output_loclists_offsets (dw_die_ref die)
8387 {
8388 dw_die_ref c;
8389 dw_attr_node *a;
8390 unsigned ix;
8391
8392 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8393 if (AT_class (a) == dw_val_class_loc_list)
8394 {
8395 dw_loc_list_ref l = AT_loc_list (a);
8396 if (l->offset_emitted)
8397 continue;
8398 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8399 loc_section_label, NULL);
8400 gcc_assert (l->hash == loc_list_idx);
8401 loc_list_idx++;
8402 l->offset_emitted = true;
8403 }
8404
8405 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8406 }
8407
8408 /* Recursively set indexes of location lists. */
8409
8410 static void
8411 assign_location_list_indexes (dw_die_ref die)
8412 {
8413 dw_die_ref c;
8414 dw_attr_node *a;
8415 unsigned ix;
8416
8417 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8418 if (AT_class (a) == dw_val_class_loc_list)
8419 {
8420 dw_loc_list_ref list = AT_loc_list (a);
8421 if (!list->num_assigned)
8422 {
8423 list->num_assigned = true;
8424 list->hash = loc_list_idx++;
8425 }
8426 }
8427
8428 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8429 }
8430
8431 /* We want to limit the number of external references, because they are
8432 larger than local references: a relocation takes multiple words, and
8433 even a sig8 reference is always eight bytes, whereas a local reference
8434 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8435 So if we encounter multiple external references to the same type DIE, we
8436 make a local typedef stub for it and redirect all references there.
8437
8438 This is the element of the hash table for keeping track of these
8439 references. */
8440
8441 struct external_ref
8442 {
8443 dw_die_ref type;
8444 dw_die_ref stub;
8445 unsigned n_refs;
8446 };
8447
8448 /* Hashtable helpers. */
8449
8450 struct external_ref_hasher : free_ptr_hash <external_ref>
8451 {
8452 static inline hashval_t hash (const external_ref *);
8453 static inline bool equal (const external_ref *, const external_ref *);
8454 };
8455
8456 inline hashval_t
8457 external_ref_hasher::hash (const external_ref *r)
8458 {
8459 dw_die_ref die = r->type;
8460 hashval_t h = 0;
8461
8462 /* We can't use the address of the DIE for hashing, because
8463 that will make the order of the stub DIEs non-deterministic. */
8464 if (! die->comdat_type_p)
8465 /* We have a symbol; use it to compute a hash. */
8466 h = htab_hash_string (die->die_id.die_symbol);
8467 else
8468 {
8469 /* We have a type signature; use a subset of the bits as the hash.
8470 The 8-byte signature is at least as large as hashval_t. */
8471 comdat_type_node *type_node = die->die_id.die_type_node;
8472 memcpy (&h, type_node->signature, sizeof (h));
8473 }
8474 return h;
8475 }
8476
8477 inline bool
8478 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8479 {
8480 return r1->type == r2->type;
8481 }
8482
8483 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8484
8485 /* Return a pointer to the external_ref for references to DIE. */
8486
8487 static struct external_ref *
8488 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8489 {
8490 struct external_ref ref, *ref_p;
8491 external_ref **slot;
8492
8493 ref.type = die;
8494 slot = map->find_slot (&ref, INSERT);
8495 if (*slot != HTAB_EMPTY_ENTRY)
8496 return *slot;
8497
8498 ref_p = XCNEW (struct external_ref);
8499 ref_p->type = die;
8500 *slot = ref_p;
8501 return ref_p;
8502 }
8503
8504 /* Subroutine of optimize_external_refs, below.
8505
8506 If we see a type skeleton, record it as our stub. If we see external
8507 references, remember how many we've seen. */
8508
8509 static void
8510 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8511 {
8512 dw_die_ref c;
8513 dw_attr_node *a;
8514 unsigned ix;
8515 struct external_ref *ref_p;
8516
8517 if (is_type_die (die)
8518 && (c = get_AT_ref (die, DW_AT_signature)))
8519 {
8520 /* This is a local skeleton; use it for local references. */
8521 ref_p = lookup_external_ref (map, c);
8522 ref_p->stub = die;
8523 }
8524
8525 /* Scan the DIE references, and remember any that refer to DIEs from
8526 other CUs (i.e. those which are not marked). */
8527 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8528 if (AT_class (a) == dw_val_class_die_ref
8529 && (c = AT_ref (a))->die_mark == 0
8530 && is_type_die (c))
8531 {
8532 ref_p = lookup_external_ref (map, c);
8533 ref_p->n_refs++;
8534 }
8535
8536 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8537 }
8538
8539 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8540 points to an external_ref, DATA is the CU we're processing. If we don't
8541 already have a local stub, and we have multiple refs, build a stub. */
8542
8543 int
8544 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8545 {
8546 struct external_ref *ref_p = *slot;
8547
8548 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8549 {
8550 /* We have multiple references to this type, so build a small stub.
8551 Both of these forms are a bit dodgy from the perspective of the
8552 DWARF standard, since technically they should have names. */
8553 dw_die_ref cu = data;
8554 dw_die_ref type = ref_p->type;
8555 dw_die_ref stub = NULL;
8556
8557 if (type->comdat_type_p)
8558 {
8559 /* If we refer to this type via sig8, use AT_signature. */
8560 stub = new_die (type->die_tag, cu, NULL_TREE);
8561 add_AT_die_ref (stub, DW_AT_signature, type);
8562 }
8563 else
8564 {
8565 /* Otherwise, use a typedef with no name. */
8566 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8567 add_AT_die_ref (stub, DW_AT_type, type);
8568 }
8569
8570 stub->die_mark++;
8571 ref_p->stub = stub;
8572 }
8573 return 1;
8574 }
8575
8576 /* DIE is a unit; look through all the DIE references to see if there are
8577 any external references to types, and if so, create local stubs for
8578 them which will be applied in build_abbrev_table. This is useful because
8579 references to local DIEs are smaller. */
8580
8581 static external_ref_hash_type *
8582 optimize_external_refs (dw_die_ref die)
8583 {
8584 external_ref_hash_type *map = new external_ref_hash_type (10);
8585 optimize_external_refs_1 (die, map);
8586 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8587 return map;
8588 }
8589
8590 /* The following 3 variables are temporaries that are computed only during the
8591 build_abbrev_table call and used and released during the following
8592 optimize_abbrev_table call. */
8593
8594 /* First abbrev_id that can be optimized based on usage. */
8595 static unsigned int abbrev_opt_start;
8596
8597 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8598 abbrev_id smaller than this, because they must be already sized
8599 during build_abbrev_table). */
8600 static unsigned int abbrev_opt_base_type_end;
8601
8602 /* Vector of usage counts during build_abbrev_table. Indexed by
8603 abbrev_id - abbrev_opt_start. */
8604 static vec<unsigned int> abbrev_usage_count;
8605
8606 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8607 static vec<dw_die_ref> sorted_abbrev_dies;
8608
8609 /* The format of each DIE (and its attribute value pairs) is encoded in an
8610 abbreviation table. This routine builds the abbreviation table and assigns
8611 a unique abbreviation id for each abbreviation entry. The children of each
8612 die are visited recursively. */
8613
8614 static void
8615 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8616 {
8617 unsigned int abbrev_id = 0;
8618 dw_die_ref c;
8619 dw_attr_node *a;
8620 unsigned ix;
8621 dw_die_ref abbrev;
8622
8623 /* Scan the DIE references, and replace any that refer to
8624 DIEs from other CUs (i.e. those which are not marked) with
8625 the local stubs we built in optimize_external_refs. */
8626 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8627 if (AT_class (a) == dw_val_class_die_ref
8628 && (c = AT_ref (a))->die_mark == 0)
8629 {
8630 struct external_ref *ref_p;
8631 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8632
8633 ref_p = lookup_external_ref (extern_map, c);
8634 if (ref_p->stub && ref_p->stub != die)
8635 change_AT_die_ref (a, ref_p->stub);
8636 else
8637 /* We aren't changing this reference, so mark it external. */
8638 set_AT_ref_external (a, 1);
8639 }
8640
8641 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
8642 {
8643 dw_attr_node *die_a, *abbrev_a;
8644 unsigned ix;
8645 bool ok = true;
8646
8647 if (abbrev_id == 0)
8648 continue;
8649 if (abbrev->die_tag != die->die_tag)
8650 continue;
8651 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8652 continue;
8653
8654 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8655 continue;
8656
8657 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8658 {
8659 abbrev_a = &(*abbrev->die_attr)[ix];
8660 if ((abbrev_a->dw_attr != die_a->dw_attr)
8661 || (value_format (abbrev_a) != value_format (die_a)))
8662 {
8663 ok = false;
8664 break;
8665 }
8666 }
8667 if (ok)
8668 break;
8669 }
8670
8671 if (abbrev_id >= vec_safe_length (abbrev_die_table))
8672 {
8673 vec_safe_push (abbrev_die_table, die);
8674 if (abbrev_opt_start)
8675 abbrev_usage_count.safe_push (0);
8676 }
8677 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
8678 {
8679 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
8680 sorted_abbrev_dies.safe_push (die);
8681 }
8682
8683 die->die_abbrev = abbrev_id;
8684 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
8685 }
8686
8687 /* Callback function for sorted_abbrev_dies vector sorting. We sort
8688 by die_abbrev's usage count, from the most commonly used
8689 abbreviation to the least. */
8690
8691 static int
8692 die_abbrev_cmp (const void *p1, const void *p2)
8693 {
8694 dw_die_ref die1 = *(const dw_die_ref *) p1;
8695 dw_die_ref die2 = *(const dw_die_ref *) p2;
8696
8697 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
8698 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
8699
8700 if (die1->die_abbrev >= abbrev_opt_base_type_end
8701 && die2->die_abbrev >= abbrev_opt_base_type_end)
8702 {
8703 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8704 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8705 return -1;
8706 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8707 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8708 return 1;
8709 }
8710
8711 /* Stabilize the sort. */
8712 if (die1->die_abbrev < die2->die_abbrev)
8713 return -1;
8714 if (die1->die_abbrev > die2->die_abbrev)
8715 return 1;
8716
8717 return 0;
8718 }
8719
8720 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
8721 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
8722 into dw_val_class_const_implicit or
8723 dw_val_class_unsigned_const_implicit. */
8724
8725 static void
8726 optimize_implicit_const (unsigned int first_id, unsigned int end,
8727 vec<bool> &implicit_consts)
8728 {
8729 /* It never makes sense if there is just one DIE using the abbreviation. */
8730 if (end < first_id + 2)
8731 return;
8732
8733 dw_attr_node *a;
8734 unsigned ix, i;
8735 dw_die_ref die = sorted_abbrev_dies[first_id];
8736 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8737 if (implicit_consts[ix])
8738 {
8739 enum dw_val_class new_class = dw_val_class_none;
8740 switch (AT_class (a))
8741 {
8742 case dw_val_class_unsigned_const:
8743 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
8744 continue;
8745
8746 /* The .debug_abbrev section will grow by
8747 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
8748 in all the DIEs using that abbreviation. */
8749 if (constant_size (AT_unsigned (a)) * (end - first_id)
8750 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
8751 continue;
8752
8753 new_class = dw_val_class_unsigned_const_implicit;
8754 break;
8755
8756 case dw_val_class_const:
8757 new_class = dw_val_class_const_implicit;
8758 break;
8759
8760 case dw_val_class_file:
8761 new_class = dw_val_class_file_implicit;
8762 break;
8763
8764 default:
8765 continue;
8766 }
8767 for (i = first_id; i < end; i++)
8768 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
8769 = new_class;
8770 }
8771 }
8772
8773 /* Attempt to optimize abbreviation table from abbrev_opt_start
8774 abbreviation above. */
8775
8776 static void
8777 optimize_abbrev_table (void)
8778 {
8779 if (abbrev_opt_start
8780 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
8781 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
8782 {
8783 auto_vec<bool, 32> implicit_consts;
8784 sorted_abbrev_dies.qsort (die_abbrev_cmp);
8785
8786 unsigned int abbrev_id = abbrev_opt_start - 1;
8787 unsigned int first_id = ~0U;
8788 unsigned int last_abbrev_id = 0;
8789 unsigned int i;
8790 dw_die_ref die;
8791 if (abbrev_opt_base_type_end > abbrev_opt_start)
8792 abbrev_id = abbrev_opt_base_type_end - 1;
8793 /* Reassign abbreviation ids from abbrev_opt_start above, so that
8794 most commonly used abbreviations come first. */
8795 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
8796 {
8797 dw_attr_node *a;
8798 unsigned ix;
8799
8800 /* If calc_base_type_die_sizes has been called, the CU and
8801 base types after it can't be optimized, because we've already
8802 calculated their DIE offsets. We've sorted them first. */
8803 if (die->die_abbrev < abbrev_opt_base_type_end)
8804 continue;
8805 if (die->die_abbrev != last_abbrev_id)
8806 {
8807 last_abbrev_id = die->die_abbrev;
8808 if (dwarf_version >= 5 && first_id != ~0U)
8809 optimize_implicit_const (first_id, i, implicit_consts);
8810 abbrev_id++;
8811 (*abbrev_die_table)[abbrev_id] = die;
8812 if (dwarf_version >= 5)
8813 {
8814 first_id = i;
8815 implicit_consts.truncate (0);
8816
8817 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8818 switch (AT_class (a))
8819 {
8820 case dw_val_class_const:
8821 case dw_val_class_unsigned_const:
8822 case dw_val_class_file:
8823 implicit_consts.safe_push (true);
8824 break;
8825 default:
8826 implicit_consts.safe_push (false);
8827 break;
8828 }
8829 }
8830 }
8831 else if (dwarf_version >= 5)
8832 {
8833 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8834 if (!implicit_consts[ix])
8835 continue;
8836 else
8837 {
8838 dw_attr_node *other_a
8839 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
8840 if (!dw_val_equal_p (&a->dw_attr_val,
8841 &other_a->dw_attr_val))
8842 implicit_consts[ix] = false;
8843 }
8844 }
8845 die->die_abbrev = abbrev_id;
8846 }
8847 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
8848 if (dwarf_version >= 5 && first_id != ~0U)
8849 optimize_implicit_const (first_id, i, implicit_consts);
8850 }
8851
8852 abbrev_opt_start = 0;
8853 abbrev_opt_base_type_end = 0;
8854 abbrev_usage_count.release ();
8855 sorted_abbrev_dies.release ();
8856 }
8857 \f
8858 /* Return the power-of-two number of bytes necessary to represent VALUE. */
8859
8860 static int
8861 constant_size (unsigned HOST_WIDE_INT value)
8862 {
8863 int log;
8864
8865 if (value == 0)
8866 log = 0;
8867 else
8868 log = floor_log2 (value);
8869
8870 log = log / 8;
8871 log = 1 << (floor_log2 (log) + 1);
8872
8873 return log;
8874 }
8875
8876 /* Return the size of a DIE as it is represented in the
8877 .debug_info section. */
8878
8879 static unsigned long
8880 size_of_die (dw_die_ref die)
8881 {
8882 unsigned long size = 0;
8883 dw_attr_node *a;
8884 unsigned ix;
8885 enum dwarf_form form;
8886
8887 size += size_of_uleb128 (die->die_abbrev);
8888 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8889 {
8890 switch (AT_class (a))
8891 {
8892 case dw_val_class_addr:
8893 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8894 {
8895 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8896 size += size_of_uleb128 (AT_index (a));
8897 }
8898 else
8899 size += DWARF2_ADDR_SIZE;
8900 break;
8901 case dw_val_class_offset:
8902 size += DWARF_OFFSET_SIZE;
8903 break;
8904 case dw_val_class_loc:
8905 {
8906 unsigned long lsize = size_of_locs (AT_loc (a));
8907
8908 /* Block length. */
8909 if (dwarf_version >= 4)
8910 size += size_of_uleb128 (lsize);
8911 else
8912 size += constant_size (lsize);
8913 size += lsize;
8914 }
8915 break;
8916 case dw_val_class_loc_list:
8917 if (dwarf_split_debug_info && dwarf_version >= 5)
8918 {
8919 gcc_assert (AT_loc_list (a)->num_assigned);
8920 size += size_of_uleb128 (AT_loc_list (a)->hash);
8921 }
8922 else
8923 size += DWARF_OFFSET_SIZE;
8924 break;
8925 case dw_val_class_range_list:
8926 if (value_format (a) == DW_FORM_rnglistx)
8927 {
8928 gcc_assert (rnglist_idx);
8929 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
8930 size += size_of_uleb128 (r->idx);
8931 }
8932 else
8933 size += DWARF_OFFSET_SIZE;
8934 break;
8935 case dw_val_class_const:
8936 size += size_of_sleb128 (AT_int (a));
8937 break;
8938 case dw_val_class_unsigned_const:
8939 {
8940 int csize = constant_size (AT_unsigned (a));
8941 if (dwarf_version == 3
8942 && a->dw_attr == DW_AT_data_member_location
8943 && csize >= 4)
8944 size += size_of_uleb128 (AT_unsigned (a));
8945 else
8946 size += csize;
8947 }
8948 break;
8949 case dw_val_class_const_implicit:
8950 case dw_val_class_unsigned_const_implicit:
8951 case dw_val_class_file_implicit:
8952 /* These occupy no size in the DIE, just an extra sleb128 in
8953 .debug_abbrev. */
8954 break;
8955 case dw_val_class_const_double:
8956 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
8957 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
8958 size++; /* block */
8959 break;
8960 case dw_val_class_wide_int:
8961 size += (get_full_len (*a->dw_attr_val.v.val_wide)
8962 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
8963 if (get_full_len (*a->dw_attr_val.v.val_wide)
8964 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
8965 size++; /* block */
8966 break;
8967 case dw_val_class_vec:
8968 size += constant_size (a->dw_attr_val.v.val_vec.length
8969 * a->dw_attr_val.v.val_vec.elt_size)
8970 + a->dw_attr_val.v.val_vec.length
8971 * a->dw_attr_val.v.val_vec.elt_size; /* block */
8972 break;
8973 case dw_val_class_flag:
8974 if (dwarf_version >= 4)
8975 /* Currently all add_AT_flag calls pass in 1 as last argument,
8976 so DW_FORM_flag_present can be used. If that ever changes,
8977 we'll need to use DW_FORM_flag and have some optimization
8978 in build_abbrev_table that will change those to
8979 DW_FORM_flag_present if it is set to 1 in all DIEs using
8980 the same abbrev entry. */
8981 gcc_assert (a->dw_attr_val.v.val_flag == 1);
8982 else
8983 size += 1;
8984 break;
8985 case dw_val_class_die_ref:
8986 if (AT_ref_external (a))
8987 {
8988 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
8989 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
8990 is sized by target address length, whereas in DWARF3
8991 it's always sized as an offset. */
8992 if (use_debug_types)
8993 size += DWARF_TYPE_SIGNATURE_SIZE;
8994 else if (dwarf_version == 2)
8995 size += DWARF2_ADDR_SIZE;
8996 else
8997 size += DWARF_OFFSET_SIZE;
8998 }
8999 else
9000 size += DWARF_OFFSET_SIZE;
9001 break;
9002 case dw_val_class_fde_ref:
9003 size += DWARF_OFFSET_SIZE;
9004 break;
9005 case dw_val_class_lbl_id:
9006 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9007 {
9008 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9009 size += size_of_uleb128 (AT_index (a));
9010 }
9011 else
9012 size += DWARF2_ADDR_SIZE;
9013 break;
9014 case dw_val_class_lineptr:
9015 case dw_val_class_macptr:
9016 case dw_val_class_loclistsptr:
9017 size += DWARF_OFFSET_SIZE;
9018 break;
9019 case dw_val_class_str:
9020 form = AT_string_form (a);
9021 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9022 size += DWARF_OFFSET_SIZE;
9023 else if (form == DW_FORM_GNU_str_index)
9024 size += size_of_uleb128 (AT_index (a));
9025 else
9026 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9027 break;
9028 case dw_val_class_file:
9029 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9030 break;
9031 case dw_val_class_data8:
9032 size += 8;
9033 break;
9034 case dw_val_class_vms_delta:
9035 size += DWARF_OFFSET_SIZE;
9036 break;
9037 case dw_val_class_high_pc:
9038 size += DWARF2_ADDR_SIZE;
9039 break;
9040 case dw_val_class_discr_value:
9041 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9042 break;
9043 case dw_val_class_discr_list:
9044 {
9045 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9046
9047 /* This is a block, so we have the block length and then its
9048 data. */
9049 size += constant_size (block_size) + block_size;
9050 }
9051 break;
9052 default:
9053 gcc_unreachable ();
9054 }
9055 }
9056
9057 return size;
9058 }
9059
9060 /* Size the debugging information associated with a given DIE. Visits the
9061 DIE's children recursively. Updates the global variable next_die_offset, on
9062 each time through. Uses the current value of next_die_offset to update the
9063 die_offset field in each DIE. */
9064
9065 static void
9066 calc_die_sizes (dw_die_ref die)
9067 {
9068 dw_die_ref c;
9069
9070 gcc_assert (die->die_offset == 0
9071 || (unsigned long int) die->die_offset == next_die_offset);
9072 die->die_offset = next_die_offset;
9073 next_die_offset += size_of_die (die);
9074
9075 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9076
9077 if (die->die_child != NULL)
9078 /* Count the null byte used to terminate sibling lists. */
9079 next_die_offset += 1;
9080 }
9081
9082 /* Size just the base type children at the start of the CU.
9083 This is needed because build_abbrev needs to size locs
9084 and sizing of type based stack ops needs to know die_offset
9085 values for the base types. */
9086
9087 static void
9088 calc_base_type_die_sizes (void)
9089 {
9090 unsigned long die_offset = (dwarf_split_debug_info
9091 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9092 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9093 unsigned int i;
9094 dw_die_ref base_type;
9095 #if ENABLE_ASSERT_CHECKING
9096 dw_die_ref prev = comp_unit_die ()->die_child;
9097 #endif
9098
9099 die_offset += size_of_die (comp_unit_die ());
9100 for (i = 0; base_types.iterate (i, &base_type); i++)
9101 {
9102 #if ENABLE_ASSERT_CHECKING
9103 gcc_assert (base_type->die_offset == 0
9104 && prev->die_sib == base_type
9105 && base_type->die_child == NULL
9106 && base_type->die_abbrev);
9107 prev = base_type;
9108 #endif
9109 if (abbrev_opt_start
9110 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9111 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9112 base_type->die_offset = die_offset;
9113 die_offset += size_of_die (base_type);
9114 }
9115 }
9116
9117 /* Set the marks for a die and its children. We do this so
9118 that we know whether or not a reference needs to use FORM_ref_addr; only
9119 DIEs in the same CU will be marked. We used to clear out the offset
9120 and use that as the flag, but ran into ordering problems. */
9121
9122 static void
9123 mark_dies (dw_die_ref die)
9124 {
9125 dw_die_ref c;
9126
9127 gcc_assert (!die->die_mark);
9128
9129 die->die_mark = 1;
9130 FOR_EACH_CHILD (die, c, mark_dies (c));
9131 }
9132
9133 /* Clear the marks for a die and its children. */
9134
9135 static void
9136 unmark_dies (dw_die_ref die)
9137 {
9138 dw_die_ref c;
9139
9140 if (! use_debug_types)
9141 gcc_assert (die->die_mark);
9142
9143 die->die_mark = 0;
9144 FOR_EACH_CHILD (die, c, unmark_dies (c));
9145 }
9146
9147 /* Clear the marks for a die, its children and referred dies. */
9148
9149 static void
9150 unmark_all_dies (dw_die_ref die)
9151 {
9152 dw_die_ref c;
9153 dw_attr_node *a;
9154 unsigned ix;
9155
9156 if (!die->die_mark)
9157 return;
9158 die->die_mark = 0;
9159
9160 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9161
9162 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9163 if (AT_class (a) == dw_val_class_die_ref)
9164 unmark_all_dies (AT_ref (a));
9165 }
9166
9167 /* Calculate if the entry should appear in the final output file. It may be
9168 from a pruned a type. */
9169
9170 static bool
9171 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9172 {
9173 /* By limiting gnu pubnames to definitions only, gold can generate a
9174 gdb index without entries for declarations, which don't include
9175 enough information to be useful. */
9176 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9177 return false;
9178
9179 if (table == pubname_table)
9180 {
9181 /* Enumerator names are part of the pubname table, but the
9182 parent DW_TAG_enumeration_type die may have been pruned.
9183 Don't output them if that is the case. */
9184 if (p->die->die_tag == DW_TAG_enumerator &&
9185 (p->die->die_parent == NULL
9186 || !p->die->die_parent->die_perennial_p))
9187 return false;
9188
9189 /* Everything else in the pubname table is included. */
9190 return true;
9191 }
9192
9193 /* The pubtypes table shouldn't include types that have been
9194 pruned. */
9195 return (p->die->die_offset != 0
9196 || !flag_eliminate_unused_debug_types);
9197 }
9198
9199 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9200 generated for the compilation unit. */
9201
9202 static unsigned long
9203 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9204 {
9205 unsigned long size;
9206 unsigned i;
9207 pubname_entry *p;
9208 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9209
9210 size = DWARF_PUBNAMES_HEADER_SIZE;
9211 FOR_EACH_VEC_ELT (*names, i, p)
9212 if (include_pubname_in_output (names, p))
9213 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9214
9215 size += DWARF_OFFSET_SIZE;
9216 return size;
9217 }
9218
9219 /* Return the size of the information in the .debug_aranges section. */
9220
9221 static unsigned long
9222 size_of_aranges (void)
9223 {
9224 unsigned long size;
9225
9226 size = DWARF_ARANGES_HEADER_SIZE;
9227
9228 /* Count the address/length pair for this compilation unit. */
9229 if (text_section_used)
9230 size += 2 * DWARF2_ADDR_SIZE;
9231 if (cold_text_section_used)
9232 size += 2 * DWARF2_ADDR_SIZE;
9233 if (have_multiple_function_sections)
9234 {
9235 unsigned fde_idx;
9236 dw_fde_ref fde;
9237
9238 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9239 {
9240 if (DECL_IGNORED_P (fde->decl))
9241 continue;
9242 if (!fde->in_std_section)
9243 size += 2 * DWARF2_ADDR_SIZE;
9244 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9245 size += 2 * DWARF2_ADDR_SIZE;
9246 }
9247 }
9248
9249 /* Count the two zero words used to terminated the address range table. */
9250 size += 2 * DWARF2_ADDR_SIZE;
9251 return size;
9252 }
9253 \f
9254 /* Select the encoding of an attribute value. */
9255
9256 static enum dwarf_form
9257 value_format (dw_attr_node *a)
9258 {
9259 switch (AT_class (a))
9260 {
9261 case dw_val_class_addr:
9262 /* Only very few attributes allow DW_FORM_addr. */
9263 switch (a->dw_attr)
9264 {
9265 case DW_AT_low_pc:
9266 case DW_AT_high_pc:
9267 case DW_AT_entry_pc:
9268 case DW_AT_trampoline:
9269 return (AT_index (a) == NOT_INDEXED
9270 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9271 default:
9272 break;
9273 }
9274 switch (DWARF2_ADDR_SIZE)
9275 {
9276 case 1:
9277 return DW_FORM_data1;
9278 case 2:
9279 return DW_FORM_data2;
9280 case 4:
9281 return DW_FORM_data4;
9282 case 8:
9283 return DW_FORM_data8;
9284 default:
9285 gcc_unreachable ();
9286 }
9287 case dw_val_class_loc_list:
9288 if (dwarf_split_debug_info
9289 && dwarf_version >= 5
9290 && AT_loc_list (a)->num_assigned)
9291 return DW_FORM_loclistx;
9292 /* FALLTHRU */
9293 case dw_val_class_range_list:
9294 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9295 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9296 care about sizes of .debug* sections in shared libraries and
9297 executables and don't take into account relocations that affect just
9298 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9299 table in the .debug_rnglists section. */
9300 if (dwarf_split_debug_info
9301 && dwarf_version >= 5
9302 && AT_class (a) == dw_val_class_range_list
9303 && rnglist_idx
9304 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9305 return DW_FORM_rnglistx;
9306 if (dwarf_version >= 4)
9307 return DW_FORM_sec_offset;
9308 /* FALLTHRU */
9309 case dw_val_class_vms_delta:
9310 case dw_val_class_offset:
9311 switch (DWARF_OFFSET_SIZE)
9312 {
9313 case 4:
9314 return DW_FORM_data4;
9315 case 8:
9316 return DW_FORM_data8;
9317 default:
9318 gcc_unreachable ();
9319 }
9320 case dw_val_class_loc:
9321 if (dwarf_version >= 4)
9322 return DW_FORM_exprloc;
9323 switch (constant_size (size_of_locs (AT_loc (a))))
9324 {
9325 case 1:
9326 return DW_FORM_block1;
9327 case 2:
9328 return DW_FORM_block2;
9329 case 4:
9330 return DW_FORM_block4;
9331 default:
9332 gcc_unreachable ();
9333 }
9334 case dw_val_class_const:
9335 return DW_FORM_sdata;
9336 case dw_val_class_unsigned_const:
9337 switch (constant_size (AT_unsigned (a)))
9338 {
9339 case 1:
9340 return DW_FORM_data1;
9341 case 2:
9342 return DW_FORM_data2;
9343 case 4:
9344 /* In DWARF3 DW_AT_data_member_location with
9345 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9346 constant, so we need to use DW_FORM_udata if we need
9347 a large constant. */
9348 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9349 return DW_FORM_udata;
9350 return DW_FORM_data4;
9351 case 8:
9352 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9353 return DW_FORM_udata;
9354 return DW_FORM_data8;
9355 default:
9356 gcc_unreachable ();
9357 }
9358 case dw_val_class_const_implicit:
9359 case dw_val_class_unsigned_const_implicit:
9360 case dw_val_class_file_implicit:
9361 return DW_FORM_implicit_const;
9362 case dw_val_class_const_double:
9363 switch (HOST_BITS_PER_WIDE_INT)
9364 {
9365 case 8:
9366 return DW_FORM_data2;
9367 case 16:
9368 return DW_FORM_data4;
9369 case 32:
9370 return DW_FORM_data8;
9371 case 64:
9372 if (dwarf_version >= 5)
9373 return DW_FORM_data16;
9374 /* FALLTHRU */
9375 default:
9376 return DW_FORM_block1;
9377 }
9378 case dw_val_class_wide_int:
9379 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9380 {
9381 case 8:
9382 return DW_FORM_data1;
9383 case 16:
9384 return DW_FORM_data2;
9385 case 32:
9386 return DW_FORM_data4;
9387 case 64:
9388 return DW_FORM_data8;
9389 case 128:
9390 if (dwarf_version >= 5)
9391 return DW_FORM_data16;
9392 /* FALLTHRU */
9393 default:
9394 return DW_FORM_block1;
9395 }
9396 case dw_val_class_vec:
9397 switch (constant_size (a->dw_attr_val.v.val_vec.length
9398 * a->dw_attr_val.v.val_vec.elt_size))
9399 {
9400 case 1:
9401 return DW_FORM_block1;
9402 case 2:
9403 return DW_FORM_block2;
9404 case 4:
9405 return DW_FORM_block4;
9406 default:
9407 gcc_unreachable ();
9408 }
9409 case dw_val_class_flag:
9410 if (dwarf_version >= 4)
9411 {
9412 /* Currently all add_AT_flag calls pass in 1 as last argument,
9413 so DW_FORM_flag_present can be used. If that ever changes,
9414 we'll need to use DW_FORM_flag and have some optimization
9415 in build_abbrev_table that will change those to
9416 DW_FORM_flag_present if it is set to 1 in all DIEs using
9417 the same abbrev entry. */
9418 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9419 return DW_FORM_flag_present;
9420 }
9421 return DW_FORM_flag;
9422 case dw_val_class_die_ref:
9423 if (AT_ref_external (a))
9424 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9425 else
9426 return DW_FORM_ref;
9427 case dw_val_class_fde_ref:
9428 return DW_FORM_data;
9429 case dw_val_class_lbl_id:
9430 return (AT_index (a) == NOT_INDEXED
9431 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9432 case dw_val_class_lineptr:
9433 case dw_val_class_macptr:
9434 case dw_val_class_loclistsptr:
9435 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9436 case dw_val_class_str:
9437 return AT_string_form (a);
9438 case dw_val_class_file:
9439 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9440 {
9441 case 1:
9442 return DW_FORM_data1;
9443 case 2:
9444 return DW_FORM_data2;
9445 case 4:
9446 return DW_FORM_data4;
9447 default:
9448 gcc_unreachable ();
9449 }
9450
9451 case dw_val_class_data8:
9452 return DW_FORM_data8;
9453
9454 case dw_val_class_high_pc:
9455 switch (DWARF2_ADDR_SIZE)
9456 {
9457 case 1:
9458 return DW_FORM_data1;
9459 case 2:
9460 return DW_FORM_data2;
9461 case 4:
9462 return DW_FORM_data4;
9463 case 8:
9464 return DW_FORM_data8;
9465 default:
9466 gcc_unreachable ();
9467 }
9468
9469 case dw_val_class_discr_value:
9470 return (a->dw_attr_val.v.val_discr_value.pos
9471 ? DW_FORM_udata
9472 : DW_FORM_sdata);
9473 case dw_val_class_discr_list:
9474 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9475 {
9476 case 1:
9477 return DW_FORM_block1;
9478 case 2:
9479 return DW_FORM_block2;
9480 case 4:
9481 return DW_FORM_block4;
9482 default:
9483 gcc_unreachable ();
9484 }
9485
9486 default:
9487 gcc_unreachable ();
9488 }
9489 }
9490
9491 /* Output the encoding of an attribute value. */
9492
9493 static void
9494 output_value_format (dw_attr_node *a)
9495 {
9496 enum dwarf_form form = value_format (a);
9497
9498 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9499 }
9500
9501 /* Given a die and id, produce the appropriate abbreviations. */
9502
9503 static void
9504 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9505 {
9506 unsigned ix;
9507 dw_attr_node *a_attr;
9508
9509 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9510 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9511 dwarf_tag_name (abbrev->die_tag));
9512
9513 if (abbrev->die_child != NULL)
9514 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9515 else
9516 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9517
9518 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9519 {
9520 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9521 dwarf_attr_name (a_attr->dw_attr));
9522 output_value_format (a_attr);
9523 if (value_format (a_attr) == DW_FORM_implicit_const)
9524 {
9525 if (AT_class (a_attr) == dw_val_class_file_implicit)
9526 {
9527 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9528 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9529 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9530 }
9531 else
9532 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9533 }
9534 }
9535
9536 dw2_asm_output_data (1, 0, NULL);
9537 dw2_asm_output_data (1, 0, NULL);
9538 }
9539
9540
9541 /* Output the .debug_abbrev section which defines the DIE abbreviation
9542 table. */
9543
9544 static void
9545 output_abbrev_section (void)
9546 {
9547 unsigned int abbrev_id;
9548 dw_die_ref abbrev;
9549
9550 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9551 if (abbrev_id != 0)
9552 output_die_abbrevs (abbrev_id, abbrev);
9553
9554 /* Terminate the table. */
9555 dw2_asm_output_data (1, 0, NULL);
9556 }
9557
9558 /* Output a symbol we can use to refer to this DIE from another CU. */
9559
9560 static inline void
9561 output_die_symbol (dw_die_ref die)
9562 {
9563 const char *sym = die->die_id.die_symbol;
9564
9565 gcc_assert (!die->comdat_type_p);
9566
9567 if (sym == 0)
9568 return;
9569
9570 if (strncmp (sym, DIE_LABEL_PREFIX, sizeof (DIE_LABEL_PREFIX) - 1) == 0)
9571 /* We make these global, not weak; if the target doesn't support
9572 .linkonce, it doesn't support combining the sections, so debugging
9573 will break. */
9574 targetm.asm_out.globalize_label (asm_out_file, sym);
9575
9576 ASM_OUTPUT_LABEL (asm_out_file, sym);
9577 }
9578
9579 /* Return a new location list, given the begin and end range, and the
9580 expression. */
9581
9582 static inline dw_loc_list_ref
9583 new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
9584 const char *section)
9585 {
9586 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9587
9588 retlist->begin = begin;
9589 retlist->begin_entry = NULL;
9590 retlist->end = end;
9591 retlist->expr = expr;
9592 retlist->section = section;
9593
9594 return retlist;
9595 }
9596
9597 /* Generate a new internal symbol for this location list node, if it
9598 hasn't got one yet. */
9599
9600 static inline void
9601 gen_llsym (dw_loc_list_ref list)
9602 {
9603 gcc_assert (!list->ll_symbol);
9604 list->ll_symbol = gen_internal_sym ("LLST");
9605 }
9606
9607 /* Output the location list given to us. */
9608
9609 static void
9610 output_loc_list (dw_loc_list_ref list_head)
9611 {
9612 if (list_head->emitted)
9613 return;
9614 list_head->emitted = true;
9615
9616 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
9617
9618 dw_loc_list_ref curr = list_head;
9619 const char *last_section = NULL;
9620 const char *base_label = NULL;
9621
9622 /* Walk the location list, and output each range + expression. */
9623 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
9624 {
9625 unsigned long size;
9626 /* Don't output an entry that starts and ends at the same address. */
9627 if (strcmp (curr->begin, curr->end) == 0 && !curr->force)
9628 continue;
9629 size = size_of_locs (curr->expr);
9630 /* If the expression is too large, drop it on the floor. We could
9631 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9632 in the expression, but >= 64KB expressions for a single value
9633 in a single range are unlikely very useful. */
9634 if (dwarf_version < 5 && size > 0xffff)
9635 continue;
9636 if (dwarf_version >= 5)
9637 {
9638 if (dwarf_split_debug_info)
9639 {
9640 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
9641 uleb128 index into .debug_addr and uleb128 length. */
9642 dw2_asm_output_data (1, DW_LLE_startx_length,
9643 "DW_LLE_startx_length (%s)",
9644 list_head->ll_symbol);
9645 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9646 "Location list range start index "
9647 "(%s)", curr->begin);
9648 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
9649 For that case we probably need to emit DW_LLE_startx_endx,
9650 but we'd need 2 .debug_addr entries rather than just one. */
9651 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9652 "Location list length (%s)",
9653 list_head->ll_symbol);
9654 }
9655 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
9656 {
9657 /* If all code is in .text section, the base address is
9658 already provided by the CU attributes. Use
9659 DW_LLE_offset_pair where both addresses are uleb128 encoded
9660 offsets against that base. */
9661 dw2_asm_output_data (1, DW_LLE_offset_pair,
9662 "DW_LLE_offset_pair (%s)",
9663 list_head->ll_symbol);
9664 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
9665 "Location list begin address (%s)",
9666 list_head->ll_symbol);
9667 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
9668 "Location list end address (%s)",
9669 list_head->ll_symbol);
9670 }
9671 else if (HAVE_AS_LEB128)
9672 {
9673 /* Otherwise, find out how many consecutive entries could share
9674 the same base entry. If just one, emit DW_LLE_start_length,
9675 otherwise emit DW_LLE_base_address for the base address
9676 followed by a series of DW_LLE_offset_pair. */
9677 if (last_section == NULL || curr->section != last_section)
9678 {
9679 dw_loc_list_ref curr2;
9680 for (curr2 = curr->dw_loc_next; curr2 != NULL;
9681 curr2 = curr2->dw_loc_next)
9682 {
9683 if (strcmp (curr2->begin, curr2->end) == 0
9684 && !curr2->force)
9685 continue;
9686 break;
9687 }
9688 if (curr2 == NULL || curr->section != curr2->section)
9689 last_section = NULL;
9690 else
9691 {
9692 last_section = curr->section;
9693 base_label = curr->begin;
9694 dw2_asm_output_data (1, DW_LLE_base_address,
9695 "DW_LLE_base_address (%s)",
9696 list_head->ll_symbol);
9697 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
9698 "Base address (%s)",
9699 list_head->ll_symbol);
9700 }
9701 }
9702 /* Only one entry with the same base address. Use
9703 DW_LLE_start_length with absolute address and uleb128
9704 length. */
9705 if (last_section == NULL)
9706 {
9707 dw2_asm_output_data (1, DW_LLE_start_length,
9708 "DW_LLE_start_length (%s)",
9709 list_head->ll_symbol);
9710 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9711 "Location list begin address (%s)",
9712 list_head->ll_symbol);
9713 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9714 "Location list length "
9715 "(%s)", list_head->ll_symbol);
9716 }
9717 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
9718 DW_LLE_base_address. */
9719 else
9720 {
9721 dw2_asm_output_data (1, DW_LLE_offset_pair,
9722 "DW_LLE_offset_pair (%s)",
9723 list_head->ll_symbol);
9724 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
9725 "Location list begin address "
9726 "(%s)", list_head->ll_symbol);
9727 dw2_asm_output_delta_uleb128 (curr->end, base_label,
9728 "Location list end address "
9729 "(%s)", list_head->ll_symbol);
9730 }
9731 }
9732 /* The assembler does not support .uleb128 directive. Emit
9733 DW_LLE_start_end with a pair of absolute addresses. */
9734 else
9735 {
9736 dw2_asm_output_data (1, DW_LLE_start_end,
9737 "DW_LLE_start_end (%s)",
9738 list_head->ll_symbol);
9739 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9740 "Location list begin address (%s)",
9741 list_head->ll_symbol);
9742 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9743 "Location list end address (%s)",
9744 list_head->ll_symbol);
9745 }
9746 }
9747 else if (dwarf_split_debug_info)
9748 {
9749 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
9750 and 4 byte length. */
9751 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
9752 "Location list start/length entry (%s)",
9753 list_head->ll_symbol);
9754 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9755 "Location list range start index (%s)",
9756 curr->begin);
9757 /* The length field is 4 bytes. If we ever need to support
9758 an 8-byte length, we can add a new DW_LLE code or fall back
9759 to DW_LLE_GNU_start_end_entry. */
9760 dw2_asm_output_delta (4, curr->end, curr->begin,
9761 "Location list range length (%s)",
9762 list_head->ll_symbol);
9763 }
9764 else if (!have_multiple_function_sections)
9765 {
9766 /* Pair of relative addresses against start of text section. */
9767 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
9768 "Location list begin address (%s)",
9769 list_head->ll_symbol);
9770 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
9771 "Location list end address (%s)",
9772 list_head->ll_symbol);
9773 }
9774 else
9775 {
9776 /* Pair of absolute addresses. */
9777 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9778 "Location list begin address (%s)",
9779 list_head->ll_symbol);
9780 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9781 "Location list end address (%s)",
9782 list_head->ll_symbol);
9783 }
9784
9785 /* Output the block length for this list of location operations. */
9786 if (dwarf_version >= 5)
9787 dw2_asm_output_data_uleb128 (size, "Location expression size");
9788 else
9789 {
9790 gcc_assert (size <= 0xffff);
9791 dw2_asm_output_data (2, size, "Location expression size");
9792 }
9793
9794 output_loc_sequence (curr->expr, -1);
9795 }
9796
9797 /* And finally list termination. */
9798 if (dwarf_version >= 5)
9799 dw2_asm_output_data (1, DW_LLE_end_of_list,
9800 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
9801 else if (dwarf_split_debug_info)
9802 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
9803 "Location list terminator (%s)",
9804 list_head->ll_symbol);
9805 else
9806 {
9807 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9808 "Location list terminator begin (%s)",
9809 list_head->ll_symbol);
9810 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9811 "Location list terminator end (%s)",
9812 list_head->ll_symbol);
9813 }
9814 }
9815
9816 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
9817 section. Emit a relocated reference if val_entry is NULL, otherwise,
9818 emit an indirect reference. */
9819
9820 static void
9821 output_range_list_offset (dw_attr_node *a)
9822 {
9823 const char *name = dwarf_attr_name (a->dw_attr);
9824
9825 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
9826 {
9827 if (dwarf_version >= 5)
9828 {
9829 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9830 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
9831 debug_ranges_section, "%s", name);
9832 }
9833 else
9834 {
9835 char *p = strchr (ranges_section_label, '\0');
9836 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
9837 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
9838 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
9839 debug_ranges_section, "%s", name);
9840 *p = '\0';
9841 }
9842 }
9843 else if (dwarf_version >= 5)
9844 {
9845 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9846 gcc_assert (rnglist_idx);
9847 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
9848 }
9849 else
9850 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9851 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
9852 "%s (offset from %s)", name, ranges_section_label);
9853 }
9854
9855 /* Output the offset into the debug_loc section. */
9856
9857 static void
9858 output_loc_list_offset (dw_attr_node *a)
9859 {
9860 char *sym = AT_loc_list (a)->ll_symbol;
9861
9862 gcc_assert (sym);
9863 if (!dwarf_split_debug_info)
9864 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
9865 "%s", dwarf_attr_name (a->dw_attr));
9866 else if (dwarf_version >= 5)
9867 {
9868 gcc_assert (AT_loc_list (a)->num_assigned);
9869 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
9870 dwarf_attr_name (a->dw_attr),
9871 sym);
9872 }
9873 else
9874 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
9875 "%s", dwarf_attr_name (a->dw_attr));
9876 }
9877
9878 /* Output an attribute's index or value appropriately. */
9879
9880 static void
9881 output_attr_index_or_value (dw_attr_node *a)
9882 {
9883 const char *name = dwarf_attr_name (a->dw_attr);
9884
9885 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9886 {
9887 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
9888 return;
9889 }
9890 switch (AT_class (a))
9891 {
9892 case dw_val_class_addr:
9893 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
9894 break;
9895 case dw_val_class_high_pc:
9896 case dw_val_class_lbl_id:
9897 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
9898 break;
9899 default:
9900 gcc_unreachable ();
9901 }
9902 }
9903
9904 /* Output a type signature. */
9905
9906 static inline void
9907 output_signature (const char *sig, const char *name)
9908 {
9909 int i;
9910
9911 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9912 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
9913 }
9914
9915 /* Output a discriminant value. */
9916
9917 static inline void
9918 output_discr_value (dw_discr_value *discr_value, const char *name)
9919 {
9920 if (discr_value->pos)
9921 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
9922 else
9923 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
9924 }
9925
9926 /* Output the DIE and its attributes. Called recursively to generate
9927 the definitions of each child DIE. */
9928
9929 static void
9930 output_die (dw_die_ref die)
9931 {
9932 dw_attr_node *a;
9933 dw_die_ref c;
9934 unsigned long size;
9935 unsigned ix;
9936
9937 /* If someone in another CU might refer to us, set up a symbol for
9938 them to point to. */
9939 if (! die->comdat_type_p && die->die_id.die_symbol)
9940 output_die_symbol (die);
9941
9942 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
9943 (unsigned long)die->die_offset,
9944 dwarf_tag_name (die->die_tag));
9945
9946 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9947 {
9948 const char *name = dwarf_attr_name (a->dw_attr);
9949
9950 switch (AT_class (a))
9951 {
9952 case dw_val_class_addr:
9953 output_attr_index_or_value (a);
9954 break;
9955
9956 case dw_val_class_offset:
9957 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
9958 "%s", name);
9959 break;
9960
9961 case dw_val_class_range_list:
9962 output_range_list_offset (a);
9963 break;
9964
9965 case dw_val_class_loc:
9966 size = size_of_locs (AT_loc (a));
9967
9968 /* Output the block length for this list of location operations. */
9969 if (dwarf_version >= 4)
9970 dw2_asm_output_data_uleb128 (size, "%s", name);
9971 else
9972 dw2_asm_output_data (constant_size (size), size, "%s", name);
9973
9974 output_loc_sequence (AT_loc (a), -1);
9975 break;
9976
9977 case dw_val_class_const:
9978 /* ??? It would be slightly more efficient to use a scheme like is
9979 used for unsigned constants below, but gdb 4.x does not sign
9980 extend. Gdb 5.x does sign extend. */
9981 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
9982 break;
9983
9984 case dw_val_class_unsigned_const:
9985 {
9986 int csize = constant_size (AT_unsigned (a));
9987 if (dwarf_version == 3
9988 && a->dw_attr == DW_AT_data_member_location
9989 && csize >= 4)
9990 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
9991 else
9992 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
9993 }
9994 break;
9995
9996 case dw_val_class_const_implicit:
9997 if (flag_debug_asm)
9998 fprintf (asm_out_file, "\t\t\t%s %s ("
9999 HOST_WIDE_INT_PRINT_DEC ")\n",
10000 ASM_COMMENT_START, name, AT_int (a));
10001 break;
10002
10003 case dw_val_class_unsigned_const_implicit:
10004 if (flag_debug_asm)
10005 fprintf (asm_out_file, "\t\t\t%s %s ("
10006 HOST_WIDE_INT_PRINT_HEX ")\n",
10007 ASM_COMMENT_START, name, AT_unsigned (a));
10008 break;
10009
10010 case dw_val_class_const_double:
10011 {
10012 unsigned HOST_WIDE_INT first, second;
10013
10014 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10015 dw2_asm_output_data (1,
10016 HOST_BITS_PER_DOUBLE_INT
10017 / HOST_BITS_PER_CHAR,
10018 NULL);
10019
10020 if (WORDS_BIG_ENDIAN)
10021 {
10022 first = a->dw_attr_val.v.val_double.high;
10023 second = a->dw_attr_val.v.val_double.low;
10024 }
10025 else
10026 {
10027 first = a->dw_attr_val.v.val_double.low;
10028 second = a->dw_attr_val.v.val_double.high;
10029 }
10030
10031 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10032 first, "%s", name);
10033 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10034 second, NULL);
10035 }
10036 break;
10037
10038 case dw_val_class_wide_int:
10039 {
10040 int i;
10041 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10042 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10043 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10044 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10045 * l, NULL);
10046
10047 if (WORDS_BIG_ENDIAN)
10048 for (i = len - 1; i >= 0; --i)
10049 {
10050 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10051 "%s", name);
10052 name = "";
10053 }
10054 else
10055 for (i = 0; i < len; ++i)
10056 {
10057 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10058 "%s", name);
10059 name = "";
10060 }
10061 }
10062 break;
10063
10064 case dw_val_class_vec:
10065 {
10066 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10067 unsigned int len = a->dw_attr_val.v.val_vec.length;
10068 unsigned int i;
10069 unsigned char *p;
10070
10071 dw2_asm_output_data (constant_size (len * elt_size),
10072 len * elt_size, "%s", name);
10073 if (elt_size > sizeof (HOST_WIDE_INT))
10074 {
10075 elt_size /= 2;
10076 len *= 2;
10077 }
10078 for (i = 0, p = a->dw_attr_val.v.val_vec.array;
10079 i < len;
10080 i++, p += elt_size)
10081 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10082 "fp or vector constant word %u", i);
10083 break;
10084 }
10085
10086 case dw_val_class_flag:
10087 if (dwarf_version >= 4)
10088 {
10089 /* Currently all add_AT_flag calls pass in 1 as last argument,
10090 so DW_FORM_flag_present can be used. If that ever changes,
10091 we'll need to use DW_FORM_flag and have some optimization
10092 in build_abbrev_table that will change those to
10093 DW_FORM_flag_present if it is set to 1 in all DIEs using
10094 the same abbrev entry. */
10095 gcc_assert (AT_flag (a) == 1);
10096 if (flag_debug_asm)
10097 fprintf (asm_out_file, "\t\t\t%s %s\n",
10098 ASM_COMMENT_START, name);
10099 break;
10100 }
10101 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10102 break;
10103
10104 case dw_val_class_loc_list:
10105 output_loc_list_offset (a);
10106 break;
10107
10108 case dw_val_class_die_ref:
10109 if (AT_ref_external (a))
10110 {
10111 if (AT_ref (a)->comdat_type_p)
10112 {
10113 comdat_type_node *type_node
10114 = AT_ref (a)->die_id.die_type_node;
10115
10116 gcc_assert (type_node);
10117 output_signature (type_node->signature, name);
10118 }
10119 else
10120 {
10121 const char *sym = AT_ref (a)->die_id.die_symbol;
10122 int size;
10123
10124 gcc_assert (sym);
10125 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10126 length, whereas in DWARF3 it's always sized as an
10127 offset. */
10128 if (dwarf_version == 2)
10129 size = DWARF2_ADDR_SIZE;
10130 else
10131 size = DWARF_OFFSET_SIZE;
10132 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10133 name);
10134 }
10135 }
10136 else
10137 {
10138 gcc_assert (AT_ref (a)->die_offset);
10139 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10140 "%s", name);
10141 }
10142 break;
10143
10144 case dw_val_class_fde_ref:
10145 {
10146 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10147
10148 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10149 a->dw_attr_val.v.val_fde_index * 2);
10150 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10151 "%s", name);
10152 }
10153 break;
10154
10155 case dw_val_class_vms_delta:
10156 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10157 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10158 AT_vms_delta2 (a), AT_vms_delta1 (a),
10159 "%s", name);
10160 #else
10161 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10162 AT_vms_delta2 (a), AT_vms_delta1 (a),
10163 "%s", name);
10164 #endif
10165 break;
10166
10167 case dw_val_class_lbl_id:
10168 output_attr_index_or_value (a);
10169 break;
10170
10171 case dw_val_class_lineptr:
10172 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10173 debug_line_section, "%s", name);
10174 break;
10175
10176 case dw_val_class_macptr:
10177 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10178 debug_macinfo_section, "%s", name);
10179 break;
10180
10181 case dw_val_class_loclistsptr:
10182 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10183 debug_loc_section, "%s", name);
10184 break;
10185
10186 case dw_val_class_str:
10187 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10188 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10189 a->dw_attr_val.v.val_str->label,
10190 debug_str_section,
10191 "%s: \"%s\"", name, AT_string (a));
10192 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10193 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10194 a->dw_attr_val.v.val_str->label,
10195 debug_line_str_section,
10196 "%s: \"%s\"", name, AT_string (a));
10197 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10198 dw2_asm_output_data_uleb128 (AT_index (a),
10199 "%s: \"%s\"", name, AT_string (a));
10200 else
10201 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10202 break;
10203
10204 case dw_val_class_file:
10205 {
10206 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10207
10208 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10209 a->dw_attr_val.v.val_file->filename);
10210 break;
10211 }
10212
10213 case dw_val_class_file_implicit:
10214 if (flag_debug_asm)
10215 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10216 ASM_COMMENT_START, name,
10217 maybe_emit_file (a->dw_attr_val.v.val_file),
10218 a->dw_attr_val.v.val_file->filename);
10219 break;
10220
10221 case dw_val_class_data8:
10222 {
10223 int i;
10224
10225 for (i = 0; i < 8; i++)
10226 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10227 i == 0 ? "%s" : NULL, name);
10228 break;
10229 }
10230
10231 case dw_val_class_high_pc:
10232 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10233 get_AT_low_pc (die), "DW_AT_high_pc");
10234 break;
10235
10236 case dw_val_class_discr_value:
10237 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10238 break;
10239
10240 case dw_val_class_discr_list:
10241 {
10242 dw_discr_list_ref list = AT_discr_list (a);
10243 const int size = size_of_discr_list (list);
10244
10245 /* This is a block, so output its length first. */
10246 dw2_asm_output_data (constant_size (size), size,
10247 "%s: block size", name);
10248
10249 for (; list != NULL; list = list->dw_discr_next)
10250 {
10251 /* One byte for the discriminant value descriptor, and then as
10252 many LEB128 numbers as required. */
10253 if (list->dw_discr_range)
10254 dw2_asm_output_data (1, DW_DSC_range,
10255 "%s: DW_DSC_range", name);
10256 else
10257 dw2_asm_output_data (1, DW_DSC_label,
10258 "%s: DW_DSC_label", name);
10259
10260 output_discr_value (&list->dw_discr_lower_bound, name);
10261 if (list->dw_discr_range)
10262 output_discr_value (&list->dw_discr_upper_bound, name);
10263 }
10264 break;
10265 }
10266
10267 default:
10268 gcc_unreachable ();
10269 }
10270 }
10271
10272 FOR_EACH_CHILD (die, c, output_die (c));
10273
10274 /* Add null byte to terminate sibling list. */
10275 if (die->die_child != NULL)
10276 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10277 (unsigned long) die->die_offset);
10278 }
10279
10280 /* Output the compilation unit that appears at the beginning of the
10281 .debug_info section, and precedes the DIE descriptions. */
10282
10283 static void
10284 output_compilation_unit_header (enum dwarf_unit_type ut)
10285 {
10286 if (!XCOFF_DEBUGGING_INFO)
10287 {
10288 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10289 dw2_asm_output_data (4, 0xffffffff,
10290 "Initial length escape value indicating 64-bit DWARF extension");
10291 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10292 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10293 "Length of Compilation Unit Info");
10294 }
10295
10296 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10297 if (dwarf_version >= 5)
10298 {
10299 const char *name;
10300 switch (ut)
10301 {
10302 case DW_UT_compile: name = "DW_UT_compile"; break;
10303 case DW_UT_type: name = "DW_UT_type"; break;
10304 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10305 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10306 default: gcc_unreachable ();
10307 }
10308 dw2_asm_output_data (1, ut, "%s", name);
10309 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10310 }
10311 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10312 debug_abbrev_section,
10313 "Offset Into Abbrev. Section");
10314 if (dwarf_version < 5)
10315 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10316 }
10317
10318 /* Output the compilation unit DIE and its children. */
10319
10320 static void
10321 output_comp_unit (dw_die_ref die, int output_if_empty,
10322 const unsigned char *dwo_id)
10323 {
10324 const char *secname, *oldsym;
10325 char *tmp;
10326
10327 /* Unless we are outputting main CU, we may throw away empty ones. */
10328 if (!output_if_empty && die->die_child == NULL)
10329 return;
10330
10331 /* Even if there are no children of this DIE, we must output the information
10332 about the compilation unit. Otherwise, on an empty translation unit, we
10333 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10334 will then complain when examining the file. First mark all the DIEs in
10335 this CU so we know which get local refs. */
10336 mark_dies (die);
10337
10338 external_ref_hash_type *extern_map = optimize_external_refs (die);
10339
10340 /* For now, optimize only the main CU, in order to optimize the rest
10341 we'd need to see all of them earlier. Leave the rest for post-linking
10342 tools like DWZ. */
10343 if (die == comp_unit_die ())
10344 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10345
10346 build_abbrev_table (die, extern_map);
10347
10348 optimize_abbrev_table ();
10349
10350 delete extern_map;
10351
10352 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10353 next_die_offset = (dwo_id
10354 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10355 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10356 calc_die_sizes (die);
10357
10358 oldsym = die->die_id.die_symbol;
10359 if (oldsym)
10360 {
10361 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10362
10363 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10364 secname = tmp;
10365 die->die_id.die_symbol = NULL;
10366 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10367 }
10368 else
10369 {
10370 switch_to_section (debug_info_section);
10371 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10372 info_section_emitted = true;
10373 }
10374
10375 /* Output debugging information. */
10376 output_compilation_unit_header (dwo_id
10377 ? DW_UT_split_compile : DW_UT_compile);
10378 if (dwarf_version >= 5)
10379 {
10380 if (dwo_id != NULL)
10381 for (int i = 0; i < 8; i++)
10382 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10383 }
10384 output_die (die);
10385
10386 /* Leave the marks on the main CU, so we can check them in
10387 output_pubnames. */
10388 if (oldsym)
10389 {
10390 unmark_dies (die);
10391 die->die_id.die_symbol = oldsym;
10392 }
10393 }
10394
10395 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
10396 and .debug_pubtypes. This is configured per-target, but can be
10397 overridden by the -gpubnames or -gno-pubnames options. */
10398
10399 static inline bool
10400 want_pubnames (void)
10401 {
10402 if (debug_info_level <= DINFO_LEVEL_TERSE)
10403 return false;
10404 if (debug_generate_pub_sections != -1)
10405 return debug_generate_pub_sections;
10406 return targetm.want_debug_pub_sections;
10407 }
10408
10409 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
10410
10411 static void
10412 add_AT_pubnames (dw_die_ref die)
10413 {
10414 if (want_pubnames ())
10415 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
10416 }
10417
10418 /* Add a string attribute value to a skeleton DIE. */
10419
10420 static inline void
10421 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
10422 const char *str)
10423 {
10424 dw_attr_node attr;
10425 struct indirect_string_node *node;
10426
10427 if (! skeleton_debug_str_hash)
10428 skeleton_debug_str_hash
10429 = hash_table<indirect_string_hasher>::create_ggc (10);
10430
10431 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
10432 find_string_form (node);
10433 if (node->form == DW_FORM_GNU_str_index)
10434 node->form = DW_FORM_strp;
10435
10436 attr.dw_attr = attr_kind;
10437 attr.dw_attr_val.val_class = dw_val_class_str;
10438 attr.dw_attr_val.val_entry = NULL;
10439 attr.dw_attr_val.v.val_str = node;
10440 add_dwarf_attr (die, &attr);
10441 }
10442
10443 /* Helper function to generate top-level dies for skeleton debug_info and
10444 debug_types. */
10445
10446 static void
10447 add_top_level_skeleton_die_attrs (dw_die_ref die)
10448 {
10449 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
10450 const char *comp_dir = comp_dir_string ();
10451
10452 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
10453 if (comp_dir != NULL)
10454 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
10455 add_AT_pubnames (die);
10456 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
10457 }
10458
10459 /* Output skeleton debug sections that point to the dwo file. */
10460
10461 static void
10462 output_skeleton_debug_sections (dw_die_ref comp_unit,
10463 const unsigned char *dwo_id)
10464 {
10465 /* These attributes will be found in the full debug_info section. */
10466 remove_AT (comp_unit, DW_AT_producer);
10467 remove_AT (comp_unit, DW_AT_language);
10468
10469 switch_to_section (debug_skeleton_info_section);
10470 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
10471
10472 /* Produce the skeleton compilation-unit header. This one differs enough from
10473 a normal CU header that it's better not to call output_compilation_unit
10474 header. */
10475 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10476 dw2_asm_output_data (4, 0xffffffff,
10477 "Initial length escape value indicating 64-bit "
10478 "DWARF extension");
10479
10480 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10481 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10482 - DWARF_INITIAL_LENGTH_SIZE
10483 + size_of_die (comp_unit),
10484 "Length of Compilation Unit Info");
10485 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10486 if (dwarf_version >= 5)
10487 {
10488 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
10489 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10490 }
10491 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
10492 debug_skeleton_abbrev_section,
10493 "Offset Into Abbrev. Section");
10494 if (dwarf_version < 5)
10495 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10496 else
10497 for (int i = 0; i < 8; i++)
10498 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10499
10500 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
10501 output_die (comp_unit);
10502
10503 /* Build the skeleton debug_abbrev section. */
10504 switch_to_section (debug_skeleton_abbrev_section);
10505 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
10506
10507 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
10508
10509 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
10510 }
10511
10512 /* Output a comdat type unit DIE and its children. */
10513
10514 static void
10515 output_comdat_type_unit (comdat_type_node *node)
10516 {
10517 const char *secname;
10518 char *tmp;
10519 int i;
10520 #if defined (OBJECT_FORMAT_ELF)
10521 tree comdat_key;
10522 #endif
10523
10524 /* First mark all the DIEs in this CU so we know which get local refs. */
10525 mark_dies (node->root_die);
10526
10527 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
10528
10529 build_abbrev_table (node->root_die, extern_map);
10530
10531 delete extern_map;
10532 extern_map = NULL;
10533
10534 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10535 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
10536 calc_die_sizes (node->root_die);
10537
10538 #if defined (OBJECT_FORMAT_ELF)
10539 if (dwarf_version >= 5)
10540 {
10541 if (!dwarf_split_debug_info)
10542 secname = ".debug_info";
10543 else
10544 secname = ".debug_info.dwo";
10545 }
10546 else if (!dwarf_split_debug_info)
10547 secname = ".debug_types";
10548 else
10549 secname = ".debug_types.dwo";
10550
10551 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10552 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
10553 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10554 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
10555 comdat_key = get_identifier (tmp);
10556 targetm.asm_out.named_section (secname,
10557 SECTION_DEBUG | SECTION_LINKONCE,
10558 comdat_key);
10559 #else
10560 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10561 sprintf (tmp, (dwarf_version >= 5
10562 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
10563 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10564 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
10565 secname = tmp;
10566 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10567 #endif
10568
10569 /* Output debugging information. */
10570 output_compilation_unit_header (dwarf_split_debug_info
10571 ? DW_UT_split_type : DW_UT_type);
10572 output_signature (node->signature, "Type Signature");
10573 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
10574 "Offset to Type DIE");
10575 output_die (node->root_die);
10576
10577 unmark_dies (node->root_die);
10578 }
10579
10580 /* Return the DWARF2/3 pubname associated with a decl. */
10581
10582 static const char *
10583 dwarf2_name (tree decl, int scope)
10584 {
10585 if (DECL_NAMELESS (decl))
10586 return NULL;
10587 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
10588 }
10589
10590 /* Add a new entry to .debug_pubnames if appropriate. */
10591
10592 static void
10593 add_pubname_string (const char *str, dw_die_ref die)
10594 {
10595 pubname_entry e;
10596
10597 e.die = die;
10598 e.name = xstrdup (str);
10599 vec_safe_push (pubname_table, e);
10600 }
10601
10602 static void
10603 add_pubname (tree decl, dw_die_ref die)
10604 {
10605 if (!want_pubnames ())
10606 return;
10607
10608 /* Don't add items to the table when we expect that the consumer will have
10609 just read the enclosing die. For example, if the consumer is looking at a
10610 class_member, it will either be inside the class already, or will have just
10611 looked up the class to find the member. Either way, searching the class is
10612 faster than searching the index. */
10613 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
10614 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10615 {
10616 const char *name = dwarf2_name (decl, 1);
10617
10618 if (name)
10619 add_pubname_string (name, die);
10620 }
10621 }
10622
10623 /* Add an enumerator to the pubnames section. */
10624
10625 static void
10626 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
10627 {
10628 pubname_entry e;
10629
10630 gcc_assert (scope_name);
10631 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
10632 e.die = die;
10633 vec_safe_push (pubname_table, e);
10634 }
10635
10636 /* Add a new entry to .debug_pubtypes if appropriate. */
10637
10638 static void
10639 add_pubtype (tree decl, dw_die_ref die)
10640 {
10641 pubname_entry e;
10642
10643 if (!want_pubnames ())
10644 return;
10645
10646 if ((TREE_PUBLIC (decl)
10647 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10648 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
10649 {
10650 tree scope = NULL;
10651 const char *scope_name = "";
10652 const char *sep = is_cxx () ? "::" : ".";
10653 const char *name;
10654
10655 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
10656 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
10657 {
10658 scope_name = lang_hooks.dwarf_name (scope, 1);
10659 if (scope_name != NULL && scope_name[0] != '\0')
10660 scope_name = concat (scope_name, sep, NULL);
10661 else
10662 scope_name = "";
10663 }
10664
10665 if (TYPE_P (decl))
10666 name = type_tag (decl);
10667 else
10668 name = lang_hooks.dwarf_name (decl, 1);
10669
10670 /* If we don't have a name for the type, there's no point in adding
10671 it to the table. */
10672 if (name != NULL && name[0] != '\0')
10673 {
10674 e.die = die;
10675 e.name = concat (scope_name, name, NULL);
10676 vec_safe_push (pubtype_table, e);
10677 }
10678
10679 /* Although it might be more consistent to add the pubinfo for the
10680 enumerators as their dies are created, they should only be added if the
10681 enum type meets the criteria above. So rather than re-check the parent
10682 enum type whenever an enumerator die is created, just output them all
10683 here. This isn't protected by the name conditional because anonymous
10684 enums don't have names. */
10685 if (die->die_tag == DW_TAG_enumeration_type)
10686 {
10687 dw_die_ref c;
10688
10689 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
10690 }
10691 }
10692 }
10693
10694 /* Output a single entry in the pubnames table. */
10695
10696 static void
10697 output_pubname (dw_offset die_offset, pubname_entry *entry)
10698 {
10699 dw_die_ref die = entry->die;
10700 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
10701
10702 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
10703
10704 if (debug_generate_pub_sections == 2)
10705 {
10706 /* This logic follows gdb's method for determining the value of the flag
10707 byte. */
10708 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
10709 switch (die->die_tag)
10710 {
10711 case DW_TAG_typedef:
10712 case DW_TAG_base_type:
10713 case DW_TAG_subrange_type:
10714 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10715 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10716 break;
10717 case DW_TAG_enumerator:
10718 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10719 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10720 if (!is_cxx () && !is_java ())
10721 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10722 break;
10723 case DW_TAG_subprogram:
10724 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10725 GDB_INDEX_SYMBOL_KIND_FUNCTION);
10726 if (!is_ada ())
10727 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10728 break;
10729 case DW_TAG_constant:
10730 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10731 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10732 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10733 break;
10734 case DW_TAG_variable:
10735 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10736 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10737 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10738 break;
10739 case DW_TAG_namespace:
10740 case DW_TAG_imported_declaration:
10741 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10742 break;
10743 case DW_TAG_class_type:
10744 case DW_TAG_interface_type:
10745 case DW_TAG_structure_type:
10746 case DW_TAG_union_type:
10747 case DW_TAG_enumeration_type:
10748 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10749 if (!is_cxx () && !is_java ())
10750 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10751 break;
10752 default:
10753 /* An unusual tag. Leave the flag-byte empty. */
10754 break;
10755 }
10756 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
10757 "GDB-index flags");
10758 }
10759
10760 dw2_asm_output_nstring (entry->name, -1, "external name");
10761 }
10762
10763
10764 /* Output the public names table used to speed up access to externally
10765 visible names; or the public types table used to find type definitions. */
10766
10767 static void
10768 output_pubnames (vec<pubname_entry, va_gc> *names)
10769 {
10770 unsigned i;
10771 unsigned long pubnames_length = size_of_pubnames (names);
10772 pubname_entry *pub;
10773
10774 if (!XCOFF_DEBUGGING_INFO)
10775 {
10776 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10777 dw2_asm_output_data (4, 0xffffffff,
10778 "Initial length escape value indicating 64-bit DWARF extension");
10779 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
10780 "Pub Info Length");
10781 }
10782
10783 /* Version number for pubnames/pubtypes is independent of dwarf version. */
10784 dw2_asm_output_data (2, 2, "DWARF Version");
10785
10786 if (dwarf_split_debug_info)
10787 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10788 debug_skeleton_info_section,
10789 "Offset of Compilation Unit Info");
10790 else
10791 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10792 debug_info_section,
10793 "Offset of Compilation Unit Info");
10794 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
10795 "Compilation Unit Length");
10796
10797 FOR_EACH_VEC_ELT (*names, i, pub)
10798 {
10799 if (include_pubname_in_output (names, pub))
10800 {
10801 dw_offset die_offset = pub->die->die_offset;
10802
10803 /* We shouldn't see pubnames for DIEs outside of the main CU. */
10804 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
10805 gcc_assert (pub->die->die_mark);
10806
10807 /* If we're putting types in their own .debug_types sections,
10808 the .debug_pubtypes table will still point to the compile
10809 unit (not the type unit), so we want to use the offset of
10810 the skeleton DIE (if there is one). */
10811 if (pub->die->comdat_type_p && names == pubtype_table)
10812 {
10813 comdat_type_node *type_node = pub->die->die_id.die_type_node;
10814
10815 if (type_node != NULL)
10816 die_offset = (type_node->skeleton_die != NULL
10817 ? type_node->skeleton_die->die_offset
10818 : comp_unit_die ()->die_offset);
10819 }
10820
10821 output_pubname (die_offset, pub);
10822 }
10823 }
10824
10825 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
10826 }
10827
10828 /* Output public names and types tables if necessary. */
10829
10830 static void
10831 output_pubtables (void)
10832 {
10833 if (!want_pubnames () || !info_section_emitted)
10834 return;
10835
10836 switch_to_section (debug_pubnames_section);
10837 output_pubnames (pubname_table);
10838 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
10839 It shouldn't hurt to emit it always, since pure DWARF2 consumers
10840 simply won't look for the section. */
10841 switch_to_section (debug_pubtypes_section);
10842 output_pubnames (pubtype_table);
10843 }
10844
10845
10846 /* Output the information that goes into the .debug_aranges table.
10847 Namely, define the beginning and ending address range of the
10848 text section generated for this compilation unit. */
10849
10850 static void
10851 output_aranges (void)
10852 {
10853 unsigned i;
10854 unsigned long aranges_length = size_of_aranges ();
10855
10856 if (!XCOFF_DEBUGGING_INFO)
10857 {
10858 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10859 dw2_asm_output_data (4, 0xffffffff,
10860 "Initial length escape value indicating 64-bit DWARF extension");
10861 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
10862 "Length of Address Ranges Info");
10863 }
10864
10865 /* Version number for aranges is still 2, even up to DWARF5. */
10866 dw2_asm_output_data (2, 2, "DWARF Version");
10867 if (dwarf_split_debug_info)
10868 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10869 debug_skeleton_info_section,
10870 "Offset of Compilation Unit Info");
10871 else
10872 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10873 debug_info_section,
10874 "Offset of Compilation Unit Info");
10875 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
10876 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
10877
10878 /* We need to align to twice the pointer size here. */
10879 if (DWARF_ARANGES_PAD_SIZE)
10880 {
10881 /* Pad using a 2 byte words so that padding is correct for any
10882 pointer size. */
10883 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
10884 2 * DWARF2_ADDR_SIZE);
10885 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
10886 dw2_asm_output_data (2, 0, NULL);
10887 }
10888
10889 /* It is necessary not to output these entries if the sections were
10890 not used; if the sections were not used, the length will be 0 and
10891 the address may end up as 0 if the section is discarded by ld
10892 --gc-sections, leaving an invalid (0, 0) entry that can be
10893 confused with the terminator. */
10894 if (text_section_used)
10895 {
10896 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
10897 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
10898 text_section_label, "Length");
10899 }
10900 if (cold_text_section_used)
10901 {
10902 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
10903 "Address");
10904 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
10905 cold_text_section_label, "Length");
10906 }
10907
10908 if (have_multiple_function_sections)
10909 {
10910 unsigned fde_idx;
10911 dw_fde_ref fde;
10912
10913 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
10914 {
10915 if (DECL_IGNORED_P (fde->decl))
10916 continue;
10917 if (!fde->in_std_section)
10918 {
10919 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
10920 "Address");
10921 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
10922 fde->dw_fde_begin, "Length");
10923 }
10924 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
10925 {
10926 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
10927 "Address");
10928 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
10929 fde->dw_fde_second_begin, "Length");
10930 }
10931 }
10932 }
10933
10934 /* Output the terminator words. */
10935 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10936 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10937 }
10938
10939 /* Add a new entry to .debug_ranges. Return its index into
10940 ranges_table vector. */
10941
10942 static unsigned int
10943 add_ranges_num (int num, bool maybe_new_sec)
10944 {
10945 dw_ranges r = { NULL, num, 0, maybe_new_sec };
10946 vec_safe_push (ranges_table, r);
10947 return vec_safe_length (ranges_table) - 1;
10948 }
10949
10950 /* Add a new entry to .debug_ranges corresponding to a block, or a
10951 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
10952 this entry might be in a different section from previous range. */
10953
10954 static unsigned int
10955 add_ranges (const_tree block, bool maybe_new_sec)
10956 {
10957 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
10958 }
10959
10960 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
10961 chain, or middle entry of a chain that will be directly referred to. */
10962
10963 static void
10964 note_rnglist_head (unsigned int offset)
10965 {
10966 if (dwarf_version < 5 || (*ranges_table)[offset].label)
10967 return;
10968 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
10969 }
10970
10971 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
10972 When using dwarf_split_debug_info, address attributes in dies destined
10973 for the final executable should be direct references--setting the
10974 parameter force_direct ensures this behavior. */
10975
10976 static void
10977 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
10978 bool *added, bool force_direct)
10979 {
10980 unsigned int in_use = vec_safe_length (ranges_by_label);
10981 unsigned int offset;
10982 dw_ranges_by_label rbl = { begin, end };
10983 vec_safe_push (ranges_by_label, rbl);
10984 offset = add_ranges_num (-(int)in_use - 1, true);
10985 if (!*added)
10986 {
10987 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
10988 *added = true;
10989 note_rnglist_head (offset);
10990 }
10991 }
10992
10993 /* Emit .debug_ranges section. */
10994
10995 static void
10996 output_ranges (void)
10997 {
10998 unsigned i;
10999 static const char *const start_fmt = "Offset %#x";
11000 const char *fmt = start_fmt;
11001 dw_ranges *r;
11002
11003 switch_to_section (debug_ranges_section);
11004 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11005 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11006 {
11007 int block_num = r->num;
11008
11009 if (block_num > 0)
11010 {
11011 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11012 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11013
11014 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11015 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11016
11017 /* If all code is in the text section, then the compilation
11018 unit base address defaults to DW_AT_low_pc, which is the
11019 base of the text section. */
11020 if (!have_multiple_function_sections)
11021 {
11022 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11023 text_section_label,
11024 fmt, i * 2 * DWARF2_ADDR_SIZE);
11025 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11026 text_section_label, NULL);
11027 }
11028
11029 /* Otherwise, the compilation unit base address is zero,
11030 which allows us to use absolute addresses, and not worry
11031 about whether the target supports cross-section
11032 arithmetic. */
11033 else
11034 {
11035 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11036 fmt, i * 2 * DWARF2_ADDR_SIZE);
11037 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11038 }
11039
11040 fmt = NULL;
11041 }
11042
11043 /* Negative block_num stands for an index into ranges_by_label. */
11044 else if (block_num < 0)
11045 {
11046 int lab_idx = - block_num - 1;
11047
11048 if (!have_multiple_function_sections)
11049 {
11050 gcc_unreachable ();
11051 #if 0
11052 /* If we ever use add_ranges_by_labels () for a single
11053 function section, all we have to do is to take out
11054 the #if 0 above. */
11055 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11056 (*ranges_by_label)[lab_idx].begin,
11057 text_section_label,
11058 fmt, i * 2 * DWARF2_ADDR_SIZE);
11059 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11060 (*ranges_by_label)[lab_idx].end,
11061 text_section_label, NULL);
11062 #endif
11063 }
11064 else
11065 {
11066 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11067 (*ranges_by_label)[lab_idx].begin,
11068 fmt, i * 2 * DWARF2_ADDR_SIZE);
11069 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11070 (*ranges_by_label)[lab_idx].end,
11071 NULL);
11072 }
11073 }
11074 else
11075 {
11076 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11077 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11078 fmt = start_fmt;
11079 }
11080 }
11081 }
11082
11083 /* Non-zero if .debug_line_str should be used for .debug_line section
11084 strings or strings that are likely shareable with those. */
11085 #define DWARF5_USE_DEBUG_LINE_STR \
11086 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11087 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11088 /* FIXME: there is no .debug_line_str.dwo section, \
11089 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11090 && !dwarf_split_debug_info)
11091
11092 /* Assign .debug_rnglists indexes. */
11093
11094 static void
11095 index_rnglists (void)
11096 {
11097 unsigned i;
11098 dw_ranges *r;
11099
11100 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11101 if (r->label)
11102 r->idx = rnglist_idx++;
11103 }
11104
11105 /* Emit .debug_rnglists section. */
11106
11107 static void
11108 output_rnglists (void)
11109 {
11110 unsigned i;
11111 dw_ranges *r;
11112 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11113 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11114 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11115
11116 switch_to_section (debug_ranges_section);
11117 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11118 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL, 2);
11119 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL, 3);
11120 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11121 dw2_asm_output_data (4, 0xffffffff,
11122 "Initial length escape value indicating "
11123 "64-bit DWARF extension");
11124 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11125 "Length of Range Lists");
11126 ASM_OUTPUT_LABEL (asm_out_file, l1);
11127 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11128 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11129 dw2_asm_output_data (1, 0, "Segment Size");
11130 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11131 about relocation sizes and primarily care about the size of .debug*
11132 sections in linked shared libraries and executables, then
11133 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11134 into it are usually larger than just DW_FORM_sec_offset offsets
11135 into the .debug_rnglists section. */
11136 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11137 "Offset Entry Count");
11138 if (dwarf_split_debug_info)
11139 {
11140 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11141 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11142 if (r->label)
11143 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11144 ranges_base_label, NULL);
11145 }
11146
11147 const char *lab = "";
11148 unsigned int len = vec_safe_length (ranges_table);
11149 const char *base = NULL;
11150 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11151 {
11152 int block_num = r->num;
11153
11154 if (r->label)
11155 {
11156 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11157 lab = r->label;
11158 }
11159 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11160 base = NULL;
11161 if (block_num > 0)
11162 {
11163 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11164 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11165
11166 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11167 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11168
11169 if (HAVE_AS_LEB128)
11170 {
11171 /* If all code is in the text section, then the compilation
11172 unit base address defaults to DW_AT_low_pc, which is the
11173 base of the text section. */
11174 if (!have_multiple_function_sections)
11175 {
11176 dw2_asm_output_data (1, DW_RLE_offset_pair,
11177 "DW_RLE_offset_pair (%s)", lab);
11178 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11179 "Range begin address (%s)", lab);
11180 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11181 "Range end address (%s)", lab);
11182 continue;
11183 }
11184 if (base == NULL)
11185 {
11186 dw_ranges *r2 = NULL;
11187 if (i < len - 1)
11188 r2 = &(*ranges_table)[i + 1];
11189 if (r2
11190 && r2->num != 0
11191 && r2->label == NULL
11192 && !r2->maybe_new_sec)
11193 {
11194 dw2_asm_output_data (1, DW_RLE_base_address,
11195 "DW_RLE_base_address (%s)", lab);
11196 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11197 "Base address (%s)", lab);
11198 strcpy (basebuf, blabel);
11199 base = basebuf;
11200 }
11201 }
11202 if (base)
11203 {
11204 dw2_asm_output_data (1, DW_RLE_offset_pair,
11205 "DW_RLE_offset_pair (%s)", lab);
11206 dw2_asm_output_delta_uleb128 (blabel, base,
11207 "Range begin address (%s)", lab);
11208 dw2_asm_output_delta_uleb128 (elabel, base,
11209 "Range end address (%s)", lab);
11210 continue;
11211 }
11212 dw2_asm_output_data (1, DW_RLE_start_length,
11213 "DW_RLE_start_length (%s)", lab);
11214 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11215 "Range begin address (%s)", lab);
11216 dw2_asm_output_delta_uleb128 (elabel, blabel,
11217 "Range length (%s)", lab);
11218 }
11219 else
11220 {
11221 dw2_asm_output_data (1, DW_RLE_start_end,
11222 "DW_RLE_start_end (%s)", lab);
11223 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11224 "Range begin address (%s)", lab);
11225 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11226 "Range end address (%s)", lab);
11227 }
11228 }
11229
11230 /* Negative block_num stands for an index into ranges_by_label. */
11231 else if (block_num < 0)
11232 {
11233 int lab_idx = - block_num - 1;
11234 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11235 const char *elabel = (*ranges_by_label)[lab_idx].end;
11236
11237 if (!have_multiple_function_sections)
11238 gcc_unreachable ();
11239 if (HAVE_AS_LEB128)
11240 {
11241 dw2_asm_output_data (1, DW_RLE_start_length,
11242 "DW_RLE_start_length (%s)", lab);
11243 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11244 "Range begin address (%s)", lab);
11245 dw2_asm_output_delta_uleb128 (elabel, blabel,
11246 "Range length (%s)", lab);
11247 }
11248 else
11249 {
11250 dw2_asm_output_data (1, DW_RLE_start_end,
11251 "DW_RLE_start_end (%s)", lab);
11252 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11253 "Range begin address (%s)", lab);
11254 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11255 "Range end address (%s)", lab);
11256 }
11257 }
11258 else
11259 dw2_asm_output_data (1, DW_RLE_end_of_list,
11260 "DW_RLE_end_of_list (%s)", lab);
11261 }
11262 ASM_OUTPUT_LABEL (asm_out_file, l2);
11263 }
11264
11265 /* Data structure containing information about input files. */
11266 struct file_info
11267 {
11268 const char *path; /* Complete file name. */
11269 const char *fname; /* File name part. */
11270 int length; /* Length of entire string. */
11271 struct dwarf_file_data * file_idx; /* Index in input file table. */
11272 int dir_idx; /* Index in directory table. */
11273 };
11274
11275 /* Data structure containing information about directories with source
11276 files. */
11277 struct dir_info
11278 {
11279 const char *path; /* Path including directory name. */
11280 int length; /* Path length. */
11281 int prefix; /* Index of directory entry which is a prefix. */
11282 int count; /* Number of files in this directory. */
11283 int dir_idx; /* Index of directory used as base. */
11284 };
11285
11286 /* Callback function for file_info comparison. We sort by looking at
11287 the directories in the path. */
11288
11289 static int
11290 file_info_cmp (const void *p1, const void *p2)
11291 {
11292 const struct file_info *const s1 = (const struct file_info *) p1;
11293 const struct file_info *const s2 = (const struct file_info *) p2;
11294 const unsigned char *cp1;
11295 const unsigned char *cp2;
11296
11297 /* Take care of file names without directories. We need to make sure that
11298 we return consistent values to qsort since some will get confused if
11299 we return the same value when identical operands are passed in opposite
11300 orders. So if neither has a directory, return 0 and otherwise return
11301 1 or -1 depending on which one has the directory. */
11302 if ((s1->path == s1->fname || s2->path == s2->fname))
11303 return (s2->path == s2->fname) - (s1->path == s1->fname);
11304
11305 cp1 = (const unsigned char *) s1->path;
11306 cp2 = (const unsigned char *) s2->path;
11307
11308 while (1)
11309 {
11310 ++cp1;
11311 ++cp2;
11312 /* Reached the end of the first path? If so, handle like above. */
11313 if ((cp1 == (const unsigned char *) s1->fname)
11314 || (cp2 == (const unsigned char *) s2->fname))
11315 return ((cp2 == (const unsigned char *) s2->fname)
11316 - (cp1 == (const unsigned char *) s1->fname));
11317
11318 /* Character of current path component the same? */
11319 else if (*cp1 != *cp2)
11320 return *cp1 - *cp2;
11321 }
11322 }
11323
11324 struct file_name_acquire_data
11325 {
11326 struct file_info *files;
11327 int used_files;
11328 int max_files;
11329 };
11330
11331 /* Traversal function for the hash table. */
11332
11333 int
11334 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11335 {
11336 struct dwarf_file_data *d = *slot;
11337 struct file_info *fi;
11338 const char *f;
11339
11340 gcc_assert (fnad->max_files >= d->emitted_number);
11341
11342 if (! d->emitted_number)
11343 return 1;
11344
11345 gcc_assert (fnad->max_files != fnad->used_files);
11346
11347 fi = fnad->files + fnad->used_files++;
11348
11349 /* Skip all leading "./". */
11350 f = d->filename;
11351 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
11352 f += 2;
11353
11354 /* Create a new array entry. */
11355 fi->path = f;
11356 fi->length = strlen (f);
11357 fi->file_idx = d;
11358
11359 /* Search for the file name part. */
11360 f = strrchr (f, DIR_SEPARATOR);
11361 #if defined (DIR_SEPARATOR_2)
11362 {
11363 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
11364
11365 if (g != NULL)
11366 {
11367 if (f == NULL || f < g)
11368 f = g;
11369 }
11370 }
11371 #endif
11372
11373 fi->fname = f == NULL ? fi->path : f + 1;
11374 return 1;
11375 }
11376
11377 /* Helper function for output_file_names. Emit a FORM encoded
11378 string STR, with assembly comment start ENTRY_KIND and
11379 index IDX */
11380
11381 static void
11382 output_line_string (enum dwarf_form form, const char *str,
11383 const char *entry_kind, unsigned int idx)
11384 {
11385 switch (form)
11386 {
11387 case DW_FORM_string:
11388 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
11389 break;
11390 case DW_FORM_line_strp:
11391 if (!debug_line_str_hash)
11392 debug_line_str_hash
11393 = hash_table<indirect_string_hasher>::create_ggc (10);
11394
11395 struct indirect_string_node *node;
11396 node = find_AT_string_in_table (str, debug_line_str_hash);
11397 set_indirect_string (node);
11398 node->form = form;
11399 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
11400 debug_line_str_section, "%s: %#x: \"%s\"",
11401 entry_kind, 0, node->str);
11402 break;
11403 default:
11404 gcc_unreachable ();
11405 }
11406 }
11407
11408 /* Output the directory table and the file name table. We try to minimize
11409 the total amount of memory needed. A heuristic is used to avoid large
11410 slowdowns with many input files. */
11411
11412 static void
11413 output_file_names (void)
11414 {
11415 struct file_name_acquire_data fnad;
11416 int numfiles;
11417 struct file_info *files;
11418 struct dir_info *dirs;
11419 int *saved;
11420 int *savehere;
11421 int *backmap;
11422 int ndirs;
11423 int idx_offset;
11424 int i;
11425
11426 if (!last_emitted_file)
11427 {
11428 if (dwarf_version >= 5)
11429 {
11430 dw2_asm_output_data (1, 0, "Directory entry format count");
11431 dw2_asm_output_data_uleb128 (0, "Directories count");
11432 dw2_asm_output_data (1, 0, "File name entry format count");
11433 dw2_asm_output_data_uleb128 (0, "File names count");
11434 }
11435 else
11436 {
11437 dw2_asm_output_data (1, 0, "End directory table");
11438 dw2_asm_output_data (1, 0, "End file name table");
11439 }
11440 return;
11441 }
11442
11443 numfiles = last_emitted_file->emitted_number;
11444
11445 /* Allocate the various arrays we need. */
11446 files = XALLOCAVEC (struct file_info, numfiles);
11447 dirs = XALLOCAVEC (struct dir_info, numfiles);
11448
11449 fnad.files = files;
11450 fnad.used_files = 0;
11451 fnad.max_files = numfiles;
11452 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
11453 gcc_assert (fnad.used_files == fnad.max_files);
11454
11455 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
11456
11457 /* Find all the different directories used. */
11458 dirs[0].path = files[0].path;
11459 dirs[0].length = files[0].fname - files[0].path;
11460 dirs[0].prefix = -1;
11461 dirs[0].count = 1;
11462 dirs[0].dir_idx = 0;
11463 files[0].dir_idx = 0;
11464 ndirs = 1;
11465
11466 for (i = 1; i < numfiles; i++)
11467 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
11468 && memcmp (dirs[ndirs - 1].path, files[i].path,
11469 dirs[ndirs - 1].length) == 0)
11470 {
11471 /* Same directory as last entry. */
11472 files[i].dir_idx = ndirs - 1;
11473 ++dirs[ndirs - 1].count;
11474 }
11475 else
11476 {
11477 int j;
11478
11479 /* This is a new directory. */
11480 dirs[ndirs].path = files[i].path;
11481 dirs[ndirs].length = files[i].fname - files[i].path;
11482 dirs[ndirs].count = 1;
11483 dirs[ndirs].dir_idx = ndirs;
11484 files[i].dir_idx = ndirs;
11485
11486 /* Search for a prefix. */
11487 dirs[ndirs].prefix = -1;
11488 for (j = 0; j < ndirs; j++)
11489 if (dirs[j].length < dirs[ndirs].length
11490 && dirs[j].length > 1
11491 && (dirs[ndirs].prefix == -1
11492 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
11493 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
11494 dirs[ndirs].prefix = j;
11495
11496 ++ndirs;
11497 }
11498
11499 /* Now to the actual work. We have to find a subset of the directories which
11500 allow expressing the file name using references to the directory table
11501 with the least amount of characters. We do not do an exhaustive search
11502 where we would have to check out every combination of every single
11503 possible prefix. Instead we use a heuristic which provides nearly optimal
11504 results in most cases and never is much off. */
11505 saved = XALLOCAVEC (int, ndirs);
11506 savehere = XALLOCAVEC (int, ndirs);
11507
11508 memset (saved, '\0', ndirs * sizeof (saved[0]));
11509 for (i = 0; i < ndirs; i++)
11510 {
11511 int j;
11512 int total;
11513
11514 /* We can always save some space for the current directory. But this
11515 does not mean it will be enough to justify adding the directory. */
11516 savehere[i] = dirs[i].length;
11517 total = (savehere[i] - saved[i]) * dirs[i].count;
11518
11519 for (j = i + 1; j < ndirs; j++)
11520 {
11521 savehere[j] = 0;
11522 if (saved[j] < dirs[i].length)
11523 {
11524 /* Determine whether the dirs[i] path is a prefix of the
11525 dirs[j] path. */
11526 int k;
11527
11528 k = dirs[j].prefix;
11529 while (k != -1 && k != (int) i)
11530 k = dirs[k].prefix;
11531
11532 if (k == (int) i)
11533 {
11534 /* Yes it is. We can possibly save some memory by
11535 writing the filenames in dirs[j] relative to
11536 dirs[i]. */
11537 savehere[j] = dirs[i].length;
11538 total += (savehere[j] - saved[j]) * dirs[j].count;
11539 }
11540 }
11541 }
11542
11543 /* Check whether we can save enough to justify adding the dirs[i]
11544 directory. */
11545 if (total > dirs[i].length + 1)
11546 {
11547 /* It's worthwhile adding. */
11548 for (j = i; j < ndirs; j++)
11549 if (savehere[j] > 0)
11550 {
11551 /* Remember how much we saved for this directory so far. */
11552 saved[j] = savehere[j];
11553
11554 /* Remember the prefix directory. */
11555 dirs[j].dir_idx = i;
11556 }
11557 }
11558 }
11559
11560 /* Emit the directory name table. */
11561 idx_offset = dirs[0].length > 0 ? 1 : 0;
11562 enum dwarf_form str_form = DW_FORM_string;
11563 enum dwarf_form idx_form = DW_FORM_udata;
11564 if (dwarf_version >= 5)
11565 {
11566 const char *comp_dir = comp_dir_string ();
11567 if (comp_dir == NULL)
11568 comp_dir = "";
11569 dw2_asm_output_data (1, 1, "Directory entry format count");
11570 if (DWARF5_USE_DEBUG_LINE_STR)
11571 str_form = DW_FORM_line_strp;
11572 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11573 dw2_asm_output_data_uleb128 (str_form, "%s",
11574 get_DW_FORM_name (str_form));
11575 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
11576 if (str_form == DW_FORM_string)
11577 {
11578 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
11579 for (i = 1 - idx_offset; i < ndirs; i++)
11580 dw2_asm_output_nstring (dirs[i].path,
11581 dirs[i].length
11582 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11583 "Directory Entry: %#x", i + idx_offset);
11584 }
11585 else
11586 {
11587 output_line_string (str_form, comp_dir, "Directory Entry", 0);
11588 for (i = 1 - idx_offset; i < ndirs; i++)
11589 {
11590 const char *str
11591 = ggc_alloc_string (dirs[i].path,
11592 dirs[i].length
11593 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
11594 output_line_string (str_form, str, "Directory Entry",
11595 (unsigned) i + idx_offset);
11596 }
11597 }
11598 }
11599 else
11600 {
11601 for (i = 1 - idx_offset; i < ndirs; i++)
11602 dw2_asm_output_nstring (dirs[i].path,
11603 dirs[i].length
11604 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11605 "Directory Entry: %#x", i + idx_offset);
11606
11607 dw2_asm_output_data (1, 0, "End directory table");
11608 }
11609
11610 /* We have to emit them in the order of emitted_number since that's
11611 used in the debug info generation. To do this efficiently we
11612 generate a back-mapping of the indices first. */
11613 backmap = XALLOCAVEC (int, numfiles);
11614 for (i = 0; i < numfiles; i++)
11615 backmap[files[i].file_idx->emitted_number - 1] = i;
11616
11617 if (dwarf_version >= 5)
11618 {
11619 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
11620 if (filename0 == NULL)
11621 filename0 = "";
11622 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
11623 DW_FORM_data2. Choose one based on the number of directories
11624 and how much space would they occupy in each encoding.
11625 If we have at most 256 directories, all indexes fit into
11626 a single byte, so DW_FORM_data1 is most compact (if there
11627 are at most 128 directories, DW_FORM_udata would be as
11628 compact as that, but not shorter and slower to decode). */
11629 if (ndirs + idx_offset <= 256)
11630 idx_form = DW_FORM_data1;
11631 /* If there are more than 65536 directories, we have to use
11632 DW_FORM_udata, DW_FORM_data2 can't refer to them.
11633 Otherwise, compute what space would occupy if all the indexes
11634 used DW_FORM_udata - sum - and compare that to how large would
11635 be DW_FORM_data2 encoding, and pick the more efficient one. */
11636 else if (ndirs + idx_offset <= 65536)
11637 {
11638 unsigned HOST_WIDE_INT sum = 1;
11639 for (i = 0; i < numfiles; i++)
11640 {
11641 int file_idx = backmap[i];
11642 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11643 sum += size_of_uleb128 (dir_idx);
11644 }
11645 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
11646 idx_form = DW_FORM_data2;
11647 }
11648 #ifdef VMS_DEBUGGING_INFO
11649 dw2_asm_output_data (1, 4, "File name entry format count");
11650 #else
11651 dw2_asm_output_data (1, 2, "File name entry format count");
11652 #endif
11653 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11654 dw2_asm_output_data_uleb128 (str_form, "%s",
11655 get_DW_FORM_name (str_form));
11656 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
11657 "DW_LNCT_directory_index");
11658 dw2_asm_output_data_uleb128 (idx_form, "%s",
11659 get_DW_FORM_name (idx_form));
11660 #ifdef VMS_DEBUGGING_INFO
11661 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
11662 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11663 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
11664 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11665 #endif
11666 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
11667
11668 output_line_string (str_form, filename0, "File Entry", 0);
11669
11670 /* Include directory index. */
11671 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11672 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11673 0, NULL);
11674 else
11675 dw2_asm_output_data_uleb128 (0, NULL);
11676
11677 #ifdef VMS_DEBUGGING_INFO
11678 dw2_asm_output_data_uleb128 (0, NULL);
11679 dw2_asm_output_data_uleb128 (0, NULL);
11680 #endif
11681 }
11682
11683 /* Now write all the file names. */
11684 for (i = 0; i < numfiles; i++)
11685 {
11686 int file_idx = backmap[i];
11687 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11688
11689 #ifdef VMS_DEBUGGING_INFO
11690 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
11691
11692 /* Setting these fields can lead to debugger miscomparisons,
11693 but VMS Debug requires them to be set correctly. */
11694
11695 int ver;
11696 long long cdt;
11697 long siz;
11698 int maxfilelen = (strlen (files[file_idx].path)
11699 + dirs[dir_idx].length
11700 + MAX_VMS_VERSION_LEN + 1);
11701 char *filebuf = XALLOCAVEC (char, maxfilelen);
11702
11703 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
11704 snprintf (filebuf, maxfilelen, "%s;%d",
11705 files[file_idx].path + dirs[dir_idx].length, ver);
11706
11707 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
11708
11709 /* Include directory index. */
11710 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11711 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11712 dir_idx + idx_offset, NULL);
11713 else
11714 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11715
11716 /* Modification time. */
11717 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11718 &cdt, 0, 0, 0) == 0)
11719 ? cdt : 0, NULL);
11720
11721 /* File length in bytes. */
11722 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11723 0, &siz, 0, 0) == 0)
11724 ? siz : 0, NULL);
11725 #else
11726 output_line_string (str_form,
11727 files[file_idx].path + dirs[dir_idx].length,
11728 "File Entry", (unsigned) i + 1);
11729
11730 /* Include directory index. */
11731 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11732 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11733 dir_idx + idx_offset, NULL);
11734 else
11735 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11736
11737 if (dwarf_version >= 5)
11738 continue;
11739
11740 /* Modification time. */
11741 dw2_asm_output_data_uleb128 (0, NULL);
11742
11743 /* File length in bytes. */
11744 dw2_asm_output_data_uleb128 (0, NULL);
11745 #endif /* VMS_DEBUGGING_INFO */
11746 }
11747
11748 if (dwarf_version < 5)
11749 dw2_asm_output_data (1, 0, "End file name table");
11750 }
11751
11752
11753 /* Output one line number table into the .debug_line section. */
11754
11755 static void
11756 output_one_line_info_table (dw_line_info_table *table)
11757 {
11758 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
11759 unsigned int current_line = 1;
11760 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
11761 dw_line_info_entry *ent;
11762 size_t i;
11763
11764 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
11765 {
11766 switch (ent->opcode)
11767 {
11768 case LI_set_address:
11769 /* ??? Unfortunately, we have little choice here currently, and
11770 must always use the most general form. GCC does not know the
11771 address delta itself, so we can't use DW_LNS_advance_pc. Many
11772 ports do have length attributes which will give an upper bound
11773 on the address range. We could perhaps use length attributes
11774 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
11775 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
11776
11777 /* This can handle any delta. This takes
11778 4+DWARF2_ADDR_SIZE bytes. */
11779 dw2_asm_output_data (1, 0, "set address %s", line_label);
11780 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11781 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11782 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
11783 break;
11784
11785 case LI_set_line:
11786 if (ent->val == current_line)
11787 {
11788 /* We still need to start a new row, so output a copy insn. */
11789 dw2_asm_output_data (1, DW_LNS_copy,
11790 "copy line %u", current_line);
11791 }
11792 else
11793 {
11794 int line_offset = ent->val - current_line;
11795 int line_delta = line_offset - DWARF_LINE_BASE;
11796
11797 current_line = ent->val;
11798 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
11799 {
11800 /* This can handle deltas from -10 to 234, using the current
11801 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
11802 This takes 1 byte. */
11803 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
11804 "line %u", current_line);
11805 }
11806 else
11807 {
11808 /* This can handle any delta. This takes at least 4 bytes,
11809 depending on the value being encoded. */
11810 dw2_asm_output_data (1, DW_LNS_advance_line,
11811 "advance to line %u", current_line);
11812 dw2_asm_output_data_sleb128 (line_offset, NULL);
11813 dw2_asm_output_data (1, DW_LNS_copy, NULL);
11814 }
11815 }
11816 break;
11817
11818 case LI_set_file:
11819 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
11820 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11821 break;
11822
11823 case LI_set_column:
11824 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
11825 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11826 break;
11827
11828 case LI_negate_stmt:
11829 current_is_stmt = !current_is_stmt;
11830 dw2_asm_output_data (1, DW_LNS_negate_stmt,
11831 "is_stmt %d", current_is_stmt);
11832 break;
11833
11834 case LI_set_prologue_end:
11835 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
11836 "set prologue end");
11837 break;
11838
11839 case LI_set_epilogue_begin:
11840 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
11841 "set epilogue begin");
11842 break;
11843
11844 case LI_set_discriminator:
11845 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
11846 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
11847 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
11848 dw2_asm_output_data_uleb128 (ent->val, NULL);
11849 break;
11850 }
11851 }
11852
11853 /* Emit debug info for the address of the end of the table. */
11854 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
11855 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11856 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11857 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
11858
11859 dw2_asm_output_data (1, 0, "end sequence");
11860 dw2_asm_output_data_uleb128 (1, NULL);
11861 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
11862 }
11863
11864 /* Output the source line number correspondence information. This
11865 information goes into the .debug_line section. */
11866
11867 static void
11868 output_line_info (bool prologue_only)
11869 {
11870 static unsigned int generation;
11871 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
11872 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
11873 bool saw_one = false;
11874 int opc;
11875
11876 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
11877 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
11878 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
11879 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
11880
11881 if (!XCOFF_DEBUGGING_INFO)
11882 {
11883 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11884 dw2_asm_output_data (4, 0xffffffff,
11885 "Initial length escape value indicating 64-bit DWARF extension");
11886 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11887 "Length of Source Line Info");
11888 }
11889
11890 ASM_OUTPUT_LABEL (asm_out_file, l1);
11891
11892 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11893 if (dwarf_version >= 5)
11894 {
11895 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11896 dw2_asm_output_data (1, 0, "Segment Size");
11897 }
11898 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
11899 ASM_OUTPUT_LABEL (asm_out_file, p1);
11900
11901 /* Define the architecture-dependent minimum instruction length (in bytes).
11902 In this implementation of DWARF, this field is used for information
11903 purposes only. Since GCC generates assembly language, we have no
11904 a priori knowledge of how many instruction bytes are generated for each
11905 source line, and therefore can use only the DW_LNE_set_address and
11906 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
11907 this as '1', which is "correct enough" for all architectures,
11908 and don't let the target override. */
11909 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
11910
11911 if (dwarf_version >= 4)
11912 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
11913 "Maximum Operations Per Instruction");
11914 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
11915 "Default is_stmt_start flag");
11916 dw2_asm_output_data (1, DWARF_LINE_BASE,
11917 "Line Base Value (Special Opcodes)");
11918 dw2_asm_output_data (1, DWARF_LINE_RANGE,
11919 "Line Range Value (Special Opcodes)");
11920 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
11921 "Special Opcode Base");
11922
11923 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
11924 {
11925 int n_op_args;
11926 switch (opc)
11927 {
11928 case DW_LNS_advance_pc:
11929 case DW_LNS_advance_line:
11930 case DW_LNS_set_file:
11931 case DW_LNS_set_column:
11932 case DW_LNS_fixed_advance_pc:
11933 case DW_LNS_set_isa:
11934 n_op_args = 1;
11935 break;
11936 default:
11937 n_op_args = 0;
11938 break;
11939 }
11940
11941 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
11942 opc, n_op_args);
11943 }
11944
11945 /* Write out the information about the files we use. */
11946 output_file_names ();
11947 ASM_OUTPUT_LABEL (asm_out_file, p2);
11948 if (prologue_only)
11949 {
11950 /* Output the marker for the end of the line number info. */
11951 ASM_OUTPUT_LABEL (asm_out_file, l2);
11952 return;
11953 }
11954
11955 if (separate_line_info)
11956 {
11957 dw_line_info_table *table;
11958 size_t i;
11959
11960 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
11961 if (table->in_use)
11962 {
11963 output_one_line_info_table (table);
11964 saw_one = true;
11965 }
11966 }
11967 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
11968 {
11969 output_one_line_info_table (cold_text_section_line_info);
11970 saw_one = true;
11971 }
11972
11973 /* ??? Some Darwin linkers crash on a .debug_line section with no
11974 sequences. Further, merely a DW_LNE_end_sequence entry is not
11975 sufficient -- the address column must also be initialized.
11976 Make sure to output at least one set_address/end_sequence pair,
11977 choosing .text since that section is always present. */
11978 if (text_section_line_info->in_use || !saw_one)
11979 output_one_line_info_table (text_section_line_info);
11980
11981 /* Output the marker for the end of the line number info. */
11982 ASM_OUTPUT_LABEL (asm_out_file, l2);
11983 }
11984 \f
11985 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
11986
11987 static inline bool
11988 need_endianity_attribute_p (bool reverse)
11989 {
11990 return reverse && (dwarf_version >= 3 || !dwarf_strict);
11991 }
11992
11993 /* Given a pointer to a tree node for some base type, return a pointer to
11994 a DIE that describes the given type. REVERSE is true if the type is
11995 to be interpreted in the reverse storage order wrt the target order.
11996
11997 This routine must only be called for GCC type nodes that correspond to
11998 Dwarf base (fundamental) types. */
11999
12000 static dw_die_ref
12001 base_type_die (tree type, bool reverse)
12002 {
12003 dw_die_ref base_type_result;
12004 enum dwarf_type encoding;
12005 bool fpt_used = false;
12006 struct fixed_point_type_info fpt_info;
12007 tree type_bias = NULL_TREE;
12008
12009 if (TREE_CODE (type) == ERROR_MARK || TREE_CODE (type) == VOID_TYPE)
12010 return 0;
12011
12012 /* If this is a subtype that should not be emitted as a subrange type,
12013 use the base type. See subrange_type_for_debug_p. */
12014 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12015 type = TREE_TYPE (type);
12016
12017 switch (TREE_CODE (type))
12018 {
12019 case INTEGER_TYPE:
12020 if ((dwarf_version >= 4 || !dwarf_strict)
12021 && TYPE_NAME (type)
12022 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12023 && DECL_IS_BUILTIN (TYPE_NAME (type))
12024 && DECL_NAME (TYPE_NAME (type)))
12025 {
12026 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12027 if (strcmp (name, "char16_t") == 0
12028 || strcmp (name, "char32_t") == 0)
12029 {
12030 encoding = DW_ATE_UTF;
12031 break;
12032 }
12033 }
12034 if ((dwarf_version >= 3 || !dwarf_strict)
12035 && lang_hooks.types.get_fixed_point_type_info)
12036 {
12037 memset (&fpt_info, 0, sizeof (fpt_info));
12038 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12039 {
12040 fpt_used = true;
12041 encoding = ((TYPE_UNSIGNED (type))
12042 ? DW_ATE_unsigned_fixed
12043 : DW_ATE_signed_fixed);
12044 break;
12045 }
12046 }
12047 if (TYPE_STRING_FLAG (type))
12048 {
12049 if (TYPE_UNSIGNED (type))
12050 encoding = DW_ATE_unsigned_char;
12051 else
12052 encoding = DW_ATE_signed_char;
12053 }
12054 else if (TYPE_UNSIGNED (type))
12055 encoding = DW_ATE_unsigned;
12056 else
12057 encoding = DW_ATE_signed;
12058
12059 if (!dwarf_strict
12060 && lang_hooks.types.get_type_bias)
12061 type_bias = lang_hooks.types.get_type_bias (type);
12062 break;
12063
12064 case REAL_TYPE:
12065 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12066 {
12067 if (dwarf_version >= 3 || !dwarf_strict)
12068 encoding = DW_ATE_decimal_float;
12069 else
12070 encoding = DW_ATE_lo_user;
12071 }
12072 else
12073 encoding = DW_ATE_float;
12074 break;
12075
12076 case FIXED_POINT_TYPE:
12077 if (!(dwarf_version >= 3 || !dwarf_strict))
12078 encoding = DW_ATE_lo_user;
12079 else if (TYPE_UNSIGNED (type))
12080 encoding = DW_ATE_unsigned_fixed;
12081 else
12082 encoding = DW_ATE_signed_fixed;
12083 break;
12084
12085 /* Dwarf2 doesn't know anything about complex ints, so use
12086 a user defined type for it. */
12087 case COMPLEX_TYPE:
12088 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12089 encoding = DW_ATE_complex_float;
12090 else
12091 encoding = DW_ATE_lo_user;
12092 break;
12093
12094 case BOOLEAN_TYPE:
12095 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12096 encoding = DW_ATE_boolean;
12097 break;
12098
12099 default:
12100 /* No other TREE_CODEs are Dwarf fundamental types. */
12101 gcc_unreachable ();
12102 }
12103
12104 base_type_result = new_die (DW_TAG_base_type, comp_unit_die (), type);
12105
12106 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12107 int_size_in_bytes (type));
12108 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12109
12110 if (need_endianity_attribute_p (reverse))
12111 add_AT_unsigned (base_type_result, DW_AT_endianity,
12112 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12113
12114 add_alignment_attribute (base_type_result, type);
12115
12116 if (fpt_used)
12117 {
12118 switch (fpt_info.scale_factor_kind)
12119 {
12120 case fixed_point_scale_factor_binary:
12121 add_AT_int (base_type_result, DW_AT_binary_scale,
12122 fpt_info.scale_factor.binary);
12123 break;
12124
12125 case fixed_point_scale_factor_decimal:
12126 add_AT_int (base_type_result, DW_AT_decimal_scale,
12127 fpt_info.scale_factor.decimal);
12128 break;
12129
12130 case fixed_point_scale_factor_arbitrary:
12131 /* Arbitrary scale factors cannot be described in standard DWARF,
12132 yet. */
12133 if (!dwarf_strict)
12134 {
12135 /* Describe the scale factor as a rational constant. */
12136 const dw_die_ref scale_factor
12137 = new_die (DW_TAG_constant, comp_unit_die (), type);
12138
12139 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12140 fpt_info.scale_factor.arbitrary.numerator);
12141 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12142 fpt_info.scale_factor.arbitrary.denominator);
12143
12144 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12145 }
12146 break;
12147
12148 default:
12149 gcc_unreachable ();
12150 }
12151 }
12152
12153 if (type_bias)
12154 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12155 dw_scalar_form_constant
12156 | dw_scalar_form_exprloc
12157 | dw_scalar_form_reference,
12158 NULL);
12159
12160 add_pubtype (type, base_type_result);
12161
12162 return base_type_result;
12163 }
12164
12165 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12166 named 'auto' in its type: return true for it, false otherwise. */
12167
12168 static inline bool
12169 is_cxx_auto (tree type)
12170 {
12171 if (is_cxx ())
12172 {
12173 tree name = TYPE_IDENTIFIER (type);
12174 if (name == get_identifier ("auto")
12175 || name == get_identifier ("decltype(auto)"))
12176 return true;
12177 }
12178 return false;
12179 }
12180
12181 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12182 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12183
12184 static inline int
12185 is_base_type (tree type)
12186 {
12187 switch (TREE_CODE (type))
12188 {
12189 case ERROR_MARK:
12190 case VOID_TYPE:
12191 case INTEGER_TYPE:
12192 case REAL_TYPE:
12193 case FIXED_POINT_TYPE:
12194 case COMPLEX_TYPE:
12195 case BOOLEAN_TYPE:
12196 case POINTER_BOUNDS_TYPE:
12197 return 1;
12198
12199 case ARRAY_TYPE:
12200 case RECORD_TYPE:
12201 case UNION_TYPE:
12202 case QUAL_UNION_TYPE:
12203 case ENUMERAL_TYPE:
12204 case FUNCTION_TYPE:
12205 case METHOD_TYPE:
12206 case POINTER_TYPE:
12207 case REFERENCE_TYPE:
12208 case NULLPTR_TYPE:
12209 case OFFSET_TYPE:
12210 case LANG_TYPE:
12211 case VECTOR_TYPE:
12212 return 0;
12213
12214 default:
12215 if (is_cxx_auto (type))
12216 return 0;
12217 gcc_unreachable ();
12218 }
12219
12220 return 0;
12221 }
12222
12223 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12224 node, return the size in bits for the type if it is a constant, or else
12225 return the alignment for the type if the type's size is not constant, or
12226 else return BITS_PER_WORD if the type actually turns out to be an
12227 ERROR_MARK node. */
12228
12229 static inline unsigned HOST_WIDE_INT
12230 simple_type_size_in_bits (const_tree type)
12231 {
12232 if (TREE_CODE (type) == ERROR_MARK)
12233 return BITS_PER_WORD;
12234 else if (TYPE_SIZE (type) == NULL_TREE)
12235 return 0;
12236 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12237 return tree_to_uhwi (TYPE_SIZE (type));
12238 else
12239 return TYPE_ALIGN (type);
12240 }
12241
12242 /* Similarly, but return an offset_int instead of UHWI. */
12243
12244 static inline offset_int
12245 offset_int_type_size_in_bits (const_tree type)
12246 {
12247 if (TREE_CODE (type) == ERROR_MARK)
12248 return BITS_PER_WORD;
12249 else if (TYPE_SIZE (type) == NULL_TREE)
12250 return 0;
12251 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12252 return wi::to_offset (TYPE_SIZE (type));
12253 else
12254 return TYPE_ALIGN (type);
12255 }
12256
12257 /* Given a pointer to a tree node for a subrange type, return a pointer
12258 to a DIE that describes the given type. */
12259
12260 static dw_die_ref
12261 subrange_type_die (tree type, tree low, tree high, tree bias,
12262 dw_die_ref context_die)
12263 {
12264 dw_die_ref subrange_die;
12265 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12266
12267 if (context_die == NULL)
12268 context_die = comp_unit_die ();
12269
12270 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12271
12272 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12273 {
12274 /* The size of the subrange type and its base type do not match,
12275 so we need to generate a size attribute for the subrange type. */
12276 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12277 }
12278
12279 add_alignment_attribute (subrange_die, type);
12280
12281 if (low)
12282 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12283 if (high)
12284 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12285 if (bias && !dwarf_strict)
12286 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12287 dw_scalar_form_constant
12288 | dw_scalar_form_exprloc
12289 | dw_scalar_form_reference,
12290 NULL);
12291
12292 return subrange_die;
12293 }
12294
12295 /* Returns the (const and/or volatile) cv_qualifiers associated with
12296 the decl node. This will normally be augmented with the
12297 cv_qualifiers of the underlying type in add_type_attribute. */
12298
12299 static int
12300 decl_quals (const_tree decl)
12301 {
12302 return ((TREE_READONLY (decl)
12303 /* The C++ front-end correctly marks reference-typed
12304 variables as readonly, but from a language (and debug
12305 info) standpoint they are not const-qualified. */
12306 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12307 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12308 | (TREE_THIS_VOLATILE (decl)
12309 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12310 }
12311
12312 /* Determine the TYPE whose qualifiers match the largest strict subset
12313 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12314 qualifiers outside QUAL_MASK. */
12315
12316 static int
12317 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12318 {
12319 tree t;
12320 int best_rank = 0, best_qual = 0, max_rank;
12321
12322 type_quals &= qual_mask;
12323 max_rank = popcount_hwi (type_quals) - 1;
12324
12325 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12326 t = TYPE_NEXT_VARIANT (t))
12327 {
12328 int q = TYPE_QUALS (t) & qual_mask;
12329
12330 if ((q & type_quals) == q && q != type_quals
12331 && check_base_type (t, type))
12332 {
12333 int rank = popcount_hwi (q);
12334
12335 if (rank > best_rank)
12336 {
12337 best_rank = rank;
12338 best_qual = q;
12339 }
12340 }
12341 }
12342
12343 return best_qual;
12344 }
12345
12346 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
12347 static const dwarf_qual_info_t dwarf_qual_info[] =
12348 {
12349 { TYPE_QUAL_CONST, DW_TAG_const_type },
12350 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
12351 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
12352 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
12353 };
12354 static const unsigned int dwarf_qual_info_size
12355 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
12356
12357 /* If DIE is a qualified DIE of some base DIE with the same parent,
12358 return the base DIE, otherwise return NULL. Set MASK to the
12359 qualifiers added compared to the returned DIE. */
12360
12361 static dw_die_ref
12362 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
12363 {
12364 unsigned int i;
12365 for (i = 0; i < dwarf_qual_info_size; i++)
12366 if (die->die_tag == dwarf_qual_info[i].t)
12367 break;
12368 if (i == dwarf_qual_info_size)
12369 return NULL;
12370 if (vec_safe_length (die->die_attr) != 1)
12371 return NULL;
12372 dw_die_ref type = get_AT_ref (die, DW_AT_type);
12373 if (type == NULL || type->die_parent != die->die_parent)
12374 return NULL;
12375 *mask |= dwarf_qual_info[i].q;
12376 if (depth)
12377 {
12378 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
12379 if (ret)
12380 return ret;
12381 }
12382 return type;
12383 }
12384
12385 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
12386 entry that chains the modifiers specified by CV_QUALS in front of the
12387 given type. REVERSE is true if the type is to be interpreted in the
12388 reverse storage order wrt the target order. */
12389
12390 static dw_die_ref
12391 modified_type_die (tree type, int cv_quals, bool reverse,
12392 dw_die_ref context_die)
12393 {
12394 enum tree_code code = TREE_CODE (type);
12395 dw_die_ref mod_type_die;
12396 dw_die_ref sub_die = NULL;
12397 tree item_type = NULL;
12398 tree qualified_type;
12399 tree name, low, high;
12400 dw_die_ref mod_scope;
12401 /* Only these cv-qualifiers are currently handled. */
12402 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
12403 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC);
12404
12405 if (code == ERROR_MARK)
12406 return NULL;
12407
12408 if (lang_hooks.types.get_debug_type)
12409 {
12410 tree debug_type = lang_hooks.types.get_debug_type (type);
12411
12412 if (debug_type != NULL_TREE && debug_type != type)
12413 return modified_type_die (debug_type, cv_quals, reverse, context_die);
12414 }
12415
12416 cv_quals &= cv_qual_mask;
12417
12418 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
12419 tag modifier (and not an attribute) old consumers won't be able
12420 to handle it. */
12421 if (dwarf_version < 3)
12422 cv_quals &= ~TYPE_QUAL_RESTRICT;
12423
12424 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
12425 if (dwarf_version < 5)
12426 cv_quals &= ~TYPE_QUAL_ATOMIC;
12427
12428 /* See if we already have the appropriately qualified variant of
12429 this type. */
12430 qualified_type = get_qualified_type (type, cv_quals);
12431
12432 if (qualified_type == sizetype
12433 && TYPE_NAME (qualified_type)
12434 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
12435 {
12436 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
12437
12438 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
12439 && TYPE_PRECISION (t)
12440 == TYPE_PRECISION (qualified_type)
12441 && TYPE_UNSIGNED (t)
12442 == TYPE_UNSIGNED (qualified_type));
12443 qualified_type = t;
12444 }
12445
12446 /* If we do, then we can just use its DIE, if it exists. */
12447 if (qualified_type)
12448 {
12449 mod_type_die = lookup_type_die (qualified_type);
12450
12451 /* DW_AT_endianity doesn't come from a qualifier on the type. */
12452 if (mod_type_die
12453 && (!need_endianity_attribute_p (reverse)
12454 || !is_base_type (type)
12455 || get_AT_unsigned (mod_type_die, DW_AT_endianity)))
12456 return mod_type_die;
12457 }
12458
12459 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
12460
12461 /* Handle C typedef types. */
12462 if (name && TREE_CODE (name) == TYPE_DECL && DECL_ORIGINAL_TYPE (name)
12463 && !DECL_ARTIFICIAL (name))
12464 {
12465 tree dtype = TREE_TYPE (name);
12466
12467 if (qualified_type == dtype)
12468 {
12469 /* For a named type, use the typedef. */
12470 gen_type_die (qualified_type, context_die);
12471 return lookup_type_die (qualified_type);
12472 }
12473 else
12474 {
12475 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
12476 dquals &= cv_qual_mask;
12477 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
12478 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
12479 /* cv-unqualified version of named type. Just use
12480 the unnamed type to which it refers. */
12481 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
12482 reverse, context_die);
12483 /* Else cv-qualified version of named type; fall through. */
12484 }
12485 }
12486
12487 mod_scope = scope_die_for (type, context_die);
12488
12489 if (cv_quals)
12490 {
12491 int sub_quals = 0, first_quals = 0;
12492 unsigned i;
12493 dw_die_ref first = NULL, last = NULL;
12494
12495 /* Determine a lesser qualified type that most closely matches
12496 this one. Then generate DW_TAG_* entries for the remaining
12497 qualifiers. */
12498 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
12499 cv_qual_mask);
12500 if (sub_quals && use_debug_types)
12501 {
12502 bool needed = false;
12503 /* If emitting type units, make sure the order of qualifiers
12504 is canonical. Thus, start from unqualified type if
12505 an earlier qualifier is missing in sub_quals, but some later
12506 one is present there. */
12507 for (i = 0; i < dwarf_qual_info_size; i++)
12508 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12509 needed = true;
12510 else if (needed && (dwarf_qual_info[i].q & cv_quals))
12511 {
12512 sub_quals = 0;
12513 break;
12514 }
12515 }
12516 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
12517 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
12518 {
12519 /* As not all intermediate qualified DIEs have corresponding
12520 tree types, ensure that qualified DIEs in the same scope
12521 as their DW_AT_type are emitted after their DW_AT_type,
12522 only with other qualified DIEs for the same type possibly
12523 in between them. Determine the range of such qualified
12524 DIEs now (first being the base type, last being corresponding
12525 last qualified DIE for it). */
12526 unsigned int count = 0;
12527 first = qualified_die_p (mod_type_die, &first_quals,
12528 dwarf_qual_info_size);
12529 if (first == NULL)
12530 first = mod_type_die;
12531 gcc_assert ((first_quals & ~sub_quals) == 0);
12532 for (count = 0, last = first;
12533 count < (1U << dwarf_qual_info_size);
12534 count++, last = last->die_sib)
12535 {
12536 int quals = 0;
12537 if (last == mod_scope->die_child)
12538 break;
12539 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
12540 != first)
12541 break;
12542 }
12543 }
12544
12545 for (i = 0; i < dwarf_qual_info_size; i++)
12546 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12547 {
12548 dw_die_ref d;
12549 if (first && first != last)
12550 {
12551 for (d = first->die_sib; ; d = d->die_sib)
12552 {
12553 int quals = 0;
12554 qualified_die_p (d, &quals, dwarf_qual_info_size);
12555 if (quals == (first_quals | dwarf_qual_info[i].q))
12556 break;
12557 if (d == last)
12558 {
12559 d = NULL;
12560 break;
12561 }
12562 }
12563 if (d)
12564 {
12565 mod_type_die = d;
12566 continue;
12567 }
12568 }
12569 if (first)
12570 {
12571 d = ggc_cleared_alloc<die_node> ();
12572 d->die_tag = dwarf_qual_info[i].t;
12573 add_child_die_after (mod_scope, d, last);
12574 last = d;
12575 }
12576 else
12577 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
12578 if (mod_type_die)
12579 add_AT_die_ref (d, DW_AT_type, mod_type_die);
12580 mod_type_die = d;
12581 first_quals |= dwarf_qual_info[i].q;
12582 }
12583 }
12584 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
12585 {
12586 dwarf_tag tag = DW_TAG_pointer_type;
12587 if (code == REFERENCE_TYPE)
12588 {
12589 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
12590 tag = DW_TAG_rvalue_reference_type;
12591 else
12592 tag = DW_TAG_reference_type;
12593 }
12594 mod_type_die = new_die (tag, mod_scope, type);
12595
12596 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
12597 simple_type_size_in_bits (type) / BITS_PER_UNIT);
12598 add_alignment_attribute (mod_type_die, type);
12599 item_type = TREE_TYPE (type);
12600
12601 addr_space_t as = TYPE_ADDR_SPACE (item_type);
12602 if (!ADDR_SPACE_GENERIC_P (as))
12603 {
12604 int action = targetm.addr_space.debug (as);
12605 if (action >= 0)
12606 {
12607 /* Positive values indicate an address_class. */
12608 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
12609 }
12610 else
12611 {
12612 /* Negative values indicate an (inverted) segment base reg. */
12613 dw_loc_descr_ref d
12614 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
12615 add_AT_loc (mod_type_die, DW_AT_segment, d);
12616 }
12617 }
12618 }
12619 else if (code == INTEGER_TYPE
12620 && TREE_TYPE (type) != NULL_TREE
12621 && subrange_type_for_debug_p (type, &low, &high))
12622 {
12623 tree bias = NULL_TREE;
12624 if (lang_hooks.types.get_type_bias)
12625 bias = lang_hooks.types.get_type_bias (type);
12626 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
12627 item_type = TREE_TYPE (type);
12628 }
12629 else if (is_base_type (type))
12630 mod_type_die = base_type_die (type, reverse);
12631 else
12632 {
12633 gen_type_die (type, context_die);
12634
12635 /* We have to get the type_main_variant here (and pass that to the
12636 `lookup_type_die' routine) because the ..._TYPE node we have
12637 might simply be a *copy* of some original type node (where the
12638 copy was created to help us keep track of typedef names) and
12639 that copy might have a different TYPE_UID from the original
12640 ..._TYPE node. */
12641 if (TREE_CODE (type) == FUNCTION_TYPE
12642 || TREE_CODE (type) == METHOD_TYPE)
12643 {
12644 /* For function/method types, can't just use type_main_variant here,
12645 because that can have different ref-qualifiers for C++,
12646 but try to canonicalize. */
12647 tree main = TYPE_MAIN_VARIANT (type);
12648 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
12649 if (check_base_type (t, main) && check_lang_type (t, type))
12650 return lookup_type_die (t);
12651 return lookup_type_die (type);
12652 }
12653 else if (TREE_CODE (type) != VECTOR_TYPE
12654 && TREE_CODE (type) != ARRAY_TYPE)
12655 return lookup_type_die (type_main_variant (type));
12656 else
12657 /* Vectors have the debugging information in the type,
12658 not the main variant. */
12659 return lookup_type_die (type);
12660 }
12661
12662 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
12663 don't output a DW_TAG_typedef, since there isn't one in the
12664 user's program; just attach a DW_AT_name to the type.
12665 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
12666 if the base type already has the same name. */
12667 if (name
12668 && ((TREE_CODE (name) != TYPE_DECL
12669 && (qualified_type == TYPE_MAIN_VARIANT (type)
12670 || (cv_quals == TYPE_UNQUALIFIED)))
12671 || (TREE_CODE (name) == TYPE_DECL
12672 && TREE_TYPE (name) == qualified_type
12673 && DECL_NAME (name))))
12674 {
12675 if (TREE_CODE (name) == TYPE_DECL)
12676 /* Could just call add_name_and_src_coords_attributes here,
12677 but since this is a builtin type it doesn't have any
12678 useful source coordinates anyway. */
12679 name = DECL_NAME (name);
12680 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
12681 }
12682 /* This probably indicates a bug. */
12683 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
12684 {
12685 name = TYPE_IDENTIFIER (type);
12686 add_name_attribute (mod_type_die,
12687 name ? IDENTIFIER_POINTER (name) : "__unknown__");
12688 }
12689
12690 if (qualified_type)
12691 equate_type_number_to_die (qualified_type, mod_type_die);
12692
12693 if (item_type)
12694 /* We must do this after the equate_type_number_to_die call, in case
12695 this is a recursive type. This ensures that the modified_type_die
12696 recursion will terminate even if the type is recursive. Recursive
12697 types are possible in Ada. */
12698 sub_die = modified_type_die (item_type,
12699 TYPE_QUALS_NO_ADDR_SPACE (item_type),
12700 reverse,
12701 context_die);
12702
12703 if (sub_die != NULL)
12704 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
12705
12706 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
12707 if (TYPE_ARTIFICIAL (type))
12708 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
12709
12710 return mod_type_die;
12711 }
12712
12713 /* Generate DIEs for the generic parameters of T.
12714 T must be either a generic type or a generic function.
12715 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
12716
12717 static void
12718 gen_generic_params_dies (tree t)
12719 {
12720 tree parms, args;
12721 int parms_num, i;
12722 dw_die_ref die = NULL;
12723 int non_default;
12724
12725 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
12726 return;
12727
12728 if (TYPE_P (t))
12729 die = lookup_type_die (t);
12730 else if (DECL_P (t))
12731 die = lookup_decl_die (t);
12732
12733 gcc_assert (die);
12734
12735 parms = lang_hooks.get_innermost_generic_parms (t);
12736 if (!parms)
12737 /* T has no generic parameter. It means T is neither a generic type
12738 or function. End of story. */
12739 return;
12740
12741 parms_num = TREE_VEC_LENGTH (parms);
12742 args = lang_hooks.get_innermost_generic_args (t);
12743 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
12744 non_default = int_cst_value (TREE_CHAIN (args));
12745 else
12746 non_default = TREE_VEC_LENGTH (args);
12747 for (i = 0; i < parms_num; i++)
12748 {
12749 tree parm, arg, arg_pack_elems;
12750 dw_die_ref parm_die;
12751
12752 parm = TREE_VEC_ELT (parms, i);
12753 arg = TREE_VEC_ELT (args, i);
12754 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
12755 gcc_assert (parm && TREE_VALUE (parm) && arg);
12756
12757 if (parm && TREE_VALUE (parm) && arg)
12758 {
12759 /* If PARM represents a template parameter pack,
12760 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
12761 by DW_TAG_template_*_parameter DIEs for the argument
12762 pack elements of ARG. Note that ARG would then be
12763 an argument pack. */
12764 if (arg_pack_elems)
12765 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
12766 arg_pack_elems,
12767 die);
12768 else
12769 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
12770 true /* emit name */, die);
12771 if (i >= non_default)
12772 add_AT_flag (parm_die, DW_AT_default_value, 1);
12773 }
12774 }
12775 }
12776
12777 /* Create and return a DIE for PARM which should be
12778 the representation of a generic type parameter.
12779 For instance, in the C++ front end, PARM would be a template parameter.
12780 ARG is the argument to PARM.
12781 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
12782 name of the PARM.
12783 PARENT_DIE is the parent DIE which the new created DIE should be added to,
12784 as a child node. */
12785
12786 static dw_die_ref
12787 generic_parameter_die (tree parm, tree arg,
12788 bool emit_name_p,
12789 dw_die_ref parent_die)
12790 {
12791 dw_die_ref tmpl_die = NULL;
12792 const char *name = NULL;
12793
12794 if (!parm || !DECL_NAME (parm) || !arg)
12795 return NULL;
12796
12797 /* We support non-type generic parameters and arguments,
12798 type generic parameters and arguments, as well as
12799 generic generic parameters (a.k.a. template template parameters in C++)
12800 and arguments. */
12801 if (TREE_CODE (parm) == PARM_DECL)
12802 /* PARM is a nontype generic parameter */
12803 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
12804 else if (TREE_CODE (parm) == TYPE_DECL)
12805 /* PARM is a type generic parameter. */
12806 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
12807 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12808 /* PARM is a generic generic parameter.
12809 Its DIE is a GNU extension. It shall have a
12810 DW_AT_name attribute to represent the name of the template template
12811 parameter, and a DW_AT_GNU_template_name attribute to represent the
12812 name of the template template argument. */
12813 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
12814 parent_die, parm);
12815 else
12816 gcc_unreachable ();
12817
12818 if (tmpl_die)
12819 {
12820 tree tmpl_type;
12821
12822 /* If PARM is a generic parameter pack, it means we are
12823 emitting debug info for a template argument pack element.
12824 In other terms, ARG is a template argument pack element.
12825 In that case, we don't emit any DW_AT_name attribute for
12826 the die. */
12827 if (emit_name_p)
12828 {
12829 name = IDENTIFIER_POINTER (DECL_NAME (parm));
12830 gcc_assert (name);
12831 add_AT_string (tmpl_die, DW_AT_name, name);
12832 }
12833
12834 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12835 {
12836 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
12837 TMPL_DIE should have a child DW_AT_type attribute that is set
12838 to the type of the argument to PARM, which is ARG.
12839 If PARM is a type generic parameter, TMPL_DIE should have a
12840 child DW_AT_type that is set to ARG. */
12841 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
12842 add_type_attribute (tmpl_die, tmpl_type,
12843 (TREE_THIS_VOLATILE (tmpl_type)
12844 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
12845 false, parent_die);
12846 }
12847 else
12848 {
12849 /* So TMPL_DIE is a DIE representing a
12850 a generic generic template parameter, a.k.a template template
12851 parameter in C++ and arg is a template. */
12852
12853 /* The DW_AT_GNU_template_name attribute of the DIE must be set
12854 to the name of the argument. */
12855 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
12856 if (name)
12857 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
12858 }
12859
12860 if (TREE_CODE (parm) == PARM_DECL)
12861 /* So PARM is a non-type generic parameter.
12862 DWARF3 5.6.8 says we must set a DW_AT_const_value child
12863 attribute of TMPL_DIE which value represents the value
12864 of ARG.
12865 We must be careful here:
12866 The value of ARG might reference some function decls.
12867 We might currently be emitting debug info for a generic
12868 type and types are emitted before function decls, we don't
12869 know if the function decls referenced by ARG will actually be
12870 emitted after cgraph computations.
12871 So must defer the generation of the DW_AT_const_value to
12872 after cgraph is ready. */
12873 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
12874 }
12875
12876 return tmpl_die;
12877 }
12878
12879 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
12880 PARM_PACK must be a template parameter pack. The returned DIE
12881 will be child DIE of PARENT_DIE. */
12882
12883 static dw_die_ref
12884 template_parameter_pack_die (tree parm_pack,
12885 tree parm_pack_args,
12886 dw_die_ref parent_die)
12887 {
12888 dw_die_ref die;
12889 int j;
12890
12891 gcc_assert (parent_die && parm_pack);
12892
12893 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
12894 add_name_and_src_coords_attributes (die, parm_pack);
12895 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
12896 generic_parameter_die (parm_pack,
12897 TREE_VEC_ELT (parm_pack_args, j),
12898 false /* Don't emit DW_AT_name */,
12899 die);
12900 return die;
12901 }
12902
12903 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
12904 an enumerated type. */
12905
12906 static inline int
12907 type_is_enum (const_tree type)
12908 {
12909 return TREE_CODE (type) == ENUMERAL_TYPE;
12910 }
12911
12912 /* Return the DBX register number described by a given RTL node. */
12913
12914 static unsigned int
12915 dbx_reg_number (const_rtx rtl)
12916 {
12917 unsigned regno = REGNO (rtl);
12918
12919 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
12920
12921 #ifdef LEAF_REG_REMAP
12922 if (crtl->uses_only_leaf_regs)
12923 {
12924 int leaf_reg = LEAF_REG_REMAP (regno);
12925 if (leaf_reg != -1)
12926 regno = (unsigned) leaf_reg;
12927 }
12928 #endif
12929
12930 regno = DBX_REGISTER_NUMBER (regno);
12931 gcc_assert (regno != INVALID_REGNUM);
12932 return regno;
12933 }
12934
12935 /* Optionally add a DW_OP_piece term to a location description expression.
12936 DW_OP_piece is only added if the location description expression already
12937 doesn't end with DW_OP_piece. */
12938
12939 static void
12940 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
12941 {
12942 dw_loc_descr_ref loc;
12943
12944 if (*list_head != NULL)
12945 {
12946 /* Find the end of the chain. */
12947 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
12948 ;
12949
12950 if (loc->dw_loc_opc != DW_OP_piece)
12951 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
12952 }
12953 }
12954
12955 /* Return a location descriptor that designates a machine register or
12956 zero if there is none. */
12957
12958 static dw_loc_descr_ref
12959 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
12960 {
12961 rtx regs;
12962
12963 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
12964 return 0;
12965
12966 /* We only use "frame base" when we're sure we're talking about the
12967 post-prologue local stack frame. We do this by *not* running
12968 register elimination until this point, and recognizing the special
12969 argument pointer and soft frame pointer rtx's.
12970 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
12971 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
12972 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
12973 {
12974 dw_loc_descr_ref result = NULL;
12975
12976 if (dwarf_version >= 4 || !dwarf_strict)
12977 {
12978 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
12979 initialized);
12980 if (result)
12981 add_loc_descr (&result,
12982 new_loc_descr (DW_OP_stack_value, 0, 0));
12983 }
12984 return result;
12985 }
12986
12987 regs = targetm.dwarf_register_span (rtl);
12988
12989 if (REG_NREGS (rtl) > 1 || regs)
12990 return multiple_reg_loc_descriptor (rtl, regs, initialized);
12991 else
12992 {
12993 unsigned int dbx_regnum = dbx_reg_number (rtl);
12994 if (dbx_regnum == IGNORED_DWARF_REGNUM)
12995 return 0;
12996 return one_reg_loc_descriptor (dbx_regnum, initialized);
12997 }
12998 }
12999
13000 /* Return a location descriptor that designates a machine register for
13001 a given hard register number. */
13002
13003 static dw_loc_descr_ref
13004 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13005 {
13006 dw_loc_descr_ref reg_loc_descr;
13007
13008 if (regno <= 31)
13009 reg_loc_descr
13010 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13011 else
13012 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13013
13014 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13015 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13016
13017 return reg_loc_descr;
13018 }
13019
13020 /* Given an RTL of a register, return a location descriptor that
13021 designates a value that spans more than one register. */
13022
13023 static dw_loc_descr_ref
13024 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13025 enum var_init_status initialized)
13026 {
13027 int size, i;
13028 dw_loc_descr_ref loc_result = NULL;
13029
13030 /* Simple, contiguous registers. */
13031 if (regs == NULL_RTX)
13032 {
13033 unsigned reg = REGNO (rtl);
13034 int nregs;
13035
13036 #ifdef LEAF_REG_REMAP
13037 if (crtl->uses_only_leaf_regs)
13038 {
13039 int leaf_reg = LEAF_REG_REMAP (reg);
13040 if (leaf_reg != -1)
13041 reg = (unsigned) leaf_reg;
13042 }
13043 #endif
13044
13045 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13046 nregs = REG_NREGS (rtl);
13047
13048 size = GET_MODE_SIZE (GET_MODE (rtl)) / nregs;
13049
13050 loc_result = NULL;
13051 while (nregs--)
13052 {
13053 dw_loc_descr_ref t;
13054
13055 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13056 VAR_INIT_STATUS_INITIALIZED);
13057 add_loc_descr (&loc_result, t);
13058 add_loc_descr_op_piece (&loc_result, size);
13059 ++reg;
13060 }
13061 return loc_result;
13062 }
13063
13064 /* Now onto stupid register sets in non contiguous locations. */
13065
13066 gcc_assert (GET_CODE (regs) == PARALLEL);
13067
13068 size = GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0)));
13069 loc_result = NULL;
13070
13071 for (i = 0; i < XVECLEN (regs, 0); ++i)
13072 {
13073 dw_loc_descr_ref t;
13074
13075 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13076 VAR_INIT_STATUS_INITIALIZED);
13077 add_loc_descr (&loc_result, t);
13078 add_loc_descr_op_piece (&loc_result, size);
13079 }
13080
13081 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13082 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13083 return loc_result;
13084 }
13085
13086 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13087
13088 /* Return a location descriptor that designates a constant i,
13089 as a compound operation from constant (i >> shift), constant shift
13090 and DW_OP_shl. */
13091
13092 static dw_loc_descr_ref
13093 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13094 {
13095 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13096 add_loc_descr (&ret, int_loc_descriptor (shift));
13097 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13098 return ret;
13099 }
13100
13101 /* Return a location descriptor that designates a constant. */
13102
13103 static dw_loc_descr_ref
13104 int_loc_descriptor (HOST_WIDE_INT i)
13105 {
13106 enum dwarf_location_atom op;
13107
13108 /* Pick the smallest representation of a constant, rather than just
13109 defaulting to the LEB encoding. */
13110 if (i >= 0)
13111 {
13112 int clz = clz_hwi (i);
13113 int ctz = ctz_hwi (i);
13114 if (i <= 31)
13115 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13116 else if (i <= 0xff)
13117 op = DW_OP_const1u;
13118 else if (i <= 0xffff)
13119 op = DW_OP_const2u;
13120 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13121 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13122 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13123 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13124 while DW_OP_const4u is 5 bytes. */
13125 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13126 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13127 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13128 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13129 while DW_OP_const4u is 5 bytes. */
13130 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13131
13132 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13133 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13134 <= 4)
13135 {
13136 /* As i >= 2**31, the double cast above will yield a negative number.
13137 Since wrapping is defined in DWARF expressions we can output big
13138 positive integers as small negative ones, regardless of the size
13139 of host wide ints.
13140
13141 Here, since the evaluator will handle 32-bit values and since i >=
13142 2**31, we know it's going to be interpreted as a negative literal:
13143 store it this way if we can do better than 5 bytes this way. */
13144 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13145 }
13146 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13147 op = DW_OP_const4u;
13148
13149 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13150 least 6 bytes: see if we can do better before falling back to it. */
13151 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13152 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13153 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13154 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13155 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13156 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13157 >= HOST_BITS_PER_WIDE_INT)
13158 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13159 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13160 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13161 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13162 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13163 && size_of_uleb128 (i) > 6)
13164 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13165 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13166 else
13167 op = DW_OP_constu;
13168 }
13169 else
13170 {
13171 if (i >= -0x80)
13172 op = DW_OP_const1s;
13173 else if (i >= -0x8000)
13174 op = DW_OP_const2s;
13175 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13176 {
13177 if (size_of_int_loc_descriptor (i) < 5)
13178 {
13179 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13180 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13181 return ret;
13182 }
13183 op = DW_OP_const4s;
13184 }
13185 else
13186 {
13187 if (size_of_int_loc_descriptor (i)
13188 < (unsigned long) 1 + size_of_sleb128 (i))
13189 {
13190 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13191 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13192 return ret;
13193 }
13194 op = DW_OP_consts;
13195 }
13196 }
13197
13198 return new_loc_descr (op, i, 0);
13199 }
13200
13201 /* Likewise, for unsigned constants. */
13202
13203 static dw_loc_descr_ref
13204 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13205 {
13206 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13207 const unsigned HOST_WIDE_INT max_uint
13208 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13209
13210 /* If possible, use the clever signed constants handling. */
13211 if (i <= max_int)
13212 return int_loc_descriptor ((HOST_WIDE_INT) i);
13213
13214 /* Here, we are left with positive numbers that cannot be represented as
13215 HOST_WIDE_INT, i.e.:
13216 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13217
13218 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13219 whereas may be better to output a negative integer: thanks to integer
13220 wrapping, we know that:
13221 x = x - 2 ** DWARF2_ADDR_SIZE
13222 = x - 2 * (max (HOST_WIDE_INT) + 1)
13223 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13224 small negative integers. Let's try that in cases it will clearly improve
13225 the encoding: there is no gain turning DW_OP_const4u into
13226 DW_OP_const4s. */
13227 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13228 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13229 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13230 {
13231 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13232
13233 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13234 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13235 const HOST_WIDE_INT second_shift
13236 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13237
13238 /* So we finally have:
13239 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
13240 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
13241 return int_loc_descriptor (second_shift);
13242 }
13243
13244 /* Last chance: fallback to a simple constant operation. */
13245 return new_loc_descr
13246 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13247 ? DW_OP_const4u
13248 : DW_OP_const8u,
13249 i, 0);
13250 }
13251
13252 /* Generate and return a location description that computes the unsigned
13253 comparison of the two stack top entries (a OP b where b is the top-most
13254 entry and a is the second one). The KIND of comparison can be LT_EXPR,
13255 LE_EXPR, GT_EXPR or GE_EXPR. */
13256
13257 static dw_loc_descr_ref
13258 uint_comparison_loc_list (enum tree_code kind)
13259 {
13260 enum dwarf_location_atom op, flip_op;
13261 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
13262
13263 switch (kind)
13264 {
13265 case LT_EXPR:
13266 op = DW_OP_lt;
13267 break;
13268 case LE_EXPR:
13269 op = DW_OP_le;
13270 break;
13271 case GT_EXPR:
13272 op = DW_OP_gt;
13273 break;
13274 case GE_EXPR:
13275 op = DW_OP_ge;
13276 break;
13277 default:
13278 gcc_unreachable ();
13279 }
13280
13281 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
13282 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
13283
13284 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
13285 possible to perform unsigned comparisons: we just have to distinguish
13286 three cases:
13287
13288 1. when a and b have the same sign (as signed integers); then we should
13289 return: a OP(signed) b;
13290
13291 2. when a is a negative signed integer while b is a positive one, then a
13292 is a greater unsigned integer than b; likewise when a and b's roles
13293 are flipped.
13294
13295 So first, compare the sign of the two operands. */
13296 ret = new_loc_descr (DW_OP_over, 0, 0);
13297 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13298 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
13299 /* If they have different signs (i.e. they have different sign bits), then
13300 the stack top value has now the sign bit set and thus it's smaller than
13301 zero. */
13302 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
13303 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
13304 add_loc_descr (&ret, bra_node);
13305
13306 /* We are in case 1. At this point, we know both operands have the same
13307 sign, to it's safe to use the built-in signed comparison. */
13308 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13309 add_loc_descr (&ret, jmp_node);
13310
13311 /* We are in case 2. Here, we know both operands do not have the same sign,
13312 so we have to flip the signed comparison. */
13313 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
13314 tmp = new_loc_descr (flip_op, 0, 0);
13315 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13316 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
13317 add_loc_descr (&ret, tmp);
13318
13319 /* This dummy operation is necessary to make the two branches join. */
13320 tmp = new_loc_descr (DW_OP_nop, 0, 0);
13321 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13322 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
13323 add_loc_descr (&ret, tmp);
13324
13325 return ret;
13326 }
13327
13328 /* Likewise, but takes the location description lists (might be destructive on
13329 them). Return NULL if either is NULL or if concatenation fails. */
13330
13331 static dw_loc_list_ref
13332 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
13333 enum tree_code kind)
13334 {
13335 if (left == NULL || right == NULL)
13336 return NULL;
13337
13338 add_loc_list (&left, right);
13339 if (left == NULL)
13340 return NULL;
13341
13342 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
13343 return left;
13344 }
13345
13346 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
13347 without actually allocating it. */
13348
13349 static unsigned long
13350 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13351 {
13352 return size_of_int_loc_descriptor (i >> shift)
13353 + size_of_int_loc_descriptor (shift)
13354 + 1;
13355 }
13356
13357 /* Return size_of_locs (int_loc_descriptor (i)) without
13358 actually allocating it. */
13359
13360 static unsigned long
13361 size_of_int_loc_descriptor (HOST_WIDE_INT i)
13362 {
13363 unsigned long s;
13364
13365 if (i >= 0)
13366 {
13367 int clz, ctz;
13368 if (i <= 31)
13369 return 1;
13370 else if (i <= 0xff)
13371 return 2;
13372 else if (i <= 0xffff)
13373 return 3;
13374 clz = clz_hwi (i);
13375 ctz = ctz_hwi (i);
13376 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13377 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13378 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13379 - clz - 5);
13380 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13381 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13382 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13383 - clz - 8);
13384 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13385 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13386 <= 4)
13387 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13388 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13389 return 5;
13390 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
13391 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13392 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13393 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13394 - clz - 8);
13395 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13396 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
13397 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13398 - clz - 16);
13399 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13400 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13401 && s > 6)
13402 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13403 - clz - 32);
13404 else
13405 return 1 + s;
13406 }
13407 else
13408 {
13409 if (i >= -0x80)
13410 return 2;
13411 else if (i >= -0x8000)
13412 return 3;
13413 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13414 {
13415 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13416 {
13417 s = size_of_int_loc_descriptor (-i) + 1;
13418 if (s < 5)
13419 return s;
13420 }
13421 return 5;
13422 }
13423 else
13424 {
13425 unsigned long r = 1 + size_of_sleb128 (i);
13426 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13427 {
13428 s = size_of_int_loc_descriptor (-i) + 1;
13429 if (s < r)
13430 return s;
13431 }
13432 return r;
13433 }
13434 }
13435 }
13436
13437 /* Return loc description representing "address" of integer value.
13438 This can appear only as toplevel expression. */
13439
13440 static dw_loc_descr_ref
13441 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
13442 {
13443 int litsize;
13444 dw_loc_descr_ref loc_result = NULL;
13445
13446 if (!(dwarf_version >= 4 || !dwarf_strict))
13447 return NULL;
13448
13449 litsize = size_of_int_loc_descriptor (i);
13450 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
13451 is more compact. For DW_OP_stack_value we need:
13452 litsize + 1 (DW_OP_stack_value)
13453 and for DW_OP_implicit_value:
13454 1 (DW_OP_implicit_value) + 1 (length) + size. */
13455 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
13456 {
13457 loc_result = int_loc_descriptor (i);
13458 add_loc_descr (&loc_result,
13459 new_loc_descr (DW_OP_stack_value, 0, 0));
13460 return loc_result;
13461 }
13462
13463 loc_result = new_loc_descr (DW_OP_implicit_value,
13464 size, 0);
13465 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
13466 loc_result->dw_loc_oprnd2.v.val_int = i;
13467 return loc_result;
13468 }
13469
13470 /* Return a location descriptor that designates a base+offset location. */
13471
13472 static dw_loc_descr_ref
13473 based_loc_descr (rtx reg, HOST_WIDE_INT offset,
13474 enum var_init_status initialized)
13475 {
13476 unsigned int regno;
13477 dw_loc_descr_ref result;
13478 dw_fde_ref fde = cfun->fde;
13479
13480 /* We only use "frame base" when we're sure we're talking about the
13481 post-prologue local stack frame. We do this by *not* running
13482 register elimination until this point, and recognizing the special
13483 argument pointer and soft frame pointer rtx's. */
13484 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
13485 {
13486 rtx elim = (ira_use_lra_p
13487 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
13488 : eliminate_regs (reg, VOIDmode, NULL_RTX));
13489
13490 if (elim != reg)
13491 {
13492 if (GET_CODE (elim) == PLUS)
13493 {
13494 offset += INTVAL (XEXP (elim, 1));
13495 elim = XEXP (elim, 0);
13496 }
13497 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
13498 && (elim == hard_frame_pointer_rtx
13499 || elim == stack_pointer_rtx))
13500 || elim == (frame_pointer_needed
13501 ? hard_frame_pointer_rtx
13502 : stack_pointer_rtx));
13503
13504 /* If drap register is used to align stack, use frame
13505 pointer + offset to access stack variables. If stack
13506 is aligned without drap, use stack pointer + offset to
13507 access stack variables. */
13508 if (crtl->stack_realign_tried
13509 && reg == frame_pointer_rtx)
13510 {
13511 int base_reg
13512 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
13513 ? HARD_FRAME_POINTER_REGNUM
13514 : REGNO (elim));
13515 return new_reg_loc_descr (base_reg, offset);
13516 }
13517
13518 gcc_assert (frame_pointer_fb_offset_valid);
13519 offset += frame_pointer_fb_offset;
13520 return new_loc_descr (DW_OP_fbreg, offset, 0);
13521 }
13522 }
13523
13524 regno = REGNO (reg);
13525 #ifdef LEAF_REG_REMAP
13526 if (crtl->uses_only_leaf_regs)
13527 {
13528 int leaf_reg = LEAF_REG_REMAP (regno);
13529 if (leaf_reg != -1)
13530 regno = (unsigned) leaf_reg;
13531 }
13532 #endif
13533 regno = DWARF_FRAME_REGNUM (regno);
13534
13535 if (!optimize && fde
13536 && (fde->drap_reg == regno || fde->vdrap_reg == regno))
13537 {
13538 /* Use cfa+offset to represent the location of arguments passed
13539 on the stack when drap is used to align stack.
13540 Only do this when not optimizing, for optimized code var-tracking
13541 is supposed to track where the arguments live and the register
13542 used as vdrap or drap in some spot might be used for something
13543 else in other part of the routine. */
13544 return new_loc_descr (DW_OP_fbreg, offset, 0);
13545 }
13546
13547 if (regno <= 31)
13548 result = new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + regno),
13549 offset, 0);
13550 else
13551 result = new_loc_descr (DW_OP_bregx, regno, offset);
13552
13553 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13554 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13555
13556 return result;
13557 }
13558
13559 /* Return true if this RTL expression describes a base+offset calculation. */
13560
13561 static inline int
13562 is_based_loc (const_rtx rtl)
13563 {
13564 return (GET_CODE (rtl) == PLUS
13565 && ((REG_P (XEXP (rtl, 0))
13566 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
13567 && CONST_INT_P (XEXP (rtl, 1)))));
13568 }
13569
13570 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
13571 failed. */
13572
13573 static dw_loc_descr_ref
13574 tls_mem_loc_descriptor (rtx mem)
13575 {
13576 tree base;
13577 dw_loc_descr_ref loc_result;
13578
13579 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
13580 return NULL;
13581
13582 base = get_base_address (MEM_EXPR (mem));
13583 if (base == NULL
13584 || !VAR_P (base)
13585 || !DECL_THREAD_LOCAL_P (base))
13586 return NULL;
13587
13588 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
13589 if (loc_result == NULL)
13590 return NULL;
13591
13592 if (MEM_OFFSET (mem))
13593 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
13594
13595 return loc_result;
13596 }
13597
13598 /* Output debug info about reason why we failed to expand expression as dwarf
13599 expression. */
13600
13601 static void
13602 expansion_failed (tree expr, rtx rtl, char const *reason)
13603 {
13604 if (dump_file && (dump_flags & TDF_DETAILS))
13605 {
13606 fprintf (dump_file, "Failed to expand as dwarf: ");
13607 if (expr)
13608 print_generic_expr (dump_file, expr, dump_flags);
13609 if (rtl)
13610 {
13611 fprintf (dump_file, "\n");
13612 print_rtl (dump_file, rtl);
13613 }
13614 fprintf (dump_file, "\nReason: %s\n", reason);
13615 }
13616 }
13617
13618 /* Helper function for const_ok_for_output. */
13619
13620 static bool
13621 const_ok_for_output_1 (rtx rtl)
13622 {
13623 if (GET_CODE (rtl) == UNSPEC)
13624 {
13625 /* If delegitimize_address couldn't do anything with the UNSPEC, assume
13626 we can't express it in the debug info. */
13627 /* Don't complain about TLS UNSPECs, those are just too hard to
13628 delegitimize. Note this could be a non-decl SYMBOL_REF such as
13629 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
13630 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
13631 if (flag_checking
13632 && (XVECLEN (rtl, 0) == 0
13633 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
13634 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
13635 inform (current_function_decl
13636 ? DECL_SOURCE_LOCATION (current_function_decl)
13637 : UNKNOWN_LOCATION,
13638 #if NUM_UNSPEC_VALUES > 0
13639 "non-delegitimized UNSPEC %s (%d) found in variable location",
13640 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
13641 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
13642 XINT (rtl, 1));
13643 #else
13644 "non-delegitimized UNSPEC %d found in variable location",
13645 XINT (rtl, 1));
13646 #endif
13647 expansion_failed (NULL_TREE, rtl,
13648 "UNSPEC hasn't been delegitimized.\n");
13649 return false;
13650 }
13651
13652 if (targetm.const_not_ok_for_debug_p (rtl))
13653 {
13654 expansion_failed (NULL_TREE, rtl,
13655 "Expression rejected for debug by the backend.\n");
13656 return false;
13657 }
13658
13659 /* FIXME: Refer to PR60655. It is possible for simplification
13660 of rtl expressions in var tracking to produce such expressions.
13661 We should really identify / validate expressions
13662 enclosed in CONST that can be handled by assemblers on various
13663 targets and only handle legitimate cases here. */
13664 if (GET_CODE (rtl) != SYMBOL_REF)
13665 {
13666 if (GET_CODE (rtl) == NOT)
13667 return false;
13668 return true;
13669 }
13670
13671 if (CONSTANT_POOL_ADDRESS_P (rtl))
13672 {
13673 bool marked;
13674 get_pool_constant_mark (rtl, &marked);
13675 /* If all references to this pool constant were optimized away,
13676 it was not output and thus we can't represent it. */
13677 if (!marked)
13678 {
13679 expansion_failed (NULL_TREE, rtl,
13680 "Constant was removed from constant pool.\n");
13681 return false;
13682 }
13683 }
13684
13685 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
13686 return false;
13687
13688 /* Avoid references to external symbols in debug info, on several targets
13689 the linker might even refuse to link when linking a shared library,
13690 and in many other cases the relocations for .debug_info/.debug_loc are
13691 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
13692 to be defined within the same shared library or executable are fine. */
13693 if (SYMBOL_REF_EXTERNAL_P (rtl))
13694 {
13695 tree decl = SYMBOL_REF_DECL (rtl);
13696
13697 if (decl == NULL || !targetm.binds_local_p (decl))
13698 {
13699 expansion_failed (NULL_TREE, rtl,
13700 "Symbol not defined in current TU.\n");
13701 return false;
13702 }
13703 }
13704
13705 return true;
13706 }
13707
13708 /* Return true if constant RTL can be emitted in DW_OP_addr or
13709 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
13710 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
13711
13712 static bool
13713 const_ok_for_output (rtx rtl)
13714 {
13715 if (GET_CODE (rtl) == SYMBOL_REF)
13716 return const_ok_for_output_1 (rtl);
13717
13718 if (GET_CODE (rtl) == CONST)
13719 {
13720 subrtx_var_iterator::array_type array;
13721 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
13722 if (!const_ok_for_output_1 (*iter))
13723 return false;
13724 return true;
13725 }
13726
13727 return true;
13728 }
13729
13730 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
13731 if possible, NULL otherwise. */
13732
13733 static dw_die_ref
13734 base_type_for_mode (machine_mode mode, bool unsignedp)
13735 {
13736 dw_die_ref type_die;
13737 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
13738
13739 if (type == NULL)
13740 return NULL;
13741 switch (TREE_CODE (type))
13742 {
13743 case INTEGER_TYPE:
13744 case REAL_TYPE:
13745 break;
13746 default:
13747 return NULL;
13748 }
13749 type_die = lookup_type_die (type);
13750 if (!type_die)
13751 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
13752 comp_unit_die ());
13753 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
13754 return NULL;
13755 return type_die;
13756 }
13757
13758 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
13759 type matching MODE, or, if MODE is narrower than or as wide as
13760 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
13761 possible. */
13762
13763 static dw_loc_descr_ref
13764 convert_descriptor_to_mode (machine_mode mode, dw_loc_descr_ref op)
13765 {
13766 machine_mode outer_mode = mode;
13767 dw_die_ref type_die;
13768 dw_loc_descr_ref cvt;
13769
13770 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13771 {
13772 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
13773 return op;
13774 }
13775 type_die = base_type_for_mode (outer_mode, 1);
13776 if (type_die == NULL)
13777 return NULL;
13778 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13779 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13780 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13781 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13782 add_loc_descr (&op, cvt);
13783 return op;
13784 }
13785
13786 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
13787
13788 static dw_loc_descr_ref
13789 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
13790 dw_loc_descr_ref op1)
13791 {
13792 dw_loc_descr_ref ret = op0;
13793 add_loc_descr (&ret, op1);
13794 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13795 if (STORE_FLAG_VALUE != 1)
13796 {
13797 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
13798 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13799 }
13800 return ret;
13801 }
13802
13803 /* Return location descriptor for signed comparison OP RTL. */
13804
13805 static dw_loc_descr_ref
13806 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
13807 machine_mode mem_mode)
13808 {
13809 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
13810 dw_loc_descr_ref op0, op1;
13811 int shift;
13812
13813 if (op_mode == VOIDmode)
13814 op_mode = GET_MODE (XEXP (rtl, 1));
13815 if (op_mode == VOIDmode)
13816 return NULL;
13817
13818 if (dwarf_strict
13819 && dwarf_version < 5
13820 && (!SCALAR_INT_MODE_P (op_mode)
13821 || GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE))
13822 return NULL;
13823
13824 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
13825 VAR_INIT_STATUS_INITIALIZED);
13826 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
13827 VAR_INIT_STATUS_INITIALIZED);
13828
13829 if (op0 == NULL || op1 == NULL)
13830 return NULL;
13831
13832 if (!SCALAR_INT_MODE_P (op_mode)
13833 || GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
13834 return compare_loc_descriptor (op, op0, op1);
13835
13836 if (GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
13837 {
13838 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
13839 dw_loc_descr_ref cvt;
13840
13841 if (type_die == NULL)
13842 return NULL;
13843 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13844 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13845 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13846 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13847 add_loc_descr (&op0, cvt);
13848 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13849 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13850 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13851 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13852 add_loc_descr (&op1, cvt);
13853 return compare_loc_descriptor (op, op0, op1);
13854 }
13855
13856 shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
13857 /* For eq/ne, if the operands are known to be zero-extended,
13858 there is no need to do the fancy shifting up. */
13859 if (op == DW_OP_eq || op == DW_OP_ne)
13860 {
13861 dw_loc_descr_ref last0, last1;
13862 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
13863 ;
13864 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
13865 ;
13866 /* deref_size zero extends, and for constants we can check
13867 whether they are zero extended or not. */
13868 if (((last0->dw_loc_opc == DW_OP_deref_size
13869 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
13870 || (CONST_INT_P (XEXP (rtl, 0))
13871 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
13872 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
13873 && ((last1->dw_loc_opc == DW_OP_deref_size
13874 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
13875 || (CONST_INT_P (XEXP (rtl, 1))
13876 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
13877 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
13878 return compare_loc_descriptor (op, op0, op1);
13879
13880 /* EQ/NE comparison against constant in narrower type than
13881 DWARF2_ADDR_SIZE can be performed either as
13882 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
13883 DW_OP_{eq,ne}
13884 or
13885 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
13886 DW_OP_{eq,ne}. Pick whatever is shorter. */
13887 if (CONST_INT_P (XEXP (rtl, 1))
13888 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
13889 && (size_of_int_loc_descriptor (shift) + 1
13890 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
13891 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
13892 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
13893 & GET_MODE_MASK (op_mode))))
13894 {
13895 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
13896 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
13897 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
13898 & GET_MODE_MASK (op_mode));
13899 return compare_loc_descriptor (op, op0, op1);
13900 }
13901 }
13902 add_loc_descr (&op0, int_loc_descriptor (shift));
13903 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
13904 if (CONST_INT_P (XEXP (rtl, 1)))
13905 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
13906 else
13907 {
13908 add_loc_descr (&op1, int_loc_descriptor (shift));
13909 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
13910 }
13911 return compare_loc_descriptor (op, op0, op1);
13912 }
13913
13914 /* Return location descriptor for unsigned comparison OP RTL. */
13915
13916 static dw_loc_descr_ref
13917 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
13918 machine_mode mem_mode)
13919 {
13920 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
13921 dw_loc_descr_ref op0, op1;
13922
13923 if (op_mode == VOIDmode)
13924 op_mode = GET_MODE (XEXP (rtl, 1));
13925 if (op_mode == VOIDmode)
13926 return NULL;
13927 if (!SCALAR_INT_MODE_P (op_mode))
13928 return NULL;
13929
13930 if (dwarf_strict
13931 && dwarf_version < 5
13932 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
13933 return NULL;
13934
13935 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
13936 VAR_INIT_STATUS_INITIALIZED);
13937 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
13938 VAR_INIT_STATUS_INITIALIZED);
13939
13940 if (op0 == NULL || op1 == NULL)
13941 return NULL;
13942
13943 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
13944 {
13945 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
13946 dw_loc_descr_ref last0, last1;
13947 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
13948 ;
13949 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
13950 ;
13951 if (CONST_INT_P (XEXP (rtl, 0)))
13952 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
13953 /* deref_size zero extends, so no need to mask it again. */
13954 else if (last0->dw_loc_opc != DW_OP_deref_size
13955 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
13956 {
13957 add_loc_descr (&op0, int_loc_descriptor (mask));
13958 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
13959 }
13960 if (CONST_INT_P (XEXP (rtl, 1)))
13961 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
13962 /* deref_size zero extends, so no need to mask it again. */
13963 else if (last1->dw_loc_opc != DW_OP_deref_size
13964 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
13965 {
13966 add_loc_descr (&op1, int_loc_descriptor (mask));
13967 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
13968 }
13969 }
13970 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
13971 {
13972 HOST_WIDE_INT bias = 1;
13973 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
13974 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
13975 if (CONST_INT_P (XEXP (rtl, 1)))
13976 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
13977 + INTVAL (XEXP (rtl, 1)));
13978 else
13979 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
13980 bias, 0));
13981 }
13982 return compare_loc_descriptor (op, op0, op1);
13983 }
13984
13985 /* Return location descriptor for {U,S}{MIN,MAX}. */
13986
13987 static dw_loc_descr_ref
13988 minmax_loc_descriptor (rtx rtl, machine_mode mode,
13989 machine_mode mem_mode)
13990 {
13991 enum dwarf_location_atom op;
13992 dw_loc_descr_ref op0, op1, ret;
13993 dw_loc_descr_ref bra_node, drop_node;
13994
13995 if (dwarf_strict
13996 && dwarf_version < 5
13997 && (!SCALAR_INT_MODE_P (mode)
13998 || GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE))
13999 return NULL;
14000
14001 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14002 VAR_INIT_STATUS_INITIALIZED);
14003 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14004 VAR_INIT_STATUS_INITIALIZED);
14005
14006 if (op0 == NULL || op1 == NULL)
14007 return NULL;
14008
14009 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14010 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14011 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14012 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14013 {
14014 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
14015 {
14016 HOST_WIDE_INT mask = GET_MODE_MASK (mode);
14017 add_loc_descr (&op0, int_loc_descriptor (mask));
14018 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14019 add_loc_descr (&op1, int_loc_descriptor (mask));
14020 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14021 }
14022 else if (GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE)
14023 {
14024 HOST_WIDE_INT bias = 1;
14025 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14026 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14027 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14028 }
14029 }
14030 else if (!SCALAR_INT_MODE_P (mode)
14031 && GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
14032 {
14033 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode)) * BITS_PER_UNIT;
14034 add_loc_descr (&op0, int_loc_descriptor (shift));
14035 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14036 add_loc_descr (&op1, int_loc_descriptor (shift));
14037 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14038 }
14039 else if (SCALAR_INT_MODE_P (mode)
14040 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
14041 {
14042 dw_die_ref type_die = base_type_for_mode (mode, 0);
14043 dw_loc_descr_ref cvt;
14044 if (type_die == NULL)
14045 return NULL;
14046 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14047 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14048 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14049 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14050 add_loc_descr (&op0, cvt);
14051 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14052 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14053 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14054 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14055 add_loc_descr (&op1, cvt);
14056 }
14057
14058 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14059 op = DW_OP_lt;
14060 else
14061 op = DW_OP_gt;
14062 ret = op0;
14063 add_loc_descr (&ret, op1);
14064 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14065 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14066 add_loc_descr (&ret, bra_node);
14067 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14068 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14069 add_loc_descr (&ret, drop_node);
14070 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14071 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14072 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14073 && SCALAR_INT_MODE_P (mode)
14074 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
14075 ret = convert_descriptor_to_mode (mode, ret);
14076 return ret;
14077 }
14078
14079 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14080 but after converting arguments to type_die, afterwards
14081 convert back to unsigned. */
14082
14083 static dw_loc_descr_ref
14084 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14085 machine_mode mode, machine_mode mem_mode)
14086 {
14087 dw_loc_descr_ref cvt, op0, op1;
14088
14089 if (type_die == NULL)
14090 return NULL;
14091 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14092 VAR_INIT_STATUS_INITIALIZED);
14093 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14094 VAR_INIT_STATUS_INITIALIZED);
14095 if (op0 == NULL || op1 == NULL)
14096 return NULL;
14097 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14098 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14099 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14100 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14101 add_loc_descr (&op0, cvt);
14102 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14103 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14104 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14105 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14106 add_loc_descr (&op1, cvt);
14107 add_loc_descr (&op0, op1);
14108 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14109 return convert_descriptor_to_mode (mode, op0);
14110 }
14111
14112 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14113 const0 is DW_OP_lit0 or corresponding typed constant,
14114 const1 is DW_OP_lit1 or corresponding typed constant
14115 and constMSB is constant with just the MSB bit set
14116 for the mode):
14117 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14118 L1: const0 DW_OP_swap
14119 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14120 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14121 L3: DW_OP_drop
14122 L4: DW_OP_nop
14123
14124 CTZ is similar:
14125 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14126 L1: const0 DW_OP_swap
14127 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14128 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14129 L3: DW_OP_drop
14130 L4: DW_OP_nop
14131
14132 FFS is similar:
14133 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14134 L1: const1 DW_OP_swap
14135 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14136 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14137 L3: DW_OP_drop
14138 L4: DW_OP_nop */
14139
14140 static dw_loc_descr_ref
14141 clz_loc_descriptor (rtx rtl, machine_mode mode,
14142 machine_mode mem_mode)
14143 {
14144 dw_loc_descr_ref op0, ret, tmp;
14145 HOST_WIDE_INT valv;
14146 dw_loc_descr_ref l1jump, l1label;
14147 dw_loc_descr_ref l2jump, l2label;
14148 dw_loc_descr_ref l3jump, l3label;
14149 dw_loc_descr_ref l4jump, l4label;
14150 rtx msb;
14151
14152 if (!SCALAR_INT_MODE_P (mode)
14153 || GET_MODE (XEXP (rtl, 0)) != mode)
14154 return NULL;
14155
14156 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14157 VAR_INIT_STATUS_INITIALIZED);
14158 if (op0 == NULL)
14159 return NULL;
14160 ret = op0;
14161 if (GET_CODE (rtl) == CLZ)
14162 {
14163 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14164 valv = GET_MODE_BITSIZE (mode);
14165 }
14166 else if (GET_CODE (rtl) == FFS)
14167 valv = 0;
14168 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14169 valv = GET_MODE_BITSIZE (mode);
14170 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14171 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14172 add_loc_descr (&ret, l1jump);
14173 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14174 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14175 VAR_INIT_STATUS_INITIALIZED);
14176 if (tmp == NULL)
14177 return NULL;
14178 add_loc_descr (&ret, tmp);
14179 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14180 add_loc_descr (&ret, l4jump);
14181 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14182 ? const1_rtx : const0_rtx,
14183 mode, mem_mode,
14184 VAR_INIT_STATUS_INITIALIZED);
14185 if (l1label == NULL)
14186 return NULL;
14187 add_loc_descr (&ret, l1label);
14188 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14189 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14190 add_loc_descr (&ret, l2label);
14191 if (GET_CODE (rtl) != CLZ)
14192 msb = const1_rtx;
14193 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14194 msb = GEN_INT (HOST_WIDE_INT_1U
14195 << (GET_MODE_BITSIZE (mode) - 1));
14196 else
14197 msb = immed_wide_int_const
14198 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
14199 GET_MODE_PRECISION (mode)), mode);
14200 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
14201 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14202 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
14203 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
14204 else
14205 tmp = mem_loc_descriptor (msb, mode, mem_mode,
14206 VAR_INIT_STATUS_INITIALIZED);
14207 if (tmp == NULL)
14208 return NULL;
14209 add_loc_descr (&ret, tmp);
14210 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14211 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
14212 add_loc_descr (&ret, l3jump);
14213 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14214 VAR_INIT_STATUS_INITIALIZED);
14215 if (tmp == NULL)
14216 return NULL;
14217 add_loc_descr (&ret, tmp);
14218 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
14219 ? DW_OP_shl : DW_OP_shr, 0, 0));
14220 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14221 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
14222 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14223 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
14224 add_loc_descr (&ret, l2jump);
14225 l3label = new_loc_descr (DW_OP_drop, 0, 0);
14226 add_loc_descr (&ret, l3label);
14227 l4label = new_loc_descr (DW_OP_nop, 0, 0);
14228 add_loc_descr (&ret, l4label);
14229 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14230 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14231 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14232 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14233 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14234 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
14235 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14236 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
14237 return ret;
14238 }
14239
14240 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
14241 const1 is DW_OP_lit1 or corresponding typed constant):
14242 const0 DW_OP_swap
14243 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14244 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14245 L2: DW_OP_drop
14246
14247 PARITY is similar:
14248 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14249 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14250 L2: DW_OP_drop */
14251
14252 static dw_loc_descr_ref
14253 popcount_loc_descriptor (rtx rtl, machine_mode mode,
14254 machine_mode mem_mode)
14255 {
14256 dw_loc_descr_ref op0, ret, tmp;
14257 dw_loc_descr_ref l1jump, l1label;
14258 dw_loc_descr_ref l2jump, l2label;
14259
14260 if (!SCALAR_INT_MODE_P (mode)
14261 || GET_MODE (XEXP (rtl, 0)) != mode)
14262 return NULL;
14263
14264 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14265 VAR_INIT_STATUS_INITIALIZED);
14266 if (op0 == NULL)
14267 return NULL;
14268 ret = op0;
14269 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14270 VAR_INIT_STATUS_INITIALIZED);
14271 if (tmp == NULL)
14272 return NULL;
14273 add_loc_descr (&ret, tmp);
14274 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14275 l1label = new_loc_descr (DW_OP_dup, 0, 0);
14276 add_loc_descr (&ret, l1label);
14277 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14278 add_loc_descr (&ret, l2jump);
14279 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14280 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14281 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14282 VAR_INIT_STATUS_INITIALIZED);
14283 if (tmp == NULL)
14284 return NULL;
14285 add_loc_descr (&ret, tmp);
14286 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14287 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
14288 ? DW_OP_plus : DW_OP_xor, 0, 0));
14289 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14290 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14291 VAR_INIT_STATUS_INITIALIZED);
14292 add_loc_descr (&ret, tmp);
14293 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14294 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14295 add_loc_descr (&ret, l1jump);
14296 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14297 add_loc_descr (&ret, l2label);
14298 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14299 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14300 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14301 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14302 return ret;
14303 }
14304
14305 /* BSWAP (constS is initial shift count, either 56 or 24):
14306 constS const0
14307 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
14308 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
14309 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
14310 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
14311 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
14312
14313 static dw_loc_descr_ref
14314 bswap_loc_descriptor (rtx rtl, machine_mode mode,
14315 machine_mode mem_mode)
14316 {
14317 dw_loc_descr_ref op0, ret, tmp;
14318 dw_loc_descr_ref l1jump, l1label;
14319 dw_loc_descr_ref l2jump, l2label;
14320
14321 if (!SCALAR_INT_MODE_P (mode)
14322 || BITS_PER_UNIT != 8
14323 || (GET_MODE_BITSIZE (mode) != 32
14324 && GET_MODE_BITSIZE (mode) != 64))
14325 return NULL;
14326
14327 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14328 VAR_INIT_STATUS_INITIALIZED);
14329 if (op0 == NULL)
14330 return NULL;
14331
14332 ret = op0;
14333 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14334 mode, mem_mode,
14335 VAR_INIT_STATUS_INITIALIZED);
14336 if (tmp == NULL)
14337 return NULL;
14338 add_loc_descr (&ret, tmp);
14339 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14340 VAR_INIT_STATUS_INITIALIZED);
14341 if (tmp == NULL)
14342 return NULL;
14343 add_loc_descr (&ret, tmp);
14344 l1label = new_loc_descr (DW_OP_pick, 2, 0);
14345 add_loc_descr (&ret, l1label);
14346 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14347 mode, mem_mode,
14348 VAR_INIT_STATUS_INITIALIZED);
14349 add_loc_descr (&ret, tmp);
14350 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
14351 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14352 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14353 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
14354 VAR_INIT_STATUS_INITIALIZED);
14355 if (tmp == NULL)
14356 return NULL;
14357 add_loc_descr (&ret, tmp);
14358 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14359 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
14360 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14361 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14362 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14363 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14364 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14365 VAR_INIT_STATUS_INITIALIZED);
14366 add_loc_descr (&ret, tmp);
14367 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
14368 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14369 add_loc_descr (&ret, l2jump);
14370 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
14371 VAR_INIT_STATUS_INITIALIZED);
14372 add_loc_descr (&ret, tmp);
14373 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14374 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14375 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14376 add_loc_descr (&ret, l1jump);
14377 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14378 add_loc_descr (&ret, l2label);
14379 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14380 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14381 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14382 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14383 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14384 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14385 return ret;
14386 }
14387
14388 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
14389 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14390 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
14391 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
14392
14393 ROTATERT is similar:
14394 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
14395 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14396 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
14397
14398 static dw_loc_descr_ref
14399 rotate_loc_descriptor (rtx rtl, machine_mode mode,
14400 machine_mode mem_mode)
14401 {
14402 rtx rtlop1 = XEXP (rtl, 1);
14403 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
14404 int i;
14405
14406 if (!SCALAR_INT_MODE_P (mode))
14407 return NULL;
14408
14409 if (GET_MODE (rtlop1) != VOIDmode
14410 && GET_MODE_BITSIZE (GET_MODE (rtlop1)) < GET_MODE_BITSIZE (mode))
14411 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
14412 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14413 VAR_INIT_STATUS_INITIALIZED);
14414 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
14415 VAR_INIT_STATUS_INITIALIZED);
14416 if (op0 == NULL || op1 == NULL)
14417 return NULL;
14418 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
14419 for (i = 0; i < 2; i++)
14420 {
14421 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
14422 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
14423 mode, mem_mode,
14424 VAR_INIT_STATUS_INITIALIZED);
14425 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
14426 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14427 ? DW_OP_const4u
14428 : HOST_BITS_PER_WIDE_INT == 64
14429 ? DW_OP_const8u : DW_OP_constu,
14430 GET_MODE_MASK (mode), 0);
14431 else
14432 mask[i] = NULL;
14433 if (mask[i] == NULL)
14434 return NULL;
14435 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
14436 }
14437 ret = op0;
14438 add_loc_descr (&ret, op1);
14439 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14440 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14441 if (GET_CODE (rtl) == ROTATERT)
14442 {
14443 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14444 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14445 GET_MODE_BITSIZE (mode), 0));
14446 }
14447 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14448 if (mask[0] != NULL)
14449 add_loc_descr (&ret, mask[0]);
14450 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14451 if (mask[1] != NULL)
14452 {
14453 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14454 add_loc_descr (&ret, mask[1]);
14455 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14456 }
14457 if (GET_CODE (rtl) == ROTATE)
14458 {
14459 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14460 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14461 GET_MODE_BITSIZE (mode), 0));
14462 }
14463 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14464 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14465 return ret;
14466 }
14467
14468 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
14469 for DEBUG_PARAMETER_REF RTL. */
14470
14471 static dw_loc_descr_ref
14472 parameter_ref_descriptor (rtx rtl)
14473 {
14474 dw_loc_descr_ref ret;
14475 dw_die_ref ref;
14476
14477 if (dwarf_strict)
14478 return NULL;
14479 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
14480 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
14481 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
14482 if (ref)
14483 {
14484 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14485 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
14486 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
14487 }
14488 else
14489 {
14490 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
14491 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
14492 }
14493 return ret;
14494 }
14495
14496 /* The following routine converts the RTL for a variable or parameter
14497 (resident in memory) into an equivalent Dwarf representation of a
14498 mechanism for getting the address of that same variable onto the top of a
14499 hypothetical "address evaluation" stack.
14500
14501 When creating memory location descriptors, we are effectively transforming
14502 the RTL for a memory-resident object into its Dwarf postfix expression
14503 equivalent. This routine recursively descends an RTL tree, turning
14504 it into Dwarf postfix code as it goes.
14505
14506 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
14507
14508 MEM_MODE is the mode of the memory reference, needed to handle some
14509 autoincrement addressing modes.
14510
14511 Return 0 if we can't represent the location. */
14512
14513 dw_loc_descr_ref
14514 mem_loc_descriptor (rtx rtl, machine_mode mode,
14515 machine_mode mem_mode,
14516 enum var_init_status initialized)
14517 {
14518 dw_loc_descr_ref mem_loc_result = NULL;
14519 enum dwarf_location_atom op;
14520 dw_loc_descr_ref op0, op1;
14521 rtx inner = NULL_RTX;
14522
14523 if (mode == VOIDmode)
14524 mode = GET_MODE (rtl);
14525
14526 /* Note that for a dynamically sized array, the location we will generate a
14527 description of here will be the lowest numbered location which is
14528 actually within the array. That's *not* necessarily the same as the
14529 zeroth element of the array. */
14530
14531 rtl = targetm.delegitimize_address (rtl);
14532
14533 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
14534 return NULL;
14535
14536 switch (GET_CODE (rtl))
14537 {
14538 case POST_INC:
14539 case POST_DEC:
14540 case POST_MODIFY:
14541 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
14542
14543 case SUBREG:
14544 /* The case of a subreg may arise when we have a local (register)
14545 variable or a formal (register) parameter which doesn't quite fill
14546 up an entire register. For now, just assume that it is
14547 legitimate to make the Dwarf info refer to the whole register which
14548 contains the given subreg. */
14549 if (!subreg_lowpart_p (rtl))
14550 break;
14551 inner = SUBREG_REG (rtl);
14552 /* FALLTHRU */
14553 case TRUNCATE:
14554 if (inner == NULL_RTX)
14555 inner = XEXP (rtl, 0);
14556 if (SCALAR_INT_MODE_P (mode)
14557 && SCALAR_INT_MODE_P (GET_MODE (inner))
14558 && (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
14559 #ifdef POINTERS_EXTEND_UNSIGNED
14560 || (mode == Pmode && mem_mode != VOIDmode)
14561 #endif
14562 )
14563 && GET_MODE_SIZE (GET_MODE (inner)) <= DWARF2_ADDR_SIZE)
14564 {
14565 mem_loc_result = mem_loc_descriptor (inner,
14566 GET_MODE (inner),
14567 mem_mode, initialized);
14568 break;
14569 }
14570 if (dwarf_strict && dwarf_version < 5)
14571 break;
14572 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (inner)))
14573 break;
14574 if (GET_MODE_SIZE (mode) != GET_MODE_SIZE (GET_MODE (inner))
14575 && (!SCALAR_INT_MODE_P (mode)
14576 || !SCALAR_INT_MODE_P (GET_MODE (inner))))
14577 break;
14578 else
14579 {
14580 dw_die_ref type_die;
14581 dw_loc_descr_ref cvt;
14582
14583 mem_loc_result = mem_loc_descriptor (inner,
14584 GET_MODE (inner),
14585 mem_mode, initialized);
14586 if (mem_loc_result == NULL)
14587 break;
14588 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14589 if (type_die == NULL)
14590 {
14591 mem_loc_result = NULL;
14592 break;
14593 }
14594 if (GET_MODE_SIZE (mode)
14595 != GET_MODE_SIZE (GET_MODE (inner)))
14596 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14597 else
14598 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
14599 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14600 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14601 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14602 add_loc_descr (&mem_loc_result, cvt);
14603 if (SCALAR_INT_MODE_P (mode)
14604 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14605 {
14606 /* Convert it to untyped afterwards. */
14607 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14608 add_loc_descr (&mem_loc_result, cvt);
14609 }
14610 }
14611 break;
14612
14613 case REG:
14614 if (! SCALAR_INT_MODE_P (mode)
14615 || (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
14616 && rtl != arg_pointer_rtx
14617 && rtl != frame_pointer_rtx
14618 #ifdef POINTERS_EXTEND_UNSIGNED
14619 && (mode != Pmode || mem_mode == VOIDmode)
14620 #endif
14621 ))
14622 {
14623 dw_die_ref type_die;
14624 unsigned int dbx_regnum;
14625
14626 if (dwarf_strict && dwarf_version < 5)
14627 break;
14628 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
14629 break;
14630 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14631 if (type_die == NULL)
14632 break;
14633
14634 dbx_regnum = dbx_reg_number (rtl);
14635 if (dbx_regnum == IGNORED_DWARF_REGNUM)
14636 break;
14637 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
14638 dbx_regnum, 0);
14639 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14640 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14641 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
14642 break;
14643 }
14644 /* Whenever a register number forms a part of the description of the
14645 method for calculating the (dynamic) address of a memory resident
14646 object, DWARF rules require the register number be referred to as
14647 a "base register". This distinction is not based in any way upon
14648 what category of register the hardware believes the given register
14649 belongs to. This is strictly DWARF terminology we're dealing with
14650 here. Note that in cases where the location of a memory-resident
14651 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
14652 OP_CONST (0)) the actual DWARF location descriptor that we generate
14653 may just be OP_BASEREG (basereg). This may look deceptively like
14654 the object in question was allocated to a register (rather than in
14655 memory) so DWARF consumers need to be aware of the subtle
14656 distinction between OP_REG and OP_BASEREG. */
14657 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
14658 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
14659 else if (stack_realign_drap
14660 && crtl->drap_reg
14661 && crtl->args.internal_arg_pointer == rtl
14662 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
14663 {
14664 /* If RTL is internal_arg_pointer, which has been optimized
14665 out, use DRAP instead. */
14666 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
14667 VAR_INIT_STATUS_INITIALIZED);
14668 }
14669 break;
14670
14671 case SIGN_EXTEND:
14672 case ZERO_EXTEND:
14673 if (!SCALAR_INT_MODE_P (mode))
14674 break;
14675 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
14676 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14677 if (op0 == 0)
14678 break;
14679 else if (GET_CODE (rtl) == ZERO_EXTEND
14680 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
14681 && GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
14682 < HOST_BITS_PER_WIDE_INT
14683 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
14684 to expand zero extend as two shifts instead of
14685 masking. */
14686 && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= 4)
14687 {
14688 machine_mode imode = GET_MODE (XEXP (rtl, 0));
14689 mem_loc_result = op0;
14690 add_loc_descr (&mem_loc_result,
14691 int_loc_descriptor (GET_MODE_MASK (imode)));
14692 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
14693 }
14694 else if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14695 {
14696 int shift = DWARF2_ADDR_SIZE
14697 - GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)));
14698 shift *= BITS_PER_UNIT;
14699 if (GET_CODE (rtl) == SIGN_EXTEND)
14700 op = DW_OP_shra;
14701 else
14702 op = DW_OP_shr;
14703 mem_loc_result = op0;
14704 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14705 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
14706 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14707 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14708 }
14709 else if (!dwarf_strict || dwarf_version >= 5)
14710 {
14711 dw_die_ref type_die1, type_die2;
14712 dw_loc_descr_ref cvt;
14713
14714 type_die1 = base_type_for_mode (GET_MODE (XEXP (rtl, 0)),
14715 GET_CODE (rtl) == ZERO_EXTEND);
14716 if (type_die1 == NULL)
14717 break;
14718 type_die2 = base_type_for_mode (mode, 1);
14719 if (type_die2 == NULL)
14720 break;
14721 mem_loc_result = op0;
14722 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14723 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14724 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
14725 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14726 add_loc_descr (&mem_loc_result, cvt);
14727 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14728 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14729 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
14730 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14731 add_loc_descr (&mem_loc_result, cvt);
14732 }
14733 break;
14734
14735 case MEM:
14736 {
14737 rtx new_rtl = avoid_constant_pool_reference (rtl);
14738 if (new_rtl != rtl)
14739 {
14740 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
14741 initialized);
14742 if (mem_loc_result != NULL)
14743 return mem_loc_result;
14744 }
14745 }
14746 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
14747 get_address_mode (rtl), mode,
14748 VAR_INIT_STATUS_INITIALIZED);
14749 if (mem_loc_result == NULL)
14750 mem_loc_result = tls_mem_loc_descriptor (rtl);
14751 if (mem_loc_result != NULL)
14752 {
14753 if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
14754 || !SCALAR_INT_MODE_P(mode))
14755 {
14756 dw_die_ref type_die;
14757 dw_loc_descr_ref deref;
14758
14759 if (dwarf_strict && dwarf_version < 5)
14760 return NULL;
14761 type_die
14762 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14763 if (type_die == NULL)
14764 return NULL;
14765 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type),
14766 GET_MODE_SIZE (mode), 0);
14767 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14768 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14769 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
14770 add_loc_descr (&mem_loc_result, deref);
14771 }
14772 else if (GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE)
14773 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
14774 else
14775 add_loc_descr (&mem_loc_result,
14776 new_loc_descr (DW_OP_deref_size,
14777 GET_MODE_SIZE (mode), 0));
14778 }
14779 break;
14780
14781 case LO_SUM:
14782 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
14783
14784 case LABEL_REF:
14785 /* Some ports can transform a symbol ref into a label ref, because
14786 the symbol ref is too far away and has to be dumped into a constant
14787 pool. */
14788 case CONST:
14789 case SYMBOL_REF:
14790 if (!SCALAR_INT_MODE_P (mode)
14791 || (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
14792 #ifdef POINTERS_EXTEND_UNSIGNED
14793 && (mode != Pmode || mem_mode == VOIDmode)
14794 #endif
14795 ))
14796 break;
14797 if (GET_CODE (rtl) == SYMBOL_REF
14798 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14799 {
14800 dw_loc_descr_ref temp;
14801
14802 /* If this is not defined, we have no way to emit the data. */
14803 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
14804 break;
14805
14806 temp = new_addr_loc_descr (rtl, dtprel_true);
14807
14808 /* We check for DWARF 5 here because gdb did not implement
14809 DW_OP_form_tls_address until after 7.12. */
14810 mem_loc_result = new_loc_descr ((dwarf_version >= 5
14811 ? DW_OP_form_tls_address
14812 : DW_OP_GNU_push_tls_address),
14813 0, 0);
14814 add_loc_descr (&mem_loc_result, temp);
14815
14816 break;
14817 }
14818
14819 if (!const_ok_for_output (rtl))
14820 {
14821 if (GET_CODE (rtl) == CONST)
14822 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14823 initialized);
14824 break;
14825 }
14826
14827 symref:
14828 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
14829 vec_safe_push (used_rtx_array, rtl);
14830 break;
14831
14832 case CONCAT:
14833 case CONCATN:
14834 case VAR_LOCATION:
14835 case DEBUG_IMPLICIT_PTR:
14836 expansion_failed (NULL_TREE, rtl,
14837 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
14838 return 0;
14839
14840 case ENTRY_VALUE:
14841 if (dwarf_strict && dwarf_version < 5)
14842 return NULL;
14843 if (REG_P (ENTRY_VALUE_EXP (rtl)))
14844 {
14845 if (!SCALAR_INT_MODE_P (mode)
14846 || GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
14847 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
14848 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14849 else
14850 {
14851 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
14852 if (dbx_regnum == IGNORED_DWARF_REGNUM)
14853 return NULL;
14854 op0 = one_reg_loc_descriptor (dbx_regnum,
14855 VAR_INIT_STATUS_INITIALIZED);
14856 }
14857 }
14858 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
14859 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
14860 {
14861 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
14862 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14863 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
14864 return NULL;
14865 }
14866 else
14867 gcc_unreachable ();
14868 if (op0 == NULL)
14869 return NULL;
14870 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
14871 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
14872 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
14873 break;
14874
14875 case DEBUG_PARAMETER_REF:
14876 mem_loc_result = parameter_ref_descriptor (rtl);
14877 break;
14878
14879 case PRE_MODIFY:
14880 /* Extract the PLUS expression nested inside and fall into
14881 PLUS code below. */
14882 rtl = XEXP (rtl, 1);
14883 goto plus;
14884
14885 case PRE_INC:
14886 case PRE_DEC:
14887 /* Turn these into a PLUS expression and fall into the PLUS code
14888 below. */
14889 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
14890 gen_int_mode (GET_CODE (rtl) == PRE_INC
14891 ? GET_MODE_UNIT_SIZE (mem_mode)
14892 : -GET_MODE_UNIT_SIZE (mem_mode),
14893 mode));
14894
14895 /* fall through */
14896
14897 case PLUS:
14898 plus:
14899 if (is_based_loc (rtl)
14900 && (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
14901 || XEXP (rtl, 0) == arg_pointer_rtx
14902 || XEXP (rtl, 0) == frame_pointer_rtx)
14903 && SCALAR_INT_MODE_P (mode))
14904 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
14905 INTVAL (XEXP (rtl, 1)),
14906 VAR_INIT_STATUS_INITIALIZED);
14907 else
14908 {
14909 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14910 VAR_INIT_STATUS_INITIALIZED);
14911 if (mem_loc_result == 0)
14912 break;
14913
14914 if (CONST_INT_P (XEXP (rtl, 1))
14915 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14916 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
14917 else
14918 {
14919 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14920 VAR_INIT_STATUS_INITIALIZED);
14921 if (op1 == 0)
14922 return NULL;
14923 add_loc_descr (&mem_loc_result, op1);
14924 add_loc_descr (&mem_loc_result,
14925 new_loc_descr (DW_OP_plus, 0, 0));
14926 }
14927 }
14928 break;
14929
14930 /* If a pseudo-reg is optimized away, it is possible for it to
14931 be replaced with a MEM containing a multiply or shift. */
14932 case MINUS:
14933 op = DW_OP_minus;
14934 goto do_binop;
14935
14936 case MULT:
14937 op = DW_OP_mul;
14938 goto do_binop;
14939
14940 case DIV:
14941 if ((!dwarf_strict || dwarf_version >= 5)
14942 && SCALAR_INT_MODE_P (mode)
14943 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
14944 {
14945 mem_loc_result = typed_binop (DW_OP_div, rtl,
14946 base_type_for_mode (mode, 0),
14947 mode, mem_mode);
14948 break;
14949 }
14950 op = DW_OP_div;
14951 goto do_binop;
14952
14953 case UMOD:
14954 op = DW_OP_mod;
14955 goto do_binop;
14956
14957 case ASHIFT:
14958 op = DW_OP_shl;
14959 goto do_shift;
14960
14961 case ASHIFTRT:
14962 op = DW_OP_shra;
14963 goto do_shift;
14964
14965 case LSHIFTRT:
14966 op = DW_OP_shr;
14967 goto do_shift;
14968
14969 do_shift:
14970 if (!SCALAR_INT_MODE_P (mode))
14971 break;
14972 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14973 VAR_INIT_STATUS_INITIALIZED);
14974 {
14975 rtx rtlop1 = XEXP (rtl, 1);
14976 if (GET_MODE (rtlop1) != VOIDmode
14977 && GET_MODE_BITSIZE (GET_MODE (rtlop1))
14978 < GET_MODE_BITSIZE (mode))
14979 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
14980 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
14981 VAR_INIT_STATUS_INITIALIZED);
14982 }
14983
14984 if (op0 == 0 || op1 == 0)
14985 break;
14986
14987 mem_loc_result = op0;
14988 add_loc_descr (&mem_loc_result, op1);
14989 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14990 break;
14991
14992 case AND:
14993 op = DW_OP_and;
14994 goto do_binop;
14995
14996 case IOR:
14997 op = DW_OP_or;
14998 goto do_binop;
14999
15000 case XOR:
15001 op = DW_OP_xor;
15002 goto do_binop;
15003
15004 do_binop:
15005 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15006 VAR_INIT_STATUS_INITIALIZED);
15007 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15008 VAR_INIT_STATUS_INITIALIZED);
15009
15010 if (op0 == 0 || op1 == 0)
15011 break;
15012
15013 mem_loc_result = op0;
15014 add_loc_descr (&mem_loc_result, op1);
15015 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15016 break;
15017
15018 case MOD:
15019 if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
15020 && (!dwarf_strict || dwarf_version >= 5))
15021 {
15022 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15023 base_type_for_mode (mode, 0),
15024 mode, mem_mode);
15025 break;
15026 }
15027
15028 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15029 VAR_INIT_STATUS_INITIALIZED);
15030 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15031 VAR_INIT_STATUS_INITIALIZED);
15032
15033 if (op0 == 0 || op1 == 0)
15034 break;
15035
15036 mem_loc_result = op0;
15037 add_loc_descr (&mem_loc_result, op1);
15038 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15039 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15040 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15041 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15042 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15043 break;
15044
15045 case UDIV:
15046 if ((!dwarf_strict || dwarf_version >= 5)
15047 && SCALAR_INT_MODE_P (mode))
15048 {
15049 if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
15050 {
15051 op = DW_OP_div;
15052 goto do_binop;
15053 }
15054 mem_loc_result = typed_binop (DW_OP_div, rtl,
15055 base_type_for_mode (mode, 1),
15056 mode, mem_mode);
15057 }
15058 break;
15059
15060 case NOT:
15061 op = DW_OP_not;
15062 goto do_unop;
15063
15064 case ABS:
15065 op = DW_OP_abs;
15066 goto do_unop;
15067
15068 case NEG:
15069 op = DW_OP_neg;
15070 goto do_unop;
15071
15072 do_unop:
15073 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15074 VAR_INIT_STATUS_INITIALIZED);
15075
15076 if (op0 == 0)
15077 break;
15078
15079 mem_loc_result = op0;
15080 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15081 break;
15082
15083 case CONST_INT:
15084 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
15085 #ifdef POINTERS_EXTEND_UNSIGNED
15086 || (mode == Pmode
15087 && mem_mode != VOIDmode
15088 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15089 #endif
15090 )
15091 {
15092 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15093 break;
15094 }
15095 if ((!dwarf_strict || dwarf_version >= 5)
15096 && (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT
15097 || GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT))
15098 {
15099 dw_die_ref type_die = base_type_for_mode (mode, 1);
15100 machine_mode amode;
15101 if (type_die == NULL)
15102 return NULL;
15103 amode = mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT,
15104 MODE_INT, 0);
15105 if (INTVAL (rtl) >= 0
15106 && amode != BLKmode
15107 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15108 /* const DW_OP_convert <XXX> vs.
15109 DW_OP_const_type <XXX, 1, const>. */
15110 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15111 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (mode))
15112 {
15113 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15114 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15115 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15116 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15117 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15118 add_loc_descr (&mem_loc_result, op0);
15119 return mem_loc_result;
15120 }
15121 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15122 INTVAL (rtl));
15123 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15124 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15125 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15126 if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15127 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15128 else
15129 {
15130 mem_loc_result->dw_loc_oprnd2.val_class
15131 = dw_val_class_const_double;
15132 mem_loc_result->dw_loc_oprnd2.v.val_double
15133 = double_int::from_shwi (INTVAL (rtl));
15134 }
15135 }
15136 break;
15137
15138 case CONST_DOUBLE:
15139 if (!dwarf_strict || dwarf_version >= 5)
15140 {
15141 dw_die_ref type_die;
15142
15143 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15144 CONST_DOUBLE rtx could represent either a large integer
15145 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15146 the value is always a floating point constant.
15147
15148 When it is an integer, a CONST_DOUBLE is used whenever
15149 the constant requires 2 HWIs to be adequately represented.
15150 We output CONST_DOUBLEs as blocks. */
15151 if (mode == VOIDmode
15152 || (GET_MODE (rtl) == VOIDmode
15153 && GET_MODE_BITSIZE (mode) != HOST_BITS_PER_DOUBLE_INT))
15154 break;
15155 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15156 if (type_die == NULL)
15157 return NULL;
15158 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15159 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15160 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15161 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15162 #if TARGET_SUPPORTS_WIDE_INT == 0
15163 if (!SCALAR_FLOAT_MODE_P (mode))
15164 {
15165 mem_loc_result->dw_loc_oprnd2.val_class
15166 = dw_val_class_const_double;
15167 mem_loc_result->dw_loc_oprnd2.v.val_double
15168 = rtx_to_double_int (rtl);
15169 }
15170 else
15171 #endif
15172 {
15173 unsigned int length = GET_MODE_SIZE (mode);
15174 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15175
15176 insert_float (rtl, array);
15177 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15178 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15179 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15180 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15181 }
15182 }
15183 break;
15184
15185 case CONST_WIDE_INT:
15186 if (!dwarf_strict || dwarf_version >= 5)
15187 {
15188 dw_die_ref type_die;
15189
15190 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15191 if (type_die == NULL)
15192 return NULL;
15193 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15194 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15195 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15196 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15197 mem_loc_result->dw_loc_oprnd2.val_class
15198 = dw_val_class_wide_int;
15199 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15200 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
15201 }
15202 break;
15203
15204 case EQ:
15205 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
15206 break;
15207
15208 case GE:
15209 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15210 break;
15211
15212 case GT:
15213 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15214 break;
15215
15216 case LE:
15217 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15218 break;
15219
15220 case LT:
15221 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15222 break;
15223
15224 case NE:
15225 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
15226 break;
15227
15228 case GEU:
15229 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15230 break;
15231
15232 case GTU:
15233 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15234 break;
15235
15236 case LEU:
15237 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15238 break;
15239
15240 case LTU:
15241 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15242 break;
15243
15244 case UMIN:
15245 case UMAX:
15246 if (!SCALAR_INT_MODE_P (mode))
15247 break;
15248 /* FALLTHRU */
15249 case SMIN:
15250 case SMAX:
15251 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
15252 break;
15253
15254 case ZERO_EXTRACT:
15255 case SIGN_EXTRACT:
15256 if (CONST_INT_P (XEXP (rtl, 1))
15257 && CONST_INT_P (XEXP (rtl, 2))
15258 && ((unsigned) INTVAL (XEXP (rtl, 1))
15259 + (unsigned) INTVAL (XEXP (rtl, 2))
15260 <= GET_MODE_BITSIZE (mode))
15261 && SCALAR_INT_MODE_P (mode)
15262 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
15263 && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= DWARF2_ADDR_SIZE)
15264 {
15265 int shift, size;
15266 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
15267 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15268 if (op0 == 0)
15269 break;
15270 if (GET_CODE (rtl) == SIGN_EXTRACT)
15271 op = DW_OP_shra;
15272 else
15273 op = DW_OP_shr;
15274 mem_loc_result = op0;
15275 size = INTVAL (XEXP (rtl, 1));
15276 shift = INTVAL (XEXP (rtl, 2));
15277 if (BITS_BIG_ENDIAN)
15278 shift = GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
15279 - shift - size;
15280 if (shift + size != (int) DWARF2_ADDR_SIZE)
15281 {
15282 add_loc_descr (&mem_loc_result,
15283 int_loc_descriptor (DWARF2_ADDR_SIZE
15284 - shift - size));
15285 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15286 }
15287 if (size != (int) DWARF2_ADDR_SIZE)
15288 {
15289 add_loc_descr (&mem_loc_result,
15290 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
15291 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15292 }
15293 }
15294 break;
15295
15296 case IF_THEN_ELSE:
15297 {
15298 dw_loc_descr_ref op2, bra_node, drop_node;
15299 op0 = mem_loc_descriptor (XEXP (rtl, 0),
15300 GET_MODE (XEXP (rtl, 0)) == VOIDmode
15301 ? word_mode : GET_MODE (XEXP (rtl, 0)),
15302 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15303 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15304 VAR_INIT_STATUS_INITIALIZED);
15305 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
15306 VAR_INIT_STATUS_INITIALIZED);
15307 if (op0 == NULL || op1 == NULL || op2 == NULL)
15308 break;
15309
15310 mem_loc_result = op1;
15311 add_loc_descr (&mem_loc_result, op2);
15312 add_loc_descr (&mem_loc_result, op0);
15313 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15314 add_loc_descr (&mem_loc_result, bra_node);
15315 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
15316 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15317 add_loc_descr (&mem_loc_result, drop_node);
15318 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15319 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15320 }
15321 break;
15322
15323 case FLOAT_EXTEND:
15324 case FLOAT_TRUNCATE:
15325 case FLOAT:
15326 case UNSIGNED_FLOAT:
15327 case FIX:
15328 case UNSIGNED_FIX:
15329 if (!dwarf_strict || dwarf_version >= 5)
15330 {
15331 dw_die_ref type_die;
15332 dw_loc_descr_ref cvt;
15333
15334 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
15335 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15336 if (op0 == NULL)
15337 break;
15338 if (SCALAR_INT_MODE_P (GET_MODE (XEXP (rtl, 0)))
15339 && (GET_CODE (rtl) == FLOAT
15340 || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)))
15341 <= DWARF2_ADDR_SIZE))
15342 {
15343 type_die = base_type_for_mode (GET_MODE (XEXP (rtl, 0)),
15344 GET_CODE (rtl) == UNSIGNED_FLOAT);
15345 if (type_die == NULL)
15346 break;
15347 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15348 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15349 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15350 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15351 add_loc_descr (&op0, cvt);
15352 }
15353 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
15354 if (type_die == NULL)
15355 break;
15356 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15357 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15358 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15359 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15360 add_loc_descr (&op0, cvt);
15361 if (SCALAR_INT_MODE_P (mode)
15362 && (GET_CODE (rtl) == FIX
15363 || GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE))
15364 {
15365 op0 = convert_descriptor_to_mode (mode, op0);
15366 if (op0 == NULL)
15367 break;
15368 }
15369 mem_loc_result = op0;
15370 }
15371 break;
15372
15373 case CLZ:
15374 case CTZ:
15375 case FFS:
15376 mem_loc_result = clz_loc_descriptor (rtl, mode, mem_mode);
15377 break;
15378
15379 case POPCOUNT:
15380 case PARITY:
15381 mem_loc_result = popcount_loc_descriptor (rtl, mode, mem_mode);
15382 break;
15383
15384 case BSWAP:
15385 mem_loc_result = bswap_loc_descriptor (rtl, mode, mem_mode);
15386 break;
15387
15388 case ROTATE:
15389 case ROTATERT:
15390 mem_loc_result = rotate_loc_descriptor (rtl, mode, mem_mode);
15391 break;
15392
15393 case COMPARE:
15394 /* In theory, we could implement the above. */
15395 /* DWARF cannot represent the unsigned compare operations
15396 natively. */
15397 case SS_MULT:
15398 case US_MULT:
15399 case SS_DIV:
15400 case US_DIV:
15401 case SS_PLUS:
15402 case US_PLUS:
15403 case SS_MINUS:
15404 case US_MINUS:
15405 case SS_NEG:
15406 case US_NEG:
15407 case SS_ABS:
15408 case SS_ASHIFT:
15409 case US_ASHIFT:
15410 case SS_TRUNCATE:
15411 case US_TRUNCATE:
15412 case UNORDERED:
15413 case ORDERED:
15414 case UNEQ:
15415 case UNGE:
15416 case UNGT:
15417 case UNLE:
15418 case UNLT:
15419 case LTGT:
15420 case FRACT_CONVERT:
15421 case UNSIGNED_FRACT_CONVERT:
15422 case SAT_FRACT:
15423 case UNSIGNED_SAT_FRACT:
15424 case SQRT:
15425 case ASM_OPERANDS:
15426 case VEC_MERGE:
15427 case VEC_SELECT:
15428 case VEC_CONCAT:
15429 case VEC_DUPLICATE:
15430 case UNSPEC:
15431 case HIGH:
15432 case FMA:
15433 case STRICT_LOW_PART:
15434 case CONST_VECTOR:
15435 case CONST_FIXED:
15436 case CLRSB:
15437 case CLOBBER:
15438 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15439 can't express it in the debug info. This can happen e.g. with some
15440 TLS UNSPECs. */
15441 break;
15442
15443 case CONST_STRING:
15444 resolve_one_addr (&rtl);
15445 goto symref;
15446
15447 /* RTL sequences inside PARALLEL record a series of DWARF operations for
15448 the expression. An UNSPEC rtx represents a raw DWARF operation,
15449 new_loc_descr is called for it to build the operation directly.
15450 Otherwise mem_loc_descriptor is called recursively. */
15451 case PARALLEL:
15452 {
15453 int index = 0;
15454 dw_loc_descr_ref exp_result = NULL;
15455
15456 for (; index < XVECLEN (rtl, 0); index++)
15457 {
15458 rtx elem = XVECEXP (rtl, 0, index);
15459 if (GET_CODE (elem) == UNSPEC)
15460 {
15461 /* Each DWARF operation UNSPEC contain two operands, if
15462 one operand is not used for the operation, const0_rtx is
15463 passed. */
15464 gcc_assert (XVECLEN (elem, 0) == 2);
15465
15466 HOST_WIDE_INT dw_op = XINT (elem, 1);
15467 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
15468 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
15469 exp_result
15470 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
15471 oprnd2);
15472 }
15473 else
15474 exp_result
15475 = mem_loc_descriptor (elem, mode, mem_mode,
15476 VAR_INIT_STATUS_INITIALIZED);
15477
15478 if (!mem_loc_result)
15479 mem_loc_result = exp_result;
15480 else
15481 add_loc_descr (&mem_loc_result, exp_result);
15482 }
15483
15484 break;
15485 }
15486
15487 default:
15488 if (flag_checking)
15489 {
15490 print_rtl (stderr, rtl);
15491 gcc_unreachable ();
15492 }
15493 break;
15494 }
15495
15496 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15497 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15498
15499 return mem_loc_result;
15500 }
15501
15502 /* Return a descriptor that describes the concatenation of two locations.
15503 This is typically a complex variable. */
15504
15505 static dw_loc_descr_ref
15506 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
15507 {
15508 dw_loc_descr_ref cc_loc_result = NULL;
15509 dw_loc_descr_ref x0_ref
15510 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15511 dw_loc_descr_ref x1_ref
15512 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15513
15514 if (x0_ref == 0 || x1_ref == 0)
15515 return 0;
15516
15517 cc_loc_result = x0_ref;
15518 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x0)));
15519
15520 add_loc_descr (&cc_loc_result, x1_ref);
15521 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x1)));
15522
15523 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
15524 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15525
15526 return cc_loc_result;
15527 }
15528
15529 /* Return a descriptor that describes the concatenation of N
15530 locations. */
15531
15532 static dw_loc_descr_ref
15533 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
15534 {
15535 unsigned int i;
15536 dw_loc_descr_ref cc_loc_result = NULL;
15537 unsigned int n = XVECLEN (concatn, 0);
15538
15539 for (i = 0; i < n; ++i)
15540 {
15541 dw_loc_descr_ref ref;
15542 rtx x = XVECEXP (concatn, 0, i);
15543
15544 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15545 if (ref == NULL)
15546 return NULL;
15547
15548 add_loc_descr (&cc_loc_result, ref);
15549 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x)));
15550 }
15551
15552 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15553 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15554
15555 return cc_loc_result;
15556 }
15557
15558 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
15559 for DEBUG_IMPLICIT_PTR RTL. */
15560
15561 static dw_loc_descr_ref
15562 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
15563 {
15564 dw_loc_descr_ref ret;
15565 dw_die_ref ref;
15566
15567 if (dwarf_strict && dwarf_version < 5)
15568 return NULL;
15569 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
15570 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
15571 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
15572 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
15573 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
15574 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
15575 if (ref)
15576 {
15577 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15578 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15579 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15580 }
15581 else
15582 {
15583 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15584 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
15585 }
15586 return ret;
15587 }
15588
15589 /* Output a proper Dwarf location descriptor for a variable or parameter
15590 which is either allocated in a register or in a memory location. For a
15591 register, we just generate an OP_REG and the register number. For a
15592 memory location we provide a Dwarf postfix expression describing how to
15593 generate the (dynamic) address of the object onto the address stack.
15594
15595 MODE is mode of the decl if this loc_descriptor is going to be used in
15596 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
15597 allowed, VOIDmode otherwise.
15598
15599 If we don't know how to describe it, return 0. */
15600
15601 static dw_loc_descr_ref
15602 loc_descriptor (rtx rtl, machine_mode mode,
15603 enum var_init_status initialized)
15604 {
15605 dw_loc_descr_ref loc_result = NULL;
15606
15607 switch (GET_CODE (rtl))
15608 {
15609 case SUBREG:
15610 /* The case of a subreg may arise when we have a local (register)
15611 variable or a formal (register) parameter which doesn't quite fill
15612 up an entire register. For now, just assume that it is
15613 legitimate to make the Dwarf info refer to the whole register which
15614 contains the given subreg. */
15615 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
15616 loc_result = loc_descriptor (SUBREG_REG (rtl),
15617 GET_MODE (SUBREG_REG (rtl)), initialized);
15618 else
15619 goto do_default;
15620 break;
15621
15622 case REG:
15623 loc_result = reg_loc_descriptor (rtl, initialized);
15624 break;
15625
15626 case MEM:
15627 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
15628 GET_MODE (rtl), initialized);
15629 if (loc_result == NULL)
15630 loc_result = tls_mem_loc_descriptor (rtl);
15631 if (loc_result == NULL)
15632 {
15633 rtx new_rtl = avoid_constant_pool_reference (rtl);
15634 if (new_rtl != rtl)
15635 loc_result = loc_descriptor (new_rtl, mode, initialized);
15636 }
15637 break;
15638
15639 case CONCAT:
15640 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
15641 initialized);
15642 break;
15643
15644 case CONCATN:
15645 loc_result = concatn_loc_descriptor (rtl, initialized);
15646 break;
15647
15648 case VAR_LOCATION:
15649 /* Single part. */
15650 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
15651 {
15652 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
15653 if (GET_CODE (loc) == EXPR_LIST)
15654 loc = XEXP (loc, 0);
15655 loc_result = loc_descriptor (loc, mode, initialized);
15656 break;
15657 }
15658
15659 rtl = XEXP (rtl, 1);
15660 /* FALLTHRU */
15661
15662 case PARALLEL:
15663 {
15664 rtvec par_elems = XVEC (rtl, 0);
15665 int num_elem = GET_NUM_ELEM (par_elems);
15666 machine_mode mode;
15667 int i;
15668
15669 /* Create the first one, so we have something to add to. */
15670 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
15671 VOIDmode, initialized);
15672 if (loc_result == NULL)
15673 return NULL;
15674 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
15675 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15676 for (i = 1; i < num_elem; i++)
15677 {
15678 dw_loc_descr_ref temp;
15679
15680 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
15681 VOIDmode, initialized);
15682 if (temp == NULL)
15683 return NULL;
15684 add_loc_descr (&loc_result, temp);
15685 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
15686 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15687 }
15688 }
15689 break;
15690
15691 case CONST_INT:
15692 if (mode != VOIDmode && mode != BLKmode)
15693 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (mode),
15694 INTVAL (rtl));
15695 break;
15696
15697 case CONST_DOUBLE:
15698 if (mode == VOIDmode)
15699 mode = GET_MODE (rtl);
15700
15701 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15702 {
15703 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
15704
15705 /* Note that a CONST_DOUBLE rtx could represent either an integer
15706 or a floating-point constant. A CONST_DOUBLE is used whenever
15707 the constant requires more than one word in order to be
15708 adequately represented. We output CONST_DOUBLEs as blocks. */
15709 loc_result = new_loc_descr (DW_OP_implicit_value,
15710 GET_MODE_SIZE (mode), 0);
15711 #if TARGET_SUPPORTS_WIDE_INT == 0
15712 if (!SCALAR_FLOAT_MODE_P (mode))
15713 {
15714 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
15715 loc_result->dw_loc_oprnd2.v.val_double
15716 = rtx_to_double_int (rtl);
15717 }
15718 else
15719 #endif
15720 {
15721 unsigned int length = GET_MODE_SIZE (mode);
15722 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15723
15724 insert_float (rtl, array);
15725 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15726 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15727 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15728 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15729 }
15730 }
15731 break;
15732
15733 case CONST_WIDE_INT:
15734 if (mode == VOIDmode)
15735 mode = GET_MODE (rtl);
15736
15737 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15738 {
15739 loc_result = new_loc_descr (DW_OP_implicit_value,
15740 GET_MODE_SIZE (mode), 0);
15741 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
15742 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15743 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
15744 }
15745 break;
15746
15747 case CONST_VECTOR:
15748 if (mode == VOIDmode)
15749 mode = GET_MODE (rtl);
15750
15751 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15752 {
15753 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
15754 unsigned int length = CONST_VECTOR_NUNITS (rtl);
15755 unsigned char *array
15756 = ggc_vec_alloc<unsigned char> (length * elt_size);
15757 unsigned int i;
15758 unsigned char *p;
15759 machine_mode imode = GET_MODE_INNER (mode);
15760
15761 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
15762 switch (GET_MODE_CLASS (mode))
15763 {
15764 case MODE_VECTOR_INT:
15765 for (i = 0, p = array; i < length; i++, p += elt_size)
15766 {
15767 rtx elt = CONST_VECTOR_ELT (rtl, i);
15768 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
15769 }
15770 break;
15771
15772 case MODE_VECTOR_FLOAT:
15773 for (i = 0, p = array; i < length; i++, p += elt_size)
15774 {
15775 rtx elt = CONST_VECTOR_ELT (rtl, i);
15776 insert_float (elt, p);
15777 }
15778 break;
15779
15780 default:
15781 gcc_unreachable ();
15782 }
15783
15784 loc_result = new_loc_descr (DW_OP_implicit_value,
15785 length * elt_size, 0);
15786 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15787 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
15788 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
15789 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15790 }
15791 break;
15792
15793 case CONST:
15794 if (mode == VOIDmode
15795 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
15796 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
15797 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
15798 {
15799 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
15800 break;
15801 }
15802 /* FALLTHROUGH */
15803 case SYMBOL_REF:
15804 if (!const_ok_for_output (rtl))
15805 break;
15806 /* FALLTHROUGH */
15807 case LABEL_REF:
15808 if (mode != VOIDmode && GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE
15809 && (dwarf_version >= 4 || !dwarf_strict))
15810 {
15811 loc_result = new_addr_loc_descr (rtl, dtprel_false);
15812 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
15813 vec_safe_push (used_rtx_array, rtl);
15814 }
15815 break;
15816
15817 case DEBUG_IMPLICIT_PTR:
15818 loc_result = implicit_ptr_descriptor (rtl, 0);
15819 break;
15820
15821 case PLUS:
15822 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
15823 && CONST_INT_P (XEXP (rtl, 1)))
15824 {
15825 loc_result
15826 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
15827 break;
15828 }
15829 /* FALLTHRU */
15830 do_default:
15831 default:
15832 if ((SCALAR_INT_MODE_P (mode)
15833 && GET_MODE (rtl) == mode
15834 && GET_MODE_SIZE (GET_MODE (rtl)) <= DWARF2_ADDR_SIZE
15835 && dwarf_version >= 4)
15836 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
15837 {
15838 /* Value expression. */
15839 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
15840 if (loc_result)
15841 add_loc_descr (&loc_result,
15842 new_loc_descr (DW_OP_stack_value, 0, 0));
15843 }
15844 break;
15845 }
15846
15847 return loc_result;
15848 }
15849
15850 /* We need to figure out what section we should use as the base for the
15851 address ranges where a given location is valid.
15852 1. If this particular DECL has a section associated with it, use that.
15853 2. If this function has a section associated with it, use that.
15854 3. Otherwise, use the text section.
15855 XXX: If you split a variable across multiple sections, we won't notice. */
15856
15857 static const char *
15858 secname_for_decl (const_tree decl)
15859 {
15860 const char *secname;
15861
15862 if (VAR_OR_FUNCTION_DECL_P (decl)
15863 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
15864 && DECL_SECTION_NAME (decl))
15865 secname = DECL_SECTION_NAME (decl);
15866 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
15867 secname = DECL_SECTION_NAME (current_function_decl);
15868 else if (cfun && in_cold_section_p)
15869 secname = crtl->subsections.cold_section_label;
15870 else
15871 secname = text_section_label;
15872
15873 return secname;
15874 }
15875
15876 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
15877
15878 static bool
15879 decl_by_reference_p (tree decl)
15880 {
15881 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
15882 || VAR_P (decl))
15883 && DECL_BY_REFERENCE (decl));
15884 }
15885
15886 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
15887 for VARLOC. */
15888
15889 static dw_loc_descr_ref
15890 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
15891 enum var_init_status initialized)
15892 {
15893 int have_address = 0;
15894 dw_loc_descr_ref descr;
15895 machine_mode mode;
15896
15897 if (want_address != 2)
15898 {
15899 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
15900 /* Single part. */
15901 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
15902 {
15903 varloc = PAT_VAR_LOCATION_LOC (varloc);
15904 if (GET_CODE (varloc) == EXPR_LIST)
15905 varloc = XEXP (varloc, 0);
15906 mode = GET_MODE (varloc);
15907 if (MEM_P (varloc))
15908 {
15909 rtx addr = XEXP (varloc, 0);
15910 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
15911 mode, initialized);
15912 if (descr)
15913 have_address = 1;
15914 else
15915 {
15916 rtx x = avoid_constant_pool_reference (varloc);
15917 if (x != varloc)
15918 descr = mem_loc_descriptor (x, mode, VOIDmode,
15919 initialized);
15920 }
15921 }
15922 else
15923 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
15924 }
15925 else
15926 return 0;
15927 }
15928 else
15929 {
15930 if (GET_CODE (varloc) == VAR_LOCATION)
15931 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
15932 else
15933 mode = DECL_MODE (loc);
15934 descr = loc_descriptor (varloc, mode, initialized);
15935 have_address = 1;
15936 }
15937
15938 if (!descr)
15939 return 0;
15940
15941 if (want_address == 2 && !have_address
15942 && (dwarf_version >= 4 || !dwarf_strict))
15943 {
15944 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
15945 {
15946 expansion_failed (loc, NULL_RTX,
15947 "DWARF address size mismatch");
15948 return 0;
15949 }
15950 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
15951 have_address = 1;
15952 }
15953 /* Show if we can't fill the request for an address. */
15954 if (want_address && !have_address)
15955 {
15956 expansion_failed (loc, NULL_RTX,
15957 "Want address and only have value");
15958 return 0;
15959 }
15960
15961 /* If we've got an address and don't want one, dereference. */
15962 if (!want_address && have_address)
15963 {
15964 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
15965 enum dwarf_location_atom op;
15966
15967 if (size > DWARF2_ADDR_SIZE || size == -1)
15968 {
15969 expansion_failed (loc, NULL_RTX,
15970 "DWARF address size mismatch");
15971 return 0;
15972 }
15973 else if (size == DWARF2_ADDR_SIZE)
15974 op = DW_OP_deref;
15975 else
15976 op = DW_OP_deref_size;
15977
15978 add_loc_descr (&descr, new_loc_descr (op, size, 0));
15979 }
15980
15981 return descr;
15982 }
15983
15984 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
15985 if it is not possible. */
15986
15987 static dw_loc_descr_ref
15988 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
15989 {
15990 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
15991 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
15992 else if (dwarf_version >= 3 || !dwarf_strict)
15993 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
15994 else
15995 return NULL;
15996 }
15997
15998 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
15999 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16000
16001 static dw_loc_descr_ref
16002 dw_sra_loc_expr (tree decl, rtx loc)
16003 {
16004 rtx p;
16005 unsigned HOST_WIDE_INT padsize = 0;
16006 dw_loc_descr_ref descr, *descr_tail;
16007 unsigned HOST_WIDE_INT decl_size;
16008 rtx varloc;
16009 enum var_init_status initialized;
16010
16011 if (DECL_SIZE (decl) == NULL
16012 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16013 return NULL;
16014
16015 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16016 descr = NULL;
16017 descr_tail = &descr;
16018
16019 for (p = loc; p; p = XEXP (p, 1))
16020 {
16021 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16022 rtx loc_note = *decl_piece_varloc_ptr (p);
16023 dw_loc_descr_ref cur_descr;
16024 dw_loc_descr_ref *tail, last = NULL;
16025 unsigned HOST_WIDE_INT opsize = 0;
16026
16027 if (loc_note == NULL_RTX
16028 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16029 {
16030 padsize += bitsize;
16031 continue;
16032 }
16033 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16034 varloc = NOTE_VAR_LOCATION (loc_note);
16035 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16036 if (cur_descr == NULL)
16037 {
16038 padsize += bitsize;
16039 continue;
16040 }
16041
16042 /* Check that cur_descr either doesn't use
16043 DW_OP_*piece operations, or their sum is equal
16044 to bitsize. Otherwise we can't embed it. */
16045 for (tail = &cur_descr; *tail != NULL;
16046 tail = &(*tail)->dw_loc_next)
16047 if ((*tail)->dw_loc_opc == DW_OP_piece)
16048 {
16049 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16050 * BITS_PER_UNIT;
16051 last = *tail;
16052 }
16053 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16054 {
16055 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16056 last = *tail;
16057 }
16058
16059 if (last != NULL && opsize != bitsize)
16060 {
16061 padsize += bitsize;
16062 /* Discard the current piece of the descriptor and release any
16063 addr_table entries it uses. */
16064 remove_loc_list_addr_table_entries (cur_descr);
16065 continue;
16066 }
16067
16068 /* If there is a hole, add DW_OP_*piece after empty DWARF
16069 expression, which means that those bits are optimized out. */
16070 if (padsize)
16071 {
16072 if (padsize > decl_size)
16073 {
16074 remove_loc_list_addr_table_entries (cur_descr);
16075 goto discard_descr;
16076 }
16077 decl_size -= padsize;
16078 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16079 if (*descr_tail == NULL)
16080 {
16081 remove_loc_list_addr_table_entries (cur_descr);
16082 goto discard_descr;
16083 }
16084 descr_tail = &(*descr_tail)->dw_loc_next;
16085 padsize = 0;
16086 }
16087 *descr_tail = cur_descr;
16088 descr_tail = tail;
16089 if (bitsize > decl_size)
16090 goto discard_descr;
16091 decl_size -= bitsize;
16092 if (last == NULL)
16093 {
16094 HOST_WIDE_INT offset = 0;
16095 if (GET_CODE (varloc) == VAR_LOCATION
16096 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16097 {
16098 varloc = PAT_VAR_LOCATION_LOC (varloc);
16099 if (GET_CODE (varloc) == EXPR_LIST)
16100 varloc = XEXP (varloc, 0);
16101 }
16102 do
16103 {
16104 if (GET_CODE (varloc) == CONST
16105 || GET_CODE (varloc) == SIGN_EXTEND
16106 || GET_CODE (varloc) == ZERO_EXTEND)
16107 varloc = XEXP (varloc, 0);
16108 else if (GET_CODE (varloc) == SUBREG)
16109 varloc = SUBREG_REG (varloc);
16110 else
16111 break;
16112 }
16113 while (1);
16114 /* DW_OP_bit_size offset should be zero for register
16115 or implicit location descriptions and empty location
16116 descriptions, but for memory addresses needs big endian
16117 adjustment. */
16118 if (MEM_P (varloc))
16119 {
16120 unsigned HOST_WIDE_INT memsize
16121 = MEM_SIZE (varloc) * BITS_PER_UNIT;
16122 if (memsize != bitsize)
16123 {
16124 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16125 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16126 goto discard_descr;
16127 if (memsize < bitsize)
16128 goto discard_descr;
16129 if (BITS_BIG_ENDIAN)
16130 offset = memsize - bitsize;
16131 }
16132 }
16133
16134 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16135 if (*descr_tail == NULL)
16136 goto discard_descr;
16137 descr_tail = &(*descr_tail)->dw_loc_next;
16138 }
16139 }
16140
16141 /* If there were any non-empty expressions, add padding till the end of
16142 the decl. */
16143 if (descr != NULL && decl_size != 0)
16144 {
16145 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
16146 if (*descr_tail == NULL)
16147 goto discard_descr;
16148 }
16149 return descr;
16150
16151 discard_descr:
16152 /* Discard the descriptor and release any addr_table entries it uses. */
16153 remove_loc_list_addr_table_entries (descr);
16154 return NULL;
16155 }
16156
16157 /* Return the dwarf representation of the location list LOC_LIST of
16158 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
16159 function. */
16160
16161 static dw_loc_list_ref
16162 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
16163 {
16164 const char *endname, *secname;
16165 rtx varloc;
16166 enum var_init_status initialized;
16167 struct var_loc_node *node;
16168 dw_loc_descr_ref descr;
16169 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
16170 dw_loc_list_ref list = NULL;
16171 dw_loc_list_ref *listp = &list;
16172
16173 /* Now that we know what section we are using for a base,
16174 actually construct the list of locations.
16175 The first location information is what is passed to the
16176 function that creates the location list, and the remaining
16177 locations just get added on to that list.
16178 Note that we only know the start address for a location
16179 (IE location changes), so to build the range, we use
16180 the range [current location start, next location start].
16181 This means we have to special case the last node, and generate
16182 a range of [last location start, end of function label]. */
16183
16184 secname = secname_for_decl (decl);
16185
16186 for (node = loc_list->first; node; node = node->next)
16187 if (GET_CODE (node->loc) == EXPR_LIST
16188 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
16189 {
16190 if (GET_CODE (node->loc) == EXPR_LIST)
16191 {
16192 /* This requires DW_OP_{,bit_}piece, which is not usable
16193 inside DWARF expressions. */
16194 if (want_address != 2)
16195 continue;
16196 descr = dw_sra_loc_expr (decl, node->loc);
16197 if (descr == NULL)
16198 continue;
16199 }
16200 else
16201 {
16202 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16203 varloc = NOTE_VAR_LOCATION (node->loc);
16204 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
16205 }
16206 if (descr)
16207 {
16208 bool range_across_switch = false;
16209 /* If section switch happens in between node->label
16210 and node->next->label (or end of function) and
16211 we can't emit it as a single entry list,
16212 emit two ranges, first one ending at the end
16213 of first partition and second one starting at the
16214 beginning of second partition. */
16215 if (node == loc_list->last_before_switch
16216 && (node != loc_list->first || loc_list->first->next)
16217 && current_function_decl)
16218 {
16219 endname = cfun->fde->dw_fde_end;
16220 range_across_switch = true;
16221 }
16222 /* The variable has a location between NODE->LABEL and
16223 NODE->NEXT->LABEL. */
16224 else if (node->next)
16225 endname = node->next->label;
16226 /* If the variable has a location at the last label
16227 it keeps its location until the end of function. */
16228 else if (!current_function_decl)
16229 endname = text_end_label;
16230 else
16231 {
16232 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
16233 current_function_funcdef_no);
16234 endname = ggc_strdup (label_id);
16235 }
16236
16237 *listp = new_loc_list (descr, node->label, endname, secname);
16238 if (TREE_CODE (decl) == PARM_DECL
16239 && node == loc_list->first
16240 && NOTE_P (node->loc)
16241 && strcmp (node->label, endname) == 0)
16242 (*listp)->force = true;
16243 listp = &(*listp)->dw_loc_next;
16244
16245 if (range_across_switch)
16246 {
16247 if (GET_CODE (node->loc) == EXPR_LIST)
16248 descr = dw_sra_loc_expr (decl, node->loc);
16249 else
16250 {
16251 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16252 varloc = NOTE_VAR_LOCATION (node->loc);
16253 descr = dw_loc_list_1 (decl, varloc, want_address,
16254 initialized);
16255 }
16256 gcc_assert (descr);
16257 /* The variable has a location between NODE->LABEL and
16258 NODE->NEXT->LABEL. */
16259 if (node->next)
16260 endname = node->next->label;
16261 else
16262 endname = cfun->fde->dw_fde_second_end;
16263 *listp = new_loc_list (descr,
16264 cfun->fde->dw_fde_second_begin,
16265 endname, secname);
16266 listp = &(*listp)->dw_loc_next;
16267 }
16268 }
16269 }
16270
16271 /* Try to avoid the overhead of a location list emitting a location
16272 expression instead, but only if we didn't have more than one
16273 location entry in the first place. If some entries were not
16274 representable, we don't want to pretend a single entry that was
16275 applies to the entire scope in which the variable is
16276 available. */
16277 if (list && loc_list->first->next)
16278 gen_llsym (list);
16279
16280 return list;
16281 }
16282
16283 /* Return if the loc_list has only single element and thus can be represented
16284 as location description. */
16285
16286 static bool
16287 single_element_loc_list_p (dw_loc_list_ref list)
16288 {
16289 gcc_assert (!list->dw_loc_next || list->ll_symbol);
16290 return !list->ll_symbol;
16291 }
16292
16293 /* To each location in list LIST add loc descr REF. */
16294
16295 static void
16296 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16297 {
16298 dw_loc_descr_ref copy;
16299 add_loc_descr (&list->expr, ref);
16300 list = list->dw_loc_next;
16301 while (list)
16302 {
16303 copy = ggc_alloc<dw_loc_descr_node> ();
16304 memcpy (copy, ref, sizeof (dw_loc_descr_node));
16305 add_loc_descr (&list->expr, copy);
16306 while (copy->dw_loc_next)
16307 {
16308 dw_loc_descr_ref new_copy = ggc_alloc<dw_loc_descr_node> ();
16309 memcpy (new_copy, copy->dw_loc_next, sizeof (dw_loc_descr_node));
16310 copy->dw_loc_next = new_copy;
16311 copy = new_copy;
16312 }
16313 list = list->dw_loc_next;
16314 }
16315 }
16316
16317 /* Given two lists RET and LIST
16318 produce location list that is result of adding expression in LIST
16319 to expression in RET on each position in program.
16320 Might be destructive on both RET and LIST.
16321
16322 TODO: We handle only simple cases of RET or LIST having at most one
16323 element. General case would inolve sorting the lists in program order
16324 and merging them that will need some additional work.
16325 Adding that will improve quality of debug info especially for SRA-ed
16326 structures. */
16327
16328 static void
16329 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
16330 {
16331 if (!list)
16332 return;
16333 if (!*ret)
16334 {
16335 *ret = list;
16336 return;
16337 }
16338 if (!list->dw_loc_next)
16339 {
16340 add_loc_descr_to_each (*ret, list->expr);
16341 return;
16342 }
16343 if (!(*ret)->dw_loc_next)
16344 {
16345 add_loc_descr_to_each (list, (*ret)->expr);
16346 *ret = list;
16347 return;
16348 }
16349 expansion_failed (NULL_TREE, NULL_RTX,
16350 "Don't know how to merge two non-trivial"
16351 " location lists.\n");
16352 *ret = NULL;
16353 return;
16354 }
16355
16356 /* LOC is constant expression. Try a luck, look it up in constant
16357 pool and return its loc_descr of its address. */
16358
16359 static dw_loc_descr_ref
16360 cst_pool_loc_descr (tree loc)
16361 {
16362 /* Get an RTL for this, if something has been emitted. */
16363 rtx rtl = lookup_constant_def (loc);
16364
16365 if (!rtl || !MEM_P (rtl))
16366 {
16367 gcc_assert (!rtl);
16368 return 0;
16369 }
16370 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
16371
16372 /* TODO: We might get more coverage if we was actually delaying expansion
16373 of all expressions till end of compilation when constant pools are fully
16374 populated. */
16375 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
16376 {
16377 expansion_failed (loc, NULL_RTX,
16378 "CST value in contant pool but not marked.");
16379 return 0;
16380 }
16381 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16382 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
16383 }
16384
16385 /* Return dw_loc_list representing address of addr_expr LOC
16386 by looking for inner INDIRECT_REF expression and turning
16387 it into simple arithmetics.
16388
16389 See loc_list_from_tree for the meaning of CONTEXT. */
16390
16391 static dw_loc_list_ref
16392 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
16393 loc_descr_context *context)
16394 {
16395 tree obj, offset;
16396 HOST_WIDE_INT bitsize, bitpos, bytepos;
16397 machine_mode mode;
16398 int unsignedp, reversep, volatilep = 0;
16399 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
16400
16401 obj = get_inner_reference (TREE_OPERAND (loc, 0),
16402 &bitsize, &bitpos, &offset, &mode,
16403 &unsignedp, &reversep, &volatilep);
16404 STRIP_NOPS (obj);
16405 if (bitpos % BITS_PER_UNIT)
16406 {
16407 expansion_failed (loc, NULL_RTX, "bitfield access");
16408 return 0;
16409 }
16410 if (!INDIRECT_REF_P (obj))
16411 {
16412 expansion_failed (obj,
16413 NULL_RTX, "no indirect ref in inner refrence");
16414 return 0;
16415 }
16416 if (!offset && !bitpos)
16417 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
16418 context);
16419 else if (toplev
16420 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
16421 && (dwarf_version >= 4 || !dwarf_strict))
16422 {
16423 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
16424 if (!list_ret)
16425 return 0;
16426 if (offset)
16427 {
16428 /* Variable offset. */
16429 list_ret1 = loc_list_from_tree (offset, 0, context);
16430 if (list_ret1 == 0)
16431 return 0;
16432 add_loc_list (&list_ret, list_ret1);
16433 if (!list_ret)
16434 return 0;
16435 add_loc_descr_to_each (list_ret,
16436 new_loc_descr (DW_OP_plus, 0, 0));
16437 }
16438 bytepos = bitpos / BITS_PER_UNIT;
16439 if (bytepos > 0)
16440 add_loc_descr_to_each (list_ret,
16441 new_loc_descr (DW_OP_plus_uconst,
16442 bytepos, 0));
16443 else if (bytepos < 0)
16444 loc_list_plus_const (list_ret, bytepos);
16445 add_loc_descr_to_each (list_ret,
16446 new_loc_descr (DW_OP_stack_value, 0, 0));
16447 }
16448 return list_ret;
16449 }
16450
16451 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
16452 all operations from LOC are nops, move to the last one. Insert in NOPS all
16453 operations that are skipped. */
16454
16455 static void
16456 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
16457 hash_set<dw_loc_descr_ref> &nops)
16458 {
16459 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
16460 {
16461 nops.add (loc);
16462 loc = loc->dw_loc_next;
16463 }
16464 }
16465
16466 /* Helper for loc_descr_without_nops: free the location description operation
16467 P. */
16468
16469 bool
16470 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
16471 {
16472 ggc_free (loc);
16473 return true;
16474 }
16475
16476 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
16477 finishes LOC. */
16478
16479 static void
16480 loc_descr_without_nops (dw_loc_descr_ref &loc)
16481 {
16482 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
16483 return;
16484
16485 /* Set of all DW_OP_nop operations we remove. */
16486 hash_set<dw_loc_descr_ref> nops;
16487
16488 /* First, strip all prefix NOP operations in order to keep the head of the
16489 operations list. */
16490 loc_descr_to_next_no_nop (loc, nops);
16491
16492 for (dw_loc_descr_ref cur = loc; cur != NULL;)
16493 {
16494 /* For control flow operations: strip "prefix" nops in destination
16495 labels. */
16496 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
16497 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
16498 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
16499 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
16500
16501 /* Do the same for the operations that follow, then move to the next
16502 iteration. */
16503 if (cur->dw_loc_next != NULL)
16504 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
16505 cur = cur->dw_loc_next;
16506 }
16507
16508 nops.traverse<void *, free_loc_descr> (NULL);
16509 }
16510
16511
16512 struct dwarf_procedure_info;
16513
16514 /* Helper structure for location descriptions generation. */
16515 struct loc_descr_context
16516 {
16517 /* The type that is implicitly referenced by DW_OP_push_object_address, or
16518 NULL_TREE if DW_OP_push_object_address in invalid for this location
16519 description. This is used when processing PLACEHOLDER_EXPR nodes. */
16520 tree context_type;
16521 /* The ..._DECL node that should be translated as a
16522 DW_OP_push_object_address operation. */
16523 tree base_decl;
16524 /* Information about the DWARF procedure we are currently generating. NULL if
16525 we are not generating a DWARF procedure. */
16526 struct dwarf_procedure_info *dpi;
16527 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
16528 by consumer. Used for DW_TAG_generic_subrange attributes. */
16529 bool placeholder_arg;
16530 /* True if PLACEHOLDER_EXPR has been seen. */
16531 bool placeholder_seen;
16532 };
16533
16534 /* DWARF procedures generation
16535
16536 DWARF expressions (aka. location descriptions) are used to encode variable
16537 things such as sizes or offsets. Such computations can have redundant parts
16538 that can be factorized in order to reduce the size of the output debug
16539 information. This is the whole point of DWARF procedures.
16540
16541 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
16542 already factorized into functions ("size functions") in order to handle very
16543 big and complex types. Such functions are quite simple: they have integral
16544 arguments, they return an integral result and their body contains only a
16545 return statement with arithmetic expressions. This is the only kind of
16546 function we are interested in translating into DWARF procedures, here.
16547
16548 DWARF expressions and DWARF procedure are executed using a stack, so we have
16549 to define some calling convention for them to interact. Let's say that:
16550
16551 - Before calling a DWARF procedure, DWARF expressions must push on the stack
16552 all arguments in reverse order (right-to-left) so that when the DWARF
16553 procedure execution starts, the first argument is the top of the stack.
16554
16555 - Then, when returning, the DWARF procedure must have consumed all arguments
16556 on the stack, must have pushed the result and touched nothing else.
16557
16558 - Each integral argument and the result are integral types can be hold in a
16559 single stack slot.
16560
16561 - We call "frame offset" the number of stack slots that are "under DWARF
16562 procedure control": it includes the arguments slots, the temporaries and
16563 the result slot. Thus, it is equal to the number of arguments when the
16564 procedure execution starts and must be equal to one (the result) when it
16565 returns. */
16566
16567 /* Helper structure used when generating operations for a DWARF procedure. */
16568 struct dwarf_procedure_info
16569 {
16570 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
16571 currently translated. */
16572 tree fndecl;
16573 /* The number of arguments FNDECL takes. */
16574 unsigned args_count;
16575 };
16576
16577 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
16578 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
16579 equate it to this DIE. */
16580
16581 static dw_die_ref
16582 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
16583 dw_die_ref parent_die)
16584 {
16585 dw_die_ref dwarf_proc_die;
16586
16587 if ((dwarf_version < 3 && dwarf_strict)
16588 || location == NULL)
16589 return NULL;
16590
16591 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
16592 if (fndecl)
16593 equate_decl_number_to_die (fndecl, dwarf_proc_die);
16594 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
16595 return dwarf_proc_die;
16596 }
16597
16598 /* Return whether TYPE is a supported type as a DWARF procedure argument
16599 type or return type (we handle only scalar types and pointer types that
16600 aren't wider than the DWARF expression evaluation stack. */
16601
16602 static bool
16603 is_handled_procedure_type (tree type)
16604 {
16605 return ((INTEGRAL_TYPE_P (type)
16606 || TREE_CODE (type) == OFFSET_TYPE
16607 || TREE_CODE (type) == POINTER_TYPE)
16608 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
16609 }
16610
16611 /* Helper for resolve_args_picking: do the same but stop when coming across
16612 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
16613 offset *before* evaluating the corresponding operation. */
16614
16615 static bool
16616 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
16617 struct dwarf_procedure_info *dpi,
16618 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
16619 {
16620 /* The "frame_offset" identifier is already used to name a macro... */
16621 unsigned frame_offset_ = initial_frame_offset;
16622 dw_loc_descr_ref l;
16623
16624 for (l = loc; l != NULL;)
16625 {
16626 bool existed;
16627 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
16628
16629 /* If we already met this node, there is nothing to compute anymore. */
16630 if (existed)
16631 {
16632 /* Make sure that the stack size is consistent wherever the execution
16633 flow comes from. */
16634 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
16635 break;
16636 }
16637 l_frame_offset = frame_offset_;
16638
16639 /* If needed, relocate the picking offset with respect to the frame
16640 offset. */
16641 if (l->frame_offset_rel)
16642 {
16643 unsigned HOST_WIDE_INT off;
16644 switch (l->dw_loc_opc)
16645 {
16646 case DW_OP_pick:
16647 off = l->dw_loc_oprnd1.v.val_unsigned;
16648 break;
16649 case DW_OP_dup:
16650 off = 0;
16651 break;
16652 case DW_OP_over:
16653 off = 1;
16654 break;
16655 default:
16656 gcc_unreachable ();
16657 }
16658 /* frame_offset_ is the size of the current stack frame, including
16659 incoming arguments. Besides, the arguments are pushed
16660 right-to-left. Thus, in order to access the Nth argument from
16661 this operation node, the picking has to skip temporaries *plus*
16662 one stack slot per argument (0 for the first one, 1 for the second
16663 one, etc.).
16664
16665 The targetted argument number (N) is already set as the operand,
16666 and the number of temporaries can be computed with:
16667 frame_offsets_ - dpi->args_count */
16668 off += frame_offset_ - dpi->args_count;
16669
16670 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
16671 if (off > 255)
16672 return false;
16673
16674 if (off == 0)
16675 {
16676 l->dw_loc_opc = DW_OP_dup;
16677 l->dw_loc_oprnd1.v.val_unsigned = 0;
16678 }
16679 else if (off == 1)
16680 {
16681 l->dw_loc_opc = DW_OP_over;
16682 l->dw_loc_oprnd1.v.val_unsigned = 0;
16683 }
16684 else
16685 {
16686 l->dw_loc_opc = DW_OP_pick;
16687 l->dw_loc_oprnd1.v.val_unsigned = off;
16688 }
16689 }
16690
16691 /* Update frame_offset according to the effect the current operation has
16692 on the stack. */
16693 switch (l->dw_loc_opc)
16694 {
16695 case DW_OP_deref:
16696 case DW_OP_swap:
16697 case DW_OP_rot:
16698 case DW_OP_abs:
16699 case DW_OP_neg:
16700 case DW_OP_not:
16701 case DW_OP_plus_uconst:
16702 case DW_OP_skip:
16703 case DW_OP_reg0:
16704 case DW_OP_reg1:
16705 case DW_OP_reg2:
16706 case DW_OP_reg3:
16707 case DW_OP_reg4:
16708 case DW_OP_reg5:
16709 case DW_OP_reg6:
16710 case DW_OP_reg7:
16711 case DW_OP_reg8:
16712 case DW_OP_reg9:
16713 case DW_OP_reg10:
16714 case DW_OP_reg11:
16715 case DW_OP_reg12:
16716 case DW_OP_reg13:
16717 case DW_OP_reg14:
16718 case DW_OP_reg15:
16719 case DW_OP_reg16:
16720 case DW_OP_reg17:
16721 case DW_OP_reg18:
16722 case DW_OP_reg19:
16723 case DW_OP_reg20:
16724 case DW_OP_reg21:
16725 case DW_OP_reg22:
16726 case DW_OP_reg23:
16727 case DW_OP_reg24:
16728 case DW_OP_reg25:
16729 case DW_OP_reg26:
16730 case DW_OP_reg27:
16731 case DW_OP_reg28:
16732 case DW_OP_reg29:
16733 case DW_OP_reg30:
16734 case DW_OP_reg31:
16735 case DW_OP_bregx:
16736 case DW_OP_piece:
16737 case DW_OP_deref_size:
16738 case DW_OP_nop:
16739 case DW_OP_bit_piece:
16740 case DW_OP_implicit_value:
16741 case DW_OP_stack_value:
16742 break;
16743
16744 case DW_OP_addr:
16745 case DW_OP_const1u:
16746 case DW_OP_const1s:
16747 case DW_OP_const2u:
16748 case DW_OP_const2s:
16749 case DW_OP_const4u:
16750 case DW_OP_const4s:
16751 case DW_OP_const8u:
16752 case DW_OP_const8s:
16753 case DW_OP_constu:
16754 case DW_OP_consts:
16755 case DW_OP_dup:
16756 case DW_OP_over:
16757 case DW_OP_pick:
16758 case DW_OP_lit0:
16759 case DW_OP_lit1:
16760 case DW_OP_lit2:
16761 case DW_OP_lit3:
16762 case DW_OP_lit4:
16763 case DW_OP_lit5:
16764 case DW_OP_lit6:
16765 case DW_OP_lit7:
16766 case DW_OP_lit8:
16767 case DW_OP_lit9:
16768 case DW_OP_lit10:
16769 case DW_OP_lit11:
16770 case DW_OP_lit12:
16771 case DW_OP_lit13:
16772 case DW_OP_lit14:
16773 case DW_OP_lit15:
16774 case DW_OP_lit16:
16775 case DW_OP_lit17:
16776 case DW_OP_lit18:
16777 case DW_OP_lit19:
16778 case DW_OP_lit20:
16779 case DW_OP_lit21:
16780 case DW_OP_lit22:
16781 case DW_OP_lit23:
16782 case DW_OP_lit24:
16783 case DW_OP_lit25:
16784 case DW_OP_lit26:
16785 case DW_OP_lit27:
16786 case DW_OP_lit28:
16787 case DW_OP_lit29:
16788 case DW_OP_lit30:
16789 case DW_OP_lit31:
16790 case DW_OP_breg0:
16791 case DW_OP_breg1:
16792 case DW_OP_breg2:
16793 case DW_OP_breg3:
16794 case DW_OP_breg4:
16795 case DW_OP_breg5:
16796 case DW_OP_breg6:
16797 case DW_OP_breg7:
16798 case DW_OP_breg8:
16799 case DW_OP_breg9:
16800 case DW_OP_breg10:
16801 case DW_OP_breg11:
16802 case DW_OP_breg12:
16803 case DW_OP_breg13:
16804 case DW_OP_breg14:
16805 case DW_OP_breg15:
16806 case DW_OP_breg16:
16807 case DW_OP_breg17:
16808 case DW_OP_breg18:
16809 case DW_OP_breg19:
16810 case DW_OP_breg20:
16811 case DW_OP_breg21:
16812 case DW_OP_breg22:
16813 case DW_OP_breg23:
16814 case DW_OP_breg24:
16815 case DW_OP_breg25:
16816 case DW_OP_breg26:
16817 case DW_OP_breg27:
16818 case DW_OP_breg28:
16819 case DW_OP_breg29:
16820 case DW_OP_breg30:
16821 case DW_OP_breg31:
16822 case DW_OP_fbreg:
16823 case DW_OP_push_object_address:
16824 case DW_OP_call_frame_cfa:
16825 ++frame_offset_;
16826 break;
16827
16828 case DW_OP_drop:
16829 case DW_OP_xderef:
16830 case DW_OP_and:
16831 case DW_OP_div:
16832 case DW_OP_minus:
16833 case DW_OP_mod:
16834 case DW_OP_mul:
16835 case DW_OP_or:
16836 case DW_OP_plus:
16837 case DW_OP_shl:
16838 case DW_OP_shr:
16839 case DW_OP_shra:
16840 case DW_OP_xor:
16841 case DW_OP_bra:
16842 case DW_OP_eq:
16843 case DW_OP_ge:
16844 case DW_OP_gt:
16845 case DW_OP_le:
16846 case DW_OP_lt:
16847 case DW_OP_ne:
16848 case DW_OP_regx:
16849 case DW_OP_xderef_size:
16850 --frame_offset_;
16851 break;
16852
16853 case DW_OP_call2:
16854 case DW_OP_call4:
16855 case DW_OP_call_ref:
16856 {
16857 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
16858 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
16859
16860 if (stack_usage == NULL)
16861 return false;
16862 frame_offset_ += *stack_usage;
16863 break;
16864 }
16865
16866 case DW_OP_implicit_pointer:
16867 case DW_OP_entry_value:
16868 case DW_OP_const_type:
16869 case DW_OP_regval_type:
16870 case DW_OP_deref_type:
16871 case DW_OP_convert:
16872 case DW_OP_reinterpret:
16873 case DW_OP_form_tls_address:
16874 case DW_OP_GNU_push_tls_address:
16875 case DW_OP_GNU_uninit:
16876 case DW_OP_GNU_encoded_addr:
16877 case DW_OP_GNU_implicit_pointer:
16878 case DW_OP_GNU_entry_value:
16879 case DW_OP_GNU_const_type:
16880 case DW_OP_GNU_regval_type:
16881 case DW_OP_GNU_deref_type:
16882 case DW_OP_GNU_convert:
16883 case DW_OP_GNU_reinterpret:
16884 case DW_OP_GNU_parameter_ref:
16885 /* loc_list_from_tree will probably not output these operations for
16886 size functions, so assume they will not appear here. */
16887 /* Fall through... */
16888
16889 default:
16890 gcc_unreachable ();
16891 }
16892
16893 /* Now, follow the control flow (except subroutine calls). */
16894 switch (l->dw_loc_opc)
16895 {
16896 case DW_OP_bra:
16897 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
16898 frame_offsets))
16899 return false;
16900 /* Fall through. */
16901
16902 case DW_OP_skip:
16903 l = l->dw_loc_oprnd1.v.val_loc;
16904 break;
16905
16906 case DW_OP_stack_value:
16907 return true;
16908
16909 default:
16910 l = l->dw_loc_next;
16911 break;
16912 }
16913 }
16914
16915 return true;
16916 }
16917
16918 /* Make a DFS over operations reachable through LOC (i.e. follow branch
16919 operations) in order to resolve the operand of DW_OP_pick operations that
16920 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
16921 offset *before* LOC is executed. Return if all relocations were
16922 successful. */
16923
16924 static bool
16925 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
16926 struct dwarf_procedure_info *dpi)
16927 {
16928 /* Associate to all visited operations the frame offset *before* evaluating
16929 this operation. */
16930 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
16931
16932 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
16933 frame_offsets);
16934 }
16935
16936 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
16937 Return NULL if it is not possible. */
16938
16939 static dw_die_ref
16940 function_to_dwarf_procedure (tree fndecl)
16941 {
16942 struct loc_descr_context ctx;
16943 struct dwarf_procedure_info dpi;
16944 dw_die_ref dwarf_proc_die;
16945 tree tree_body = DECL_SAVED_TREE (fndecl);
16946 dw_loc_descr_ref loc_body, epilogue;
16947
16948 tree cursor;
16949 unsigned i;
16950
16951 /* Do not generate multiple DWARF procedures for the same function
16952 declaration. */
16953 dwarf_proc_die = lookup_decl_die (fndecl);
16954 if (dwarf_proc_die != NULL)
16955 return dwarf_proc_die;
16956
16957 /* DWARF procedures are available starting with the DWARFv3 standard. */
16958 if (dwarf_version < 3 && dwarf_strict)
16959 return NULL;
16960
16961 /* We handle only functions for which we still have a body, that return a
16962 supported type and that takes arguments with supported types. Note that
16963 there is no point translating functions that return nothing. */
16964 if (tree_body == NULL_TREE
16965 || DECL_RESULT (fndecl) == NULL_TREE
16966 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
16967 return NULL;
16968
16969 for (cursor = DECL_ARGUMENTS (fndecl);
16970 cursor != NULL_TREE;
16971 cursor = TREE_CHAIN (cursor))
16972 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
16973 return NULL;
16974
16975 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
16976 if (TREE_CODE (tree_body) != RETURN_EXPR)
16977 return NULL;
16978 tree_body = TREE_OPERAND (tree_body, 0);
16979 if (TREE_CODE (tree_body) != MODIFY_EXPR
16980 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
16981 return NULL;
16982 tree_body = TREE_OPERAND (tree_body, 1);
16983
16984 /* Try to translate the body expression itself. Note that this will probably
16985 cause an infinite recursion if its call graph has a cycle. This is very
16986 unlikely for size functions, however, so don't bother with such things at
16987 the moment. */
16988 ctx.context_type = NULL_TREE;
16989 ctx.base_decl = NULL_TREE;
16990 ctx.dpi = &dpi;
16991 ctx.placeholder_arg = false;
16992 ctx.placeholder_seen = false;
16993 dpi.fndecl = fndecl;
16994 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
16995 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
16996 if (!loc_body)
16997 return NULL;
16998
16999 /* After evaluating all operands in "loc_body", we should still have on the
17000 stack all arguments plus the desired function result (top of the stack).
17001 Generate code in order to keep only the result in our stack frame. */
17002 epilogue = NULL;
17003 for (i = 0; i < dpi.args_count; ++i)
17004 {
17005 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17006 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17007 op_couple->dw_loc_next->dw_loc_next = epilogue;
17008 epilogue = op_couple;
17009 }
17010 add_loc_descr (&loc_body, epilogue);
17011 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17012 return NULL;
17013
17014 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17015 because they are considered useful. Now there is an epilogue, they are
17016 not anymore, so give it another try. */
17017 loc_descr_without_nops (loc_body);
17018
17019 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17020 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17021 though, given that size functions do not come from source, so they should
17022 not have a dedicated DW_TAG_subprogram DIE. */
17023 dwarf_proc_die
17024 = new_dwarf_proc_die (loc_body, fndecl,
17025 get_context_die (DECL_CONTEXT (fndecl)));
17026
17027 /* The called DWARF procedure consumes one stack slot per argument and
17028 returns one stack slot. */
17029 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17030
17031 return dwarf_proc_die;
17032 }
17033
17034
17035 /* Generate Dwarf location list representing LOC.
17036 If WANT_ADDRESS is false, expression computing LOC will be computed
17037 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17038 if WANT_ADDRESS is 2, expression computing address useable in location
17039 will be returned (i.e. DW_OP_reg can be used
17040 to refer to register values).
17041
17042 CONTEXT provides information to customize the location descriptions
17043 generation. Its context_type field specifies what type is implicitly
17044 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17045 will not be generated.
17046
17047 Its DPI field determines whether we are generating a DWARF expression for a
17048 DWARF procedure, so PARM_DECL references are processed specifically.
17049
17050 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17051 and dpi fields were null. */
17052
17053 static dw_loc_list_ref
17054 loc_list_from_tree_1 (tree loc, int want_address,
17055 struct loc_descr_context *context)
17056 {
17057 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17058 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17059 int have_address = 0;
17060 enum dwarf_location_atom op;
17061
17062 /* ??? Most of the time we do not take proper care for sign/zero
17063 extending the values properly. Hopefully this won't be a real
17064 problem... */
17065
17066 if (context != NULL
17067 && context->base_decl == loc
17068 && want_address == 0)
17069 {
17070 if (dwarf_version >= 3 || !dwarf_strict)
17071 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17072 NULL, NULL, NULL);
17073 else
17074 return NULL;
17075 }
17076
17077 switch (TREE_CODE (loc))
17078 {
17079 case ERROR_MARK:
17080 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17081 return 0;
17082
17083 case PLACEHOLDER_EXPR:
17084 /* This case involves extracting fields from an object to determine the
17085 position of other fields. It is supposed to appear only as the first
17086 operand of COMPONENT_REF nodes and to reference precisely the type
17087 that the context allows. */
17088 if (context != NULL
17089 && TREE_TYPE (loc) == context->context_type
17090 && want_address >= 1)
17091 {
17092 if (dwarf_version >= 3 || !dwarf_strict)
17093 {
17094 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
17095 have_address = 1;
17096 break;
17097 }
17098 else
17099 return NULL;
17100 }
17101 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
17102 the single argument passed by consumer. */
17103 else if (context != NULL
17104 && context->placeholder_arg
17105 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
17106 && want_address == 0)
17107 {
17108 ret = new_loc_descr (DW_OP_pick, 0, 0);
17109 ret->frame_offset_rel = 1;
17110 context->placeholder_seen = true;
17111 break;
17112 }
17113 else
17114 expansion_failed (loc, NULL_RTX,
17115 "PLACEHOLDER_EXPR for an unexpected type");
17116 break;
17117
17118 case CALL_EXPR:
17119 {
17120 const int nargs = call_expr_nargs (loc);
17121 tree callee = get_callee_fndecl (loc);
17122 int i;
17123 dw_die_ref dwarf_proc;
17124
17125 if (callee == NULL_TREE)
17126 goto call_expansion_failed;
17127
17128 /* We handle only functions that return an integer. */
17129 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
17130 goto call_expansion_failed;
17131
17132 dwarf_proc = function_to_dwarf_procedure (callee);
17133 if (dwarf_proc == NULL)
17134 goto call_expansion_failed;
17135
17136 /* Evaluate arguments right-to-left so that the first argument will
17137 be the top-most one on the stack. */
17138 for (i = nargs - 1; i >= 0; --i)
17139 {
17140 dw_loc_descr_ref loc_descr
17141 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
17142 context);
17143
17144 if (loc_descr == NULL)
17145 goto call_expansion_failed;
17146
17147 add_loc_descr (&ret, loc_descr);
17148 }
17149
17150 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
17151 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17152 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
17153 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
17154 add_loc_descr (&ret, ret1);
17155 break;
17156
17157 call_expansion_failed:
17158 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
17159 /* There are no opcodes for these operations. */
17160 return 0;
17161 }
17162
17163 case PREINCREMENT_EXPR:
17164 case PREDECREMENT_EXPR:
17165 case POSTINCREMENT_EXPR:
17166 case POSTDECREMENT_EXPR:
17167 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
17168 /* There are no opcodes for these operations. */
17169 return 0;
17170
17171 case ADDR_EXPR:
17172 /* If we already want an address, see if there is INDIRECT_REF inside
17173 e.g. for &this->field. */
17174 if (want_address)
17175 {
17176 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
17177 (loc, want_address == 2, context);
17178 if (list_ret)
17179 have_address = 1;
17180 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
17181 && (ret = cst_pool_loc_descr (loc)))
17182 have_address = 1;
17183 }
17184 /* Otherwise, process the argument and look for the address. */
17185 if (!list_ret && !ret)
17186 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
17187 else
17188 {
17189 if (want_address)
17190 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
17191 return NULL;
17192 }
17193 break;
17194
17195 case VAR_DECL:
17196 if (DECL_THREAD_LOCAL_P (loc))
17197 {
17198 rtx rtl;
17199 enum dwarf_location_atom tls_op;
17200 enum dtprel_bool dtprel = dtprel_false;
17201
17202 if (targetm.have_tls)
17203 {
17204 /* If this is not defined, we have no way to emit the
17205 data. */
17206 if (!targetm.asm_out.output_dwarf_dtprel)
17207 return 0;
17208
17209 /* The way DW_OP_GNU_push_tls_address is specified, we
17210 can only look up addresses of objects in the current
17211 module. We used DW_OP_addr as first op, but that's
17212 wrong, because DW_OP_addr is relocated by the debug
17213 info consumer, while DW_OP_GNU_push_tls_address
17214 operand shouldn't be. */
17215 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
17216 return 0;
17217 dtprel = dtprel_true;
17218 /* We check for DWARF 5 here because gdb did not implement
17219 DW_OP_form_tls_address until after 7.12. */
17220 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
17221 : DW_OP_GNU_push_tls_address);
17222 }
17223 else
17224 {
17225 if (!targetm.emutls.debug_form_tls_address
17226 || !(dwarf_version >= 3 || !dwarf_strict))
17227 return 0;
17228 /* We stuffed the control variable into the DECL_VALUE_EXPR
17229 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
17230 no longer appear in gimple code. We used the control
17231 variable in specific so that we could pick it up here. */
17232 loc = DECL_VALUE_EXPR (loc);
17233 tls_op = DW_OP_form_tls_address;
17234 }
17235
17236 rtl = rtl_for_decl_location (loc);
17237 if (rtl == NULL_RTX)
17238 return 0;
17239
17240 if (!MEM_P (rtl))
17241 return 0;
17242 rtl = XEXP (rtl, 0);
17243 if (! CONSTANT_P (rtl))
17244 return 0;
17245
17246 ret = new_addr_loc_descr (rtl, dtprel);
17247 ret1 = new_loc_descr (tls_op, 0, 0);
17248 add_loc_descr (&ret, ret1);
17249
17250 have_address = 1;
17251 break;
17252 }
17253 /* FALLTHRU */
17254
17255 case PARM_DECL:
17256 if (context != NULL && context->dpi != NULL
17257 && DECL_CONTEXT (loc) == context->dpi->fndecl)
17258 {
17259 /* We are generating code for a DWARF procedure and we want to access
17260 one of its arguments: find the appropriate argument offset and let
17261 the resolve_args_picking pass compute the offset that complies
17262 with the stack frame size. */
17263 unsigned i = 0;
17264 tree cursor;
17265
17266 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
17267 cursor != NULL_TREE && cursor != loc;
17268 cursor = TREE_CHAIN (cursor), ++i)
17269 ;
17270 /* If we are translating a DWARF procedure, all referenced parameters
17271 must belong to the current function. */
17272 gcc_assert (cursor != NULL_TREE);
17273
17274 ret = new_loc_descr (DW_OP_pick, i, 0);
17275 ret->frame_offset_rel = 1;
17276 break;
17277 }
17278 /* FALLTHRU */
17279
17280 case RESULT_DECL:
17281 if (DECL_HAS_VALUE_EXPR_P (loc))
17282 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
17283 want_address, context);
17284 /* FALLTHRU */
17285
17286 case FUNCTION_DECL:
17287 {
17288 rtx rtl;
17289 var_loc_list *loc_list = lookup_decl_loc (loc);
17290
17291 if (loc_list && loc_list->first)
17292 {
17293 list_ret = dw_loc_list (loc_list, loc, want_address);
17294 have_address = want_address != 0;
17295 break;
17296 }
17297 rtl = rtl_for_decl_location (loc);
17298 if (rtl == NULL_RTX)
17299 {
17300 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
17301 return 0;
17302 }
17303 else if (CONST_INT_P (rtl))
17304 {
17305 HOST_WIDE_INT val = INTVAL (rtl);
17306 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17307 val &= GET_MODE_MASK (DECL_MODE (loc));
17308 ret = int_loc_descriptor (val);
17309 }
17310 else if (GET_CODE (rtl) == CONST_STRING)
17311 {
17312 expansion_failed (loc, NULL_RTX, "CONST_STRING");
17313 return 0;
17314 }
17315 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
17316 ret = new_addr_loc_descr (rtl, dtprel_false);
17317 else
17318 {
17319 machine_mode mode, mem_mode;
17320
17321 /* Certain constructs can only be represented at top-level. */
17322 if (want_address == 2)
17323 {
17324 ret = loc_descriptor (rtl, VOIDmode,
17325 VAR_INIT_STATUS_INITIALIZED);
17326 have_address = 1;
17327 }
17328 else
17329 {
17330 mode = GET_MODE (rtl);
17331 mem_mode = VOIDmode;
17332 if (MEM_P (rtl))
17333 {
17334 mem_mode = mode;
17335 mode = get_address_mode (rtl);
17336 rtl = XEXP (rtl, 0);
17337 have_address = 1;
17338 }
17339 ret = mem_loc_descriptor (rtl, mode, mem_mode,
17340 VAR_INIT_STATUS_INITIALIZED);
17341 }
17342 if (!ret)
17343 expansion_failed (loc, rtl,
17344 "failed to produce loc descriptor for rtl");
17345 }
17346 }
17347 break;
17348
17349 case MEM_REF:
17350 if (!integer_zerop (TREE_OPERAND (loc, 1)))
17351 {
17352 have_address = 1;
17353 goto do_plus;
17354 }
17355 /* Fallthru. */
17356 case INDIRECT_REF:
17357 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17358 have_address = 1;
17359 break;
17360
17361 case TARGET_MEM_REF:
17362 case SSA_NAME:
17363 case DEBUG_EXPR_DECL:
17364 return NULL;
17365
17366 case COMPOUND_EXPR:
17367 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
17368 context);
17369
17370 CASE_CONVERT:
17371 case VIEW_CONVERT_EXPR:
17372 case SAVE_EXPR:
17373 case MODIFY_EXPR:
17374 case NON_LVALUE_EXPR:
17375 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
17376 context);
17377
17378 case COMPONENT_REF:
17379 case BIT_FIELD_REF:
17380 case ARRAY_REF:
17381 case ARRAY_RANGE_REF:
17382 case REALPART_EXPR:
17383 case IMAGPART_EXPR:
17384 {
17385 tree obj, offset;
17386 HOST_WIDE_INT bitsize, bitpos, bytepos;
17387 machine_mode mode;
17388 int unsignedp, reversep, volatilep = 0;
17389
17390 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
17391 &unsignedp, &reversep, &volatilep);
17392
17393 gcc_assert (obj != loc);
17394
17395 list_ret = loc_list_from_tree_1 (obj,
17396 want_address == 2
17397 && !bitpos && !offset ? 2 : 1,
17398 context);
17399 /* TODO: We can extract value of the small expression via shifting even
17400 for nonzero bitpos. */
17401 if (list_ret == 0)
17402 return 0;
17403 if (bitpos % BITS_PER_UNIT != 0 || bitsize % BITS_PER_UNIT != 0)
17404 {
17405 expansion_failed (loc, NULL_RTX,
17406 "bitfield access");
17407 return 0;
17408 }
17409
17410 if (offset != NULL_TREE)
17411 {
17412 /* Variable offset. */
17413 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
17414 if (list_ret1 == 0)
17415 return 0;
17416 add_loc_list (&list_ret, list_ret1);
17417 if (!list_ret)
17418 return 0;
17419 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
17420 }
17421
17422 bytepos = bitpos / BITS_PER_UNIT;
17423 if (bytepos > 0)
17424 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
17425 else if (bytepos < 0)
17426 loc_list_plus_const (list_ret, bytepos);
17427
17428 have_address = 1;
17429 break;
17430 }
17431
17432 case INTEGER_CST:
17433 if ((want_address || !tree_fits_shwi_p (loc))
17434 && (ret = cst_pool_loc_descr (loc)))
17435 have_address = 1;
17436 else if (want_address == 2
17437 && tree_fits_shwi_p (loc)
17438 && (ret = address_of_int_loc_descriptor
17439 (int_size_in_bytes (TREE_TYPE (loc)),
17440 tree_to_shwi (loc))))
17441 have_address = 1;
17442 else if (tree_fits_shwi_p (loc))
17443 ret = int_loc_descriptor (tree_to_shwi (loc));
17444 else if (tree_fits_uhwi_p (loc))
17445 ret = uint_loc_descriptor (tree_to_uhwi (loc));
17446 else
17447 {
17448 expansion_failed (loc, NULL_RTX,
17449 "Integer operand is not host integer");
17450 return 0;
17451 }
17452 break;
17453
17454 case CONSTRUCTOR:
17455 case REAL_CST:
17456 case STRING_CST:
17457 case COMPLEX_CST:
17458 if ((ret = cst_pool_loc_descr (loc)))
17459 have_address = 1;
17460 else if (TREE_CODE (loc) == CONSTRUCTOR)
17461 {
17462 tree type = TREE_TYPE (loc);
17463 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
17464 unsigned HOST_WIDE_INT offset = 0;
17465 unsigned HOST_WIDE_INT cnt;
17466 constructor_elt *ce;
17467
17468 if (TREE_CODE (type) == RECORD_TYPE)
17469 {
17470 /* This is very limited, but it's enough to output
17471 pointers to member functions, as long as the
17472 referenced function is defined in the current
17473 translation unit. */
17474 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
17475 {
17476 tree val = ce->value;
17477
17478 tree field = ce->index;
17479
17480 if (val)
17481 STRIP_NOPS (val);
17482
17483 if (!field || DECL_BIT_FIELD (field))
17484 {
17485 expansion_failed (loc, NULL_RTX,
17486 "bitfield in record type constructor");
17487 size = offset = (unsigned HOST_WIDE_INT)-1;
17488 ret = NULL;
17489 break;
17490 }
17491
17492 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
17493 unsigned HOST_WIDE_INT pos = int_byte_position (field);
17494 gcc_assert (pos + fieldsize <= size);
17495 if (pos < offset)
17496 {
17497 expansion_failed (loc, NULL_RTX,
17498 "out-of-order fields in record constructor");
17499 size = offset = (unsigned HOST_WIDE_INT)-1;
17500 ret = NULL;
17501 break;
17502 }
17503 if (pos > offset)
17504 {
17505 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
17506 add_loc_descr (&ret, ret1);
17507 offset = pos;
17508 }
17509 if (val && fieldsize != 0)
17510 {
17511 ret1 = loc_descriptor_from_tree (val, want_address, context);
17512 if (!ret1)
17513 {
17514 expansion_failed (loc, NULL_RTX,
17515 "unsupported expression in field");
17516 size = offset = (unsigned HOST_WIDE_INT)-1;
17517 ret = NULL;
17518 break;
17519 }
17520 add_loc_descr (&ret, ret1);
17521 }
17522 if (fieldsize)
17523 {
17524 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
17525 add_loc_descr (&ret, ret1);
17526 offset = pos + fieldsize;
17527 }
17528 }
17529
17530 if (offset != size)
17531 {
17532 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
17533 add_loc_descr (&ret, ret1);
17534 offset = size;
17535 }
17536
17537 have_address = !!want_address;
17538 }
17539 else
17540 expansion_failed (loc, NULL_RTX,
17541 "constructor of non-record type");
17542 }
17543 else
17544 /* We can construct small constants here using int_loc_descriptor. */
17545 expansion_failed (loc, NULL_RTX,
17546 "constructor or constant not in constant pool");
17547 break;
17548
17549 case TRUTH_AND_EXPR:
17550 case TRUTH_ANDIF_EXPR:
17551 case BIT_AND_EXPR:
17552 op = DW_OP_and;
17553 goto do_binop;
17554
17555 case TRUTH_XOR_EXPR:
17556 case BIT_XOR_EXPR:
17557 op = DW_OP_xor;
17558 goto do_binop;
17559
17560 case TRUTH_OR_EXPR:
17561 case TRUTH_ORIF_EXPR:
17562 case BIT_IOR_EXPR:
17563 op = DW_OP_or;
17564 goto do_binop;
17565
17566 case FLOOR_DIV_EXPR:
17567 case CEIL_DIV_EXPR:
17568 case ROUND_DIV_EXPR:
17569 case TRUNC_DIV_EXPR:
17570 case EXACT_DIV_EXPR:
17571 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17572 return 0;
17573 op = DW_OP_div;
17574 goto do_binop;
17575
17576 case MINUS_EXPR:
17577 op = DW_OP_minus;
17578 goto do_binop;
17579
17580 case FLOOR_MOD_EXPR:
17581 case CEIL_MOD_EXPR:
17582 case ROUND_MOD_EXPR:
17583 case TRUNC_MOD_EXPR:
17584 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17585 {
17586 op = DW_OP_mod;
17587 goto do_binop;
17588 }
17589 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17590 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
17591 if (list_ret == 0 || list_ret1 == 0)
17592 return 0;
17593
17594 add_loc_list (&list_ret, list_ret1);
17595 if (list_ret == 0)
17596 return 0;
17597 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17598 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17599 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
17600 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
17601 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
17602 break;
17603
17604 case MULT_EXPR:
17605 op = DW_OP_mul;
17606 goto do_binop;
17607
17608 case LSHIFT_EXPR:
17609 op = DW_OP_shl;
17610 goto do_binop;
17611
17612 case RSHIFT_EXPR:
17613 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
17614 goto do_binop;
17615
17616 case POINTER_PLUS_EXPR:
17617 case PLUS_EXPR:
17618 do_plus:
17619 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
17620 {
17621 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
17622 smarter to encode their opposite. The DW_OP_plus_uconst operation
17623 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
17624 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
17625 bytes, Y being the size of the operation that pushes the opposite
17626 of the addend. So let's choose the smallest representation. */
17627 const tree tree_addend = TREE_OPERAND (loc, 1);
17628 offset_int wi_addend;
17629 HOST_WIDE_INT shwi_addend;
17630 dw_loc_descr_ref loc_naddend;
17631
17632 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17633 if (list_ret == 0)
17634 return 0;
17635
17636 /* Try to get the literal to push. It is the opposite of the addend,
17637 so as we rely on wrapping during DWARF evaluation, first decode
17638 the literal as a "DWARF-sized" signed number. */
17639 wi_addend = wi::to_offset (tree_addend);
17640 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
17641 shwi_addend = wi_addend.to_shwi ();
17642 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
17643 ? int_loc_descriptor (-shwi_addend)
17644 : NULL;
17645
17646 if (loc_naddend != NULL
17647 && ((unsigned) size_of_uleb128 (shwi_addend)
17648 > size_of_loc_descr (loc_naddend)))
17649 {
17650 add_loc_descr_to_each (list_ret, loc_naddend);
17651 add_loc_descr_to_each (list_ret,
17652 new_loc_descr (DW_OP_minus, 0, 0));
17653 }
17654 else
17655 {
17656 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
17657 {
17658 loc_naddend = loc_cur;
17659 loc_cur = loc_cur->dw_loc_next;
17660 ggc_free (loc_naddend);
17661 }
17662 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
17663 }
17664 break;
17665 }
17666
17667 op = DW_OP_plus;
17668 goto do_binop;
17669
17670 case LE_EXPR:
17671 op = DW_OP_le;
17672 goto do_comp_binop;
17673
17674 case GE_EXPR:
17675 op = DW_OP_ge;
17676 goto do_comp_binop;
17677
17678 case LT_EXPR:
17679 op = DW_OP_lt;
17680 goto do_comp_binop;
17681
17682 case GT_EXPR:
17683 op = DW_OP_gt;
17684 goto do_comp_binop;
17685
17686 do_comp_binop:
17687 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
17688 {
17689 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
17690 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
17691 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
17692 TREE_CODE (loc));
17693 break;
17694 }
17695 else
17696 goto do_binop;
17697
17698 case EQ_EXPR:
17699 op = DW_OP_eq;
17700 goto do_binop;
17701
17702 case NE_EXPR:
17703 op = DW_OP_ne;
17704 goto do_binop;
17705
17706 do_binop:
17707 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17708 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
17709 if (list_ret == 0 || list_ret1 == 0)
17710 return 0;
17711
17712 add_loc_list (&list_ret, list_ret1);
17713 if (list_ret == 0)
17714 return 0;
17715 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
17716 break;
17717
17718 case TRUTH_NOT_EXPR:
17719 case BIT_NOT_EXPR:
17720 op = DW_OP_not;
17721 goto do_unop;
17722
17723 case ABS_EXPR:
17724 op = DW_OP_abs;
17725 goto do_unop;
17726
17727 case NEGATE_EXPR:
17728 op = DW_OP_neg;
17729 goto do_unop;
17730
17731 do_unop:
17732 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17733 if (list_ret == 0)
17734 return 0;
17735
17736 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
17737 break;
17738
17739 case MIN_EXPR:
17740 case MAX_EXPR:
17741 {
17742 const enum tree_code code =
17743 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
17744
17745 loc = build3 (COND_EXPR, TREE_TYPE (loc),
17746 build2 (code, integer_type_node,
17747 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
17748 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
17749 }
17750
17751 /* fall through */
17752
17753 case COND_EXPR:
17754 {
17755 dw_loc_descr_ref lhs
17756 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
17757 dw_loc_list_ref rhs
17758 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
17759 dw_loc_descr_ref bra_node, jump_node, tmp;
17760
17761 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17762 if (list_ret == 0 || lhs == 0 || rhs == 0)
17763 return 0;
17764
17765 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
17766 add_loc_descr_to_each (list_ret, bra_node);
17767
17768 add_loc_list (&list_ret, rhs);
17769 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
17770 add_loc_descr_to_each (list_ret, jump_node);
17771
17772 add_loc_descr_to_each (list_ret, lhs);
17773 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
17774 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
17775
17776 /* ??? Need a node to point the skip at. Use a nop. */
17777 tmp = new_loc_descr (DW_OP_nop, 0, 0);
17778 add_loc_descr_to_each (list_ret, tmp);
17779 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
17780 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
17781 }
17782 break;
17783
17784 case FIX_TRUNC_EXPR:
17785 return 0;
17786
17787 default:
17788 /* Leave front-end specific codes as simply unknown. This comes
17789 up, for instance, with the C STMT_EXPR. */
17790 if ((unsigned int) TREE_CODE (loc)
17791 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
17792 {
17793 expansion_failed (loc, NULL_RTX,
17794 "language specific tree node");
17795 return 0;
17796 }
17797
17798 /* Otherwise this is a generic code; we should just lists all of
17799 these explicitly. We forgot one. */
17800 if (flag_checking)
17801 gcc_unreachable ();
17802
17803 /* In a release build, we want to degrade gracefully: better to
17804 generate incomplete debugging information than to crash. */
17805 return NULL;
17806 }
17807
17808 if (!ret && !list_ret)
17809 return 0;
17810
17811 if (want_address == 2 && !have_address
17812 && (dwarf_version >= 4 || !dwarf_strict))
17813 {
17814 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
17815 {
17816 expansion_failed (loc, NULL_RTX,
17817 "DWARF address size mismatch");
17818 return 0;
17819 }
17820 if (ret)
17821 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
17822 else
17823 add_loc_descr_to_each (list_ret,
17824 new_loc_descr (DW_OP_stack_value, 0, 0));
17825 have_address = 1;
17826 }
17827 /* Show if we can't fill the request for an address. */
17828 if (want_address && !have_address)
17829 {
17830 expansion_failed (loc, NULL_RTX,
17831 "Want address and only have value");
17832 return 0;
17833 }
17834
17835 gcc_assert (!ret || !list_ret);
17836
17837 /* If we've got an address and don't want one, dereference. */
17838 if (!want_address && have_address)
17839 {
17840 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
17841
17842 if (size > DWARF2_ADDR_SIZE || size == -1)
17843 {
17844 expansion_failed (loc, NULL_RTX,
17845 "DWARF address size mismatch");
17846 return 0;
17847 }
17848 else if (size == DWARF2_ADDR_SIZE)
17849 op = DW_OP_deref;
17850 else
17851 op = DW_OP_deref_size;
17852
17853 if (ret)
17854 add_loc_descr (&ret, new_loc_descr (op, size, 0));
17855 else
17856 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
17857 }
17858 if (ret)
17859 list_ret = new_loc_list (ret, NULL, NULL, NULL);
17860
17861 return list_ret;
17862 }
17863
17864 /* Likewise, but strip useless DW_OP_nop operations in the resulting
17865 expressions. */
17866
17867 static dw_loc_list_ref
17868 loc_list_from_tree (tree loc, int want_address,
17869 struct loc_descr_context *context)
17870 {
17871 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
17872
17873 for (dw_loc_list_ref loc_cur = result;
17874 loc_cur != NULL; loc_cur =
17875 loc_cur->dw_loc_next)
17876 loc_descr_without_nops (loc_cur->expr);
17877 return result;
17878 }
17879
17880 /* Same as above but return only single location expression. */
17881 static dw_loc_descr_ref
17882 loc_descriptor_from_tree (tree loc, int want_address,
17883 struct loc_descr_context *context)
17884 {
17885 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
17886 if (!ret)
17887 return NULL;
17888 if (ret->dw_loc_next)
17889 {
17890 expansion_failed (loc, NULL_RTX,
17891 "Location list where only loc descriptor needed");
17892 return NULL;
17893 }
17894 return ret->expr;
17895 }
17896
17897 /* Given a value, round it up to the lowest multiple of `boundary'
17898 which is not less than the value itself. */
17899
17900 static inline HOST_WIDE_INT
17901 ceiling (HOST_WIDE_INT value, unsigned int boundary)
17902 {
17903 return (((value + boundary - 1) / boundary) * boundary);
17904 }
17905
17906 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
17907 pointer to the declared type for the relevant field variable, or return
17908 `integer_type_node' if the given node turns out to be an
17909 ERROR_MARK node. */
17910
17911 static inline tree
17912 field_type (const_tree decl)
17913 {
17914 tree type;
17915
17916 if (TREE_CODE (decl) == ERROR_MARK)
17917 return integer_type_node;
17918
17919 type = DECL_BIT_FIELD_TYPE (decl);
17920 if (type == NULL_TREE)
17921 type = TREE_TYPE (decl);
17922
17923 return type;
17924 }
17925
17926 /* Given a pointer to a tree node, return the alignment in bits for
17927 it, or else return BITS_PER_WORD if the node actually turns out to
17928 be an ERROR_MARK node. */
17929
17930 static inline unsigned
17931 simple_type_align_in_bits (const_tree type)
17932 {
17933 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
17934 }
17935
17936 static inline unsigned
17937 simple_decl_align_in_bits (const_tree decl)
17938 {
17939 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
17940 }
17941
17942 /* Return the result of rounding T up to ALIGN. */
17943
17944 static inline offset_int
17945 round_up_to_align (const offset_int &t, unsigned int align)
17946 {
17947 return wi::udiv_trunc (t + align - 1, align) * align;
17948 }
17949
17950 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
17951 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
17952 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
17953 if we fail to return the size in one of these two forms. */
17954
17955 static dw_loc_descr_ref
17956 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
17957 {
17958 tree tree_size;
17959 struct loc_descr_context ctx;
17960
17961 /* Return a constant integer in priority, if possible. */
17962 *cst_size = int_size_in_bytes (type);
17963 if (*cst_size != -1)
17964 return NULL;
17965
17966 ctx.context_type = const_cast<tree> (type);
17967 ctx.base_decl = NULL_TREE;
17968 ctx.dpi = NULL;
17969 ctx.placeholder_arg = false;
17970 ctx.placeholder_seen = false;
17971
17972 type = TYPE_MAIN_VARIANT (type);
17973 tree_size = TYPE_SIZE_UNIT (type);
17974 return ((tree_size != NULL_TREE)
17975 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
17976 : NULL);
17977 }
17978
17979 /* Helper structure for RECORD_TYPE processing. */
17980 struct vlr_context
17981 {
17982 /* Root RECORD_TYPE. It is needed to generate data member location
17983 descriptions in variable-length records (VLR), but also to cope with
17984 variants, which are composed of nested structures multiplexed with
17985 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
17986 function processing a FIELD_DECL, it is required to be non null. */
17987 tree struct_type;
17988 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
17989 QUAL_UNION_TYPE), this holds an expression that computes the offset for
17990 this variant part as part of the root record (in storage units). For
17991 regular records, it must be NULL_TREE. */
17992 tree variant_part_offset;
17993 };
17994
17995 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
17996 addressed byte of the "containing object" for the given FIELD_DECL. If
17997 possible, return a native constant through CST_OFFSET (in which case NULL is
17998 returned); otherwise return a DWARF expression that computes the offset.
17999
18000 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18001 that offset is, either because the argument turns out to be a pointer to an
18002 ERROR_MARK node, or because the offset expression is too complex for us.
18003
18004 CTX is required: see the comment for VLR_CONTEXT. */
18005
18006 static dw_loc_descr_ref
18007 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18008 HOST_WIDE_INT *cst_offset)
18009 {
18010 tree tree_result;
18011 dw_loc_list_ref loc_result;
18012
18013 *cst_offset = 0;
18014
18015 if (TREE_CODE (decl) == ERROR_MARK)
18016 return NULL;
18017 else
18018 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18019
18020 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18021 case. */
18022 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18023 return NULL;
18024
18025 #ifdef PCC_BITFIELD_TYPE_MATTERS
18026 /* We used to handle only constant offsets in all cases. Now, we handle
18027 properly dynamic byte offsets only when PCC bitfield type doesn't
18028 matter. */
18029 if (PCC_BITFIELD_TYPE_MATTERS
18030 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18031 {
18032 offset_int object_offset_in_bits;
18033 offset_int object_offset_in_bytes;
18034 offset_int bitpos_int;
18035 tree type;
18036 tree field_size_tree;
18037 offset_int deepest_bitpos;
18038 offset_int field_size_in_bits;
18039 unsigned int type_align_in_bits;
18040 unsigned int decl_align_in_bits;
18041 offset_int type_size_in_bits;
18042
18043 bitpos_int = wi::to_offset (bit_position (decl));
18044 type = field_type (decl);
18045 type_size_in_bits = offset_int_type_size_in_bits (type);
18046 type_align_in_bits = simple_type_align_in_bits (type);
18047
18048 field_size_tree = DECL_SIZE (decl);
18049
18050 /* The size could be unspecified if there was an error, or for
18051 a flexible array member. */
18052 if (!field_size_tree)
18053 field_size_tree = bitsize_zero_node;
18054
18055 /* If the size of the field is not constant, use the type size. */
18056 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18057 field_size_in_bits = wi::to_offset (field_size_tree);
18058 else
18059 field_size_in_bits = type_size_in_bits;
18060
18061 decl_align_in_bits = simple_decl_align_in_bits (decl);
18062
18063 /* The GCC front-end doesn't make any attempt to keep track of the
18064 starting bit offset (relative to the start of the containing
18065 structure type) of the hypothetical "containing object" for a
18066 bit-field. Thus, when computing the byte offset value for the
18067 start of the "containing object" of a bit-field, we must deduce
18068 this information on our own. This can be rather tricky to do in
18069 some cases. For example, handling the following structure type
18070 definition when compiling for an i386/i486 target (which only
18071 aligns long long's to 32-bit boundaries) can be very tricky:
18072
18073 struct S { int field1; long long field2:31; };
18074
18075 Fortunately, there is a simple rule-of-thumb which can be used
18076 in such cases. When compiling for an i386/i486, GCC will
18077 allocate 8 bytes for the structure shown above. It decides to
18078 do this based upon one simple rule for bit-field allocation.
18079 GCC allocates each "containing object" for each bit-field at
18080 the first (i.e. lowest addressed) legitimate alignment boundary
18081 (based upon the required minimum alignment for the declared
18082 type of the field) which it can possibly use, subject to the
18083 condition that there is still enough available space remaining
18084 in the containing object (when allocated at the selected point)
18085 to fully accommodate all of the bits of the bit-field itself.
18086
18087 This simple rule makes it obvious why GCC allocates 8 bytes for
18088 each object of the structure type shown above. When looking
18089 for a place to allocate the "containing object" for `field2',
18090 the compiler simply tries to allocate a 64-bit "containing
18091 object" at each successive 32-bit boundary (starting at zero)
18092 until it finds a place to allocate that 64- bit field such that
18093 at least 31 contiguous (and previously unallocated) bits remain
18094 within that selected 64 bit field. (As it turns out, for the
18095 example above, the compiler finds it is OK to allocate the
18096 "containing object" 64-bit field at bit-offset zero within the
18097 structure type.)
18098
18099 Here we attempt to work backwards from the limited set of facts
18100 we're given, and we try to deduce from those facts, where GCC
18101 must have believed that the containing object started (within
18102 the structure type). The value we deduce is then used (by the
18103 callers of this routine) to generate DW_AT_location and
18104 DW_AT_bit_offset attributes for fields (both bit-fields and, in
18105 the case of DW_AT_location, regular fields as well). */
18106
18107 /* Figure out the bit-distance from the start of the structure to
18108 the "deepest" bit of the bit-field. */
18109 deepest_bitpos = bitpos_int + field_size_in_bits;
18110
18111 /* This is the tricky part. Use some fancy footwork to deduce
18112 where the lowest addressed bit of the containing object must
18113 be. */
18114 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18115
18116 /* Round up to type_align by default. This works best for
18117 bitfields. */
18118 object_offset_in_bits
18119 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
18120
18121 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
18122 {
18123 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18124
18125 /* Round up to decl_align instead. */
18126 object_offset_in_bits
18127 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
18128 }
18129
18130 object_offset_in_bytes
18131 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
18132 if (ctx->variant_part_offset == NULL_TREE)
18133 {
18134 *cst_offset = object_offset_in_bytes.to_shwi ();
18135 return NULL;
18136 }
18137 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
18138 }
18139 else
18140 #endif /* PCC_BITFIELD_TYPE_MATTERS */
18141 tree_result = byte_position (decl);
18142
18143 if (ctx->variant_part_offset != NULL_TREE)
18144 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
18145 ctx->variant_part_offset, tree_result);
18146
18147 /* If the byte offset is a constant, it's simplier to handle a native
18148 constant rather than a DWARF expression. */
18149 if (TREE_CODE (tree_result) == INTEGER_CST)
18150 {
18151 *cst_offset = wi::to_offset (tree_result).to_shwi ();
18152 return NULL;
18153 }
18154 struct loc_descr_context loc_ctx = {
18155 ctx->struct_type, /* context_type */
18156 NULL_TREE, /* base_decl */
18157 NULL, /* dpi */
18158 false, /* placeholder_arg */
18159 false /* placeholder_seen */
18160 };
18161 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
18162
18163 /* We want a DWARF expression: abort if we only have a location list with
18164 multiple elements. */
18165 if (!loc_result || !single_element_loc_list_p (loc_result))
18166 return NULL;
18167 else
18168 return loc_result->expr;
18169 }
18170 \f
18171 /* The following routines define various Dwarf attributes and any data
18172 associated with them. */
18173
18174 /* Add a location description attribute value to a DIE.
18175
18176 This emits location attributes suitable for whole variables and
18177 whole parameters. Note that the location attributes for struct fields are
18178 generated by the routine `data_member_location_attribute' below. */
18179
18180 static inline void
18181 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
18182 dw_loc_list_ref descr)
18183 {
18184 if (descr == 0)
18185 return;
18186 if (single_element_loc_list_p (descr))
18187 add_AT_loc (die, attr_kind, descr->expr);
18188 else
18189 add_AT_loc_list (die, attr_kind, descr);
18190 }
18191
18192 /* Add DW_AT_accessibility attribute to DIE if needed. */
18193
18194 static void
18195 add_accessibility_attribute (dw_die_ref die, tree decl)
18196 {
18197 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
18198 children, otherwise the default is DW_ACCESS_public. In DWARF2
18199 the default has always been DW_ACCESS_public. */
18200 if (TREE_PROTECTED (decl))
18201 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
18202 else if (TREE_PRIVATE (decl))
18203 {
18204 if (dwarf_version == 2
18205 || die->die_parent == NULL
18206 || die->die_parent->die_tag != DW_TAG_class_type)
18207 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
18208 }
18209 else if (dwarf_version > 2
18210 && die->die_parent
18211 && die->die_parent->die_tag == DW_TAG_class_type)
18212 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
18213 }
18214
18215 /* Attach the specialized form of location attribute used for data members of
18216 struct and union types. In the special case of a FIELD_DECL node which
18217 represents a bit-field, the "offset" part of this special location
18218 descriptor must indicate the distance in bytes from the lowest-addressed
18219 byte of the containing struct or union type to the lowest-addressed byte of
18220 the "containing object" for the bit-field. (See the `field_byte_offset'
18221 function above).
18222
18223 For any given bit-field, the "containing object" is a hypothetical object
18224 (of some integral or enum type) within which the given bit-field lives. The
18225 type of this hypothetical "containing object" is always the same as the
18226 declared type of the individual bit-field itself (for GCC anyway... the
18227 DWARF spec doesn't actually mandate this). Note that it is the size (in
18228 bytes) of the hypothetical "containing object" which will be given in the
18229 DW_AT_byte_size attribute for this bit-field. (See the
18230 `byte_size_attribute' function below.) It is also used when calculating the
18231 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
18232 function below.)
18233
18234 CTX is required: see the comment for VLR_CONTEXT. */
18235
18236 static void
18237 add_data_member_location_attribute (dw_die_ref die,
18238 tree decl,
18239 struct vlr_context *ctx)
18240 {
18241 HOST_WIDE_INT offset;
18242 dw_loc_descr_ref loc_descr = 0;
18243
18244 if (TREE_CODE (decl) == TREE_BINFO)
18245 {
18246 /* We're working on the TAG_inheritance for a base class. */
18247 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
18248 {
18249 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
18250 aren't at a fixed offset from all (sub)objects of the same
18251 type. We need to extract the appropriate offset from our
18252 vtable. The following dwarf expression means
18253
18254 BaseAddr = ObAddr + *((*ObAddr) - Offset)
18255
18256 This is specific to the V3 ABI, of course. */
18257
18258 dw_loc_descr_ref tmp;
18259
18260 /* Make a copy of the object address. */
18261 tmp = new_loc_descr (DW_OP_dup, 0, 0);
18262 add_loc_descr (&loc_descr, tmp);
18263
18264 /* Extract the vtable address. */
18265 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18266 add_loc_descr (&loc_descr, tmp);
18267
18268 /* Calculate the address of the offset. */
18269 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
18270 gcc_assert (offset < 0);
18271
18272 tmp = int_loc_descriptor (-offset);
18273 add_loc_descr (&loc_descr, tmp);
18274 tmp = new_loc_descr (DW_OP_minus, 0, 0);
18275 add_loc_descr (&loc_descr, tmp);
18276
18277 /* Extract the offset. */
18278 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18279 add_loc_descr (&loc_descr, tmp);
18280
18281 /* Add it to the object address. */
18282 tmp = new_loc_descr (DW_OP_plus, 0, 0);
18283 add_loc_descr (&loc_descr, tmp);
18284 }
18285 else
18286 offset = tree_to_shwi (BINFO_OFFSET (decl));
18287 }
18288 else
18289 {
18290 loc_descr = field_byte_offset (decl, ctx, &offset);
18291
18292 /* If loc_descr is available then we know the field offset is dynamic.
18293 However, GDB does not handle dynamic field offsets very well at the
18294 moment. */
18295 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
18296 {
18297 loc_descr = NULL;
18298 offset = 0;
18299 }
18300
18301 /* Data member location evalutation starts with the base address on the
18302 stack. Compute the field offset and add it to this base address. */
18303 else if (loc_descr != NULL)
18304 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
18305 }
18306
18307 if (! loc_descr)
18308 {
18309 /* While DW_AT_data_bit_offset has been added already in DWARF4,
18310 e.g. GDB only added support to it in November 2016. For DWARF5
18311 we need newer debug info consumers anyway. We might change this
18312 to dwarf_version >= 4 once most consumers catched up. */
18313 if (dwarf_version >= 5
18314 && TREE_CODE (decl) == FIELD_DECL
18315 && DECL_BIT_FIELD_TYPE (decl))
18316 {
18317 tree off = bit_position (decl);
18318 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
18319 {
18320 remove_AT (die, DW_AT_byte_size);
18321 remove_AT (die, DW_AT_bit_offset);
18322 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
18323 return;
18324 }
18325 }
18326 if (dwarf_version > 2)
18327 {
18328 /* Don't need to output a location expression, just the constant. */
18329 if (offset < 0)
18330 add_AT_int (die, DW_AT_data_member_location, offset);
18331 else
18332 add_AT_unsigned (die, DW_AT_data_member_location, offset);
18333 return;
18334 }
18335 else
18336 {
18337 enum dwarf_location_atom op;
18338
18339 /* The DWARF2 standard says that we should assume that the structure
18340 address is already on the stack, so we can specify a structure
18341 field address by using DW_OP_plus_uconst. */
18342 op = DW_OP_plus_uconst;
18343 loc_descr = new_loc_descr (op, offset, 0);
18344 }
18345 }
18346
18347 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
18348 }
18349
18350 /* Writes integer values to dw_vec_const array. */
18351
18352 static void
18353 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
18354 {
18355 while (size != 0)
18356 {
18357 *dest++ = val & 0xff;
18358 val >>= 8;
18359 --size;
18360 }
18361 }
18362
18363 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
18364
18365 static HOST_WIDE_INT
18366 extract_int (const unsigned char *src, unsigned int size)
18367 {
18368 HOST_WIDE_INT val = 0;
18369
18370 src += size;
18371 while (size != 0)
18372 {
18373 val <<= 8;
18374 val |= *--src & 0xff;
18375 --size;
18376 }
18377 return val;
18378 }
18379
18380 /* Writes wide_int values to dw_vec_const array. */
18381
18382 static void
18383 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
18384 {
18385 int i;
18386
18387 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
18388 {
18389 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
18390 return;
18391 }
18392
18393 /* We'd have to extend this code to support odd sizes. */
18394 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
18395
18396 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
18397
18398 if (WORDS_BIG_ENDIAN)
18399 for (i = n - 1; i >= 0; i--)
18400 {
18401 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18402 dest += sizeof (HOST_WIDE_INT);
18403 }
18404 else
18405 for (i = 0; i < n; i++)
18406 {
18407 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18408 dest += sizeof (HOST_WIDE_INT);
18409 }
18410 }
18411
18412 /* Writes floating point values to dw_vec_const array. */
18413
18414 static void
18415 insert_float (const_rtx rtl, unsigned char *array)
18416 {
18417 long val[4];
18418 int i;
18419
18420 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), GET_MODE (rtl));
18421
18422 /* real_to_target puts 32-bit pieces in each long. Pack them. */
18423 for (i = 0; i < GET_MODE_SIZE (GET_MODE (rtl)) / 4; i++)
18424 {
18425 insert_int (val[i], 4, array);
18426 array += 4;
18427 }
18428 }
18429
18430 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
18431 does not have a "location" either in memory or in a register. These
18432 things can arise in GNU C when a constant is passed as an actual parameter
18433 to an inlined function. They can also arise in C++ where declared
18434 constants do not necessarily get memory "homes". */
18435
18436 static bool
18437 add_const_value_attribute (dw_die_ref die, rtx rtl)
18438 {
18439 switch (GET_CODE (rtl))
18440 {
18441 case CONST_INT:
18442 {
18443 HOST_WIDE_INT val = INTVAL (rtl);
18444
18445 if (val < 0)
18446 add_AT_int (die, DW_AT_const_value, val);
18447 else
18448 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
18449 }
18450 return true;
18451
18452 case CONST_WIDE_INT:
18453 {
18454 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
18455 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
18456 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
18457 wide_int w = wi::zext (w1, prec);
18458 add_AT_wide (die, DW_AT_const_value, w);
18459 }
18460 return true;
18461
18462 case CONST_DOUBLE:
18463 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
18464 floating-point constant. A CONST_DOUBLE is used whenever the
18465 constant requires more than one word in order to be adequately
18466 represented. */
18467 {
18468 machine_mode mode = GET_MODE (rtl);
18469
18470 if (TARGET_SUPPORTS_WIDE_INT == 0 && !SCALAR_FLOAT_MODE_P (mode))
18471 add_AT_double (die, DW_AT_const_value,
18472 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
18473 else
18474 {
18475 unsigned int length = GET_MODE_SIZE (mode);
18476 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
18477
18478 insert_float (rtl, array);
18479 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
18480 }
18481 }
18482 return true;
18483
18484 case CONST_VECTOR:
18485 {
18486 machine_mode mode = GET_MODE (rtl);
18487 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
18488 unsigned int length = CONST_VECTOR_NUNITS (rtl);
18489 unsigned char *array
18490 = ggc_vec_alloc<unsigned char> (length * elt_size);
18491 unsigned int i;
18492 unsigned char *p;
18493 machine_mode imode = GET_MODE_INNER (mode);
18494
18495 switch (GET_MODE_CLASS (mode))
18496 {
18497 case MODE_VECTOR_INT:
18498 for (i = 0, p = array; i < length; i++, p += elt_size)
18499 {
18500 rtx elt = CONST_VECTOR_ELT (rtl, i);
18501 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
18502 }
18503 break;
18504
18505 case MODE_VECTOR_FLOAT:
18506 for (i = 0, p = array; i < length; i++, p += elt_size)
18507 {
18508 rtx elt = CONST_VECTOR_ELT (rtl, i);
18509 insert_float (elt, p);
18510 }
18511 break;
18512
18513 default:
18514 gcc_unreachable ();
18515 }
18516
18517 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
18518 }
18519 return true;
18520
18521 case CONST_STRING:
18522 if (dwarf_version >= 4 || !dwarf_strict)
18523 {
18524 dw_loc_descr_ref loc_result;
18525 resolve_one_addr (&rtl);
18526 rtl_addr:
18527 loc_result = new_addr_loc_descr (rtl, dtprel_false);
18528 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
18529 add_AT_loc (die, DW_AT_location, loc_result);
18530 vec_safe_push (used_rtx_array, rtl);
18531 return true;
18532 }
18533 return false;
18534
18535 case CONST:
18536 if (CONSTANT_P (XEXP (rtl, 0)))
18537 return add_const_value_attribute (die, XEXP (rtl, 0));
18538 /* FALLTHROUGH */
18539 case SYMBOL_REF:
18540 if (!const_ok_for_output (rtl))
18541 return false;
18542 /* FALLTHROUGH */
18543 case LABEL_REF:
18544 if (dwarf_version >= 4 || !dwarf_strict)
18545 goto rtl_addr;
18546 return false;
18547
18548 case PLUS:
18549 /* In cases where an inlined instance of an inline function is passed
18550 the address of an `auto' variable (which is local to the caller) we
18551 can get a situation where the DECL_RTL of the artificial local
18552 variable (for the inlining) which acts as a stand-in for the
18553 corresponding formal parameter (of the inline function) will look
18554 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
18555 exactly a compile-time constant expression, but it isn't the address
18556 of the (artificial) local variable either. Rather, it represents the
18557 *value* which the artificial local variable always has during its
18558 lifetime. We currently have no way to represent such quasi-constant
18559 values in Dwarf, so for now we just punt and generate nothing. */
18560 return false;
18561
18562 case HIGH:
18563 case CONST_FIXED:
18564 return false;
18565
18566 case MEM:
18567 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
18568 && MEM_READONLY_P (rtl)
18569 && GET_MODE (rtl) == BLKmode)
18570 {
18571 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
18572 return true;
18573 }
18574 return false;
18575
18576 default:
18577 /* No other kinds of rtx should be possible here. */
18578 gcc_unreachable ();
18579 }
18580 return false;
18581 }
18582
18583 /* Determine whether the evaluation of EXPR references any variables
18584 or functions which aren't otherwise used (and therefore may not be
18585 output). */
18586 static tree
18587 reference_to_unused (tree * tp, int * walk_subtrees,
18588 void * data ATTRIBUTE_UNUSED)
18589 {
18590 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
18591 *walk_subtrees = 0;
18592
18593 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
18594 && ! TREE_ASM_WRITTEN (*tp))
18595 return *tp;
18596 /* ??? The C++ FE emits debug information for using decls, so
18597 putting gcc_unreachable here falls over. See PR31899. For now
18598 be conservative. */
18599 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
18600 return *tp;
18601 else if (VAR_P (*tp))
18602 {
18603 varpool_node *node = varpool_node::get (*tp);
18604 if (!node || !node->definition)
18605 return *tp;
18606 }
18607 else if (TREE_CODE (*tp) == FUNCTION_DECL
18608 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
18609 {
18610 /* The call graph machinery must have finished analyzing,
18611 optimizing and gimplifying the CU by now.
18612 So if *TP has no call graph node associated
18613 to it, it means *TP will not be emitted. */
18614 if (!cgraph_node::get (*tp))
18615 return *tp;
18616 }
18617 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
18618 return *tp;
18619
18620 return NULL_TREE;
18621 }
18622
18623 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
18624 for use in a later add_const_value_attribute call. */
18625
18626 static rtx
18627 rtl_for_decl_init (tree init, tree type)
18628 {
18629 rtx rtl = NULL_RTX;
18630
18631 STRIP_NOPS (init);
18632
18633 /* If a variable is initialized with a string constant without embedded
18634 zeros, build CONST_STRING. */
18635 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
18636 {
18637 tree enttype = TREE_TYPE (type);
18638 tree domain = TYPE_DOMAIN (type);
18639 machine_mode mode = TYPE_MODE (enttype);
18640
18641 if (GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_SIZE (mode) == 1
18642 && domain
18643 && integer_zerop (TYPE_MIN_VALUE (domain))
18644 && compare_tree_int (TYPE_MAX_VALUE (domain),
18645 TREE_STRING_LENGTH (init) - 1) == 0
18646 && ((size_t) TREE_STRING_LENGTH (init)
18647 == strlen (TREE_STRING_POINTER (init)) + 1))
18648 {
18649 rtl = gen_rtx_CONST_STRING (VOIDmode,
18650 ggc_strdup (TREE_STRING_POINTER (init)));
18651 rtl = gen_rtx_MEM (BLKmode, rtl);
18652 MEM_READONLY_P (rtl) = 1;
18653 }
18654 }
18655 /* Other aggregates, and complex values, could be represented using
18656 CONCAT: FIXME! */
18657 else if (AGGREGATE_TYPE_P (type)
18658 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
18659 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
18660 || TREE_CODE (type) == COMPLEX_TYPE)
18661 ;
18662 /* Vectors only work if their mode is supported by the target.
18663 FIXME: generic vectors ought to work too. */
18664 else if (TREE_CODE (type) == VECTOR_TYPE
18665 && !VECTOR_MODE_P (TYPE_MODE (type)))
18666 ;
18667 /* If the initializer is something that we know will expand into an
18668 immediate RTL constant, expand it now. We must be careful not to
18669 reference variables which won't be output. */
18670 else if (initializer_constant_valid_p (init, type)
18671 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
18672 {
18673 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
18674 possible. */
18675 if (TREE_CODE (type) == VECTOR_TYPE)
18676 switch (TREE_CODE (init))
18677 {
18678 case VECTOR_CST:
18679 break;
18680 case CONSTRUCTOR:
18681 if (TREE_CONSTANT (init))
18682 {
18683 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
18684 bool constant_p = true;
18685 tree value;
18686 unsigned HOST_WIDE_INT ix;
18687
18688 /* Even when ctor is constant, it might contain non-*_CST
18689 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
18690 belong into VECTOR_CST nodes. */
18691 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
18692 if (!CONSTANT_CLASS_P (value))
18693 {
18694 constant_p = false;
18695 break;
18696 }
18697
18698 if (constant_p)
18699 {
18700 init = build_vector_from_ctor (type, elts);
18701 break;
18702 }
18703 }
18704 /* FALLTHRU */
18705
18706 default:
18707 return NULL;
18708 }
18709
18710 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
18711
18712 /* If expand_expr returns a MEM, it wasn't immediate. */
18713 gcc_assert (!rtl || !MEM_P (rtl));
18714 }
18715
18716 return rtl;
18717 }
18718
18719 /* Generate RTL for the variable DECL to represent its location. */
18720
18721 static rtx
18722 rtl_for_decl_location (tree decl)
18723 {
18724 rtx rtl;
18725
18726 /* Here we have to decide where we are going to say the parameter "lives"
18727 (as far as the debugger is concerned). We only have a couple of
18728 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
18729
18730 DECL_RTL normally indicates where the parameter lives during most of the
18731 activation of the function. If optimization is enabled however, this
18732 could be either NULL or else a pseudo-reg. Both of those cases indicate
18733 that the parameter doesn't really live anywhere (as far as the code
18734 generation parts of GCC are concerned) during most of the function's
18735 activation. That will happen (for example) if the parameter is never
18736 referenced within the function.
18737
18738 We could just generate a location descriptor here for all non-NULL
18739 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
18740 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
18741 where DECL_RTL is NULL or is a pseudo-reg.
18742
18743 Note however that we can only get away with using DECL_INCOMING_RTL as
18744 a backup substitute for DECL_RTL in certain limited cases. In cases
18745 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
18746 we can be sure that the parameter was passed using the same type as it is
18747 declared to have within the function, and that its DECL_INCOMING_RTL
18748 points us to a place where a value of that type is passed.
18749
18750 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
18751 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
18752 because in these cases DECL_INCOMING_RTL points us to a value of some
18753 type which is *different* from the type of the parameter itself. Thus,
18754 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
18755 such cases, the debugger would end up (for example) trying to fetch a
18756 `float' from a place which actually contains the first part of a
18757 `double'. That would lead to really incorrect and confusing
18758 output at debug-time.
18759
18760 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
18761 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
18762 are a couple of exceptions however. On little-endian machines we can
18763 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
18764 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
18765 an integral type that is smaller than TREE_TYPE (decl). These cases arise
18766 when (on a little-endian machine) a non-prototyped function has a
18767 parameter declared to be of type `short' or `char'. In such cases,
18768 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
18769 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
18770 passed `int' value. If the debugger then uses that address to fetch
18771 a `short' or a `char' (on a little-endian machine) the result will be
18772 the correct data, so we allow for such exceptional cases below.
18773
18774 Note that our goal here is to describe the place where the given formal
18775 parameter lives during most of the function's activation (i.e. between the
18776 end of the prologue and the start of the epilogue). We'll do that as best
18777 as we can. Note however that if the given formal parameter is modified
18778 sometime during the execution of the function, then a stack backtrace (at
18779 debug-time) will show the function as having been called with the *new*
18780 value rather than the value which was originally passed in. This happens
18781 rarely enough that it is not a major problem, but it *is* a problem, and
18782 I'd like to fix it.
18783
18784 A future version of dwarf2out.c may generate two additional attributes for
18785 any given DW_TAG_formal_parameter DIE which will describe the "passed
18786 type" and the "passed location" for the given formal parameter in addition
18787 to the attributes we now generate to indicate the "declared type" and the
18788 "active location" for each parameter. This additional set of attributes
18789 could be used by debuggers for stack backtraces. Separately, note that
18790 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
18791 This happens (for example) for inlined-instances of inline function formal
18792 parameters which are never referenced. This really shouldn't be
18793 happening. All PARM_DECL nodes should get valid non-NULL
18794 DECL_INCOMING_RTL values. FIXME. */
18795
18796 /* Use DECL_RTL as the "location" unless we find something better. */
18797 rtl = DECL_RTL_IF_SET (decl);
18798
18799 /* When generating abstract instances, ignore everything except
18800 constants, symbols living in memory, and symbols living in
18801 fixed registers. */
18802 if (! reload_completed)
18803 {
18804 if (rtl
18805 && (CONSTANT_P (rtl)
18806 || (MEM_P (rtl)
18807 && CONSTANT_P (XEXP (rtl, 0)))
18808 || (REG_P (rtl)
18809 && VAR_P (decl)
18810 && TREE_STATIC (decl))))
18811 {
18812 rtl = targetm.delegitimize_address (rtl);
18813 return rtl;
18814 }
18815 rtl = NULL_RTX;
18816 }
18817 else if (TREE_CODE (decl) == PARM_DECL)
18818 {
18819 if (rtl == NULL_RTX
18820 || is_pseudo_reg (rtl)
18821 || (MEM_P (rtl)
18822 && is_pseudo_reg (XEXP (rtl, 0))
18823 && DECL_INCOMING_RTL (decl)
18824 && MEM_P (DECL_INCOMING_RTL (decl))
18825 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
18826 {
18827 tree declared_type = TREE_TYPE (decl);
18828 tree passed_type = DECL_ARG_TYPE (decl);
18829 machine_mode dmode = TYPE_MODE (declared_type);
18830 machine_mode pmode = TYPE_MODE (passed_type);
18831
18832 /* This decl represents a formal parameter which was optimized out.
18833 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
18834 all cases where (rtl == NULL_RTX) just below. */
18835 if (dmode == pmode)
18836 rtl = DECL_INCOMING_RTL (decl);
18837 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
18838 && SCALAR_INT_MODE_P (dmode)
18839 && GET_MODE_SIZE (dmode) <= GET_MODE_SIZE (pmode)
18840 && DECL_INCOMING_RTL (decl))
18841 {
18842 rtx inc = DECL_INCOMING_RTL (decl);
18843 if (REG_P (inc))
18844 rtl = inc;
18845 else if (MEM_P (inc))
18846 {
18847 if (BYTES_BIG_ENDIAN)
18848 rtl = adjust_address_nv (inc, dmode,
18849 GET_MODE_SIZE (pmode)
18850 - GET_MODE_SIZE (dmode));
18851 else
18852 rtl = inc;
18853 }
18854 }
18855 }
18856
18857 /* If the parm was passed in registers, but lives on the stack, then
18858 make a big endian correction if the mode of the type of the
18859 parameter is not the same as the mode of the rtl. */
18860 /* ??? This is the same series of checks that are made in dbxout.c before
18861 we reach the big endian correction code there. It isn't clear if all
18862 of these checks are necessary here, but keeping them all is the safe
18863 thing to do. */
18864 else if (MEM_P (rtl)
18865 && XEXP (rtl, 0) != const0_rtx
18866 && ! CONSTANT_P (XEXP (rtl, 0))
18867 /* Not passed in memory. */
18868 && !MEM_P (DECL_INCOMING_RTL (decl))
18869 /* Not passed by invisible reference. */
18870 && (!REG_P (XEXP (rtl, 0))
18871 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
18872 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
18873 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
18874 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
18875 #endif
18876 )
18877 /* Big endian correction check. */
18878 && BYTES_BIG_ENDIAN
18879 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
18880 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
18881 < UNITS_PER_WORD))
18882 {
18883 machine_mode addr_mode = get_address_mode (rtl);
18884 int offset = (UNITS_PER_WORD
18885 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
18886
18887 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
18888 plus_constant (addr_mode, XEXP (rtl, 0), offset));
18889 }
18890 }
18891 else if (VAR_P (decl)
18892 && rtl
18893 && MEM_P (rtl)
18894 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl))
18895 && BYTES_BIG_ENDIAN)
18896 {
18897 machine_mode addr_mode = get_address_mode (rtl);
18898 int rsize = GET_MODE_SIZE (GET_MODE (rtl));
18899 int dsize = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)));
18900
18901 /* If a variable is declared "register" yet is smaller than
18902 a register, then if we store the variable to memory, it
18903 looks like we're storing a register-sized value, when in
18904 fact we are not. We need to adjust the offset of the
18905 storage location to reflect the actual value's bytes,
18906 else gdb will not be able to display it. */
18907 if (rsize > dsize)
18908 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
18909 plus_constant (addr_mode, XEXP (rtl, 0),
18910 rsize - dsize));
18911 }
18912
18913 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
18914 and will have been substituted directly into all expressions that use it.
18915 C does not have such a concept, but C++ and other languages do. */
18916 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
18917 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
18918
18919 if (rtl)
18920 rtl = targetm.delegitimize_address (rtl);
18921
18922 /* If we don't look past the constant pool, we risk emitting a
18923 reference to a constant pool entry that isn't referenced from
18924 code, and thus is not emitted. */
18925 if (rtl)
18926 rtl = avoid_constant_pool_reference (rtl);
18927
18928 /* Try harder to get a rtl. If this symbol ends up not being emitted
18929 in the current CU, resolve_addr will remove the expression referencing
18930 it. */
18931 if (rtl == NULL_RTX
18932 && VAR_P (decl)
18933 && !DECL_EXTERNAL (decl)
18934 && TREE_STATIC (decl)
18935 && DECL_NAME (decl)
18936 && !DECL_HARD_REGISTER (decl)
18937 && DECL_MODE (decl) != VOIDmode)
18938 {
18939 rtl = make_decl_rtl_for_debug (decl);
18940 if (!MEM_P (rtl)
18941 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
18942 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
18943 rtl = NULL_RTX;
18944 }
18945
18946 return rtl;
18947 }
18948
18949 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
18950 returned. If so, the decl for the COMMON block is returned, and the
18951 value is the offset into the common block for the symbol. */
18952
18953 static tree
18954 fortran_common (tree decl, HOST_WIDE_INT *value)
18955 {
18956 tree val_expr, cvar;
18957 machine_mode mode;
18958 HOST_WIDE_INT bitsize, bitpos;
18959 tree offset;
18960 int unsignedp, reversep, volatilep = 0;
18961
18962 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
18963 it does not have a value (the offset into the common area), or if it
18964 is thread local (as opposed to global) then it isn't common, and shouldn't
18965 be handled as such. */
18966 if (!VAR_P (decl)
18967 || !TREE_STATIC (decl)
18968 || !DECL_HAS_VALUE_EXPR_P (decl)
18969 || !is_fortran ())
18970 return NULL_TREE;
18971
18972 val_expr = DECL_VALUE_EXPR (decl);
18973 if (TREE_CODE (val_expr) != COMPONENT_REF)
18974 return NULL_TREE;
18975
18976 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
18977 &unsignedp, &reversep, &volatilep);
18978
18979 if (cvar == NULL_TREE
18980 || !VAR_P (cvar)
18981 || DECL_ARTIFICIAL (cvar)
18982 || !TREE_PUBLIC (cvar))
18983 return NULL_TREE;
18984
18985 *value = 0;
18986 if (offset != NULL)
18987 {
18988 if (!tree_fits_shwi_p (offset))
18989 return NULL_TREE;
18990 *value = tree_to_shwi (offset);
18991 }
18992 if (bitpos != 0)
18993 *value += bitpos / BITS_PER_UNIT;
18994
18995 return cvar;
18996 }
18997
18998 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
18999 data attribute for a variable or a parameter. We generate the
19000 DW_AT_const_value attribute only in those cases where the given variable
19001 or parameter does not have a true "location" either in memory or in a
19002 register. This can happen (for example) when a constant is passed as an
19003 actual argument in a call to an inline function. (It's possible that
19004 these things can crop up in other ways also.) Note that one type of
19005 constant value which can be passed into an inlined function is a constant
19006 pointer. This can happen for example if an actual argument in an inlined
19007 function call evaluates to a compile-time constant address.
19008
19009 CACHE_P is true if it is worth caching the location list for DECL,
19010 so that future calls can reuse it rather than regenerate it from scratch.
19011 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19012 since we will need to refer to them each time the function is inlined. */
19013
19014 static bool
19015 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19016 {
19017 rtx rtl;
19018 dw_loc_list_ref list;
19019 var_loc_list *loc_list;
19020 cached_dw_loc_list *cache;
19021
19022 if (early_dwarf)
19023 return false;
19024
19025 if (TREE_CODE (decl) == ERROR_MARK)
19026 return false;
19027
19028 if (get_AT (die, DW_AT_location)
19029 || get_AT (die, DW_AT_const_value))
19030 return true;
19031
19032 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19033 || TREE_CODE (decl) == RESULT_DECL);
19034
19035 /* Try to get some constant RTL for this decl, and use that as the value of
19036 the location. */
19037
19038 rtl = rtl_for_decl_location (decl);
19039 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19040 && add_const_value_attribute (die, rtl))
19041 return true;
19042
19043 /* See if we have single element location list that is equivalent to
19044 a constant value. That way we are better to use add_const_value_attribute
19045 rather than expanding constant value equivalent. */
19046 loc_list = lookup_decl_loc (decl);
19047 if (loc_list
19048 && loc_list->first
19049 && loc_list->first->next == NULL
19050 && NOTE_P (loc_list->first->loc)
19051 && NOTE_VAR_LOCATION (loc_list->first->loc)
19052 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
19053 {
19054 struct var_loc_node *node;
19055
19056 node = loc_list->first;
19057 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
19058 if (GET_CODE (rtl) == EXPR_LIST)
19059 rtl = XEXP (rtl, 0);
19060 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19061 && add_const_value_attribute (die, rtl))
19062 return true;
19063 }
19064 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
19065 list several times. See if we've already cached the contents. */
19066 list = NULL;
19067 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
19068 cache_p = false;
19069 if (cache_p)
19070 {
19071 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
19072 if (cache)
19073 list = cache->loc_list;
19074 }
19075 if (list == NULL)
19076 {
19077 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
19078 NULL);
19079 /* It is usually worth caching this result if the decl is from
19080 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
19081 if (cache_p && list && list->dw_loc_next)
19082 {
19083 cached_dw_loc_list **slot
19084 = cached_dw_loc_list_table->find_slot_with_hash (decl,
19085 DECL_UID (decl),
19086 INSERT);
19087 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
19088 cache->decl_id = DECL_UID (decl);
19089 cache->loc_list = list;
19090 *slot = cache;
19091 }
19092 }
19093 if (list)
19094 {
19095 add_AT_location_description (die, DW_AT_location, list);
19096 return true;
19097 }
19098 /* None of that worked, so it must not really have a location;
19099 try adding a constant value attribute from the DECL_INITIAL. */
19100 return tree_add_const_value_attribute_for_decl (die, decl);
19101 }
19102
19103 /* Helper function for tree_add_const_value_attribute. Natively encode
19104 initializer INIT into an array. Return true if successful. */
19105
19106 static bool
19107 native_encode_initializer (tree init, unsigned char *array, int size)
19108 {
19109 tree type;
19110
19111 if (init == NULL_TREE)
19112 return false;
19113
19114 STRIP_NOPS (init);
19115 switch (TREE_CODE (init))
19116 {
19117 case STRING_CST:
19118 type = TREE_TYPE (init);
19119 if (TREE_CODE (type) == ARRAY_TYPE)
19120 {
19121 tree enttype = TREE_TYPE (type);
19122 machine_mode mode = TYPE_MODE (enttype);
19123
19124 if (GET_MODE_CLASS (mode) != MODE_INT || GET_MODE_SIZE (mode) != 1)
19125 return false;
19126 if (int_size_in_bytes (type) != size)
19127 return false;
19128 if (size > TREE_STRING_LENGTH (init))
19129 {
19130 memcpy (array, TREE_STRING_POINTER (init),
19131 TREE_STRING_LENGTH (init));
19132 memset (array + TREE_STRING_LENGTH (init),
19133 '\0', size - TREE_STRING_LENGTH (init));
19134 }
19135 else
19136 memcpy (array, TREE_STRING_POINTER (init), size);
19137 return true;
19138 }
19139 return false;
19140 case CONSTRUCTOR:
19141 type = TREE_TYPE (init);
19142 if (int_size_in_bytes (type) != size)
19143 return false;
19144 if (TREE_CODE (type) == ARRAY_TYPE)
19145 {
19146 HOST_WIDE_INT min_index;
19147 unsigned HOST_WIDE_INT cnt;
19148 int curpos = 0, fieldsize;
19149 constructor_elt *ce;
19150
19151 if (TYPE_DOMAIN (type) == NULL_TREE
19152 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
19153 return false;
19154
19155 fieldsize = int_size_in_bytes (TREE_TYPE (type));
19156 if (fieldsize <= 0)
19157 return false;
19158
19159 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
19160 memset (array, '\0', size);
19161 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19162 {
19163 tree val = ce->value;
19164 tree index = ce->index;
19165 int pos = curpos;
19166 if (index && TREE_CODE (index) == RANGE_EXPR)
19167 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
19168 * fieldsize;
19169 else if (index)
19170 pos = (tree_to_shwi (index) - min_index) * fieldsize;
19171
19172 if (val)
19173 {
19174 STRIP_NOPS (val);
19175 if (!native_encode_initializer (val, array + pos, fieldsize))
19176 return false;
19177 }
19178 curpos = pos + fieldsize;
19179 if (index && TREE_CODE (index) == RANGE_EXPR)
19180 {
19181 int count = tree_to_shwi (TREE_OPERAND (index, 1))
19182 - tree_to_shwi (TREE_OPERAND (index, 0));
19183 while (count-- > 0)
19184 {
19185 if (val)
19186 memcpy (array + curpos, array + pos, fieldsize);
19187 curpos += fieldsize;
19188 }
19189 }
19190 gcc_assert (curpos <= size);
19191 }
19192 return true;
19193 }
19194 else if (TREE_CODE (type) == RECORD_TYPE
19195 || TREE_CODE (type) == UNION_TYPE)
19196 {
19197 tree field = NULL_TREE;
19198 unsigned HOST_WIDE_INT cnt;
19199 constructor_elt *ce;
19200
19201 if (int_size_in_bytes (type) != size)
19202 return false;
19203
19204 if (TREE_CODE (type) == RECORD_TYPE)
19205 field = TYPE_FIELDS (type);
19206
19207 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19208 {
19209 tree val = ce->value;
19210 int pos, fieldsize;
19211
19212 if (ce->index != 0)
19213 field = ce->index;
19214
19215 if (val)
19216 STRIP_NOPS (val);
19217
19218 if (field == NULL_TREE || DECL_BIT_FIELD (field))
19219 return false;
19220
19221 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
19222 && TYPE_DOMAIN (TREE_TYPE (field))
19223 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
19224 return false;
19225 else if (DECL_SIZE_UNIT (field) == NULL_TREE
19226 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
19227 return false;
19228 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
19229 pos = int_byte_position (field);
19230 gcc_assert (pos + fieldsize <= size);
19231 if (val && fieldsize != 0
19232 && !native_encode_initializer (val, array + pos, fieldsize))
19233 return false;
19234 }
19235 return true;
19236 }
19237 return false;
19238 case VIEW_CONVERT_EXPR:
19239 case NON_LVALUE_EXPR:
19240 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
19241 default:
19242 return native_encode_expr (init, array, size) == size;
19243 }
19244 }
19245
19246 /* Attach a DW_AT_const_value attribute to DIE. The value of the
19247 attribute is the const value T. */
19248
19249 static bool
19250 tree_add_const_value_attribute (dw_die_ref die, tree t)
19251 {
19252 tree init;
19253 tree type = TREE_TYPE (t);
19254 rtx rtl;
19255
19256 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
19257 return false;
19258
19259 init = t;
19260 gcc_assert (!DECL_P (init));
19261
19262 if (! early_dwarf)
19263 {
19264 rtl = rtl_for_decl_init (init, type);
19265 if (rtl)
19266 return add_const_value_attribute (die, rtl);
19267 }
19268 /* If the host and target are sane, try harder. */
19269 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
19270 && initializer_constant_valid_p (init, type))
19271 {
19272 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
19273 if (size > 0 && (int) size == size)
19274 {
19275 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
19276
19277 if (native_encode_initializer (init, array, size))
19278 {
19279 add_AT_vec (die, DW_AT_const_value, size, 1, array);
19280 return true;
19281 }
19282 ggc_free (array);
19283 }
19284 }
19285 return false;
19286 }
19287
19288 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
19289 attribute is the const value of T, where T is an integral constant
19290 variable with static storage duration
19291 (so it can't be a PARM_DECL or a RESULT_DECL). */
19292
19293 static bool
19294 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
19295 {
19296
19297 if (!decl
19298 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
19299 || (VAR_P (decl) && !TREE_STATIC (decl)))
19300 return false;
19301
19302 if (TREE_READONLY (decl)
19303 && ! TREE_THIS_VOLATILE (decl)
19304 && DECL_INITIAL (decl))
19305 /* OK */;
19306 else
19307 return false;
19308
19309 /* Don't add DW_AT_const_value if abstract origin already has one. */
19310 if (get_AT (var_die, DW_AT_const_value))
19311 return false;
19312
19313 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
19314 }
19315
19316 /* Convert the CFI instructions for the current function into a
19317 location list. This is used for DW_AT_frame_base when we targeting
19318 a dwarf2 consumer that does not support the dwarf3
19319 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
19320 expressions. */
19321
19322 static dw_loc_list_ref
19323 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
19324 {
19325 int ix;
19326 dw_fde_ref fde;
19327 dw_loc_list_ref list, *list_tail;
19328 dw_cfi_ref cfi;
19329 dw_cfa_location last_cfa, next_cfa;
19330 const char *start_label, *last_label, *section;
19331 dw_cfa_location remember;
19332
19333 fde = cfun->fde;
19334 gcc_assert (fde != NULL);
19335
19336 section = secname_for_decl (current_function_decl);
19337 list_tail = &list;
19338 list = NULL;
19339
19340 memset (&next_cfa, 0, sizeof (next_cfa));
19341 next_cfa.reg = INVALID_REGNUM;
19342 remember = next_cfa;
19343
19344 start_label = fde->dw_fde_begin;
19345
19346 /* ??? Bald assumption that the CIE opcode list does not contain
19347 advance opcodes. */
19348 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
19349 lookup_cfa_1 (cfi, &next_cfa, &remember);
19350
19351 last_cfa = next_cfa;
19352 last_label = start_label;
19353
19354 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
19355 {
19356 /* If the first partition contained no CFI adjustments, the
19357 CIE opcodes apply to the whole first partition. */
19358 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19359 fde->dw_fde_begin, fde->dw_fde_end, section);
19360 list_tail =&(*list_tail)->dw_loc_next;
19361 start_label = last_label = fde->dw_fde_second_begin;
19362 }
19363
19364 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
19365 {
19366 switch (cfi->dw_cfi_opc)
19367 {
19368 case DW_CFA_set_loc:
19369 case DW_CFA_advance_loc1:
19370 case DW_CFA_advance_loc2:
19371 case DW_CFA_advance_loc4:
19372 if (!cfa_equal_p (&last_cfa, &next_cfa))
19373 {
19374 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19375 start_label, last_label, section);
19376
19377 list_tail = &(*list_tail)->dw_loc_next;
19378 last_cfa = next_cfa;
19379 start_label = last_label;
19380 }
19381 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
19382 break;
19383
19384 case DW_CFA_advance_loc:
19385 /* The encoding is complex enough that we should never emit this. */
19386 gcc_unreachable ();
19387
19388 default:
19389 lookup_cfa_1 (cfi, &next_cfa, &remember);
19390 break;
19391 }
19392 if (ix + 1 == fde->dw_fde_switch_cfi_index)
19393 {
19394 if (!cfa_equal_p (&last_cfa, &next_cfa))
19395 {
19396 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19397 start_label, last_label, section);
19398
19399 list_tail = &(*list_tail)->dw_loc_next;
19400 last_cfa = next_cfa;
19401 start_label = last_label;
19402 }
19403 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19404 start_label, fde->dw_fde_end, section);
19405 list_tail = &(*list_tail)->dw_loc_next;
19406 start_label = last_label = fde->dw_fde_second_begin;
19407 }
19408 }
19409
19410 if (!cfa_equal_p (&last_cfa, &next_cfa))
19411 {
19412 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19413 start_label, last_label, section);
19414 list_tail = &(*list_tail)->dw_loc_next;
19415 start_label = last_label;
19416 }
19417
19418 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
19419 start_label,
19420 fde->dw_fde_second_begin
19421 ? fde->dw_fde_second_end : fde->dw_fde_end,
19422 section);
19423
19424 if (list && list->dw_loc_next)
19425 gen_llsym (list);
19426
19427 return list;
19428 }
19429
19430 /* Compute a displacement from the "steady-state frame pointer" to the
19431 frame base (often the same as the CFA), and store it in
19432 frame_pointer_fb_offset. OFFSET is added to the displacement
19433 before the latter is negated. */
19434
19435 static void
19436 compute_frame_pointer_to_fb_displacement (HOST_WIDE_INT offset)
19437 {
19438 rtx reg, elim;
19439
19440 #ifdef FRAME_POINTER_CFA_OFFSET
19441 reg = frame_pointer_rtx;
19442 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
19443 #else
19444 reg = arg_pointer_rtx;
19445 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
19446 #endif
19447
19448 elim = (ira_use_lra_p
19449 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
19450 : eliminate_regs (reg, VOIDmode, NULL_RTX));
19451 if (GET_CODE (elim) == PLUS)
19452 {
19453 offset += INTVAL (XEXP (elim, 1));
19454 elim = XEXP (elim, 0);
19455 }
19456
19457 frame_pointer_fb_offset = -offset;
19458
19459 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
19460 in which to eliminate. This is because it's stack pointer isn't
19461 directly accessible as a register within the ISA. To work around
19462 this, assume that while we cannot provide a proper value for
19463 frame_pointer_fb_offset, we won't need one either. */
19464 frame_pointer_fb_offset_valid
19465 = ((SUPPORTS_STACK_ALIGNMENT
19466 && (elim == hard_frame_pointer_rtx
19467 || elim == stack_pointer_rtx))
19468 || elim == (frame_pointer_needed
19469 ? hard_frame_pointer_rtx
19470 : stack_pointer_rtx));
19471 }
19472
19473 /* Generate a DW_AT_name attribute given some string value to be included as
19474 the value of the attribute. */
19475
19476 static void
19477 add_name_attribute (dw_die_ref die, const char *name_string)
19478 {
19479 if (name_string != NULL && *name_string != 0)
19480 {
19481 if (demangle_name_func)
19482 name_string = (*demangle_name_func) (name_string);
19483
19484 add_AT_string (die, DW_AT_name, name_string);
19485 }
19486 }
19487
19488 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
19489 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
19490 of TYPE accordingly.
19491
19492 ??? This is a temporary measure until after we're able to generate
19493 regular DWARF for the complex Ada type system. */
19494
19495 static void
19496 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
19497 dw_die_ref context_die)
19498 {
19499 tree dtype;
19500 dw_die_ref dtype_die;
19501
19502 if (!lang_hooks.types.descriptive_type)
19503 return;
19504
19505 dtype = lang_hooks.types.descriptive_type (type);
19506 if (!dtype)
19507 return;
19508
19509 dtype_die = lookup_type_die (dtype);
19510 if (!dtype_die)
19511 {
19512 gen_type_die (dtype, context_die);
19513 dtype_die = lookup_type_die (dtype);
19514 gcc_assert (dtype_die);
19515 }
19516
19517 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
19518 }
19519
19520 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
19521
19522 static const char *
19523 comp_dir_string (void)
19524 {
19525 const char *wd;
19526 char *wd1;
19527 static const char *cached_wd = NULL;
19528
19529 if (cached_wd != NULL)
19530 return cached_wd;
19531
19532 wd = get_src_pwd ();
19533 if (wd == NULL)
19534 return NULL;
19535
19536 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
19537 {
19538 int wdlen;
19539
19540 wdlen = strlen (wd);
19541 wd1 = ggc_vec_alloc<char> (wdlen + 2);
19542 strcpy (wd1, wd);
19543 wd1 [wdlen] = DIR_SEPARATOR;
19544 wd1 [wdlen + 1] = 0;
19545 wd = wd1;
19546 }
19547
19548 cached_wd = remap_debug_filename (wd);
19549 return cached_wd;
19550 }
19551
19552 /* Generate a DW_AT_comp_dir attribute for DIE. */
19553
19554 static void
19555 add_comp_dir_attribute (dw_die_ref die)
19556 {
19557 const char * wd = comp_dir_string ();
19558 if (wd != NULL)
19559 add_AT_string (die, DW_AT_comp_dir, wd);
19560 }
19561
19562 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
19563 pointer computation, ...), output a representation for that bound according
19564 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
19565 loc_list_from_tree for the meaning of CONTEXT. */
19566
19567 static void
19568 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
19569 int forms, struct loc_descr_context *context)
19570 {
19571 dw_die_ref context_die, decl_die;
19572 dw_loc_list_ref list;
19573 bool strip_conversions = true;
19574 bool placeholder_seen = false;
19575
19576 while (strip_conversions)
19577 switch (TREE_CODE (value))
19578 {
19579 case ERROR_MARK:
19580 case SAVE_EXPR:
19581 return;
19582
19583 CASE_CONVERT:
19584 case VIEW_CONVERT_EXPR:
19585 value = TREE_OPERAND (value, 0);
19586 break;
19587
19588 default:
19589 strip_conversions = false;
19590 break;
19591 }
19592
19593 /* If possible and permitted, output the attribute as a constant. */
19594 if ((forms & dw_scalar_form_constant) != 0
19595 && TREE_CODE (value) == INTEGER_CST)
19596 {
19597 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
19598
19599 /* If HOST_WIDE_INT is big enough then represent the bound as
19600 a constant value. We need to choose a form based on
19601 whether the type is signed or unsigned. We cannot just
19602 call add_AT_unsigned if the value itself is positive
19603 (add_AT_unsigned might add the unsigned value encoded as
19604 DW_FORM_data[1248]). Some DWARF consumers will lookup the
19605 bounds type and then sign extend any unsigned values found
19606 for signed types. This is needed only for
19607 DW_AT_{lower,upper}_bound, since for most other attributes,
19608 consumers will treat DW_FORM_data[1248] as unsigned values,
19609 regardless of the underlying type. */
19610 if (prec <= HOST_BITS_PER_WIDE_INT
19611 || tree_fits_uhwi_p (value))
19612 {
19613 if (TYPE_UNSIGNED (TREE_TYPE (value)))
19614 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
19615 else
19616 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
19617 }
19618 else
19619 /* Otherwise represent the bound as an unsigned value with
19620 the precision of its type. The precision and signedness
19621 of the type will be necessary to re-interpret it
19622 unambiguously. */
19623 add_AT_wide (die, attr, value);
19624 return;
19625 }
19626
19627 /* Otherwise, if it's possible and permitted too, output a reference to
19628 another DIE. */
19629 if ((forms & dw_scalar_form_reference) != 0)
19630 {
19631 tree decl = NULL_TREE;
19632
19633 /* Some type attributes reference an outer type. For instance, the upper
19634 bound of an array may reference an embedding record (this happens in
19635 Ada). */
19636 if (TREE_CODE (value) == COMPONENT_REF
19637 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
19638 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
19639 decl = TREE_OPERAND (value, 1);
19640
19641 else if (VAR_P (value)
19642 || TREE_CODE (value) == PARM_DECL
19643 || TREE_CODE (value) == RESULT_DECL)
19644 decl = value;
19645
19646 if (decl != NULL_TREE)
19647 {
19648 dw_die_ref decl_die = lookup_decl_die (decl);
19649
19650 /* ??? Can this happen, or should the variable have been bound
19651 first? Probably it can, since I imagine that we try to create
19652 the types of parameters in the order in which they exist in
19653 the list, and won't have created a forward reference to a
19654 later parameter. */
19655 if (decl_die != NULL)
19656 {
19657 add_AT_die_ref (die, attr, decl_die);
19658 return;
19659 }
19660 }
19661 }
19662
19663 /* Last chance: try to create a stack operation procedure to evaluate the
19664 value. Do nothing if even that is not possible or permitted. */
19665 if ((forms & dw_scalar_form_exprloc) == 0)
19666 return;
19667
19668 list = loc_list_from_tree (value, 2, context);
19669 if (context && context->placeholder_arg)
19670 {
19671 placeholder_seen = context->placeholder_seen;
19672 context->placeholder_seen = false;
19673 }
19674 if (list == NULL || single_element_loc_list_p (list))
19675 {
19676 /* If this attribute is not a reference nor constant, it is
19677 a DWARF expression rather than location description. For that
19678 loc_list_from_tree (value, 0, &context) is needed. */
19679 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
19680 if (list2 && single_element_loc_list_p (list2))
19681 {
19682 if (placeholder_seen)
19683 {
19684 struct dwarf_procedure_info dpi;
19685 dpi.fndecl = NULL_TREE;
19686 dpi.args_count = 1;
19687 if (!resolve_args_picking (list2->expr, 1, &dpi))
19688 return;
19689 }
19690 add_AT_loc (die, attr, list2->expr);
19691 return;
19692 }
19693 }
19694
19695 /* If that failed to give a single element location list, fall back to
19696 outputting this as a reference... still if permitted. */
19697 if (list == NULL
19698 || (forms & dw_scalar_form_reference) == 0
19699 || placeholder_seen)
19700 return;
19701
19702 if (current_function_decl == 0)
19703 context_die = comp_unit_die ();
19704 else
19705 context_die = lookup_decl_die (current_function_decl);
19706
19707 decl_die = new_die (DW_TAG_variable, context_die, value);
19708 add_AT_flag (decl_die, DW_AT_artificial, 1);
19709 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
19710 context_die);
19711 add_AT_location_description (decl_die, DW_AT_location, list);
19712 add_AT_die_ref (die, attr, decl_die);
19713 }
19714
19715 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
19716 default. */
19717
19718 static int
19719 lower_bound_default (void)
19720 {
19721 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
19722 {
19723 case DW_LANG_C:
19724 case DW_LANG_C89:
19725 case DW_LANG_C99:
19726 case DW_LANG_C11:
19727 case DW_LANG_C_plus_plus:
19728 case DW_LANG_C_plus_plus_11:
19729 case DW_LANG_C_plus_plus_14:
19730 case DW_LANG_ObjC:
19731 case DW_LANG_ObjC_plus_plus:
19732 case DW_LANG_Java:
19733 return 0;
19734 case DW_LANG_Fortran77:
19735 case DW_LANG_Fortran90:
19736 case DW_LANG_Fortran95:
19737 case DW_LANG_Fortran03:
19738 case DW_LANG_Fortran08:
19739 return 1;
19740 case DW_LANG_UPC:
19741 case DW_LANG_D:
19742 case DW_LANG_Python:
19743 return dwarf_version >= 4 ? 0 : -1;
19744 case DW_LANG_Ada95:
19745 case DW_LANG_Ada83:
19746 case DW_LANG_Cobol74:
19747 case DW_LANG_Cobol85:
19748 case DW_LANG_Pascal83:
19749 case DW_LANG_Modula2:
19750 case DW_LANG_PLI:
19751 return dwarf_version >= 4 ? 1 : -1;
19752 default:
19753 return -1;
19754 }
19755 }
19756
19757 /* Given a tree node describing an array bound (either lower or upper) output
19758 a representation for that bound. */
19759
19760 static void
19761 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
19762 tree bound, struct loc_descr_context *context)
19763 {
19764 int dflt;
19765
19766 while (1)
19767 switch (TREE_CODE (bound))
19768 {
19769 /* Strip all conversions. */
19770 CASE_CONVERT:
19771 case VIEW_CONVERT_EXPR:
19772 bound = TREE_OPERAND (bound, 0);
19773 break;
19774
19775 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
19776 are even omitted when they are the default. */
19777 case INTEGER_CST:
19778 /* If the value for this bound is the default one, we can even omit the
19779 attribute. */
19780 if (bound_attr == DW_AT_lower_bound
19781 && tree_fits_shwi_p (bound)
19782 && (dflt = lower_bound_default ()) != -1
19783 && tree_to_shwi (bound) == dflt)
19784 return;
19785
19786 /* FALLTHRU */
19787
19788 default:
19789 /* Because of the complex interaction there can be with other GNAT
19790 encodings, GDB isn't ready yet to handle proper DWARF description
19791 for self-referencial subrange bounds: let GNAT encodings do the
19792 magic in such a case. */
19793 if (is_ada ()
19794 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
19795 && contains_placeholder_p (bound))
19796 return;
19797
19798 add_scalar_info (subrange_die, bound_attr, bound,
19799 dw_scalar_form_constant
19800 | dw_scalar_form_exprloc
19801 | dw_scalar_form_reference,
19802 context);
19803 return;
19804 }
19805 }
19806
19807 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
19808 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
19809 Note that the block of subscript information for an array type also
19810 includes information about the element type of the given array type.
19811
19812 This function reuses previously set type and bound information if
19813 available. */
19814
19815 static void
19816 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
19817 {
19818 unsigned dimension_number;
19819 tree lower, upper;
19820 dw_die_ref child = type_die->die_child;
19821
19822 for (dimension_number = 0;
19823 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
19824 type = TREE_TYPE (type), dimension_number++)
19825 {
19826 tree domain = TYPE_DOMAIN (type);
19827
19828 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
19829 break;
19830
19831 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
19832 and (in GNU C only) variable bounds. Handle all three forms
19833 here. */
19834
19835 /* Find and reuse a previously generated DW_TAG_subrange_type if
19836 available.
19837
19838 For multi-dimensional arrays, as we iterate through the
19839 various dimensions in the enclosing for loop above, we also
19840 iterate through the DIE children and pick at each
19841 DW_TAG_subrange_type previously generated (if available).
19842 Each child DW_TAG_subrange_type DIE describes the range of
19843 the current dimension. At this point we should have as many
19844 DW_TAG_subrange_type's as we have dimensions in the
19845 array. */
19846 dw_die_ref subrange_die = NULL;
19847 if (child)
19848 while (1)
19849 {
19850 child = child->die_sib;
19851 if (child->die_tag == DW_TAG_subrange_type)
19852 subrange_die = child;
19853 if (child == type_die->die_child)
19854 {
19855 /* If we wrapped around, stop looking next time. */
19856 child = NULL;
19857 break;
19858 }
19859 if (child->die_tag == DW_TAG_subrange_type)
19860 break;
19861 }
19862 if (!subrange_die)
19863 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
19864
19865 if (domain)
19866 {
19867 /* We have an array type with specified bounds. */
19868 lower = TYPE_MIN_VALUE (domain);
19869 upper = TYPE_MAX_VALUE (domain);
19870
19871 /* Define the index type. */
19872 if (TREE_TYPE (domain)
19873 && !get_AT (subrange_die, DW_AT_type))
19874 {
19875 /* ??? This is probably an Ada unnamed subrange type. Ignore the
19876 TREE_TYPE field. We can't emit debug info for this
19877 because it is an unnamed integral type. */
19878 if (TREE_CODE (domain) == INTEGER_TYPE
19879 && TYPE_NAME (domain) == NULL_TREE
19880 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
19881 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
19882 ;
19883 else
19884 add_type_attribute (subrange_die, TREE_TYPE (domain),
19885 TYPE_UNQUALIFIED, false, type_die);
19886 }
19887
19888 /* ??? If upper is NULL, the array has unspecified length,
19889 but it does have a lower bound. This happens with Fortran
19890 dimension arr(N:*)
19891 Since the debugger is definitely going to need to know N
19892 to produce useful results, go ahead and output the lower
19893 bound solo, and hope the debugger can cope. */
19894
19895 if (!get_AT (subrange_die, DW_AT_lower_bound))
19896 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
19897 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
19898 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
19899 }
19900
19901 /* Otherwise we have an array type with an unspecified length. The
19902 DWARF-2 spec does not say how to handle this; let's just leave out the
19903 bounds. */
19904 }
19905 }
19906
19907 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
19908
19909 static void
19910 add_byte_size_attribute (dw_die_ref die, tree tree_node)
19911 {
19912 dw_die_ref decl_die;
19913 HOST_WIDE_INT size;
19914 dw_loc_descr_ref size_expr = NULL;
19915
19916 switch (TREE_CODE (tree_node))
19917 {
19918 case ERROR_MARK:
19919 size = 0;
19920 break;
19921 case ENUMERAL_TYPE:
19922 case RECORD_TYPE:
19923 case UNION_TYPE:
19924 case QUAL_UNION_TYPE:
19925 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
19926 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
19927 {
19928 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
19929 return;
19930 }
19931 size_expr = type_byte_size (tree_node, &size);
19932 break;
19933 case FIELD_DECL:
19934 /* For a data member of a struct or union, the DW_AT_byte_size is
19935 generally given as the number of bytes normally allocated for an
19936 object of the *declared* type of the member itself. This is true
19937 even for bit-fields. */
19938 size = int_size_in_bytes (field_type (tree_node));
19939 break;
19940 default:
19941 gcc_unreachable ();
19942 }
19943
19944 /* Support for dynamically-sized objects was introduced by DWARFv3.
19945 At the moment, GDB does not handle variable byte sizes very well,
19946 though. */
19947 if ((dwarf_version >= 3 || !dwarf_strict)
19948 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
19949 && size_expr != NULL)
19950 add_AT_loc (die, DW_AT_byte_size, size_expr);
19951
19952 /* Note that `size' might be -1 when we get to this point. If it is, that
19953 indicates that the byte size of the entity in question is variable and
19954 that we could not generate a DWARF expression that computes it. */
19955 if (size >= 0)
19956 add_AT_unsigned (die, DW_AT_byte_size, size);
19957 }
19958
19959 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
19960 alignment. */
19961
19962 static void
19963 add_alignment_attribute (dw_die_ref die, tree tree_node)
19964 {
19965 if (dwarf_version < 5 && dwarf_strict)
19966 return;
19967
19968 unsigned align;
19969
19970 if (DECL_P (tree_node))
19971 {
19972 if (!DECL_USER_ALIGN (tree_node))
19973 return;
19974
19975 align = DECL_ALIGN_UNIT (tree_node);
19976 }
19977 else if (TYPE_P (tree_node))
19978 {
19979 if (!TYPE_USER_ALIGN (tree_node))
19980 return;
19981
19982 align = TYPE_ALIGN_UNIT (tree_node);
19983 }
19984 else
19985 gcc_unreachable ();
19986
19987 add_AT_unsigned (die, DW_AT_alignment, align);
19988 }
19989
19990 /* For a FIELD_DECL node which represents a bit-field, output an attribute
19991 which specifies the distance in bits from the highest order bit of the
19992 "containing object" for the bit-field to the highest order bit of the
19993 bit-field itself.
19994
19995 For any given bit-field, the "containing object" is a hypothetical object
19996 (of some integral or enum type) within which the given bit-field lives. The
19997 type of this hypothetical "containing object" is always the same as the
19998 declared type of the individual bit-field itself. The determination of the
19999 exact location of the "containing object" for a bit-field is rather
20000 complicated. It's handled by the `field_byte_offset' function (above).
20001
20002 CTX is required: see the comment for VLR_CONTEXT.
20003
20004 Note that it is the size (in bytes) of the hypothetical "containing object"
20005 which will be given in the DW_AT_byte_size attribute for this bit-field.
20006 (See `byte_size_attribute' above). */
20007
20008 static inline void
20009 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20010 {
20011 HOST_WIDE_INT object_offset_in_bytes;
20012 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20013 HOST_WIDE_INT bitpos_int;
20014 HOST_WIDE_INT highest_order_object_bit_offset;
20015 HOST_WIDE_INT highest_order_field_bit_offset;
20016 HOST_WIDE_INT bit_offset;
20017
20018 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20019
20020 /* Must be a field and a bit field. */
20021 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20022
20023 /* We can't yet handle bit-fields whose offsets are variable, so if we
20024 encounter such things, just return without generating any attribute
20025 whatsoever. Likewise for variable or too large size. */
20026 if (! tree_fits_shwi_p (bit_position (decl))
20027 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
20028 return;
20029
20030 bitpos_int = int_bit_position (decl);
20031
20032 /* Note that the bit offset is always the distance (in bits) from the
20033 highest-order bit of the "containing object" to the highest-order bit of
20034 the bit-field itself. Since the "high-order end" of any object or field
20035 is different on big-endian and little-endian machines, the computation
20036 below must take account of these differences. */
20037 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
20038 highest_order_field_bit_offset = bitpos_int;
20039
20040 if (! BYTES_BIG_ENDIAN)
20041 {
20042 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
20043 highest_order_object_bit_offset +=
20044 simple_type_size_in_bits (original_type);
20045 }
20046
20047 bit_offset
20048 = (! BYTES_BIG_ENDIAN
20049 ? highest_order_object_bit_offset - highest_order_field_bit_offset
20050 : highest_order_field_bit_offset - highest_order_object_bit_offset);
20051
20052 if (bit_offset < 0)
20053 add_AT_int (die, DW_AT_bit_offset, bit_offset);
20054 else
20055 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
20056 }
20057
20058 /* For a FIELD_DECL node which represents a bit field, output an attribute
20059 which specifies the length in bits of the given field. */
20060
20061 static inline void
20062 add_bit_size_attribute (dw_die_ref die, tree decl)
20063 {
20064 /* Must be a field and a bit field. */
20065 gcc_assert (TREE_CODE (decl) == FIELD_DECL
20066 && DECL_BIT_FIELD_TYPE (decl));
20067
20068 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
20069 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
20070 }
20071
20072 /* If the compiled language is ANSI C, then add a 'prototyped'
20073 attribute, if arg types are given for the parameters of a function. */
20074
20075 static inline void
20076 add_prototyped_attribute (dw_die_ref die, tree func_type)
20077 {
20078 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20079 {
20080 case DW_LANG_C:
20081 case DW_LANG_C89:
20082 case DW_LANG_C99:
20083 case DW_LANG_C11:
20084 case DW_LANG_ObjC:
20085 if (prototype_p (func_type))
20086 add_AT_flag (die, DW_AT_prototyped, 1);
20087 break;
20088 default:
20089 break;
20090 }
20091 }
20092
20093 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
20094 by looking in the type declaration, the object declaration equate table or
20095 the block mapping. */
20096
20097 static inline dw_die_ref
20098 add_abstract_origin_attribute (dw_die_ref die, tree origin)
20099 {
20100 dw_die_ref origin_die = NULL;
20101
20102 if (TREE_CODE (origin) != FUNCTION_DECL
20103 && TREE_CODE (origin) != BLOCK)
20104 {
20105 /* We may have gotten separated from the block for the inlined
20106 function, if we're in an exception handler or some such; make
20107 sure that the abstract function has been written out.
20108
20109 Doing this for nested functions is wrong, however; functions are
20110 distinct units, and our context might not even be inline. */
20111 tree fn = origin;
20112
20113 if (TYPE_P (fn))
20114 fn = TYPE_STUB_DECL (fn);
20115
20116 fn = decl_function_context (fn);
20117 if (fn)
20118 dwarf2out_abstract_function (fn);
20119 }
20120
20121 if (DECL_P (origin))
20122 origin_die = lookup_decl_die (origin);
20123 else if (TYPE_P (origin))
20124 origin_die = lookup_type_die (origin);
20125 else if (TREE_CODE (origin) == BLOCK)
20126 origin_die = BLOCK_DIE (origin);
20127
20128 /* XXX: Functions that are never lowered don't always have correct block
20129 trees (in the case of java, they simply have no block tree, in some other
20130 languages). For these functions, there is nothing we can really do to
20131 output correct debug info for inlined functions in all cases. Rather
20132 than die, we'll just produce deficient debug info now, in that we will
20133 have variables without a proper abstract origin. In the future, when all
20134 functions are lowered, we should re-add a gcc_assert (origin_die)
20135 here. */
20136
20137 if (origin_die)
20138 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
20139 return origin_die;
20140 }
20141
20142 /* We do not currently support the pure_virtual attribute. */
20143
20144 static inline void
20145 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
20146 {
20147 if (DECL_VINDEX (func_decl))
20148 {
20149 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
20150
20151 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
20152 add_AT_loc (die, DW_AT_vtable_elem_location,
20153 new_loc_descr (DW_OP_constu,
20154 tree_to_shwi (DECL_VINDEX (func_decl)),
20155 0));
20156
20157 /* GNU extension: Record what type this method came from originally. */
20158 if (debug_info_level > DINFO_LEVEL_TERSE
20159 && DECL_CONTEXT (func_decl))
20160 add_AT_die_ref (die, DW_AT_containing_type,
20161 lookup_type_die (DECL_CONTEXT (func_decl)));
20162 }
20163 }
20164 \f
20165 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
20166 given decl. This used to be a vendor extension until after DWARF 4
20167 standardized it. */
20168
20169 static void
20170 add_linkage_attr (dw_die_ref die, tree decl)
20171 {
20172 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20173
20174 /* Mimic what assemble_name_raw does with a leading '*'. */
20175 if (name[0] == '*')
20176 name = &name[1];
20177
20178 if (dwarf_version >= 4)
20179 add_AT_string (die, DW_AT_linkage_name, name);
20180 else
20181 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
20182 }
20183
20184 /* Add source coordinate attributes for the given decl. */
20185
20186 static void
20187 add_src_coords_attributes (dw_die_ref die, tree decl)
20188 {
20189 expanded_location s;
20190
20191 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
20192 return;
20193 s = expand_location (DECL_SOURCE_LOCATION (decl));
20194 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
20195 add_AT_unsigned (die, DW_AT_decl_line, s.line);
20196 if (debug_column_info && s.column)
20197 add_AT_unsigned (die, DW_AT_decl_column, s.column);
20198 }
20199
20200 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
20201
20202 static void
20203 add_linkage_name_raw (dw_die_ref die, tree decl)
20204 {
20205 /* Defer until we have an assembler name set. */
20206 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
20207 {
20208 limbo_die_node *asm_name;
20209
20210 asm_name = ggc_cleared_alloc<limbo_die_node> ();
20211 asm_name->die = die;
20212 asm_name->created_for = decl;
20213 asm_name->next = deferred_asm_name;
20214 deferred_asm_name = asm_name;
20215 }
20216 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
20217 add_linkage_attr (die, decl);
20218 }
20219
20220 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
20221
20222 static void
20223 add_linkage_name (dw_die_ref die, tree decl)
20224 {
20225 if (debug_info_level > DINFO_LEVEL_NONE
20226 && VAR_OR_FUNCTION_DECL_P (decl)
20227 && TREE_PUBLIC (decl)
20228 && !(VAR_P (decl) && DECL_REGISTER (decl))
20229 && die->die_tag != DW_TAG_member)
20230 add_linkage_name_raw (die, decl);
20231 }
20232
20233 /* Add a DW_AT_name attribute and source coordinate attribute for the
20234 given decl, but only if it actually has a name. */
20235
20236 static void
20237 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
20238 bool no_linkage_name)
20239 {
20240 tree decl_name;
20241
20242 decl_name = DECL_NAME (decl);
20243 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20244 {
20245 const char *name = dwarf2_name (decl, 0);
20246 if (name)
20247 add_name_attribute (die, name);
20248 if (! DECL_ARTIFICIAL (decl))
20249 add_src_coords_attributes (die, decl);
20250
20251 if (!no_linkage_name)
20252 add_linkage_name (die, decl);
20253 }
20254
20255 #ifdef VMS_DEBUGGING_INFO
20256 /* Get the function's name, as described by its RTL. This may be different
20257 from the DECL_NAME name used in the source file. */
20258 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
20259 {
20260 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
20261 XEXP (DECL_RTL (decl), 0), false);
20262 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
20263 }
20264 #endif /* VMS_DEBUGGING_INFO */
20265 }
20266
20267 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
20268
20269 static void
20270 add_discr_value (dw_die_ref die, dw_discr_value *value)
20271 {
20272 dw_attr_node attr;
20273
20274 attr.dw_attr = DW_AT_discr_value;
20275 attr.dw_attr_val.val_class = dw_val_class_discr_value;
20276 attr.dw_attr_val.val_entry = NULL;
20277 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
20278 if (value->pos)
20279 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
20280 else
20281 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
20282 add_dwarf_attr (die, &attr);
20283 }
20284
20285 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
20286
20287 static void
20288 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
20289 {
20290 dw_attr_node attr;
20291
20292 attr.dw_attr = DW_AT_discr_list;
20293 attr.dw_attr_val.val_class = dw_val_class_discr_list;
20294 attr.dw_attr_val.val_entry = NULL;
20295 attr.dw_attr_val.v.val_discr_list = discr_list;
20296 add_dwarf_attr (die, &attr);
20297 }
20298
20299 static inline dw_discr_list_ref
20300 AT_discr_list (dw_attr_node *attr)
20301 {
20302 return attr->dw_attr_val.v.val_discr_list;
20303 }
20304
20305 #ifdef VMS_DEBUGGING_INFO
20306 /* Output the debug main pointer die for VMS */
20307
20308 void
20309 dwarf2out_vms_debug_main_pointer (void)
20310 {
20311 char label[MAX_ARTIFICIAL_LABEL_BYTES];
20312 dw_die_ref die;
20313
20314 /* Allocate the VMS debug main subprogram die. */
20315 die = ggc_cleared_alloc<die_node> ();
20316 die->die_tag = DW_TAG_subprogram;
20317 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
20318 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
20319 current_function_funcdef_no);
20320 add_AT_lbl_id (die, DW_AT_entry_pc, label);
20321
20322 /* Make it the first child of comp_unit_die (). */
20323 die->die_parent = comp_unit_die ();
20324 if (comp_unit_die ()->die_child)
20325 {
20326 die->die_sib = comp_unit_die ()->die_child->die_sib;
20327 comp_unit_die ()->die_child->die_sib = die;
20328 }
20329 else
20330 {
20331 die->die_sib = die;
20332 comp_unit_die ()->die_child = die;
20333 }
20334 }
20335 #endif /* VMS_DEBUGGING_INFO */
20336
20337 /* Push a new declaration scope. */
20338
20339 static void
20340 push_decl_scope (tree scope)
20341 {
20342 vec_safe_push (decl_scope_table, scope);
20343 }
20344
20345 /* Pop a declaration scope. */
20346
20347 static inline void
20348 pop_decl_scope (void)
20349 {
20350 decl_scope_table->pop ();
20351 }
20352
20353 /* walk_tree helper function for uses_local_type, below. */
20354
20355 static tree
20356 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
20357 {
20358 if (!TYPE_P (*tp))
20359 *walk_subtrees = 0;
20360 else
20361 {
20362 tree name = TYPE_NAME (*tp);
20363 if (name && DECL_P (name) && decl_function_context (name))
20364 return *tp;
20365 }
20366 return NULL_TREE;
20367 }
20368
20369 /* If TYPE involves a function-local type (including a local typedef to a
20370 non-local type), returns that type; otherwise returns NULL_TREE. */
20371
20372 static tree
20373 uses_local_type (tree type)
20374 {
20375 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
20376 return used;
20377 }
20378
20379 /* Return the DIE for the scope that immediately contains this type.
20380 Non-named types that do not involve a function-local type get global
20381 scope. Named types nested in namespaces or other types get their
20382 containing scope. All other types (i.e. function-local named types) get
20383 the current active scope. */
20384
20385 static dw_die_ref
20386 scope_die_for (tree t, dw_die_ref context_die)
20387 {
20388 dw_die_ref scope_die = NULL;
20389 tree containing_scope;
20390
20391 /* Non-types always go in the current scope. */
20392 gcc_assert (TYPE_P (t));
20393
20394 /* Use the scope of the typedef, rather than the scope of the type
20395 it refers to. */
20396 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
20397 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
20398 else
20399 containing_scope = TYPE_CONTEXT (t);
20400
20401 /* Use the containing namespace if there is one. */
20402 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
20403 {
20404 if (context_die == lookup_decl_die (containing_scope))
20405 /* OK */;
20406 else if (debug_info_level > DINFO_LEVEL_TERSE)
20407 context_die = get_context_die (containing_scope);
20408 else
20409 containing_scope = NULL_TREE;
20410 }
20411
20412 /* Ignore function type "scopes" from the C frontend. They mean that
20413 a tagged type is local to a parmlist of a function declarator, but
20414 that isn't useful to DWARF. */
20415 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
20416 containing_scope = NULL_TREE;
20417
20418 if (SCOPE_FILE_SCOPE_P (containing_scope))
20419 {
20420 /* If T uses a local type keep it local as well, to avoid references
20421 to function-local DIEs from outside the function. */
20422 if (current_function_decl && uses_local_type (t))
20423 scope_die = context_die;
20424 else
20425 scope_die = comp_unit_die ();
20426 }
20427 else if (TYPE_P (containing_scope))
20428 {
20429 /* For types, we can just look up the appropriate DIE. */
20430 if (debug_info_level > DINFO_LEVEL_TERSE)
20431 scope_die = get_context_die (containing_scope);
20432 else
20433 {
20434 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
20435 if (scope_die == NULL)
20436 scope_die = comp_unit_die ();
20437 }
20438 }
20439 else
20440 scope_die = context_die;
20441
20442 return scope_die;
20443 }
20444
20445 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
20446
20447 static inline int
20448 local_scope_p (dw_die_ref context_die)
20449 {
20450 for (; context_die; context_die = context_die->die_parent)
20451 if (context_die->die_tag == DW_TAG_inlined_subroutine
20452 || context_die->die_tag == DW_TAG_subprogram)
20453 return 1;
20454
20455 return 0;
20456 }
20457
20458 /* Returns nonzero if CONTEXT_DIE is a class. */
20459
20460 static inline int
20461 class_scope_p (dw_die_ref context_die)
20462 {
20463 return (context_die
20464 && (context_die->die_tag == DW_TAG_structure_type
20465 || context_die->die_tag == DW_TAG_class_type
20466 || context_die->die_tag == DW_TAG_interface_type
20467 || context_die->die_tag == DW_TAG_union_type));
20468 }
20469
20470 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
20471 whether or not to treat a DIE in this context as a declaration. */
20472
20473 static inline int
20474 class_or_namespace_scope_p (dw_die_ref context_die)
20475 {
20476 return (class_scope_p (context_die)
20477 || (context_die && context_die->die_tag == DW_TAG_namespace));
20478 }
20479
20480 /* Many forms of DIEs require a "type description" attribute. This
20481 routine locates the proper "type descriptor" die for the type given
20482 by 'type' plus any additional qualifiers given by 'cv_quals', and
20483 adds a DW_AT_type attribute below the given die. */
20484
20485 static void
20486 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
20487 bool reverse, dw_die_ref context_die)
20488 {
20489 enum tree_code code = TREE_CODE (type);
20490 dw_die_ref type_die = NULL;
20491
20492 /* ??? If this type is an unnamed subrange type of an integral, floating-point
20493 or fixed-point type, use the inner type. This is because we have no
20494 support for unnamed types in base_type_die. This can happen if this is
20495 an Ada subrange type. Correct solution is emit a subrange type die. */
20496 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
20497 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
20498 type = TREE_TYPE (type), code = TREE_CODE (type);
20499
20500 if (code == ERROR_MARK
20501 /* Handle a special case. For functions whose return type is void, we
20502 generate *no* type attribute. (Note that no object may have type
20503 `void', so this only applies to function return types). */
20504 || code == VOID_TYPE)
20505 return;
20506
20507 type_die = modified_type_die (type,
20508 cv_quals | TYPE_QUALS_NO_ADDR_SPACE (type),
20509 reverse,
20510 context_die);
20511
20512 if (type_die != NULL)
20513 add_AT_die_ref (object_die, DW_AT_type, type_die);
20514 }
20515
20516 /* Given an object die, add the calling convention attribute for the
20517 function call type. */
20518 static void
20519 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
20520 {
20521 enum dwarf_calling_convention value = DW_CC_normal;
20522
20523 value = ((enum dwarf_calling_convention)
20524 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
20525
20526 if (is_fortran ()
20527 && !strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "MAIN__"))
20528 {
20529 /* DWARF 2 doesn't provide a way to identify a program's source-level
20530 entry point. DW_AT_calling_convention attributes are only meant
20531 to describe functions' calling conventions. However, lacking a
20532 better way to signal the Fortran main program, we used this for
20533 a long time, following existing custom. Now, DWARF 4 has
20534 DW_AT_main_subprogram, which we add below, but some tools still
20535 rely on the old way, which we thus keep. */
20536 value = DW_CC_program;
20537
20538 if (dwarf_version >= 4 || !dwarf_strict)
20539 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
20540 }
20541
20542 /* Only add the attribute if the backend requests it, and
20543 is not DW_CC_normal. */
20544 if (value && (value != DW_CC_normal))
20545 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
20546 }
20547
20548 /* Given a tree pointer to a struct, class, union, or enum type node, return
20549 a pointer to the (string) tag name for the given type, or zero if the type
20550 was declared without a tag. */
20551
20552 static const char *
20553 type_tag (const_tree type)
20554 {
20555 const char *name = 0;
20556
20557 if (TYPE_NAME (type) != 0)
20558 {
20559 tree t = 0;
20560
20561 /* Find the IDENTIFIER_NODE for the type name. */
20562 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
20563 && !TYPE_NAMELESS (type))
20564 t = TYPE_NAME (type);
20565
20566 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
20567 a TYPE_DECL node, regardless of whether or not a `typedef' was
20568 involved. */
20569 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
20570 && ! DECL_IGNORED_P (TYPE_NAME (type)))
20571 {
20572 /* We want to be extra verbose. Don't call dwarf_name if
20573 DECL_NAME isn't set. The default hook for decl_printable_name
20574 doesn't like that, and in this context it's correct to return
20575 0, instead of "<anonymous>" or the like. */
20576 if (DECL_NAME (TYPE_NAME (type))
20577 && !DECL_NAMELESS (TYPE_NAME (type)))
20578 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
20579 }
20580
20581 /* Now get the name as a string, or invent one. */
20582 if (!name && t != 0)
20583 name = IDENTIFIER_POINTER (t);
20584 }
20585
20586 return (name == 0 || *name == '\0') ? 0 : name;
20587 }
20588
20589 /* Return the type associated with a data member, make a special check
20590 for bit field types. */
20591
20592 static inline tree
20593 member_declared_type (const_tree member)
20594 {
20595 return (DECL_BIT_FIELD_TYPE (member)
20596 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
20597 }
20598
20599 /* Get the decl's label, as described by its RTL. This may be different
20600 from the DECL_NAME name used in the source file. */
20601
20602 #if 0
20603 static const char *
20604 decl_start_label (tree decl)
20605 {
20606 rtx x;
20607 const char *fnname;
20608
20609 x = DECL_RTL (decl);
20610 gcc_assert (MEM_P (x));
20611
20612 x = XEXP (x, 0);
20613 gcc_assert (GET_CODE (x) == SYMBOL_REF);
20614
20615 fnname = XSTR (x, 0);
20616 return fnname;
20617 }
20618 #endif
20619 \f
20620 /* For variable-length arrays that have been previously generated, but
20621 may be incomplete due to missing subscript info, fill the subscript
20622 info. Return TRUE if this is one of those cases. */
20623 static bool
20624 fill_variable_array_bounds (tree type)
20625 {
20626 if (TREE_ASM_WRITTEN (type)
20627 && TREE_CODE (type) == ARRAY_TYPE
20628 && variably_modified_type_p (type, NULL))
20629 {
20630 dw_die_ref array_die = lookup_type_die (type);
20631 if (!array_die)
20632 return false;
20633 add_subscript_info (array_die, type, !is_ada ());
20634 return true;
20635 }
20636 return false;
20637 }
20638
20639 /* These routines generate the internal representation of the DIE's for
20640 the compilation unit. Debugging information is collected by walking
20641 the declaration trees passed in from dwarf2out_decl(). */
20642
20643 static void
20644 gen_array_type_die (tree type, dw_die_ref context_die)
20645 {
20646 dw_die_ref array_die;
20647
20648 /* GNU compilers represent multidimensional array types as sequences of one
20649 dimensional array types whose element types are themselves array types.
20650 We sometimes squish that down to a single array_type DIE with multiple
20651 subscripts in the Dwarf debugging info. The draft Dwarf specification
20652 say that we are allowed to do this kind of compression in C, because
20653 there is no difference between an array of arrays and a multidimensional
20654 array. We don't do this for Ada to remain as close as possible to the
20655 actual representation, which is especially important against the language
20656 flexibilty wrt arrays of variable size. */
20657
20658 bool collapse_nested_arrays = !is_ada ();
20659
20660 if (fill_variable_array_bounds (type))
20661 return;
20662
20663 dw_die_ref scope_die = scope_die_for (type, context_die);
20664 tree element_type;
20665
20666 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
20667 DW_TAG_string_type doesn't have DW_AT_type attribute). */
20668 if (TYPE_STRING_FLAG (type)
20669 && TREE_CODE (type) == ARRAY_TYPE
20670 && is_fortran ()
20671 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
20672 {
20673 HOST_WIDE_INT size;
20674
20675 array_die = new_die (DW_TAG_string_type, scope_die, type);
20676 add_name_attribute (array_die, type_tag (type));
20677 equate_type_number_to_die (type, array_die);
20678 size = int_size_in_bytes (type);
20679 if (size >= 0)
20680 add_AT_unsigned (array_die, DW_AT_byte_size, size);
20681 else if (TYPE_DOMAIN (type) != NULL_TREE
20682 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
20683 {
20684 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
20685 tree rszdecl = szdecl;
20686 HOST_WIDE_INT rsize = 0;
20687
20688 size = int_size_in_bytes (TREE_TYPE (szdecl));
20689 if (!DECL_P (szdecl))
20690 {
20691 if (TREE_CODE (szdecl) == INDIRECT_REF
20692 && DECL_P (TREE_OPERAND (szdecl, 0)))
20693 {
20694 rszdecl = TREE_OPERAND (szdecl, 0);
20695 rsize = int_size_in_bytes (TREE_TYPE (rszdecl));
20696 if (rsize <= 0)
20697 size = 0;
20698 }
20699 else
20700 size = 0;
20701 }
20702 if (size > 0)
20703 {
20704 dw_loc_list_ref loc = loc_list_from_tree (szdecl, 2, NULL);
20705 if (loc == NULL
20706 && early_dwarf
20707 && current_function_decl
20708 && DECL_CONTEXT (rszdecl) == current_function_decl)
20709 {
20710 dw_die_ref ref = lookup_decl_die (rszdecl);
20711 dw_loc_descr_ref l = NULL;
20712 if (ref)
20713 {
20714 l = new_loc_descr (DW_OP_call4, 0, 0);
20715 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
20716 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
20717 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
20718 }
20719 else if (TREE_CODE (rszdecl) == PARM_DECL
20720 && string_types)
20721 {
20722 l = new_loc_descr (DW_OP_call4, 0, 0);
20723 l->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
20724 l->dw_loc_oprnd1.v.val_decl_ref = rszdecl;
20725 string_types->safe_push (array_die);
20726 }
20727 if (l && rszdecl != szdecl)
20728 {
20729 if (rsize == DWARF2_ADDR_SIZE)
20730 add_loc_descr (&l, new_loc_descr (DW_OP_deref,
20731 0, 0));
20732 else
20733 add_loc_descr (&l, new_loc_descr (DW_OP_deref_size,
20734 rsize, 0));
20735 }
20736 if (l)
20737 loc = new_loc_list (l, NULL, NULL, NULL);
20738 }
20739 if (loc)
20740 {
20741 add_AT_location_description (array_die, DW_AT_string_length,
20742 loc);
20743 if (size != DWARF2_ADDR_SIZE)
20744 add_AT_unsigned (array_die, dwarf_version >= 5
20745 ? DW_AT_string_length_byte_size
20746 : DW_AT_byte_size, size);
20747 }
20748 }
20749 }
20750 return;
20751 }
20752
20753 array_die = new_die (DW_TAG_array_type, scope_die, type);
20754 add_name_attribute (array_die, type_tag (type));
20755 equate_type_number_to_die (type, array_die);
20756
20757 if (TREE_CODE (type) == VECTOR_TYPE)
20758 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
20759
20760 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
20761 if (is_fortran ()
20762 && TREE_CODE (type) == ARRAY_TYPE
20763 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
20764 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
20765 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
20766
20767 #if 0
20768 /* We default the array ordering. SDB will probably do
20769 the right things even if DW_AT_ordering is not present. It's not even
20770 an issue until we start to get into multidimensional arrays anyway. If
20771 SDB is ever caught doing the Wrong Thing for multi-dimensional arrays,
20772 then we'll have to put the DW_AT_ordering attribute back in. (But if
20773 and when we find out that we need to put these in, we will only do so
20774 for multidimensional arrays. */
20775 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
20776 #endif
20777
20778 if (TREE_CODE (type) == VECTOR_TYPE)
20779 {
20780 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
20781 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
20782 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
20783 add_bound_info (subrange_die, DW_AT_upper_bound,
20784 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
20785 }
20786 else
20787 add_subscript_info (array_die, type, collapse_nested_arrays);
20788
20789 /* Add representation of the type of the elements of this array type and
20790 emit the corresponding DIE if we haven't done it already. */
20791 element_type = TREE_TYPE (type);
20792 if (collapse_nested_arrays)
20793 while (TREE_CODE (element_type) == ARRAY_TYPE)
20794 {
20795 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
20796 break;
20797 element_type = TREE_TYPE (element_type);
20798 }
20799
20800 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
20801 TREE_CODE (type) == ARRAY_TYPE
20802 && TYPE_REVERSE_STORAGE_ORDER (type),
20803 context_die);
20804
20805 add_gnat_descriptive_type_attribute (array_die, type, context_die);
20806 if (TYPE_ARTIFICIAL (type))
20807 add_AT_flag (array_die, DW_AT_artificial, 1);
20808
20809 if (get_AT (array_die, DW_AT_name))
20810 add_pubtype (type, array_die);
20811
20812 add_alignment_attribute (array_die, type);
20813 }
20814
20815 /* After all arguments are created, adjust any DW_TAG_string_type
20816 DIEs DW_AT_string_length attributes. */
20817
20818 static void
20819 adjust_string_types (void)
20820 {
20821 dw_die_ref array_die;
20822 unsigned int i;
20823 FOR_EACH_VEC_ELT (*string_types, i, array_die)
20824 {
20825 dw_attr_node *a = get_AT (array_die, DW_AT_string_length);
20826 if (a == NULL)
20827 continue;
20828 dw_loc_descr_ref loc = AT_loc (a);
20829 gcc_assert (loc->dw_loc_opc == DW_OP_call4
20830 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref);
20831 dw_die_ref ref = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
20832 if (ref)
20833 {
20834 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
20835 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
20836 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
20837 }
20838 else
20839 {
20840 remove_AT (array_die, DW_AT_string_length);
20841 remove_AT (array_die, dwarf_version >= 5
20842 ? DW_AT_string_length_byte_size
20843 : DW_AT_byte_size);
20844 }
20845 }
20846 }
20847
20848 /* This routine generates DIE for array with hidden descriptor, details
20849 are filled into *info by a langhook. */
20850
20851 static void
20852 gen_descr_array_type_die (tree type, struct array_descr_info *info,
20853 dw_die_ref context_die)
20854 {
20855 const dw_die_ref scope_die = scope_die_for (type, context_die);
20856 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
20857 struct loc_descr_context context = { type, info->base_decl, NULL,
20858 false, false };
20859 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
20860 int dim;
20861
20862 add_name_attribute (array_die, type_tag (type));
20863 equate_type_number_to_die (type, array_die);
20864
20865 if (info->ndimensions > 1)
20866 switch (info->ordering)
20867 {
20868 case array_descr_ordering_row_major:
20869 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
20870 break;
20871 case array_descr_ordering_column_major:
20872 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
20873 break;
20874 default:
20875 break;
20876 }
20877
20878 if (dwarf_version >= 3 || !dwarf_strict)
20879 {
20880 if (info->data_location)
20881 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
20882 dw_scalar_form_exprloc, &context);
20883 if (info->associated)
20884 add_scalar_info (array_die, DW_AT_associated, info->associated,
20885 dw_scalar_form_constant
20886 | dw_scalar_form_exprloc
20887 | dw_scalar_form_reference, &context);
20888 if (info->allocated)
20889 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
20890 dw_scalar_form_constant
20891 | dw_scalar_form_exprloc
20892 | dw_scalar_form_reference, &context);
20893 if (info->stride)
20894 {
20895 const enum dwarf_attribute attr
20896 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
20897 const int forms
20898 = (info->stride_in_bits)
20899 ? dw_scalar_form_constant
20900 : (dw_scalar_form_constant
20901 | dw_scalar_form_exprloc
20902 | dw_scalar_form_reference);
20903
20904 add_scalar_info (array_die, attr, info->stride, forms, &context);
20905 }
20906 }
20907 if (dwarf_version >= 5)
20908 {
20909 if (info->rank)
20910 {
20911 add_scalar_info (array_die, DW_AT_rank, info->rank,
20912 dw_scalar_form_constant
20913 | dw_scalar_form_exprloc, &context);
20914 subrange_tag = DW_TAG_generic_subrange;
20915 context.placeholder_arg = true;
20916 }
20917 }
20918
20919 add_gnat_descriptive_type_attribute (array_die, type, context_die);
20920
20921 for (dim = 0; dim < info->ndimensions; dim++)
20922 {
20923 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
20924
20925 if (info->dimen[dim].bounds_type)
20926 add_type_attribute (subrange_die,
20927 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
20928 false, context_die);
20929 if (info->dimen[dim].lower_bound)
20930 add_bound_info (subrange_die, DW_AT_lower_bound,
20931 info->dimen[dim].lower_bound, &context);
20932 if (info->dimen[dim].upper_bound)
20933 add_bound_info (subrange_die, DW_AT_upper_bound,
20934 info->dimen[dim].upper_bound, &context);
20935 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
20936 add_scalar_info (subrange_die, DW_AT_byte_stride,
20937 info->dimen[dim].stride,
20938 dw_scalar_form_constant
20939 | dw_scalar_form_exprloc
20940 | dw_scalar_form_reference,
20941 &context);
20942 }
20943
20944 gen_type_die (info->element_type, context_die);
20945 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
20946 TREE_CODE (type) == ARRAY_TYPE
20947 && TYPE_REVERSE_STORAGE_ORDER (type),
20948 context_die);
20949
20950 if (get_AT (array_die, DW_AT_name))
20951 add_pubtype (type, array_die);
20952
20953 add_alignment_attribute (array_die, type);
20954 }
20955
20956 #if 0
20957 static void
20958 gen_entry_point_die (tree decl, dw_die_ref context_die)
20959 {
20960 tree origin = decl_ultimate_origin (decl);
20961 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
20962
20963 if (origin != NULL)
20964 add_abstract_origin_attribute (decl_die, origin);
20965 else
20966 {
20967 add_name_and_src_coords_attributes (decl_die, decl);
20968 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
20969 TYPE_UNQUALIFIED, false, context_die);
20970 }
20971
20972 if (DECL_ABSTRACT_P (decl))
20973 equate_decl_number_to_die (decl, decl_die);
20974 else
20975 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
20976 }
20977 #endif
20978
20979 /* Walk through the list of incomplete types again, trying once more to
20980 emit full debugging info for them. */
20981
20982 static void
20983 retry_incomplete_types (void)
20984 {
20985 set_early_dwarf s;
20986 int i;
20987
20988 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
20989 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
20990 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
20991 vec_safe_truncate (incomplete_types, 0);
20992 }
20993
20994 /* Determine what tag to use for a record type. */
20995
20996 static enum dwarf_tag
20997 record_type_tag (tree type)
20998 {
20999 if (! lang_hooks.types.classify_record)
21000 return DW_TAG_structure_type;
21001
21002 switch (lang_hooks.types.classify_record (type))
21003 {
21004 case RECORD_IS_STRUCT:
21005 return DW_TAG_structure_type;
21006
21007 case RECORD_IS_CLASS:
21008 return DW_TAG_class_type;
21009
21010 case RECORD_IS_INTERFACE:
21011 if (dwarf_version >= 3 || !dwarf_strict)
21012 return DW_TAG_interface_type;
21013 return DW_TAG_structure_type;
21014
21015 default:
21016 gcc_unreachable ();
21017 }
21018 }
21019
21020 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21021 include all of the information about the enumeration values also. Each
21022 enumerated type name/value is listed as a child of the enumerated type
21023 DIE. */
21024
21025 static dw_die_ref
21026 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21027 {
21028 dw_die_ref type_die = lookup_type_die (type);
21029
21030 if (type_die == NULL)
21031 {
21032 type_die = new_die (DW_TAG_enumeration_type,
21033 scope_die_for (type, context_die), type);
21034 equate_type_number_to_die (type, type_die);
21035 add_name_attribute (type_die, type_tag (type));
21036 if (dwarf_version >= 4 || !dwarf_strict)
21037 {
21038 if (ENUM_IS_SCOPED (type))
21039 add_AT_flag (type_die, DW_AT_enum_class, 1);
21040 if (ENUM_IS_OPAQUE (type))
21041 add_AT_flag (type_die, DW_AT_declaration, 1);
21042 }
21043 if (!dwarf_strict)
21044 add_AT_unsigned (type_die, DW_AT_encoding,
21045 TYPE_UNSIGNED (type)
21046 ? DW_ATE_unsigned
21047 : DW_ATE_signed);
21048 }
21049 else if (! TYPE_SIZE (type))
21050 return type_die;
21051 else
21052 remove_AT (type_die, DW_AT_declaration);
21053
21054 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21055 given enum type is incomplete, do not generate the DW_AT_byte_size
21056 attribute or the DW_AT_element_list attribute. */
21057 if (TYPE_SIZE (type))
21058 {
21059 tree link;
21060
21061 TREE_ASM_WRITTEN (type) = 1;
21062 add_byte_size_attribute (type_die, type);
21063 add_alignment_attribute (type_die, type);
21064 if (dwarf_version >= 3 || !dwarf_strict)
21065 {
21066 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21067 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21068 context_die);
21069 }
21070 if (TYPE_STUB_DECL (type) != NULL_TREE)
21071 {
21072 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21073 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21074 }
21075
21076 /* If the first reference to this type was as the return type of an
21077 inline function, then it may not have a parent. Fix this now. */
21078 if (type_die->die_parent == NULL)
21079 add_child_die (scope_die_for (type, context_die), type_die);
21080
21081 for (link = TYPE_VALUES (type);
21082 link != NULL; link = TREE_CHAIN (link))
21083 {
21084 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21085 tree value = TREE_VALUE (link);
21086
21087 add_name_attribute (enum_die,
21088 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21089
21090 if (TREE_CODE (value) == CONST_DECL)
21091 value = DECL_INITIAL (value);
21092
21093 if (simple_type_size_in_bits (TREE_TYPE (value))
21094 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
21095 {
21096 /* For constant forms created by add_AT_unsigned DWARF
21097 consumers (GDB, elfutils, etc.) always zero extend
21098 the value. Only when the actual value is negative
21099 do we need to use add_AT_int to generate a constant
21100 form that can represent negative values. */
21101 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
21102 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
21103 add_AT_unsigned (enum_die, DW_AT_const_value,
21104 (unsigned HOST_WIDE_INT) val);
21105 else
21106 add_AT_int (enum_die, DW_AT_const_value, val);
21107 }
21108 else
21109 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
21110 that here. TODO: This should be re-worked to use correct
21111 signed/unsigned double tags for all cases. */
21112 add_AT_wide (enum_die, DW_AT_const_value, value);
21113 }
21114
21115 add_gnat_descriptive_type_attribute (type_die, type, context_die);
21116 if (TYPE_ARTIFICIAL (type))
21117 add_AT_flag (type_die, DW_AT_artificial, 1);
21118 }
21119 else
21120 add_AT_flag (type_die, DW_AT_declaration, 1);
21121
21122 add_alignment_attribute (type_die, type);
21123
21124 add_pubtype (type, type_die);
21125
21126 return type_die;
21127 }
21128
21129 /* Generate a DIE to represent either a real live formal parameter decl or to
21130 represent just the type of some formal parameter position in some function
21131 type.
21132
21133 Note that this routine is a bit unusual because its argument may be a
21134 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
21135 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
21136 node. If it's the former then this function is being called to output a
21137 DIE to represent a formal parameter object (or some inlining thereof). If
21138 it's the latter, then this function is only being called to output a
21139 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
21140 argument type of some subprogram type.
21141 If EMIT_NAME_P is true, name and source coordinate attributes
21142 are emitted. */
21143
21144 static dw_die_ref
21145 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
21146 dw_die_ref context_die)
21147 {
21148 tree node_or_origin = node ? node : origin;
21149 tree ultimate_origin;
21150 dw_die_ref parm_die = NULL;
21151
21152 if (TREE_CODE_CLASS (TREE_CODE (node_or_origin)) == tcc_declaration)
21153 {
21154 parm_die = lookup_decl_die (node);
21155
21156 /* If the contexts differ, we may not be talking about the same
21157 thing. */
21158 if (parm_die && parm_die->die_parent != context_die)
21159 {
21160 if (!DECL_ABSTRACT_P (node))
21161 {
21162 /* This can happen when creating an inlined instance, in
21163 which case we need to create a new DIE that will get
21164 annotated with DW_AT_abstract_origin. */
21165 parm_die = NULL;
21166 }
21167 else
21168 {
21169 /* FIXME: Reuse DIE even with a differing context.
21170
21171 This can happen when calling
21172 dwarf2out_abstract_function to build debug info for
21173 the abstract instance of a function for which we have
21174 already generated a DIE in
21175 dwarf2out_early_global_decl.
21176
21177 Once we remove dwarf2out_abstract_function, we should
21178 have a call to gcc_unreachable here. */
21179 }
21180 }
21181
21182 if (parm_die && parm_die->die_parent == NULL)
21183 {
21184 /* Check that parm_die already has the right attributes that
21185 we would have added below. If any attributes are
21186 missing, fall through to add them. */
21187 if (! DECL_ABSTRACT_P (node_or_origin)
21188 && !get_AT (parm_die, DW_AT_location)
21189 && !get_AT (parm_die, DW_AT_const_value))
21190 /* We are missing location info, and are about to add it. */
21191 ;
21192 else
21193 {
21194 add_child_die (context_die, parm_die);
21195 return parm_die;
21196 }
21197 }
21198 }
21199
21200 /* If we have a previously generated DIE, use it, unless this is an
21201 concrete instance (origin != NULL), in which case we need a new
21202 DIE with a corresponding DW_AT_abstract_origin. */
21203 bool reusing_die;
21204 if (parm_die && origin == NULL)
21205 reusing_die = true;
21206 else
21207 {
21208 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
21209 reusing_die = false;
21210 }
21211
21212 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
21213 {
21214 case tcc_declaration:
21215 ultimate_origin = decl_ultimate_origin (node_or_origin);
21216 if (node || ultimate_origin)
21217 origin = ultimate_origin;
21218
21219 if (reusing_die)
21220 goto add_location;
21221
21222 if (origin != NULL)
21223 add_abstract_origin_attribute (parm_die, origin);
21224 else if (emit_name_p)
21225 add_name_and_src_coords_attributes (parm_die, node);
21226 if (origin == NULL
21227 || (! DECL_ABSTRACT_P (node_or_origin)
21228 && variably_modified_type_p (TREE_TYPE (node_or_origin),
21229 decl_function_context
21230 (node_or_origin))))
21231 {
21232 tree type = TREE_TYPE (node_or_origin);
21233 if (decl_by_reference_p (node_or_origin))
21234 add_type_attribute (parm_die, TREE_TYPE (type),
21235 TYPE_UNQUALIFIED,
21236 false, context_die);
21237 else
21238 add_type_attribute (parm_die, type,
21239 decl_quals (node_or_origin),
21240 false, context_die);
21241 }
21242 if (origin == NULL && DECL_ARTIFICIAL (node))
21243 add_AT_flag (parm_die, DW_AT_artificial, 1);
21244 add_location:
21245 if (node && node != origin)
21246 equate_decl_number_to_die (node, parm_die);
21247 if (! DECL_ABSTRACT_P (node_or_origin))
21248 add_location_or_const_value_attribute (parm_die, node_or_origin,
21249 node == NULL);
21250
21251 break;
21252
21253 case tcc_type:
21254 /* We were called with some kind of a ..._TYPE node. */
21255 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
21256 context_die);
21257 break;
21258
21259 default:
21260 gcc_unreachable ();
21261 }
21262
21263 return parm_die;
21264 }
21265
21266 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
21267 children DW_TAG_formal_parameter DIEs representing the arguments of the
21268 parameter pack.
21269
21270 PARM_PACK must be a function parameter pack.
21271 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
21272 must point to the subsequent arguments of the function PACK_ARG belongs to.
21273 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
21274 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
21275 following the last one for which a DIE was generated. */
21276
21277 static dw_die_ref
21278 gen_formal_parameter_pack_die (tree parm_pack,
21279 tree pack_arg,
21280 dw_die_ref subr_die,
21281 tree *next_arg)
21282 {
21283 tree arg;
21284 dw_die_ref parm_pack_die;
21285
21286 gcc_assert (parm_pack
21287 && lang_hooks.function_parameter_pack_p (parm_pack)
21288 && subr_die);
21289
21290 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
21291 add_src_coords_attributes (parm_pack_die, parm_pack);
21292
21293 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
21294 {
21295 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
21296 parm_pack))
21297 break;
21298 gen_formal_parameter_die (arg, NULL,
21299 false /* Don't emit name attribute. */,
21300 parm_pack_die);
21301 }
21302 if (next_arg)
21303 *next_arg = arg;
21304 return parm_pack_die;
21305 }
21306
21307 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
21308 at the end of an (ANSI prototyped) formal parameters list. */
21309
21310 static void
21311 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
21312 {
21313 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
21314 }
21315
21316 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
21317 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
21318 parameters as specified in some function type specification (except for
21319 those which appear as part of a function *definition*). */
21320
21321 static void
21322 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
21323 {
21324 tree link;
21325 tree formal_type = NULL;
21326 tree first_parm_type;
21327 tree arg;
21328
21329 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
21330 {
21331 arg = DECL_ARGUMENTS (function_or_method_type);
21332 function_or_method_type = TREE_TYPE (function_or_method_type);
21333 }
21334 else
21335 arg = NULL_TREE;
21336
21337 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
21338
21339 /* Make our first pass over the list of formal parameter types and output a
21340 DW_TAG_formal_parameter DIE for each one. */
21341 for (link = first_parm_type; link; )
21342 {
21343 dw_die_ref parm_die;
21344
21345 formal_type = TREE_VALUE (link);
21346 if (formal_type == void_type_node)
21347 break;
21348
21349 /* Output a (nameless) DIE to represent the formal parameter itself. */
21350 if (!POINTER_BOUNDS_TYPE_P (formal_type))
21351 {
21352 parm_die = gen_formal_parameter_die (formal_type, NULL,
21353 true /* Emit name attribute. */,
21354 context_die);
21355 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
21356 && link == first_parm_type)
21357 {
21358 add_AT_flag (parm_die, DW_AT_artificial, 1);
21359 if (dwarf_version >= 3 || !dwarf_strict)
21360 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
21361 }
21362 else if (arg && DECL_ARTIFICIAL (arg))
21363 add_AT_flag (parm_die, DW_AT_artificial, 1);
21364 }
21365
21366 link = TREE_CHAIN (link);
21367 if (arg)
21368 arg = DECL_CHAIN (arg);
21369 }
21370
21371 /* If this function type has an ellipsis, add a
21372 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
21373 if (formal_type != void_type_node)
21374 gen_unspecified_parameters_die (function_or_method_type, context_die);
21375
21376 /* Make our second (and final) pass over the list of formal parameter types
21377 and output DIEs to represent those types (as necessary). */
21378 for (link = TYPE_ARG_TYPES (function_or_method_type);
21379 link && TREE_VALUE (link);
21380 link = TREE_CHAIN (link))
21381 gen_type_die (TREE_VALUE (link), context_die);
21382 }
21383
21384 /* We want to generate the DIE for TYPE so that we can generate the
21385 die for MEMBER, which has been defined; we will need to refer back
21386 to the member declaration nested within TYPE. If we're trying to
21387 generate minimal debug info for TYPE, processing TYPE won't do the
21388 trick; we need to attach the member declaration by hand. */
21389
21390 static void
21391 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
21392 {
21393 gen_type_die (type, context_die);
21394
21395 /* If we're trying to avoid duplicate debug info, we may not have
21396 emitted the member decl for this function. Emit it now. */
21397 if (TYPE_STUB_DECL (type)
21398 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
21399 && ! lookup_decl_die (member))
21400 {
21401 dw_die_ref type_die;
21402 gcc_assert (!decl_ultimate_origin (member));
21403
21404 push_decl_scope (type);
21405 type_die = lookup_type_die_strip_naming_typedef (type);
21406 if (TREE_CODE (member) == FUNCTION_DECL)
21407 gen_subprogram_die (member, type_die);
21408 else if (TREE_CODE (member) == FIELD_DECL)
21409 {
21410 /* Ignore the nameless fields that are used to skip bits but handle
21411 C++ anonymous unions and structs. */
21412 if (DECL_NAME (member) != NULL_TREE
21413 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
21414 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
21415 {
21416 struct vlr_context vlr_ctx = {
21417 DECL_CONTEXT (member), /* struct_type */
21418 NULL_TREE /* variant_part_offset */
21419 };
21420 gen_type_die (member_declared_type (member), type_die);
21421 gen_field_die (member, &vlr_ctx, type_die);
21422 }
21423 }
21424 else
21425 gen_variable_die (member, NULL_TREE, type_die);
21426
21427 pop_decl_scope ();
21428 }
21429 }
21430 \f
21431 /* Forward declare these functions, because they are mutually recursive
21432 with their set_block_* pairing functions. */
21433 static void set_decl_origin_self (tree);
21434 static void set_decl_abstract_flags (tree, vec<tree> &);
21435
21436 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
21437 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
21438 that it points to the node itself, thus indicating that the node is its
21439 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
21440 the given node is NULL, recursively descend the decl/block tree which
21441 it is the root of, and for each other ..._DECL or BLOCK node contained
21442 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
21443 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
21444 values to point to themselves. */
21445
21446 static void
21447 set_block_origin_self (tree stmt)
21448 {
21449 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
21450 {
21451 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
21452
21453 {
21454 tree local_decl;
21455
21456 for (local_decl = BLOCK_VARS (stmt);
21457 local_decl != NULL_TREE;
21458 local_decl = DECL_CHAIN (local_decl))
21459 /* Do not recurse on nested functions since the inlining status
21460 of parent and child can be different as per the DWARF spec. */
21461 if (TREE_CODE (local_decl) != FUNCTION_DECL
21462 && !DECL_EXTERNAL (local_decl))
21463 set_decl_origin_self (local_decl);
21464 }
21465
21466 {
21467 tree subblock;
21468
21469 for (subblock = BLOCK_SUBBLOCKS (stmt);
21470 subblock != NULL_TREE;
21471 subblock = BLOCK_CHAIN (subblock))
21472 set_block_origin_self (subblock); /* Recurse. */
21473 }
21474 }
21475 }
21476
21477 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
21478 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
21479 node to so that it points to the node itself, thus indicating that the
21480 node represents its own (abstract) origin. Additionally, if the
21481 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
21482 the decl/block tree of which the given node is the root of, and for
21483 each other ..._DECL or BLOCK node contained therein whose
21484 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
21485 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
21486 point to themselves. */
21487
21488 static void
21489 set_decl_origin_self (tree decl)
21490 {
21491 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
21492 {
21493 DECL_ABSTRACT_ORIGIN (decl) = decl;
21494 if (TREE_CODE (decl) == FUNCTION_DECL)
21495 {
21496 tree arg;
21497
21498 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
21499 DECL_ABSTRACT_ORIGIN (arg) = arg;
21500 if (DECL_INITIAL (decl) != NULL_TREE
21501 && DECL_INITIAL (decl) != error_mark_node)
21502 set_block_origin_self (DECL_INITIAL (decl));
21503 }
21504 }
21505 }
21506 \f
21507 /* Given a pointer to some BLOCK node, set the BLOCK_ABSTRACT flag to 1
21508 and if it wasn't 1 before, push it to abstract_vec vector.
21509 For all local decls and all local sub-blocks (recursively) do it
21510 too. */
21511
21512 static void
21513 set_block_abstract_flags (tree stmt, vec<tree> &abstract_vec)
21514 {
21515 tree local_decl;
21516 tree subblock;
21517 unsigned int i;
21518
21519 if (!BLOCK_ABSTRACT (stmt))
21520 {
21521 abstract_vec.safe_push (stmt);
21522 BLOCK_ABSTRACT (stmt) = 1;
21523 }
21524
21525 for (local_decl = BLOCK_VARS (stmt);
21526 local_decl != NULL_TREE;
21527 local_decl = DECL_CHAIN (local_decl))
21528 if (! DECL_EXTERNAL (local_decl))
21529 set_decl_abstract_flags (local_decl, abstract_vec);
21530
21531 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
21532 {
21533 local_decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
21534 if ((VAR_P (local_decl) && !TREE_STATIC (local_decl))
21535 || TREE_CODE (local_decl) == PARM_DECL)
21536 set_decl_abstract_flags (local_decl, abstract_vec);
21537 }
21538
21539 for (subblock = BLOCK_SUBBLOCKS (stmt);
21540 subblock != NULL_TREE;
21541 subblock = BLOCK_CHAIN (subblock))
21542 set_block_abstract_flags (subblock, abstract_vec);
21543 }
21544
21545 /* Given a pointer to some ..._DECL node, set DECL_ABSTRACT_P flag on it
21546 to 1 and if it wasn't 1 before, push to abstract_vec vector.
21547 In the case where the decl is a FUNCTION_DECL also set the abstract
21548 flags for all of the parameters, local vars, local
21549 blocks and sub-blocks (recursively). */
21550
21551 static void
21552 set_decl_abstract_flags (tree decl, vec<tree> &abstract_vec)
21553 {
21554 if (!DECL_ABSTRACT_P (decl))
21555 {
21556 abstract_vec.safe_push (decl);
21557 DECL_ABSTRACT_P (decl) = 1;
21558 }
21559
21560 if (TREE_CODE (decl) == FUNCTION_DECL)
21561 {
21562 tree arg;
21563
21564 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
21565 if (!DECL_ABSTRACT_P (arg))
21566 {
21567 abstract_vec.safe_push (arg);
21568 DECL_ABSTRACT_P (arg) = 1;
21569 }
21570 if (DECL_INITIAL (decl) != NULL_TREE
21571 && DECL_INITIAL (decl) != error_mark_node)
21572 set_block_abstract_flags (DECL_INITIAL (decl), abstract_vec);
21573 }
21574 }
21575
21576 /* Generate the DWARF2 info for the "abstract" instance of a function which we
21577 may later generate inlined and/or out-of-line instances of.
21578
21579 FIXME: In the early-dwarf world, this function, and most of the
21580 DECL_ABSTRACT code should be obsoleted. The early DIE _is_
21581 the abstract instance. All we would need to do is annotate
21582 the early DIE with the appropriate DW_AT_inline in late
21583 dwarf (perhaps in gen_inlined_subroutine_die).
21584
21585 However, we can't do this yet, because LTO streaming of DIEs
21586 has not been implemented yet. */
21587
21588 static void
21589 dwarf2out_abstract_function (tree decl)
21590 {
21591 dw_die_ref old_die;
21592 tree save_fn;
21593 tree context;
21594 hash_table<decl_loc_hasher> *old_decl_loc_table;
21595 hash_table<dw_loc_list_hasher> *old_cached_dw_loc_list_table;
21596 int old_call_site_count, old_tail_call_site_count;
21597 struct call_arg_loc_node *old_call_arg_locations;
21598
21599 /* Make sure we have the actual abstract inline, not a clone. */
21600 decl = DECL_ORIGIN (decl);
21601
21602 old_die = lookup_decl_die (decl);
21603 if (old_die && get_AT (old_die, DW_AT_inline))
21604 /* We've already generated the abstract instance. */
21605 return;
21606
21607 /* We can be called while recursively when seeing block defining inlined subroutine
21608 DIE. Be sure to not clobber the outer location table nor use it or we would
21609 get locations in abstract instantces. */
21610 old_decl_loc_table = decl_loc_table;
21611 decl_loc_table = NULL;
21612 old_cached_dw_loc_list_table = cached_dw_loc_list_table;
21613 cached_dw_loc_list_table = NULL;
21614 old_call_arg_locations = call_arg_locations;
21615 call_arg_locations = NULL;
21616 old_call_site_count = call_site_count;
21617 call_site_count = -1;
21618 old_tail_call_site_count = tail_call_site_count;
21619 tail_call_site_count = -1;
21620
21621 /* Be sure we've emitted the in-class declaration DIE (if any) first, so
21622 we don't get confused by DECL_ABSTRACT_P. */
21623 if (debug_info_level > DINFO_LEVEL_TERSE)
21624 {
21625 context = decl_class_context (decl);
21626 if (context)
21627 gen_type_die_for_member
21628 (context, decl, decl_function_context (decl) ? NULL : comp_unit_die ());
21629 }
21630
21631 /* Pretend we've just finished compiling this function. */
21632 save_fn = current_function_decl;
21633 current_function_decl = decl;
21634
21635 auto_vec<tree, 64> abstract_vec;
21636 set_decl_abstract_flags (decl, abstract_vec);
21637 dwarf2out_decl (decl);
21638 unsigned int i;
21639 tree t;
21640 FOR_EACH_VEC_ELT (abstract_vec, i, t)
21641 if (TREE_CODE (t) == BLOCK)
21642 BLOCK_ABSTRACT (t) = 0;
21643 else
21644 DECL_ABSTRACT_P (t) = 0;
21645
21646 current_function_decl = save_fn;
21647 decl_loc_table = old_decl_loc_table;
21648 cached_dw_loc_list_table = old_cached_dw_loc_list_table;
21649 call_arg_locations = old_call_arg_locations;
21650 call_site_count = old_call_site_count;
21651 tail_call_site_count = old_tail_call_site_count;
21652 }
21653
21654 /* Helper function of premark_used_types() which gets called through
21655 htab_traverse.
21656
21657 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21658 marked as unused by prune_unused_types. */
21659
21660 bool
21661 premark_used_types_helper (tree const &type, void *)
21662 {
21663 dw_die_ref die;
21664
21665 die = lookup_type_die (type);
21666 if (die != NULL)
21667 die->die_perennial_p = 1;
21668 return true;
21669 }
21670
21671 /* Helper function of premark_types_used_by_global_vars which gets called
21672 through htab_traverse.
21673
21674 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21675 marked as unused by prune_unused_types. The DIE of the type is marked
21676 only if the global variable using the type will actually be emitted. */
21677
21678 int
21679 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
21680 void *)
21681 {
21682 struct types_used_by_vars_entry *entry;
21683 dw_die_ref die;
21684
21685 entry = (struct types_used_by_vars_entry *) *slot;
21686 gcc_assert (entry->type != NULL
21687 && entry->var_decl != NULL);
21688 die = lookup_type_die (entry->type);
21689 if (die)
21690 {
21691 /* Ask cgraph if the global variable really is to be emitted.
21692 If yes, then we'll keep the DIE of ENTRY->TYPE. */
21693 varpool_node *node = varpool_node::get (entry->var_decl);
21694 if (node && node->definition)
21695 {
21696 die->die_perennial_p = 1;
21697 /* Keep the parent DIEs as well. */
21698 while ((die = die->die_parent) && die->die_perennial_p == 0)
21699 die->die_perennial_p = 1;
21700 }
21701 }
21702 return 1;
21703 }
21704
21705 /* Mark all members of used_types_hash as perennial. */
21706
21707 static void
21708 premark_used_types (struct function *fun)
21709 {
21710 if (fun && fun->used_types_hash)
21711 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
21712 }
21713
21714 /* Mark all members of types_used_by_vars_entry as perennial. */
21715
21716 static void
21717 premark_types_used_by_global_vars (void)
21718 {
21719 if (types_used_by_vars_hash)
21720 types_used_by_vars_hash
21721 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
21722 }
21723
21724 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
21725 for CA_LOC call arg loc node. */
21726
21727 static dw_die_ref
21728 gen_call_site_die (tree decl, dw_die_ref subr_die,
21729 struct call_arg_loc_node *ca_loc)
21730 {
21731 dw_die_ref stmt_die = NULL, die;
21732 tree block = ca_loc->block;
21733
21734 while (block
21735 && block != DECL_INITIAL (decl)
21736 && TREE_CODE (block) == BLOCK)
21737 {
21738 stmt_die = BLOCK_DIE (block);
21739 if (stmt_die)
21740 break;
21741 block = BLOCK_SUPERCONTEXT (block);
21742 }
21743 if (stmt_die == NULL)
21744 stmt_die = subr_die;
21745 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
21746 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
21747 if (ca_loc->tail_call_p)
21748 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
21749 if (ca_loc->symbol_ref)
21750 {
21751 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
21752 if (tdie)
21753 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
21754 else
21755 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
21756 false);
21757 }
21758 return die;
21759 }
21760
21761 /* Generate a DIE to represent a declared function (either file-scope or
21762 block-local). */
21763
21764 static void
21765 gen_subprogram_die (tree decl, dw_die_ref context_die)
21766 {
21767 tree origin = decl_ultimate_origin (decl);
21768 dw_die_ref subr_die;
21769 dw_die_ref old_die = lookup_decl_die (decl);
21770
21771 /* This function gets called multiple times for different stages of
21772 the debug process. For example, for func() in this code:
21773
21774 namespace S
21775 {
21776 void func() { ... }
21777 }
21778
21779 ...we get called 4 times. Twice in early debug and twice in
21780 late debug:
21781
21782 Early debug
21783 -----------
21784
21785 1. Once while generating func() within the namespace. This is
21786 the declaration. The declaration bit below is set, as the
21787 context is the namespace.
21788
21789 A new DIE will be generated with DW_AT_declaration set.
21790
21791 2. Once for func() itself. This is the specification. The
21792 declaration bit below is clear as the context is the CU.
21793
21794 We will use the cached DIE from (1) to create a new DIE with
21795 DW_AT_specification pointing to the declaration in (1).
21796
21797 Late debug via rest_of_handle_final()
21798 -------------------------------------
21799
21800 3. Once generating func() within the namespace. This is also the
21801 declaration, as in (1), but this time we will early exit below
21802 as we have a cached DIE and a declaration needs no additional
21803 annotations (no locations), as the source declaration line
21804 info is enough.
21805
21806 4. Once for func() itself. As in (2), this is the specification,
21807 but this time we will re-use the cached DIE, and just annotate
21808 it with the location information that should now be available.
21809
21810 For something without namespaces, but with abstract instances, we
21811 are also called a multiple times:
21812
21813 class Base
21814 {
21815 public:
21816 Base (); // constructor declaration (1)
21817 };
21818
21819 Base::Base () { } // constructor specification (2)
21820
21821 Early debug
21822 -----------
21823
21824 1. Once for the Base() constructor by virtue of it being a
21825 member of the Base class. This is done via
21826 rest_of_type_compilation.
21827
21828 This is a declaration, so a new DIE will be created with
21829 DW_AT_declaration.
21830
21831 2. Once for the Base() constructor definition, but this time
21832 while generating the abstract instance of the base
21833 constructor (__base_ctor) which is being generated via early
21834 debug of reachable functions.
21835
21836 Even though we have a cached version of the declaration (1),
21837 we will create a DW_AT_specification of the declaration DIE
21838 in (1).
21839
21840 3. Once for the __base_ctor itself, but this time, we generate
21841 an DW_AT_abstract_origin version of the DW_AT_specification in
21842 (2).
21843
21844 Late debug via rest_of_handle_final
21845 -----------------------------------
21846
21847 4. One final time for the __base_ctor (which will have a cached
21848 DIE with DW_AT_abstract_origin created in (3). This time,
21849 we will just annotate the location information now
21850 available.
21851 */
21852 int declaration = (current_function_decl != decl
21853 || class_or_namespace_scope_p (context_die));
21854
21855 /* Now that the C++ front end lazily declares artificial member fns, we
21856 might need to retrofit the declaration into its class. */
21857 if (!declaration && !origin && !old_die
21858 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
21859 && !class_or_namespace_scope_p (context_die)
21860 && debug_info_level > DINFO_LEVEL_TERSE)
21861 old_die = force_decl_die (decl);
21862
21863 /* An inlined instance, tag a new DIE with DW_AT_abstract_origin. */
21864 if (origin != NULL)
21865 {
21866 gcc_assert (!declaration || local_scope_p (context_die));
21867
21868 /* Fixup die_parent for the abstract instance of a nested
21869 inline function. */
21870 if (old_die && old_die->die_parent == NULL)
21871 add_child_die (context_die, old_die);
21872
21873 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
21874 {
21875 /* If we have a DW_AT_abstract_origin we have a working
21876 cached version. */
21877 subr_die = old_die;
21878 }
21879 else
21880 {
21881 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
21882 add_abstract_origin_attribute (subr_die, origin);
21883 /* This is where the actual code for a cloned function is.
21884 Let's emit linkage name attribute for it. This helps
21885 debuggers to e.g, set breakpoints into
21886 constructors/destructors when the user asks "break
21887 K::K". */
21888 add_linkage_name (subr_die, decl);
21889 }
21890 }
21891 /* A cached copy, possibly from early dwarf generation. Reuse as
21892 much as possible. */
21893 else if (old_die)
21894 {
21895 /* A declaration that has been previously dumped needs no
21896 additional information. */
21897 if (declaration)
21898 return;
21899
21900 if (!get_AT_flag (old_die, DW_AT_declaration)
21901 /* We can have a normal definition following an inline one in the
21902 case of redefinition of GNU C extern inlines.
21903 It seems reasonable to use AT_specification in this case. */
21904 && !get_AT (old_die, DW_AT_inline))
21905 {
21906 /* Detect and ignore this case, where we are trying to output
21907 something we have already output. */
21908 if (get_AT (old_die, DW_AT_low_pc)
21909 || get_AT (old_die, DW_AT_ranges))
21910 return;
21911
21912 /* If we have no location information, this must be a
21913 partially generated DIE from early dwarf generation.
21914 Fall through and generate it. */
21915 }
21916
21917 /* If the definition comes from the same place as the declaration,
21918 maybe use the old DIE. We always want the DIE for this function
21919 that has the *_pc attributes to be under comp_unit_die so the
21920 debugger can find it. We also need to do this for abstract
21921 instances of inlines, since the spec requires the out-of-line copy
21922 to have the same parent. For local class methods, this doesn't
21923 apply; we just use the old DIE. */
21924 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
21925 struct dwarf_file_data * file_index = lookup_filename (s.file);
21926 if ((is_cu_die (old_die->die_parent)
21927 /* This condition fixes the inconsistency/ICE with the
21928 following Fortran test (or some derivative thereof) while
21929 building libgfortran:
21930
21931 module some_m
21932 contains
21933 logical function funky (FLAG)
21934 funky = .true.
21935 end function
21936 end module
21937 */
21938 || (old_die->die_parent
21939 && old_die->die_parent->die_tag == DW_TAG_module)
21940 || context_die == NULL)
21941 && (DECL_ARTIFICIAL (decl)
21942 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
21943 && (get_AT_unsigned (old_die, DW_AT_decl_line)
21944 == (unsigned) s.line)
21945 && (!debug_column_info
21946 || s.column == 0
21947 || (get_AT_unsigned (old_die, DW_AT_decl_column)
21948 == (unsigned) s.column)))))
21949 {
21950 subr_die = old_die;
21951
21952 /* Clear out the declaration attribute, but leave the
21953 parameters so they can be augmented with location
21954 information later. Unless this was a declaration, in
21955 which case, wipe out the nameless parameters and recreate
21956 them further down. */
21957 if (remove_AT (subr_die, DW_AT_declaration))
21958 {
21959
21960 remove_AT (subr_die, DW_AT_object_pointer);
21961 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
21962 }
21963 }
21964 /* Make a specification pointing to the previously built
21965 declaration. */
21966 else
21967 {
21968 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
21969 add_AT_specification (subr_die, old_die);
21970 add_pubname (decl, subr_die);
21971 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
21972 add_AT_file (subr_die, DW_AT_decl_file, file_index);
21973 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
21974 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
21975 if (debug_column_info
21976 && s.column
21977 && (get_AT_unsigned (old_die, DW_AT_decl_column)
21978 != (unsigned) s.column))
21979 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
21980
21981 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
21982 emit the real type on the definition die. */
21983 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
21984 {
21985 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
21986 if (die == auto_die || die == decltype_auto_die)
21987 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
21988 TYPE_UNQUALIFIED, false, context_die);
21989 }
21990
21991 /* When we process the method declaration, we haven't seen
21992 the out-of-class defaulted definition yet, so we have to
21993 recheck now. */
21994 if ((dwarf_version >= 5 || ! dwarf_strict)
21995 && !get_AT (subr_die, DW_AT_defaulted))
21996 {
21997 int defaulted
21998 = lang_hooks.decls.decl_dwarf_attribute (decl,
21999 DW_AT_defaulted);
22000 if (defaulted != -1)
22001 {
22002 /* Other values must have been handled before. */
22003 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22004 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22005 }
22006 }
22007 }
22008 }
22009 /* Create a fresh DIE for anything else. */
22010 else
22011 {
22012 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22013
22014 if (TREE_PUBLIC (decl))
22015 add_AT_flag (subr_die, DW_AT_external, 1);
22016
22017 add_name_and_src_coords_attributes (subr_die, decl);
22018 add_pubname (decl, subr_die);
22019 if (debug_info_level > DINFO_LEVEL_TERSE)
22020 {
22021 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22022 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22023 TYPE_UNQUALIFIED, false, context_die);
22024 }
22025
22026 add_pure_or_virtual_attribute (subr_die, decl);
22027 if (DECL_ARTIFICIAL (decl))
22028 add_AT_flag (subr_die, DW_AT_artificial, 1);
22029
22030 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22031 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22032
22033 add_alignment_attribute (subr_die, decl);
22034
22035 add_accessibility_attribute (subr_die, decl);
22036 }
22037
22038 /* Unless we have an existing non-declaration DIE, equate the new
22039 DIE. */
22040 if (!old_die || is_declaration_die (old_die))
22041 equate_decl_number_to_die (decl, subr_die);
22042
22043 if (declaration)
22044 {
22045 if (!old_die || !get_AT (old_die, DW_AT_inline))
22046 {
22047 add_AT_flag (subr_die, DW_AT_declaration, 1);
22048
22049 /* If this is an explicit function declaration then generate
22050 a DW_AT_explicit attribute. */
22051 if ((dwarf_version >= 3 || !dwarf_strict)
22052 && lang_hooks.decls.decl_dwarf_attribute (decl,
22053 DW_AT_explicit) == 1)
22054 add_AT_flag (subr_die, DW_AT_explicit, 1);
22055
22056 /* If this is a C++11 deleted special function member then generate
22057 a DW_AT_deleted attribute. */
22058 if ((dwarf_version >= 5 || !dwarf_strict)
22059 && lang_hooks.decls.decl_dwarf_attribute (decl,
22060 DW_AT_deleted) == 1)
22061 add_AT_flag (subr_die, DW_AT_deleted, 1);
22062
22063 /* If this is a C++11 defaulted special function member then
22064 generate a DW_AT_defaulted attribute. */
22065 if (dwarf_version >= 5 || !dwarf_strict)
22066 {
22067 int defaulted
22068 = lang_hooks.decls.decl_dwarf_attribute (decl,
22069 DW_AT_defaulted);
22070 if (defaulted != -1)
22071 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22072 }
22073
22074 /* If this is a C++11 non-static member function with & ref-qualifier
22075 then generate a DW_AT_reference attribute. */
22076 if ((dwarf_version >= 5 || !dwarf_strict)
22077 && lang_hooks.decls.decl_dwarf_attribute (decl,
22078 DW_AT_reference) == 1)
22079 add_AT_flag (subr_die, DW_AT_reference, 1);
22080
22081 /* If this is a C++11 non-static member function with &&
22082 ref-qualifier then generate a DW_AT_reference attribute. */
22083 if ((dwarf_version >= 5 || !dwarf_strict)
22084 && lang_hooks.decls.decl_dwarf_attribute (decl,
22085 DW_AT_rvalue_reference)
22086 == 1)
22087 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22088 }
22089 }
22090 /* Tag abstract instances with DW_AT_inline. */
22091 else if (DECL_ABSTRACT_P (decl))
22092 {
22093 if (DECL_DECLARED_INLINE_P (decl))
22094 {
22095 if (cgraph_function_possibly_inlined_p (decl))
22096 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_inlined);
22097 else
22098 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_not_inlined);
22099 }
22100 else
22101 {
22102 if (cgraph_function_possibly_inlined_p (decl))
22103 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_inlined);
22104 else
22105 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_not_inlined);
22106 }
22107
22108 if (DECL_DECLARED_INLINE_P (decl)
22109 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22110 add_AT_flag (subr_die, DW_AT_artificial, 1);
22111 }
22112 /* For non DECL_EXTERNALs, if range information is available, fill
22113 the DIE with it. */
22114 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22115 {
22116 HOST_WIDE_INT cfa_fb_offset;
22117
22118 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22119
22120 if (!flag_reorder_blocks_and_partition)
22121 {
22122 dw_fde_ref fde = fun->fde;
22123 if (fde->dw_fde_begin)
22124 {
22125 /* We have already generated the labels. */
22126 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22127 fde->dw_fde_end, false);
22128 }
22129 else
22130 {
22131 /* Create start/end labels and add the range. */
22132 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22133 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22134 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22135 current_function_funcdef_no);
22136 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22137 current_function_funcdef_no);
22138 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22139 false);
22140 }
22141
22142 #if VMS_DEBUGGING_INFO
22143 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22144 Section 2.3 Prologue and Epilogue Attributes:
22145 When a breakpoint is set on entry to a function, it is generally
22146 desirable for execution to be suspended, not on the very first
22147 instruction of the function, but rather at a point after the
22148 function's frame has been set up, after any language defined local
22149 declaration processing has been completed, and before execution of
22150 the first statement of the function begins. Debuggers generally
22151 cannot properly determine where this point is. Similarly for a
22152 breakpoint set on exit from a function. The prologue and epilogue
22153 attributes allow a compiler to communicate the location(s) to use. */
22154
22155 {
22156 if (fde->dw_fde_vms_end_prologue)
22157 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22158 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22159
22160 if (fde->dw_fde_vms_begin_epilogue)
22161 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22162 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22163 }
22164 #endif
22165
22166 }
22167 else
22168 {
22169 /* Generate pubnames entries for the split function code ranges. */
22170 dw_fde_ref fde = fun->fde;
22171
22172 if (fde->dw_fde_second_begin)
22173 {
22174 if (dwarf_version >= 3 || !dwarf_strict)
22175 {
22176 /* We should use ranges for non-contiguous code section
22177 addresses. Use the actual code range for the initial
22178 section, since the HOT/COLD labels might precede an
22179 alignment offset. */
22180 bool range_list_added = false;
22181 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22182 fde->dw_fde_end, &range_list_added,
22183 false);
22184 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22185 fde->dw_fde_second_end,
22186 &range_list_added, false);
22187 if (range_list_added)
22188 add_ranges (NULL);
22189 }
22190 else
22191 {
22192 /* There is no real support in DW2 for this .. so we make
22193 a work-around. First, emit the pub name for the segment
22194 containing the function label. Then make and emit a
22195 simplified subprogram DIE for the second segment with the
22196 name pre-fixed by __hot/cold_sect_of_. We use the same
22197 linkage name for the second die so that gdb will find both
22198 sections when given "b foo". */
22199 const char *name = NULL;
22200 tree decl_name = DECL_NAME (decl);
22201 dw_die_ref seg_die;
22202
22203 /* Do the 'primary' section. */
22204 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22205 fde->dw_fde_end, false);
22206
22207 /* Build a minimal DIE for the secondary section. */
22208 seg_die = new_die (DW_TAG_subprogram,
22209 subr_die->die_parent, decl);
22210
22211 if (TREE_PUBLIC (decl))
22212 add_AT_flag (seg_die, DW_AT_external, 1);
22213
22214 if (decl_name != NULL
22215 && IDENTIFIER_POINTER (decl_name) != NULL)
22216 {
22217 name = dwarf2_name (decl, 1);
22218 if (! DECL_ARTIFICIAL (decl))
22219 add_src_coords_attributes (seg_die, decl);
22220
22221 add_linkage_name (seg_die, decl);
22222 }
22223 gcc_assert (name != NULL);
22224 add_pure_or_virtual_attribute (seg_die, decl);
22225 if (DECL_ARTIFICIAL (decl))
22226 add_AT_flag (seg_die, DW_AT_artificial, 1);
22227
22228 name = concat ("__second_sect_of_", name, NULL);
22229 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
22230 fde->dw_fde_second_end, false);
22231 add_name_attribute (seg_die, name);
22232 if (want_pubnames ())
22233 add_pubname_string (name, seg_die);
22234 }
22235 }
22236 else
22237 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
22238 false);
22239 }
22240
22241 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
22242
22243 /* We define the "frame base" as the function's CFA. This is more
22244 convenient for several reasons: (1) It's stable across the prologue
22245 and epilogue, which makes it better than just a frame pointer,
22246 (2) With dwarf3, there exists a one-byte encoding that allows us
22247 to reference the .debug_frame data by proxy, but failing that,
22248 (3) We can at least reuse the code inspection and interpretation
22249 code that determines the CFA position at various points in the
22250 function. */
22251 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
22252 {
22253 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
22254 add_AT_loc (subr_die, DW_AT_frame_base, op);
22255 }
22256 else
22257 {
22258 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
22259 if (list->dw_loc_next)
22260 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
22261 else
22262 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
22263 }
22264
22265 /* Compute a displacement from the "steady-state frame pointer" to
22266 the CFA. The former is what all stack slots and argument slots
22267 will reference in the rtl; the latter is what we've told the
22268 debugger about. We'll need to adjust all frame_base references
22269 by this displacement. */
22270 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
22271
22272 if (fun->static_chain_decl)
22273 {
22274 /* DWARF requires here a location expression that computes the
22275 address of the enclosing subprogram's frame base. The machinery
22276 in tree-nested.c is supposed to store this specific address in the
22277 last field of the FRAME record. */
22278 const tree frame_type
22279 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
22280 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
22281
22282 tree fb_expr
22283 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
22284 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
22285 fb_expr, fb_decl, NULL_TREE);
22286
22287 add_AT_location_description (subr_die, DW_AT_static_link,
22288 loc_list_from_tree (fb_expr, 0, NULL));
22289 }
22290 }
22291
22292 /* Generate child dies for template paramaters. */
22293 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
22294 gen_generic_params_dies (decl);
22295
22296 /* Now output descriptions of the arguments for this function. This gets
22297 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
22298 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
22299 `...' at the end of the formal parameter list. In order to find out if
22300 there was a trailing ellipsis or not, we must instead look at the type
22301 associated with the FUNCTION_DECL. This will be a node of type
22302 FUNCTION_TYPE. If the chain of type nodes hanging off of this
22303 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
22304 an ellipsis at the end. */
22305
22306 /* In the case where we are describing a mere function declaration, all we
22307 need to do here (and all we *can* do here) is to describe the *types* of
22308 its formal parameters. */
22309 if (debug_info_level <= DINFO_LEVEL_TERSE)
22310 ;
22311 else if (declaration)
22312 gen_formal_types_die (decl, subr_die);
22313 else
22314 {
22315 /* Generate DIEs to represent all known formal parameters. */
22316 tree parm = DECL_ARGUMENTS (decl);
22317 tree generic_decl = early_dwarf
22318 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
22319 tree generic_decl_parm = generic_decl
22320 ? DECL_ARGUMENTS (generic_decl)
22321 : NULL;
22322 auto_vec<dw_die_ref> string_types_vec;
22323 if (string_types == NULL)
22324 string_types = &string_types_vec;
22325
22326 /* Now we want to walk the list of parameters of the function and
22327 emit their relevant DIEs.
22328
22329 We consider the case of DECL being an instance of a generic function
22330 as well as it being a normal function.
22331
22332 If DECL is an instance of a generic function we walk the
22333 parameters of the generic function declaration _and_ the parameters of
22334 DECL itself. This is useful because we want to emit specific DIEs for
22335 function parameter packs and those are declared as part of the
22336 generic function declaration. In that particular case,
22337 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
22338 That DIE has children DIEs representing the set of arguments
22339 of the pack. Note that the set of pack arguments can be empty.
22340 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
22341 children DIE.
22342
22343 Otherwise, we just consider the parameters of DECL. */
22344 while (generic_decl_parm || parm)
22345 {
22346 if (generic_decl_parm
22347 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
22348 gen_formal_parameter_pack_die (generic_decl_parm,
22349 parm, subr_die,
22350 &parm);
22351 else if (parm && !POINTER_BOUNDS_P (parm))
22352 {
22353 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
22354
22355 if (parm == DECL_ARGUMENTS (decl)
22356 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
22357 && parm_die
22358 && (dwarf_version >= 3 || !dwarf_strict))
22359 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
22360
22361 parm = DECL_CHAIN (parm);
22362 }
22363 else if (parm)
22364 parm = DECL_CHAIN (parm);
22365
22366 if (generic_decl_parm)
22367 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
22368 }
22369
22370 /* Decide whether we need an unspecified_parameters DIE at the end.
22371 There are 2 more cases to do this for: 1) the ansi ... declaration -
22372 this is detectable when the end of the arg list is not a
22373 void_type_node 2) an unprototyped function declaration (not a
22374 definition). This just means that we have no info about the
22375 parameters at all. */
22376 if (early_dwarf)
22377 {
22378 if (prototype_p (TREE_TYPE (decl)))
22379 {
22380 /* This is the prototyped case, check for.... */
22381 if (stdarg_p (TREE_TYPE (decl)))
22382 gen_unspecified_parameters_die (decl, subr_die);
22383 }
22384 else if (DECL_INITIAL (decl) == NULL_TREE)
22385 gen_unspecified_parameters_die (decl, subr_die);
22386 }
22387
22388 /* Adjust DW_TAG_string_type DIEs if needed, now that all arguments
22389 have DIEs. */
22390 if (string_types == &string_types_vec)
22391 {
22392 adjust_string_types ();
22393 string_types = NULL;
22394 }
22395 }
22396
22397 if (subr_die != old_die)
22398 /* Add the calling convention attribute if requested. */
22399 add_calling_convention_attribute (subr_die, decl);
22400
22401 /* Output Dwarf info for all of the stuff within the body of the function
22402 (if it has one - it may be just a declaration).
22403
22404 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
22405 a function. This BLOCK actually represents the outermost binding contour
22406 for the function, i.e. the contour in which the function's formal
22407 parameters and labels get declared. Curiously, it appears that the front
22408 end doesn't actually put the PARM_DECL nodes for the current function onto
22409 the BLOCK_VARS list for this outer scope, but are strung off of the
22410 DECL_ARGUMENTS list for the function instead.
22411
22412 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
22413 the LABEL_DECL nodes for the function however, and we output DWARF info
22414 for those in decls_for_scope. Just within the `outer_scope' there will be
22415 a BLOCK node representing the function's outermost pair of curly braces,
22416 and any blocks used for the base and member initializers of a C++
22417 constructor function. */
22418 tree outer_scope = DECL_INITIAL (decl);
22419 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
22420 {
22421 int call_site_note_count = 0;
22422 int tail_call_site_note_count = 0;
22423
22424 /* Emit a DW_TAG_variable DIE for a named return value. */
22425 if (DECL_NAME (DECL_RESULT (decl)))
22426 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
22427
22428 /* The first time through decls_for_scope we will generate the
22429 DIEs for the locals. The second time, we fill in the
22430 location info. */
22431 decls_for_scope (outer_scope, subr_die);
22432
22433 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
22434 {
22435 struct call_arg_loc_node *ca_loc;
22436 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
22437 {
22438 dw_die_ref die = NULL;
22439 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
22440 rtx arg, next_arg;
22441
22442 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
22443 ? NOTE_VAR_LOCATION (ca_loc->call_arg_loc_note)
22444 : NULL_RTX);
22445 arg; arg = next_arg)
22446 {
22447 dw_loc_descr_ref reg, val;
22448 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
22449 dw_die_ref cdie, tdie = NULL;
22450
22451 next_arg = XEXP (arg, 1);
22452 if (REG_P (XEXP (XEXP (arg, 0), 0))
22453 && next_arg
22454 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
22455 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
22456 && REGNO (XEXP (XEXP (arg, 0), 0))
22457 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
22458 next_arg = XEXP (next_arg, 1);
22459 if (mode == VOIDmode)
22460 {
22461 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
22462 if (mode == VOIDmode)
22463 mode = GET_MODE (XEXP (arg, 0));
22464 }
22465 if (mode == VOIDmode || mode == BLKmode)
22466 continue;
22467 /* Get dynamic information about call target only if we
22468 have no static information: we cannot generate both
22469 DW_AT_call_origin and DW_AT_call_target
22470 attributes. */
22471 if (ca_loc->symbol_ref == NULL_RTX)
22472 {
22473 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
22474 {
22475 tloc = XEXP (XEXP (arg, 0), 1);
22476 continue;
22477 }
22478 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
22479 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
22480 {
22481 tlocc = XEXP (XEXP (arg, 0), 1);
22482 continue;
22483 }
22484 }
22485 reg = NULL;
22486 if (REG_P (XEXP (XEXP (arg, 0), 0)))
22487 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
22488 VAR_INIT_STATUS_INITIALIZED);
22489 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
22490 {
22491 rtx mem = XEXP (XEXP (arg, 0), 0);
22492 reg = mem_loc_descriptor (XEXP (mem, 0),
22493 get_address_mode (mem),
22494 GET_MODE (mem),
22495 VAR_INIT_STATUS_INITIALIZED);
22496 }
22497 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
22498 == DEBUG_PARAMETER_REF)
22499 {
22500 tree tdecl
22501 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
22502 tdie = lookup_decl_die (tdecl);
22503 if (tdie == NULL)
22504 continue;
22505 }
22506 else
22507 continue;
22508 if (reg == NULL
22509 && GET_CODE (XEXP (XEXP (arg, 0), 0))
22510 != DEBUG_PARAMETER_REF)
22511 continue;
22512 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
22513 VOIDmode,
22514 VAR_INIT_STATUS_INITIALIZED);
22515 if (val == NULL)
22516 continue;
22517 if (die == NULL)
22518 die = gen_call_site_die (decl, subr_die, ca_loc);
22519 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
22520 NULL_TREE);
22521 if (reg != NULL)
22522 add_AT_loc (cdie, DW_AT_location, reg);
22523 else if (tdie != NULL)
22524 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
22525 tdie);
22526 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
22527 if (next_arg != XEXP (arg, 1))
22528 {
22529 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
22530 if (mode == VOIDmode)
22531 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
22532 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
22533 0), 1),
22534 mode, VOIDmode,
22535 VAR_INIT_STATUS_INITIALIZED);
22536 if (val != NULL)
22537 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
22538 val);
22539 }
22540 }
22541 if (die == NULL
22542 && (ca_loc->symbol_ref || tloc))
22543 die = gen_call_site_die (decl, subr_die, ca_loc);
22544 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
22545 {
22546 dw_loc_descr_ref tval = NULL;
22547
22548 if (tloc != NULL_RTX)
22549 tval = mem_loc_descriptor (tloc,
22550 GET_MODE (tloc) == VOIDmode
22551 ? Pmode : GET_MODE (tloc),
22552 VOIDmode,
22553 VAR_INIT_STATUS_INITIALIZED);
22554 if (tval)
22555 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
22556 else if (tlocc != NULL_RTX)
22557 {
22558 tval = mem_loc_descriptor (tlocc,
22559 GET_MODE (tlocc) == VOIDmode
22560 ? Pmode : GET_MODE (tlocc),
22561 VOIDmode,
22562 VAR_INIT_STATUS_INITIALIZED);
22563 if (tval)
22564 add_AT_loc (die,
22565 dwarf_AT (DW_AT_call_target_clobbered),
22566 tval);
22567 }
22568 }
22569 if (die != NULL)
22570 {
22571 call_site_note_count++;
22572 if (ca_loc->tail_call_p)
22573 tail_call_site_note_count++;
22574 }
22575 }
22576 }
22577 call_arg_locations = NULL;
22578 call_arg_loc_last = NULL;
22579 if (tail_call_site_count >= 0
22580 && tail_call_site_count == tail_call_site_note_count
22581 && (!dwarf_strict || dwarf_version >= 5))
22582 {
22583 if (call_site_count >= 0
22584 && call_site_count == call_site_note_count)
22585 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
22586 else
22587 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
22588 }
22589 call_site_count = -1;
22590 tail_call_site_count = -1;
22591 }
22592
22593 /* Mark used types after we have created DIEs for the functions scopes. */
22594 premark_used_types (DECL_STRUCT_FUNCTION (decl));
22595 }
22596
22597 /* Returns a hash value for X (which really is a die_struct). */
22598
22599 hashval_t
22600 block_die_hasher::hash (die_struct *d)
22601 {
22602 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
22603 }
22604
22605 /* Return nonzero if decl_id and die_parent of die_struct X is the same
22606 as decl_id and die_parent of die_struct Y. */
22607
22608 bool
22609 block_die_hasher::equal (die_struct *x, die_struct *y)
22610 {
22611 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
22612 }
22613
22614 /* Return TRUE if DECL, which may have been previously generated as
22615 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
22616 true if decl (or its origin) is either an extern declaration or a
22617 class/namespace scoped declaration.
22618
22619 The declare_in_namespace support causes us to get two DIEs for one
22620 variable, both of which are declarations. We want to avoid
22621 considering one to be a specification, so we must test for
22622 DECLARATION and DW_AT_declaration. */
22623 static inline bool
22624 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
22625 {
22626 return (old_die && TREE_STATIC (decl) && !declaration
22627 && get_AT_flag (old_die, DW_AT_declaration) == 1);
22628 }
22629
22630 /* Return true if DECL is a local static. */
22631
22632 static inline bool
22633 local_function_static (tree decl)
22634 {
22635 gcc_assert (VAR_P (decl));
22636 return TREE_STATIC (decl)
22637 && DECL_CONTEXT (decl)
22638 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
22639 }
22640
22641 /* Generate a DIE to represent a declared data object.
22642 Either DECL or ORIGIN must be non-null. */
22643
22644 static void
22645 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
22646 {
22647 HOST_WIDE_INT off = 0;
22648 tree com_decl;
22649 tree decl_or_origin = decl ? decl : origin;
22650 tree ultimate_origin;
22651 dw_die_ref var_die;
22652 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
22653 bool declaration = (DECL_EXTERNAL (decl_or_origin)
22654 || class_or_namespace_scope_p (context_die));
22655 bool specialization_p = false;
22656 bool no_linkage_name = false;
22657
22658 /* While C++ inline static data members have definitions inside of the
22659 class, force the first DIE to be a declaration, then let gen_member_die
22660 reparent it to the class context and call gen_variable_die again
22661 to create the outside of the class DIE for the definition. */
22662 if (!declaration
22663 && old_die == NULL
22664 && decl
22665 && DECL_CONTEXT (decl)
22666 && TYPE_P (DECL_CONTEXT (decl))
22667 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
22668 {
22669 declaration = true;
22670 no_linkage_name = true;
22671 }
22672
22673 ultimate_origin = decl_ultimate_origin (decl_or_origin);
22674 if (decl || ultimate_origin)
22675 origin = ultimate_origin;
22676 com_decl = fortran_common (decl_or_origin, &off);
22677
22678 /* Symbol in common gets emitted as a child of the common block, in the form
22679 of a data member. */
22680 if (com_decl)
22681 {
22682 dw_die_ref com_die;
22683 dw_loc_list_ref loc = NULL;
22684 die_node com_die_arg;
22685
22686 var_die = lookup_decl_die (decl_or_origin);
22687 if (var_die)
22688 {
22689 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
22690 {
22691 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
22692 if (loc)
22693 {
22694 if (off)
22695 {
22696 /* Optimize the common case. */
22697 if (single_element_loc_list_p (loc)
22698 && loc->expr->dw_loc_opc == DW_OP_addr
22699 && loc->expr->dw_loc_next == NULL
22700 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
22701 == SYMBOL_REF)
22702 {
22703 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22704 loc->expr->dw_loc_oprnd1.v.val_addr
22705 = plus_constant (GET_MODE (x), x , off);
22706 }
22707 else
22708 loc_list_plus_const (loc, off);
22709 }
22710 add_AT_location_description (var_die, DW_AT_location, loc);
22711 remove_AT (var_die, DW_AT_declaration);
22712 }
22713 }
22714 return;
22715 }
22716
22717 if (common_block_die_table == NULL)
22718 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
22719
22720 com_die_arg.decl_id = DECL_UID (com_decl);
22721 com_die_arg.die_parent = context_die;
22722 com_die = common_block_die_table->find (&com_die_arg);
22723 if (! early_dwarf)
22724 loc = loc_list_from_tree (com_decl, 2, NULL);
22725 if (com_die == NULL)
22726 {
22727 const char *cnam
22728 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
22729 die_node **slot;
22730
22731 com_die = new_die (DW_TAG_common_block, context_die, decl);
22732 add_name_and_src_coords_attributes (com_die, com_decl);
22733 if (loc)
22734 {
22735 add_AT_location_description (com_die, DW_AT_location, loc);
22736 /* Avoid sharing the same loc descriptor between
22737 DW_TAG_common_block and DW_TAG_variable. */
22738 loc = loc_list_from_tree (com_decl, 2, NULL);
22739 }
22740 else if (DECL_EXTERNAL (decl_or_origin))
22741 add_AT_flag (com_die, DW_AT_declaration, 1);
22742 if (want_pubnames ())
22743 add_pubname_string (cnam, com_die); /* ??? needed? */
22744 com_die->decl_id = DECL_UID (com_decl);
22745 slot = common_block_die_table->find_slot (com_die, INSERT);
22746 *slot = com_die;
22747 }
22748 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
22749 {
22750 add_AT_location_description (com_die, DW_AT_location, loc);
22751 loc = loc_list_from_tree (com_decl, 2, NULL);
22752 remove_AT (com_die, DW_AT_declaration);
22753 }
22754 var_die = new_die (DW_TAG_variable, com_die, decl);
22755 add_name_and_src_coords_attributes (var_die, decl_or_origin);
22756 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
22757 decl_quals (decl_or_origin), false,
22758 context_die);
22759 add_alignment_attribute (var_die, decl);
22760 add_AT_flag (var_die, DW_AT_external, 1);
22761 if (loc)
22762 {
22763 if (off)
22764 {
22765 /* Optimize the common case. */
22766 if (single_element_loc_list_p (loc)
22767 && loc->expr->dw_loc_opc == DW_OP_addr
22768 && loc->expr->dw_loc_next == NULL
22769 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
22770 {
22771 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22772 loc->expr->dw_loc_oprnd1.v.val_addr
22773 = plus_constant (GET_MODE (x), x, off);
22774 }
22775 else
22776 loc_list_plus_const (loc, off);
22777 }
22778 add_AT_location_description (var_die, DW_AT_location, loc);
22779 }
22780 else if (DECL_EXTERNAL (decl_or_origin))
22781 add_AT_flag (var_die, DW_AT_declaration, 1);
22782 if (decl)
22783 equate_decl_number_to_die (decl, var_die);
22784 return;
22785 }
22786
22787 if (old_die)
22788 {
22789 if (declaration)
22790 {
22791 /* A declaration that has been previously dumped, needs no
22792 further annotations, since it doesn't need location on
22793 the second pass. */
22794 return;
22795 }
22796 else if (decl_will_get_specification_p (old_die, decl, declaration)
22797 && !get_AT (old_die, DW_AT_specification))
22798 {
22799 /* Fall-thru so we can make a new variable die along with a
22800 DW_AT_specification. */
22801 }
22802 else if (origin && old_die->die_parent != context_die)
22803 {
22804 /* If we will be creating an inlined instance, we need a
22805 new DIE that will get annotated with
22806 DW_AT_abstract_origin. Clear things so we can get a
22807 new DIE. */
22808 gcc_assert (!DECL_ABSTRACT_P (decl));
22809 old_die = NULL;
22810 }
22811 else
22812 {
22813 /* If a DIE was dumped early, it still needs location info.
22814 Skip to where we fill the location bits. */
22815 var_die = old_die;
22816 goto gen_variable_die_location;
22817 }
22818 }
22819
22820 /* For static data members, the declaration in the class is supposed
22821 to have DW_TAG_member tag; the specification should still be
22822 DW_TAG_variable referencing the DW_TAG_member DIE. */
22823 if (declaration && class_scope_p (context_die))
22824 var_die = new_die (DW_TAG_member, context_die, decl);
22825 else
22826 var_die = new_die (DW_TAG_variable, context_die, decl);
22827
22828 if (origin != NULL)
22829 add_abstract_origin_attribute (var_die, origin);
22830
22831 /* Loop unrolling can create multiple blocks that refer to the same
22832 static variable, so we must test for the DW_AT_declaration flag.
22833
22834 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
22835 copy decls and set the DECL_ABSTRACT_P flag on them instead of
22836 sharing them.
22837
22838 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
22839 else if (decl_will_get_specification_p (old_die, decl, declaration))
22840 {
22841 /* This is a definition of a C++ class level static. */
22842 add_AT_specification (var_die, old_die);
22843 specialization_p = true;
22844 if (DECL_NAME (decl))
22845 {
22846 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22847 struct dwarf_file_data * file_index = lookup_filename (s.file);
22848
22849 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22850 add_AT_file (var_die, DW_AT_decl_file, file_index);
22851
22852 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22853 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
22854
22855 if (debug_column_info
22856 && s.column
22857 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22858 != (unsigned) s.column))
22859 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
22860
22861 if (old_die->die_tag == DW_TAG_member)
22862 add_linkage_name (var_die, decl);
22863 }
22864 }
22865 else
22866 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
22867
22868 if ((origin == NULL && !specialization_p)
22869 || (origin != NULL
22870 && !DECL_ABSTRACT_P (decl_or_origin)
22871 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
22872 decl_function_context
22873 (decl_or_origin))))
22874 {
22875 tree type = TREE_TYPE (decl_or_origin);
22876
22877 if (decl_by_reference_p (decl_or_origin))
22878 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
22879 context_die);
22880 else
22881 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
22882 context_die);
22883 }
22884
22885 if (origin == NULL && !specialization_p)
22886 {
22887 if (TREE_PUBLIC (decl))
22888 add_AT_flag (var_die, DW_AT_external, 1);
22889
22890 if (DECL_ARTIFICIAL (decl))
22891 add_AT_flag (var_die, DW_AT_artificial, 1);
22892
22893 add_alignment_attribute (var_die, decl);
22894
22895 add_accessibility_attribute (var_die, decl);
22896 }
22897
22898 if (declaration)
22899 add_AT_flag (var_die, DW_AT_declaration, 1);
22900
22901 if (decl && (DECL_ABSTRACT_P (decl)
22902 || !old_die || is_declaration_die (old_die)))
22903 equate_decl_number_to_die (decl, var_die);
22904
22905 gen_variable_die_location:
22906 if (! declaration
22907 && (! DECL_ABSTRACT_P (decl_or_origin)
22908 /* Local static vars are shared between all clones/inlines,
22909 so emit DW_AT_location on the abstract DIE if DECL_RTL is
22910 already set. */
22911 || (VAR_P (decl_or_origin)
22912 && TREE_STATIC (decl_or_origin)
22913 && DECL_RTL_SET_P (decl_or_origin))))
22914 {
22915 if (early_dwarf)
22916 add_pubname (decl_or_origin, var_die);
22917 else
22918 add_location_or_const_value_attribute (var_die, decl_or_origin,
22919 decl == NULL);
22920 }
22921 else
22922 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
22923
22924 if ((dwarf_version >= 4 || !dwarf_strict)
22925 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
22926 DW_AT_const_expr) == 1
22927 && !get_AT (var_die, DW_AT_const_expr)
22928 && !specialization_p)
22929 add_AT_flag (var_die, DW_AT_const_expr, 1);
22930
22931 if (!dwarf_strict)
22932 {
22933 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
22934 DW_AT_inline);
22935 if (inl != -1
22936 && !get_AT (var_die, DW_AT_inline)
22937 && !specialization_p)
22938 add_AT_unsigned (var_die, DW_AT_inline, inl);
22939 }
22940 }
22941
22942 /* Generate a DIE to represent a named constant. */
22943
22944 static void
22945 gen_const_die (tree decl, dw_die_ref context_die)
22946 {
22947 dw_die_ref const_die;
22948 tree type = TREE_TYPE (decl);
22949
22950 const_die = lookup_decl_die (decl);
22951 if (const_die)
22952 return;
22953
22954 const_die = new_die (DW_TAG_constant, context_die, decl);
22955 equate_decl_number_to_die (decl, const_die);
22956 add_name_and_src_coords_attributes (const_die, decl);
22957 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
22958 if (TREE_PUBLIC (decl))
22959 add_AT_flag (const_die, DW_AT_external, 1);
22960 if (DECL_ARTIFICIAL (decl))
22961 add_AT_flag (const_die, DW_AT_artificial, 1);
22962 tree_add_const_value_attribute_for_decl (const_die, decl);
22963 }
22964
22965 /* Generate a DIE to represent a label identifier. */
22966
22967 static void
22968 gen_label_die (tree decl, dw_die_ref context_die)
22969 {
22970 tree origin = decl_ultimate_origin (decl);
22971 dw_die_ref lbl_die = lookup_decl_die (decl);
22972 rtx insn;
22973 char label[MAX_ARTIFICIAL_LABEL_BYTES];
22974
22975 if (!lbl_die)
22976 {
22977 lbl_die = new_die (DW_TAG_label, context_die, decl);
22978 equate_decl_number_to_die (decl, lbl_die);
22979
22980 if (origin != NULL)
22981 add_abstract_origin_attribute (lbl_die, origin);
22982 else
22983 add_name_and_src_coords_attributes (lbl_die, decl);
22984 }
22985
22986 if (DECL_ABSTRACT_P (decl))
22987 equate_decl_number_to_die (decl, lbl_die);
22988 else if (! early_dwarf)
22989 {
22990 insn = DECL_RTL_IF_SET (decl);
22991
22992 /* Deleted labels are programmer specified labels which have been
22993 eliminated because of various optimizations. We still emit them
22994 here so that it is possible to put breakpoints on them. */
22995 if (insn
22996 && (LABEL_P (insn)
22997 || ((NOTE_P (insn)
22998 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
22999 {
23000 /* When optimization is enabled (via -O) some parts of the compiler
23001 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23002 represent source-level labels which were explicitly declared by
23003 the user. This really shouldn't be happening though, so catch
23004 it if it ever does happen. */
23005 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23006
23007 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23008 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23009 }
23010 else if (insn
23011 && NOTE_P (insn)
23012 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23013 && CODE_LABEL_NUMBER (insn) != -1)
23014 {
23015 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23016 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23017 }
23018 }
23019 }
23020
23021 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23022 attributes to the DIE for a block STMT, to describe where the inlined
23023 function was called from. This is similar to add_src_coords_attributes. */
23024
23025 static inline void
23026 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23027 {
23028 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23029
23030 if (dwarf_version >= 3 || !dwarf_strict)
23031 {
23032 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23033 add_AT_unsigned (die, DW_AT_call_line, s.line);
23034 if (debug_column_info && s.column)
23035 add_AT_unsigned (die, DW_AT_call_column, s.column);
23036 }
23037 }
23038
23039
23040 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23041 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23042
23043 static inline void
23044 add_high_low_attributes (tree stmt, dw_die_ref die)
23045 {
23046 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23047
23048 if (BLOCK_FRAGMENT_CHAIN (stmt)
23049 && (dwarf_version >= 3 || !dwarf_strict))
23050 {
23051 tree chain, superblock = NULL_TREE;
23052 dw_die_ref pdie;
23053 dw_attr_node *attr = NULL;
23054
23055 if (inlined_function_outer_scope_p (stmt))
23056 {
23057 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23058 BLOCK_NUMBER (stmt));
23059 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23060 }
23061
23062 /* Optimize duplicate .debug_ranges lists or even tails of
23063 lists. If this BLOCK has same ranges as its supercontext,
23064 lookup DW_AT_ranges attribute in the supercontext (and
23065 recursively so), verify that the ranges_table contains the
23066 right values and use it instead of adding a new .debug_range. */
23067 for (chain = stmt, pdie = die;
23068 BLOCK_SAME_RANGE (chain);
23069 chain = BLOCK_SUPERCONTEXT (chain))
23070 {
23071 dw_attr_node *new_attr;
23072
23073 pdie = pdie->die_parent;
23074 if (pdie == NULL)
23075 break;
23076 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23077 break;
23078 new_attr = get_AT (pdie, DW_AT_ranges);
23079 if (new_attr == NULL
23080 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23081 break;
23082 attr = new_attr;
23083 superblock = BLOCK_SUPERCONTEXT (chain);
23084 }
23085 if (attr != NULL
23086 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23087 == BLOCK_NUMBER (superblock))
23088 && BLOCK_FRAGMENT_CHAIN (superblock))
23089 {
23090 unsigned long off = attr->dw_attr_val.v.val_offset;
23091 unsigned long supercnt = 0, thiscnt = 0;
23092 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23093 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23094 {
23095 ++supercnt;
23096 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23097 == BLOCK_NUMBER (chain));
23098 }
23099 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23100 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23101 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23102 ++thiscnt;
23103 gcc_assert (supercnt >= thiscnt);
23104 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23105 false);
23106 note_rnglist_head (off + supercnt - thiscnt);
23107 return;
23108 }
23109
23110 unsigned int offset = add_ranges (stmt, true);
23111 add_AT_range_list (die, DW_AT_ranges, offset, false);
23112 note_rnglist_head (offset);
23113
23114 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23115 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23116 do
23117 {
23118 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23119 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23120 chain = BLOCK_FRAGMENT_CHAIN (chain);
23121 }
23122 while (chain);
23123 add_ranges (NULL);
23124 }
23125 else
23126 {
23127 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
23128 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23129 BLOCK_NUMBER (stmt));
23130 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
23131 BLOCK_NUMBER (stmt));
23132 add_AT_low_high_pc (die, label, label_high, false);
23133 }
23134 }
23135
23136 /* Generate a DIE for a lexical block. */
23137
23138 static void
23139 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
23140 {
23141 dw_die_ref old_die = BLOCK_DIE (stmt);
23142 dw_die_ref stmt_die = NULL;
23143 if (!old_die)
23144 {
23145 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23146 BLOCK_DIE (stmt) = stmt_die;
23147 }
23148
23149 if (BLOCK_ABSTRACT (stmt))
23150 {
23151 if (old_die)
23152 {
23153 /* This must have been generated early and it won't even
23154 need location information since it's a DW_AT_inline
23155 function. */
23156 if (flag_checking)
23157 for (dw_die_ref c = context_die; c; c = c->die_parent)
23158 if (c->die_tag == DW_TAG_inlined_subroutine
23159 || c->die_tag == DW_TAG_subprogram)
23160 {
23161 gcc_assert (get_AT (c, DW_AT_inline));
23162 break;
23163 }
23164 return;
23165 }
23166 }
23167 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
23168 {
23169 /* If this is an inlined instance, create a new lexical die for
23170 anything below to attach DW_AT_abstract_origin to. */
23171 if (old_die)
23172 {
23173 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23174 BLOCK_DIE (stmt) = stmt_die;
23175 old_die = NULL;
23176 }
23177
23178 tree origin = block_ultimate_origin (stmt);
23179 if (origin != NULL_TREE && origin != stmt)
23180 add_abstract_origin_attribute (stmt_die, origin);
23181 }
23182
23183 if (old_die)
23184 stmt_die = old_die;
23185
23186 /* A non abstract block whose blocks have already been reordered
23187 should have the instruction range for this block. If so, set the
23188 high/low attributes. */
23189 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
23190 {
23191 gcc_assert (stmt_die);
23192 add_high_low_attributes (stmt, stmt_die);
23193 }
23194
23195 decls_for_scope (stmt, stmt_die);
23196 }
23197
23198 /* Generate a DIE for an inlined subprogram. */
23199
23200 static void
23201 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
23202 {
23203 tree decl;
23204
23205 /* The instance of function that is effectively being inlined shall not
23206 be abstract. */
23207 gcc_assert (! BLOCK_ABSTRACT (stmt));
23208
23209 decl = block_ultimate_origin (stmt);
23210
23211 /* Make sure any inlined functions are known to be inlineable. */
23212 gcc_checking_assert (DECL_ABSTRACT_P (decl)
23213 || cgraph_function_possibly_inlined_p (decl));
23214
23215 /* Emit info for the abstract instance first, if we haven't yet. We
23216 must emit this even if the block is abstract, otherwise when we
23217 emit the block below (or elsewhere), we may end up trying to emit
23218 a die whose origin die hasn't been emitted, and crashing. */
23219 dwarf2out_abstract_function (decl);
23220
23221 if (! BLOCK_ABSTRACT (stmt))
23222 {
23223 dw_die_ref subr_die
23224 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
23225
23226 if (call_arg_locations)
23227 BLOCK_DIE (stmt) = subr_die;
23228 add_abstract_origin_attribute (subr_die, decl);
23229 if (TREE_ASM_WRITTEN (stmt))
23230 add_high_low_attributes (stmt, subr_die);
23231 add_call_src_coords_attributes (stmt, subr_die);
23232
23233 decls_for_scope (stmt, subr_die);
23234 }
23235 }
23236
23237 /* Generate a DIE for a field in a record, or structure. CTX is required: see
23238 the comment for VLR_CONTEXT. */
23239
23240 static void
23241 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
23242 {
23243 dw_die_ref decl_die;
23244
23245 if (TREE_TYPE (decl) == error_mark_node)
23246 return;
23247
23248 decl_die = new_die (DW_TAG_member, context_die, decl);
23249 add_name_and_src_coords_attributes (decl_die, decl);
23250 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
23251 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
23252 context_die);
23253
23254 if (DECL_BIT_FIELD_TYPE (decl))
23255 {
23256 add_byte_size_attribute (decl_die, decl);
23257 add_bit_size_attribute (decl_die, decl);
23258 add_bit_offset_attribute (decl_die, decl, ctx);
23259 }
23260
23261 add_alignment_attribute (decl_die, decl);
23262
23263 /* If we have a variant part offset, then we are supposed to process a member
23264 of a QUAL_UNION_TYPE, which is how we represent variant parts in
23265 trees. */
23266 gcc_assert (ctx->variant_part_offset == NULL_TREE
23267 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
23268 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
23269 add_data_member_location_attribute (decl_die, decl, ctx);
23270
23271 if (DECL_ARTIFICIAL (decl))
23272 add_AT_flag (decl_die, DW_AT_artificial, 1);
23273
23274 add_accessibility_attribute (decl_die, decl);
23275
23276 /* Equate decl number to die, so that we can look up this decl later on. */
23277 equate_decl_number_to_die (decl, decl_die);
23278 }
23279
23280 #if 0
23281 /* Don't generate either pointer_type DIEs or reference_type DIEs here.
23282 Use modified_type_die instead.
23283 We keep this code here just in case these types of DIEs may be needed to
23284 represent certain things in other languages (e.g. Pascal) someday. */
23285
23286 static void
23287 gen_pointer_type_die (tree type, dw_die_ref context_die)
23288 {
23289 dw_die_ref ptr_die
23290 = new_die (DW_TAG_pointer_type, scope_die_for (type, context_die), type);
23291
23292 equate_type_number_to_die (type, ptr_die);
23293 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23294 context_die);
23295 add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
23296 }
23297
23298 /* Don't generate either pointer_type DIEs or reference_type DIEs here.
23299 Use modified_type_die instead.
23300 We keep this code here just in case these types of DIEs may be needed to
23301 represent certain things in other languages (e.g. Pascal) someday. */
23302
23303 static void
23304 gen_reference_type_die (tree type, dw_die_ref context_die)
23305 {
23306 dw_die_ref ref_die, scope_die = scope_die_for (type, context_die);
23307
23308 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
23309 ref_die = new_die (DW_TAG_rvalue_reference_type, scope_die, type);
23310 else
23311 ref_die = new_die (DW_TAG_reference_type, scope_die, type);
23312
23313 equate_type_number_to_die (type, ref_die);
23314 add_type_attribute (ref_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23315 context_die);
23316 add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
23317 }
23318 #endif
23319
23320 /* Generate a DIE for a pointer to a member type. TYPE can be an
23321 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
23322 pointer to member function. */
23323
23324 static void
23325 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
23326 {
23327 if (lookup_type_die (type))
23328 return;
23329
23330 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
23331 scope_die_for (type, context_die), type);
23332
23333 equate_type_number_to_die (type, ptr_die);
23334 add_AT_die_ref (ptr_die, DW_AT_containing_type,
23335 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
23336 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23337 context_die);
23338 add_alignment_attribute (ptr_die, type);
23339
23340 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
23341 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
23342 {
23343 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
23344 add_AT_loc (ptr_die, DW_AT_use_location, op);
23345 }
23346 }
23347
23348 static char *producer_string;
23349
23350 /* Return a heap allocated producer string including command line options
23351 if -grecord-gcc-switches. */
23352
23353 static char *
23354 gen_producer_string (void)
23355 {
23356 size_t j;
23357 auto_vec<const char *> switches;
23358 const char *language_string = lang_hooks.name;
23359 char *producer, *tail;
23360 const char *p;
23361 size_t len = dwarf_record_gcc_switches ? 0 : 3;
23362 size_t plen = strlen (language_string) + 1 + strlen (version_string);
23363
23364 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
23365 switch (save_decoded_options[j].opt_index)
23366 {
23367 case OPT_o:
23368 case OPT_d:
23369 case OPT_dumpbase:
23370 case OPT_dumpdir:
23371 case OPT_auxbase:
23372 case OPT_auxbase_strip:
23373 case OPT_quiet:
23374 case OPT_version:
23375 case OPT_v:
23376 case OPT_w:
23377 case OPT_L:
23378 case OPT_D:
23379 case OPT_I:
23380 case OPT_U:
23381 case OPT_SPECIAL_unknown:
23382 case OPT_SPECIAL_ignore:
23383 case OPT_SPECIAL_program_name:
23384 case OPT_SPECIAL_input_file:
23385 case OPT_grecord_gcc_switches:
23386 case OPT_gno_record_gcc_switches:
23387 case OPT__output_pch_:
23388 case OPT_fdiagnostics_show_location_:
23389 case OPT_fdiagnostics_show_option:
23390 case OPT_fdiagnostics_show_caret:
23391 case OPT_fdiagnostics_color_:
23392 case OPT_fverbose_asm:
23393 case OPT____:
23394 case OPT__sysroot_:
23395 case OPT_nostdinc:
23396 case OPT_nostdinc__:
23397 case OPT_fpreprocessed:
23398 case OPT_fltrans_output_list_:
23399 case OPT_fresolution_:
23400 case OPT_fdebug_prefix_map_:
23401 /* Ignore these. */
23402 continue;
23403 default:
23404 if (cl_options[save_decoded_options[j].opt_index].flags
23405 & CL_NO_DWARF_RECORD)
23406 continue;
23407 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
23408 == '-');
23409 switch (save_decoded_options[j].canonical_option[0][1])
23410 {
23411 case 'M':
23412 case 'i':
23413 case 'W':
23414 continue;
23415 case 'f':
23416 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
23417 "dump", 4) == 0)
23418 continue;
23419 break;
23420 default:
23421 break;
23422 }
23423 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
23424 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
23425 break;
23426 }
23427
23428 producer = XNEWVEC (char, plen + 1 + len + 1);
23429 tail = producer;
23430 sprintf (tail, "%s %s", language_string, version_string);
23431 tail += plen;
23432
23433 FOR_EACH_VEC_ELT (switches, j, p)
23434 {
23435 len = strlen (p);
23436 *tail = ' ';
23437 memcpy (tail + 1, p, len);
23438 tail += len + 1;
23439 }
23440
23441 *tail = '\0';
23442 return producer;
23443 }
23444
23445 /* Given a C and/or C++ language/version string return the "highest".
23446 C++ is assumed to be "higher" than C in this case. Used for merging
23447 LTO translation unit languages. */
23448 static const char *
23449 highest_c_language (const char *lang1, const char *lang2)
23450 {
23451 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
23452 return "GNU C++14";
23453 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
23454 return "GNU C++11";
23455 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
23456 return "GNU C++98";
23457
23458 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
23459 return "GNU C11";
23460 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
23461 return "GNU C99";
23462 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
23463 return "GNU C89";
23464
23465 gcc_unreachable ();
23466 }
23467
23468
23469 /* Generate the DIE for the compilation unit. */
23470
23471 static dw_die_ref
23472 gen_compile_unit_die (const char *filename)
23473 {
23474 dw_die_ref die;
23475 const char *language_string = lang_hooks.name;
23476 int language;
23477
23478 die = new_die (DW_TAG_compile_unit, NULL, NULL);
23479
23480 if (filename)
23481 {
23482 add_name_attribute (die, filename);
23483 /* Don't add cwd for <built-in>. */
23484 if (filename[0] != '<')
23485 add_comp_dir_attribute (die);
23486 }
23487
23488 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
23489
23490 /* If our producer is LTO try to figure out a common language to use
23491 from the global list of translation units. */
23492 if (strcmp (language_string, "GNU GIMPLE") == 0)
23493 {
23494 unsigned i;
23495 tree t;
23496 const char *common_lang = NULL;
23497
23498 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
23499 {
23500 if (!TRANSLATION_UNIT_LANGUAGE (t))
23501 continue;
23502 if (!common_lang)
23503 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
23504 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
23505 ;
23506 else if (strncmp (common_lang, "GNU C", 5) == 0
23507 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
23508 /* Mixing C and C++ is ok, use C++ in that case. */
23509 common_lang = highest_c_language (common_lang,
23510 TRANSLATION_UNIT_LANGUAGE (t));
23511 else
23512 {
23513 /* Fall back to C. */
23514 common_lang = NULL;
23515 break;
23516 }
23517 }
23518
23519 if (common_lang)
23520 language_string = common_lang;
23521 }
23522
23523 language = DW_LANG_C;
23524 if (strncmp (language_string, "GNU C", 5) == 0
23525 && ISDIGIT (language_string[5]))
23526 {
23527 language = DW_LANG_C89;
23528 if (dwarf_version >= 3 || !dwarf_strict)
23529 {
23530 if (strcmp (language_string, "GNU C89") != 0)
23531 language = DW_LANG_C99;
23532
23533 if (dwarf_version >= 5 /* || !dwarf_strict */)
23534 if (strcmp (language_string, "GNU C11") == 0)
23535 language = DW_LANG_C11;
23536 }
23537 }
23538 else if (strncmp (language_string, "GNU C++", 7) == 0)
23539 {
23540 language = DW_LANG_C_plus_plus;
23541 if (dwarf_version >= 5 /* || !dwarf_strict */)
23542 {
23543 if (strcmp (language_string, "GNU C++11") == 0)
23544 language = DW_LANG_C_plus_plus_11;
23545 else if (strcmp (language_string, "GNU C++14") == 0)
23546 language = DW_LANG_C_plus_plus_14;
23547 }
23548 }
23549 else if (strcmp (language_string, "GNU F77") == 0)
23550 language = DW_LANG_Fortran77;
23551 else if (strcmp (language_string, "GNU Pascal") == 0)
23552 language = DW_LANG_Pascal83;
23553 else if (dwarf_version >= 3 || !dwarf_strict)
23554 {
23555 if (strcmp (language_string, "GNU Ada") == 0)
23556 language = DW_LANG_Ada95;
23557 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23558 {
23559 language = DW_LANG_Fortran95;
23560 if (dwarf_version >= 5 /* || !dwarf_strict */)
23561 {
23562 if (strcmp (language_string, "GNU Fortran2003") == 0)
23563 language = DW_LANG_Fortran03;
23564 else if (strcmp (language_string, "GNU Fortran2008") == 0)
23565 language = DW_LANG_Fortran08;
23566 }
23567 }
23568 else if (strcmp (language_string, "GNU Java") == 0)
23569 language = DW_LANG_Java;
23570 else if (strcmp (language_string, "GNU Objective-C") == 0)
23571 language = DW_LANG_ObjC;
23572 else if (strcmp (language_string, "GNU Objective-C++") == 0)
23573 language = DW_LANG_ObjC_plus_plus;
23574 else if (dwarf_version >= 5 || !dwarf_strict)
23575 {
23576 if (strcmp (language_string, "GNU Go") == 0)
23577 language = DW_LANG_Go;
23578 }
23579 }
23580 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
23581 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23582 language = DW_LANG_Fortran90;
23583
23584 add_AT_unsigned (die, DW_AT_language, language);
23585
23586 switch (language)
23587 {
23588 case DW_LANG_Fortran77:
23589 case DW_LANG_Fortran90:
23590 case DW_LANG_Fortran95:
23591 case DW_LANG_Fortran03:
23592 case DW_LANG_Fortran08:
23593 /* Fortran has case insensitive identifiers and the front-end
23594 lowercases everything. */
23595 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
23596 break;
23597 default:
23598 /* The default DW_ID_case_sensitive doesn't need to be specified. */
23599 break;
23600 }
23601 return die;
23602 }
23603
23604 /* Generate the DIE for a base class. */
23605
23606 static void
23607 gen_inheritance_die (tree binfo, tree access, tree type,
23608 dw_die_ref context_die)
23609 {
23610 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
23611 struct vlr_context ctx = { type, NULL };
23612
23613 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
23614 context_die);
23615 add_data_member_location_attribute (die, binfo, &ctx);
23616
23617 if (BINFO_VIRTUAL_P (binfo))
23618 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
23619
23620 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
23621 children, otherwise the default is DW_ACCESS_public. In DWARF2
23622 the default has always been DW_ACCESS_private. */
23623 if (access == access_public_node)
23624 {
23625 if (dwarf_version == 2
23626 || context_die->die_tag == DW_TAG_class_type)
23627 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
23628 }
23629 else if (access == access_protected_node)
23630 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
23631 else if (dwarf_version > 2
23632 && context_die->die_tag != DW_TAG_class_type)
23633 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
23634 }
23635
23636 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
23637 structure. */
23638 static bool
23639 is_variant_part (tree decl)
23640 {
23641 return (TREE_CODE (decl) == FIELD_DECL
23642 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
23643 }
23644
23645 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
23646 return the FIELD_DECL. Return NULL_TREE otherwise. */
23647
23648 static tree
23649 analyze_discr_in_predicate (tree operand, tree struct_type)
23650 {
23651 bool continue_stripping = true;
23652 while (continue_stripping)
23653 switch (TREE_CODE (operand))
23654 {
23655 CASE_CONVERT:
23656 operand = TREE_OPERAND (operand, 0);
23657 break;
23658 default:
23659 continue_stripping = false;
23660 break;
23661 }
23662
23663 /* Match field access to members of struct_type only. */
23664 if (TREE_CODE (operand) == COMPONENT_REF
23665 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
23666 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
23667 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
23668 return TREE_OPERAND (operand, 1);
23669 else
23670 return NULL_TREE;
23671 }
23672
23673 /* Check that SRC is a constant integer that can be represented as a native
23674 integer constant (either signed or unsigned). If so, store it into DEST and
23675 return true. Return false otherwise. */
23676
23677 static bool
23678 get_discr_value (tree src, dw_discr_value *dest)
23679 {
23680 bool is_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
23681
23682 if (TREE_CODE (src) != INTEGER_CST
23683 || !(is_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
23684 return false;
23685
23686 dest->pos = is_unsigned;
23687 if (is_unsigned)
23688 dest->v.uval = tree_to_uhwi (src);
23689 else
23690 dest->v.sval = tree_to_shwi (src);
23691
23692 return true;
23693 }
23694
23695 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
23696 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
23697 store NULL_TREE in DISCR_DECL. Otherwise:
23698
23699 - store the discriminant field in STRUCT_TYPE that controls the variant
23700 part to *DISCR_DECL
23701
23702 - put in *DISCR_LISTS_P an array where for each variant, the item
23703 represents the corresponding matching list of discriminant values.
23704
23705 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
23706 the above array.
23707
23708 Note that when the array is allocated (i.e. when the analysis is
23709 successful), it is up to the caller to free the array. */
23710
23711 static void
23712 analyze_variants_discr (tree variant_part_decl,
23713 tree struct_type,
23714 tree *discr_decl,
23715 dw_discr_list_ref **discr_lists_p,
23716 unsigned *discr_lists_length)
23717 {
23718 tree variant_part_type = TREE_TYPE (variant_part_decl);
23719 tree variant;
23720 dw_discr_list_ref *discr_lists;
23721 unsigned i;
23722
23723 /* Compute how many variants there are in this variant part. */
23724 *discr_lists_length = 0;
23725 for (variant = TYPE_FIELDS (variant_part_type);
23726 variant != NULL_TREE;
23727 variant = DECL_CHAIN (variant))
23728 ++*discr_lists_length;
23729
23730 *discr_decl = NULL_TREE;
23731 *discr_lists_p
23732 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
23733 sizeof (**discr_lists_p));
23734 discr_lists = *discr_lists_p;
23735
23736 /* And then analyze all variants to extract discriminant information for all
23737 of them. This analysis is conservative: as soon as we detect something we
23738 do not support, abort everything and pretend we found nothing. */
23739 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
23740 variant != NULL_TREE;
23741 variant = DECL_CHAIN (variant), ++i)
23742 {
23743 tree match_expr = DECL_QUALIFIER (variant);
23744
23745 /* Now, try to analyze the predicate and deduce a discriminant for
23746 it. */
23747 if (match_expr == boolean_true_node)
23748 /* Typically happens for the default variant: it matches all cases that
23749 previous variants rejected. Don't output any matching value for
23750 this one. */
23751 continue;
23752
23753 /* The following loop tries to iterate over each discriminant
23754 possibility: single values or ranges. */
23755 while (match_expr != NULL_TREE)
23756 {
23757 tree next_round_match_expr;
23758 tree candidate_discr = NULL_TREE;
23759 dw_discr_list_ref new_node = NULL;
23760
23761 /* Possibilities are matched one after the other by nested
23762 TRUTH_ORIF_EXPR expressions. Process the current possibility and
23763 continue with the rest at next iteration. */
23764 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
23765 {
23766 next_round_match_expr = TREE_OPERAND (match_expr, 0);
23767 match_expr = TREE_OPERAND (match_expr, 1);
23768 }
23769 else
23770 next_round_match_expr = NULL_TREE;
23771
23772 if (match_expr == boolean_false_node)
23773 /* This sub-expression matches nothing: just wait for the next
23774 one. */
23775 ;
23776
23777 else if (TREE_CODE (match_expr) == EQ_EXPR)
23778 {
23779 /* We are matching: <discr_field> == <integer_cst>
23780 This sub-expression matches a single value. */
23781 tree integer_cst = TREE_OPERAND (match_expr, 1);
23782
23783 candidate_discr
23784 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
23785 struct_type);
23786
23787 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23788 if (!get_discr_value (integer_cst,
23789 &new_node->dw_discr_lower_bound))
23790 goto abort;
23791 new_node->dw_discr_range = false;
23792 }
23793
23794 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
23795 {
23796 /* We are matching:
23797 <discr_field> > <integer_cst>
23798 && <discr_field> < <integer_cst>.
23799 This sub-expression matches the range of values between the
23800 two matched integer constants. Note that comparisons can be
23801 inclusive or exclusive. */
23802 tree candidate_discr_1, candidate_discr_2;
23803 tree lower_cst, upper_cst;
23804 bool lower_cst_included, upper_cst_included;
23805 tree lower_op = TREE_OPERAND (match_expr, 0);
23806 tree upper_op = TREE_OPERAND (match_expr, 1);
23807
23808 /* When the comparison is exclusive, the integer constant is not
23809 the discriminant range bound we are looking for: we will have
23810 to increment or decrement it. */
23811 if (TREE_CODE (lower_op) == GE_EXPR)
23812 lower_cst_included = true;
23813 else if (TREE_CODE (lower_op) == GT_EXPR)
23814 lower_cst_included = false;
23815 else
23816 goto abort;
23817
23818 if (TREE_CODE (upper_op) == LE_EXPR)
23819 upper_cst_included = true;
23820 else if (TREE_CODE (upper_op) == LT_EXPR)
23821 upper_cst_included = false;
23822 else
23823 goto abort;
23824
23825 /* Extract the discriminant from the first operand and check it
23826 is consistant with the same analysis in the second
23827 operand. */
23828 candidate_discr_1
23829 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
23830 struct_type);
23831 candidate_discr_2
23832 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
23833 struct_type);
23834 if (candidate_discr_1 == candidate_discr_2)
23835 candidate_discr = candidate_discr_1;
23836 else
23837 goto abort;
23838
23839 /* Extract bounds from both. */
23840 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23841 lower_cst = TREE_OPERAND (lower_op, 1);
23842 upper_cst = TREE_OPERAND (upper_op, 1);
23843
23844 if (!lower_cst_included)
23845 lower_cst
23846 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
23847 build_int_cst (TREE_TYPE (lower_cst), 1));
23848 if (!upper_cst_included)
23849 upper_cst
23850 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
23851 build_int_cst (TREE_TYPE (upper_cst), 1));
23852
23853 if (!get_discr_value (lower_cst,
23854 &new_node->dw_discr_lower_bound)
23855 || !get_discr_value (upper_cst,
23856 &new_node->dw_discr_upper_bound))
23857 goto abort;
23858
23859 new_node->dw_discr_range = true;
23860 }
23861
23862 else
23863 /* Unsupported sub-expression: we cannot determine the set of
23864 matching discriminant values. Abort everything. */
23865 goto abort;
23866
23867 /* If the discriminant info is not consistant with what we saw so
23868 far, consider the analysis failed and abort everything. */
23869 if (candidate_discr == NULL_TREE
23870 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
23871 goto abort;
23872 else
23873 *discr_decl = candidate_discr;
23874
23875 if (new_node != NULL)
23876 {
23877 new_node->dw_discr_next = discr_lists[i];
23878 discr_lists[i] = new_node;
23879 }
23880 match_expr = next_round_match_expr;
23881 }
23882 }
23883
23884 /* If we reach this point, we could match everything we were interested
23885 in. */
23886 return;
23887
23888 abort:
23889 /* Clean all data structure and return no result. */
23890 free (*discr_lists_p);
23891 *discr_lists_p = NULL;
23892 *discr_decl = NULL_TREE;
23893 }
23894
23895 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
23896 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
23897 under CONTEXT_DIE.
23898
23899 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
23900 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
23901 this type, which are record types, represent the available variants and each
23902 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
23903 values are inferred from these attributes.
23904
23905 In trees, the offsets for the fields inside these sub-records are relative
23906 to the variant part itself, whereas the corresponding DIEs should have
23907 offset attributes that are relative to the embedding record base address.
23908 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
23909 must be an expression that computes the offset of the variant part to
23910 describe in DWARF. */
23911
23912 static void
23913 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
23914 dw_die_ref context_die)
23915 {
23916 const tree variant_part_type = TREE_TYPE (variant_part_decl);
23917 tree variant_part_offset = vlr_ctx->variant_part_offset;
23918 struct loc_descr_context ctx = {
23919 vlr_ctx->struct_type, /* context_type */
23920 NULL_TREE, /* base_decl */
23921 NULL, /* dpi */
23922 false, /* placeholder_arg */
23923 false /* placeholder_seen */
23924 };
23925
23926 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
23927 NULL_TREE if there is no such field. */
23928 tree discr_decl = NULL_TREE;
23929 dw_discr_list_ref *discr_lists;
23930 unsigned discr_lists_length = 0;
23931 unsigned i;
23932
23933 dw_die_ref dwarf_proc_die = NULL;
23934 dw_die_ref variant_part_die
23935 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
23936
23937 equate_decl_number_to_die (variant_part_decl, variant_part_die);
23938
23939 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
23940 &discr_decl, &discr_lists, &discr_lists_length);
23941
23942 if (discr_decl != NULL_TREE)
23943 {
23944 dw_die_ref discr_die = lookup_decl_die (discr_decl);
23945
23946 if (discr_die)
23947 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
23948 else
23949 /* We have no DIE for the discriminant, so just discard all
23950 discrimimant information in the output. */
23951 discr_decl = NULL_TREE;
23952 }
23953
23954 /* If the offset for this variant part is more complex than a constant,
23955 create a DWARF procedure for it so that we will not have to generate DWARF
23956 expressions for it for each member. */
23957 if (TREE_CODE (variant_part_offset) != INTEGER_CST
23958 && (dwarf_version >= 3 || !dwarf_strict))
23959 {
23960 const tree dwarf_proc_fndecl
23961 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
23962 build_function_type (TREE_TYPE (variant_part_offset),
23963 NULL_TREE));
23964 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
23965 const dw_loc_descr_ref dwarf_proc_body
23966 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
23967
23968 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
23969 dwarf_proc_fndecl, context_die);
23970 if (dwarf_proc_die != NULL)
23971 variant_part_offset = dwarf_proc_call;
23972 }
23973
23974 /* Output DIEs for all variants. */
23975 i = 0;
23976 for (tree variant = TYPE_FIELDS (variant_part_type);
23977 variant != NULL_TREE;
23978 variant = DECL_CHAIN (variant), ++i)
23979 {
23980 tree variant_type = TREE_TYPE (variant);
23981 dw_die_ref variant_die;
23982
23983 /* All variants (i.e. members of a variant part) are supposed to be
23984 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
23985 under these records. */
23986 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
23987
23988 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
23989 equate_decl_number_to_die (variant, variant_die);
23990
23991 /* Output discriminant values this variant matches, if any. */
23992 if (discr_decl == NULL || discr_lists[i] == NULL)
23993 /* In the case we have discriminant information at all, this is
23994 probably the default variant: as the standard says, don't
23995 output any discriminant value/list attribute. */
23996 ;
23997 else if (discr_lists[i]->dw_discr_next == NULL
23998 && !discr_lists[i]->dw_discr_range)
23999 /* If there is only one accepted value, don't bother outputting a
24000 list. */
24001 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24002 else
24003 add_discr_list (variant_die, discr_lists[i]);
24004
24005 for (tree member = TYPE_FIELDS (variant_type);
24006 member != NULL_TREE;
24007 member = DECL_CHAIN (member))
24008 {
24009 struct vlr_context vlr_sub_ctx = {
24010 vlr_ctx->struct_type, /* struct_type */
24011 NULL /* variant_part_offset */
24012 };
24013 if (is_variant_part (member))
24014 {
24015 /* All offsets for fields inside variant parts are relative to
24016 the top-level embedding RECORD_TYPE's base address. On the
24017 other hand, offsets in GCC's types are relative to the
24018 nested-most variant part. So we have to sum offsets each time
24019 we recurse. */
24020
24021 vlr_sub_ctx.variant_part_offset
24022 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24023 variant_part_offset, byte_position (member));
24024 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24025 }
24026 else
24027 {
24028 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24029 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24030 }
24031 }
24032 }
24033
24034 free (discr_lists);
24035 }
24036
24037 /* Generate a DIE for a class member. */
24038
24039 static void
24040 gen_member_die (tree type, dw_die_ref context_die)
24041 {
24042 tree member;
24043 tree binfo = TYPE_BINFO (type);
24044 dw_die_ref child;
24045
24046 /* If this is not an incomplete type, output descriptions of each of its
24047 members. Note that as we output the DIEs necessary to represent the
24048 members of this record or union type, we will also be trying to output
24049 DIEs to represent the *types* of those members. However the `type'
24050 function (above) will specifically avoid generating type DIEs for member
24051 types *within* the list of member DIEs for this (containing) type except
24052 for those types (of members) which are explicitly marked as also being
24053 members of this (containing) type themselves. The g++ front- end can
24054 force any given type to be treated as a member of some other (containing)
24055 type by setting the TYPE_CONTEXT of the given (member) type to point to
24056 the TREE node representing the appropriate (containing) type. */
24057
24058 /* First output info about the base classes. */
24059 if (binfo)
24060 {
24061 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24062 int i;
24063 tree base;
24064
24065 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24066 gen_inheritance_die (base,
24067 (accesses ? (*accesses)[i] : access_public_node),
24068 type,
24069 context_die);
24070 }
24071
24072 /* Now output info about the data members and type members. */
24073 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24074 {
24075 struct vlr_context vlr_ctx = { type, NULL_TREE };
24076
24077 /* If we thought we were generating minimal debug info for TYPE
24078 and then changed our minds, some of the member declarations
24079 may have already been defined. Don't define them again, but
24080 do put them in the right order. */
24081
24082 child = lookup_decl_die (member);
24083 if (child)
24084 {
24085 /* Handle inline static data members, which only have in-class
24086 declarations. */
24087 if (child->die_tag == DW_TAG_variable
24088 && child->die_parent == comp_unit_die ()
24089 && get_AT (child, DW_AT_specification) == NULL)
24090 {
24091 reparent_child (child, context_die);
24092 child->die_tag = DW_TAG_member;
24093 }
24094 else
24095 splice_child_die (context_die, child);
24096 }
24097
24098 /* Do not generate standard DWARF for variant parts if we are generating
24099 the corresponding GNAT encodings: DIEs generated for both would
24100 conflict in our mappings. */
24101 else if (is_variant_part (member)
24102 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24103 {
24104 vlr_ctx.variant_part_offset = byte_position (member);
24105 gen_variant_part (member, &vlr_ctx, context_die);
24106 }
24107 else
24108 {
24109 vlr_ctx.variant_part_offset = NULL_TREE;
24110 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24111 }
24112
24113 /* For C++ inline static data members emit immediately a DW_TAG_variable
24114 DIE that will refer to that DW_TAG_member through
24115 DW_AT_specification. */
24116 if (TREE_STATIC (member)
24117 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24118 != -1))
24119 {
24120 int old_extern = DECL_EXTERNAL (member);
24121 DECL_EXTERNAL (member) = 0;
24122 gen_decl_die (member, NULL, NULL, comp_unit_die ());
24123 DECL_EXTERNAL (member) = old_extern;
24124 }
24125 }
24126
24127 /* We do not keep type methods in type variants. */
24128 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24129 /* Now output info about the function members (if any). */
24130 if (TYPE_METHODS (type) != error_mark_node)
24131 for (member = TYPE_METHODS (type); member; member = DECL_CHAIN (member))
24132 {
24133 /* Don't include clones in the member list. */
24134 if (DECL_ABSTRACT_ORIGIN (member))
24135 continue;
24136 /* Nor constructors for anonymous classes. */
24137 if (DECL_ARTIFICIAL (member)
24138 && dwarf2_name (member, 0) == NULL)
24139 continue;
24140
24141 child = lookup_decl_die (member);
24142 if (child)
24143 splice_child_die (context_die, child);
24144 else
24145 gen_decl_die (member, NULL, NULL, context_die);
24146 }
24147 }
24148
24149 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
24150 is set, we pretend that the type was never defined, so we only get the
24151 member DIEs needed by later specification DIEs. */
24152
24153 static void
24154 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
24155 enum debug_info_usage usage)
24156 {
24157 if (TREE_ASM_WRITTEN (type))
24158 {
24159 /* Fill in the bound of variable-length fields in late dwarf if
24160 still incomplete. */
24161 if (!early_dwarf && variably_modified_type_p (type, NULL))
24162 for (tree member = TYPE_FIELDS (type);
24163 member;
24164 member = DECL_CHAIN (member))
24165 fill_variable_array_bounds (TREE_TYPE (member));
24166 return;
24167 }
24168
24169 dw_die_ref type_die = lookup_type_die (type);
24170 dw_die_ref scope_die = 0;
24171 int nested = 0;
24172 int complete = (TYPE_SIZE (type)
24173 && (! TYPE_STUB_DECL (type)
24174 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
24175 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
24176 complete = complete && should_emit_struct_debug (type, usage);
24177
24178 if (type_die && ! complete)
24179 return;
24180
24181 if (TYPE_CONTEXT (type) != NULL_TREE
24182 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24183 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
24184 nested = 1;
24185
24186 scope_die = scope_die_for (type, context_die);
24187
24188 /* Generate child dies for template paramaters. */
24189 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
24190 schedule_generic_params_dies_gen (type);
24191
24192 if (! type_die || (nested && is_cu_die (scope_die)))
24193 /* First occurrence of type or toplevel definition of nested class. */
24194 {
24195 dw_die_ref old_die = type_die;
24196
24197 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
24198 ? record_type_tag (type) : DW_TAG_union_type,
24199 scope_die, type);
24200 equate_type_number_to_die (type, type_die);
24201 if (old_die)
24202 add_AT_specification (type_die, old_die);
24203 else
24204 add_name_attribute (type_die, type_tag (type));
24205 }
24206 else
24207 remove_AT (type_die, DW_AT_declaration);
24208
24209 /* If this type has been completed, then give it a byte_size attribute and
24210 then give a list of members. */
24211 if (complete && !ns_decl)
24212 {
24213 /* Prevent infinite recursion in cases where the type of some member of
24214 this type is expressed in terms of this type itself. */
24215 TREE_ASM_WRITTEN (type) = 1;
24216 add_byte_size_attribute (type_die, type);
24217 add_alignment_attribute (type_die, type);
24218 if (TYPE_STUB_DECL (type) != NULL_TREE)
24219 {
24220 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
24221 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
24222 }
24223
24224 /* If the first reference to this type was as the return type of an
24225 inline function, then it may not have a parent. Fix this now. */
24226 if (type_die->die_parent == NULL)
24227 add_child_die (scope_die, type_die);
24228
24229 push_decl_scope (type);
24230 gen_member_die (type, type_die);
24231 pop_decl_scope ();
24232
24233 add_gnat_descriptive_type_attribute (type_die, type, context_die);
24234 if (TYPE_ARTIFICIAL (type))
24235 add_AT_flag (type_die, DW_AT_artificial, 1);
24236
24237 /* GNU extension: Record what type our vtable lives in. */
24238 if (TYPE_VFIELD (type))
24239 {
24240 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
24241
24242 gen_type_die (vtype, context_die);
24243 add_AT_die_ref (type_die, DW_AT_containing_type,
24244 lookup_type_die (vtype));
24245 }
24246 }
24247 else
24248 {
24249 add_AT_flag (type_die, DW_AT_declaration, 1);
24250
24251 /* We don't need to do this for function-local types. */
24252 if (TYPE_STUB_DECL (type)
24253 && ! decl_function_context (TYPE_STUB_DECL (type)))
24254 vec_safe_push (incomplete_types, type);
24255 }
24256
24257 if (get_AT (type_die, DW_AT_name))
24258 add_pubtype (type, type_die);
24259 }
24260
24261 /* Generate a DIE for a subroutine _type_. */
24262
24263 static void
24264 gen_subroutine_type_die (tree type, dw_die_ref context_die)
24265 {
24266 tree return_type = TREE_TYPE (type);
24267 dw_die_ref subr_die
24268 = new_die (DW_TAG_subroutine_type,
24269 scope_die_for (type, context_die), type);
24270
24271 equate_type_number_to_die (type, subr_die);
24272 add_prototyped_attribute (subr_die, type);
24273 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
24274 context_die);
24275 add_alignment_attribute (subr_die, type);
24276 gen_formal_types_die (type, subr_die);
24277
24278 if (get_AT (subr_die, DW_AT_name))
24279 add_pubtype (type, subr_die);
24280 if ((dwarf_version >= 5 || !dwarf_strict)
24281 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
24282 add_AT_flag (subr_die, DW_AT_reference, 1);
24283 if ((dwarf_version >= 5 || !dwarf_strict)
24284 && lang_hooks.types.type_dwarf_attribute (type,
24285 DW_AT_rvalue_reference) != -1)
24286 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
24287 }
24288
24289 /* Generate a DIE for a type definition. */
24290
24291 static void
24292 gen_typedef_die (tree decl, dw_die_ref context_die)
24293 {
24294 dw_die_ref type_die;
24295 tree origin;
24296
24297 if (TREE_ASM_WRITTEN (decl))
24298 {
24299 if (DECL_ORIGINAL_TYPE (decl))
24300 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
24301 return;
24302 }
24303
24304 TREE_ASM_WRITTEN (decl) = 1;
24305 type_die = new_die (DW_TAG_typedef, context_die, decl);
24306 origin = decl_ultimate_origin (decl);
24307 if (origin != NULL)
24308 add_abstract_origin_attribute (type_die, origin);
24309 else
24310 {
24311 tree type = TREE_TYPE (decl);
24312
24313 if (type == error_mark_node)
24314 return;
24315
24316 add_name_and_src_coords_attributes (type_die, decl);
24317 if (DECL_ORIGINAL_TYPE (decl))
24318 {
24319 type = DECL_ORIGINAL_TYPE (decl);
24320
24321 if (type == error_mark_node)
24322 return;
24323
24324 gcc_assert (type != TREE_TYPE (decl));
24325 equate_type_number_to_die (TREE_TYPE (decl), type_die);
24326 }
24327 else
24328 {
24329 if (is_naming_typedef_decl (TYPE_NAME (type)))
24330 {
24331 /* Here, we are in the case of decl being a typedef naming
24332 an anonymous type, e.g:
24333 typedef struct {...} foo;
24334 In that case TREE_TYPE (decl) is not a typedef variant
24335 type and TYPE_NAME of the anonymous type is set to the
24336 TYPE_DECL of the typedef. This construct is emitted by
24337 the C++ FE.
24338
24339 TYPE is the anonymous struct named by the typedef
24340 DECL. As we need the DW_AT_type attribute of the
24341 DW_TAG_typedef to point to the DIE of TYPE, let's
24342 generate that DIE right away. add_type_attribute
24343 called below will then pick (via lookup_type_die) that
24344 anonymous struct DIE. */
24345 if (!TREE_ASM_WRITTEN (type))
24346 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
24347
24348 /* This is a GNU Extension. We are adding a
24349 DW_AT_linkage_name attribute to the DIE of the
24350 anonymous struct TYPE. The value of that attribute
24351 is the name of the typedef decl naming the anonymous
24352 struct. This greatly eases the work of consumers of
24353 this debug info. */
24354 add_linkage_name_raw (lookup_type_die (type), decl);
24355 }
24356 }
24357
24358 add_type_attribute (type_die, type, decl_quals (decl), false,
24359 context_die);
24360
24361 if (is_naming_typedef_decl (decl))
24362 /* We want that all subsequent calls to lookup_type_die with
24363 TYPE in argument yield the DW_TAG_typedef we have just
24364 created. */
24365 equate_type_number_to_die (type, type_die);
24366
24367 type = TREE_TYPE (decl);
24368
24369 add_alignment_attribute (type_die, type);
24370
24371 add_accessibility_attribute (type_die, decl);
24372 }
24373
24374 if (DECL_ABSTRACT_P (decl))
24375 equate_decl_number_to_die (decl, type_die);
24376
24377 if (get_AT (type_die, DW_AT_name))
24378 add_pubtype (decl, type_die);
24379 }
24380
24381 /* Generate a DIE for a struct, class, enum or union type. */
24382
24383 static void
24384 gen_tagged_type_die (tree type,
24385 dw_die_ref context_die,
24386 enum debug_info_usage usage)
24387 {
24388 int need_pop;
24389
24390 if (type == NULL_TREE
24391 || !is_tagged_type (type))
24392 return;
24393
24394 if (TREE_ASM_WRITTEN (type))
24395 need_pop = 0;
24396 /* If this is a nested type whose containing class hasn't been written
24397 out yet, writing it out will cover this one, too. This does not apply
24398 to instantiations of member class templates; they need to be added to
24399 the containing class as they are generated. FIXME: This hurts the
24400 idea of combining type decls from multiple TUs, since we can't predict
24401 what set of template instantiations we'll get. */
24402 else if (TYPE_CONTEXT (type)
24403 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24404 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
24405 {
24406 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
24407
24408 if (TREE_ASM_WRITTEN (type))
24409 return;
24410
24411 /* If that failed, attach ourselves to the stub. */
24412 push_decl_scope (TYPE_CONTEXT (type));
24413 context_die = lookup_type_die (TYPE_CONTEXT (type));
24414 need_pop = 1;
24415 }
24416 else if (TYPE_CONTEXT (type) != NULL_TREE
24417 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
24418 {
24419 /* If this type is local to a function that hasn't been written
24420 out yet, use a NULL context for now; it will be fixed up in
24421 decls_for_scope. */
24422 context_die = lookup_decl_die (TYPE_CONTEXT (type));
24423 /* A declaration DIE doesn't count; nested types need to go in the
24424 specification. */
24425 if (context_die && is_declaration_die (context_die))
24426 context_die = NULL;
24427 need_pop = 0;
24428 }
24429 else
24430 {
24431 context_die = declare_in_namespace (type, context_die);
24432 need_pop = 0;
24433 }
24434
24435 if (TREE_CODE (type) == ENUMERAL_TYPE)
24436 {
24437 /* This might have been written out by the call to
24438 declare_in_namespace. */
24439 if (!TREE_ASM_WRITTEN (type))
24440 gen_enumeration_type_die (type, context_die);
24441 }
24442 else
24443 gen_struct_or_union_type_die (type, context_die, usage);
24444
24445 if (need_pop)
24446 pop_decl_scope ();
24447
24448 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
24449 it up if it is ever completed. gen_*_type_die will set it for us
24450 when appropriate. */
24451 }
24452
24453 /* Generate a type description DIE. */
24454
24455 static void
24456 gen_type_die_with_usage (tree type, dw_die_ref context_die,
24457 enum debug_info_usage usage)
24458 {
24459 struct array_descr_info info;
24460
24461 if (type == NULL_TREE || type == error_mark_node)
24462 return;
24463
24464 if (flag_checking && type)
24465 verify_type (type);
24466
24467 if (TYPE_NAME (type) != NULL_TREE
24468 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
24469 && is_redundant_typedef (TYPE_NAME (type))
24470 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
24471 /* The DECL of this type is a typedef we don't want to emit debug
24472 info for but we want debug info for its underlying typedef.
24473 This can happen for e.g, the injected-class-name of a C++
24474 type. */
24475 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
24476
24477 /* If TYPE is a typedef type variant, let's generate debug info
24478 for the parent typedef which TYPE is a type of. */
24479 if (typedef_variant_p (type))
24480 {
24481 if (TREE_ASM_WRITTEN (type))
24482 return;
24483
24484 /* Prevent broken recursion; we can't hand off to the same type. */
24485 gcc_assert (DECL_ORIGINAL_TYPE (TYPE_NAME (type)) != type);
24486
24487 /* Give typedefs the right scope. */
24488 context_die = scope_die_for (type, context_die);
24489
24490 TREE_ASM_WRITTEN (type) = 1;
24491
24492 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
24493 return;
24494 }
24495
24496 /* If type is an anonymous tagged type named by a typedef, let's
24497 generate debug info for the typedef. */
24498 if (is_naming_typedef_decl (TYPE_NAME (type)))
24499 {
24500 /* Use the DIE of the containing namespace as the parent DIE of
24501 the type description DIE we want to generate. */
24502 if (DECL_CONTEXT (TYPE_NAME (type))
24503 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
24504 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
24505
24506 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
24507 return;
24508 }
24509
24510 if (lang_hooks.types.get_debug_type)
24511 {
24512 tree debug_type = lang_hooks.types.get_debug_type (type);
24513
24514 if (debug_type != NULL_TREE && debug_type != type)
24515 {
24516 gen_type_die_with_usage (debug_type, context_die, usage);
24517 return;
24518 }
24519 }
24520
24521 /* We are going to output a DIE to represent the unqualified version
24522 of this type (i.e. without any const or volatile qualifiers) so
24523 get the main variant (i.e. the unqualified version) of this type
24524 now. (Vectors and arrays are special because the debugging info is in the
24525 cloned type itself. Similarly function/method types can contain extra
24526 ref-qualification). */
24527 if (TREE_CODE (type) == FUNCTION_TYPE
24528 || TREE_CODE (type) == METHOD_TYPE)
24529 {
24530 /* For function/method types, can't use type_main_variant here,
24531 because that can have different ref-qualifiers for C++,
24532 but try to canonicalize. */
24533 tree main = TYPE_MAIN_VARIANT (type);
24534 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
24535 {
24536 if (check_base_type (t, main) && check_lang_type (t, type))
24537 {
24538 type = t;
24539 break;
24540 }
24541 }
24542 }
24543 else if (TREE_CODE (type) != VECTOR_TYPE
24544 && TREE_CODE (type) != ARRAY_TYPE)
24545 type = type_main_variant (type);
24546
24547 /* If this is an array type with hidden descriptor, handle it first. */
24548 if (!TREE_ASM_WRITTEN (type)
24549 && lang_hooks.types.get_array_descr_info)
24550 {
24551 memset (&info, 0, sizeof (info));
24552 if (lang_hooks.types.get_array_descr_info (type, &info))
24553 {
24554 /* Fortran sometimes emits array types with no dimension. */
24555 gcc_assert (info.ndimensions >= 0
24556 && (info.ndimensions
24557 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
24558 gen_descr_array_type_die (type, &info, context_die);
24559 TREE_ASM_WRITTEN (type) = 1;
24560 return;
24561 }
24562 }
24563
24564 if (TREE_ASM_WRITTEN (type))
24565 {
24566 /* Variable-length types may be incomplete even if
24567 TREE_ASM_WRITTEN. For such types, fall through to
24568 gen_array_type_die() and possibly fill in
24569 DW_AT_{upper,lower}_bound attributes. */
24570 if ((TREE_CODE (type) != ARRAY_TYPE
24571 && TREE_CODE (type) != RECORD_TYPE
24572 && TREE_CODE (type) != UNION_TYPE
24573 && TREE_CODE (type) != QUAL_UNION_TYPE)
24574 || !variably_modified_type_p (type, NULL))
24575 return;
24576 }
24577
24578 switch (TREE_CODE (type))
24579 {
24580 case ERROR_MARK:
24581 break;
24582
24583 case POINTER_TYPE:
24584 case REFERENCE_TYPE:
24585 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
24586 ensures that the gen_type_die recursion will terminate even if the
24587 type is recursive. Recursive types are possible in Ada. */
24588 /* ??? We could perhaps do this for all types before the switch
24589 statement. */
24590 TREE_ASM_WRITTEN (type) = 1;
24591
24592 /* For these types, all that is required is that we output a DIE (or a
24593 set of DIEs) to represent the "basis" type. */
24594 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24595 DINFO_USAGE_IND_USE);
24596 break;
24597
24598 case OFFSET_TYPE:
24599 /* This code is used for C++ pointer-to-data-member types.
24600 Output a description of the relevant class type. */
24601 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
24602 DINFO_USAGE_IND_USE);
24603
24604 /* Output a description of the type of the object pointed to. */
24605 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24606 DINFO_USAGE_IND_USE);
24607
24608 /* Now output a DIE to represent this pointer-to-data-member type
24609 itself. */
24610 gen_ptr_to_mbr_type_die (type, context_die);
24611 break;
24612
24613 case FUNCTION_TYPE:
24614 /* Force out return type (in case it wasn't forced out already). */
24615 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24616 DINFO_USAGE_DIR_USE);
24617 gen_subroutine_type_die (type, context_die);
24618 break;
24619
24620 case METHOD_TYPE:
24621 /* Force out return type (in case it wasn't forced out already). */
24622 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24623 DINFO_USAGE_DIR_USE);
24624 gen_subroutine_type_die (type, context_die);
24625 break;
24626
24627 case ARRAY_TYPE:
24628 case VECTOR_TYPE:
24629 gen_array_type_die (type, context_die);
24630 break;
24631
24632 case ENUMERAL_TYPE:
24633 case RECORD_TYPE:
24634 case UNION_TYPE:
24635 case QUAL_UNION_TYPE:
24636 gen_tagged_type_die (type, context_die, usage);
24637 return;
24638
24639 case VOID_TYPE:
24640 case INTEGER_TYPE:
24641 case REAL_TYPE:
24642 case FIXED_POINT_TYPE:
24643 case COMPLEX_TYPE:
24644 case BOOLEAN_TYPE:
24645 case POINTER_BOUNDS_TYPE:
24646 /* No DIEs needed for fundamental types. */
24647 break;
24648
24649 case NULLPTR_TYPE:
24650 case LANG_TYPE:
24651 /* Just use DW_TAG_unspecified_type. */
24652 {
24653 dw_die_ref type_die = lookup_type_die (type);
24654 if (type_die == NULL)
24655 {
24656 tree name = TYPE_IDENTIFIER (type);
24657 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
24658 type);
24659 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
24660 equate_type_number_to_die (type, type_die);
24661 }
24662 }
24663 break;
24664
24665 default:
24666 if (is_cxx_auto (type))
24667 {
24668 tree name = TYPE_IDENTIFIER (type);
24669 dw_die_ref *die = (name == get_identifier ("auto")
24670 ? &auto_die : &decltype_auto_die);
24671 if (!*die)
24672 {
24673 *die = new_die (DW_TAG_unspecified_type,
24674 comp_unit_die (), NULL_TREE);
24675 add_name_attribute (*die, IDENTIFIER_POINTER (name));
24676 }
24677 equate_type_number_to_die (type, *die);
24678 break;
24679 }
24680 gcc_unreachable ();
24681 }
24682
24683 TREE_ASM_WRITTEN (type) = 1;
24684 }
24685
24686 static void
24687 gen_type_die (tree type, dw_die_ref context_die)
24688 {
24689 if (type != error_mark_node)
24690 {
24691 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
24692 if (flag_checking)
24693 {
24694 dw_die_ref die = lookup_type_die (type);
24695 if (die)
24696 check_die (die);
24697 }
24698 }
24699 }
24700
24701 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
24702 things which are local to the given block. */
24703
24704 static void
24705 gen_block_die (tree stmt, dw_die_ref context_die)
24706 {
24707 int must_output_die = 0;
24708 bool inlined_func;
24709
24710 /* Ignore blocks that are NULL. */
24711 if (stmt == NULL_TREE)
24712 return;
24713
24714 inlined_func = inlined_function_outer_scope_p (stmt);
24715
24716 /* If the block is one fragment of a non-contiguous block, do not
24717 process the variables, since they will have been done by the
24718 origin block. Do process subblocks. */
24719 if (BLOCK_FRAGMENT_ORIGIN (stmt))
24720 {
24721 tree sub;
24722
24723 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
24724 gen_block_die (sub, context_die);
24725
24726 return;
24727 }
24728
24729 /* Determine if we need to output any Dwarf DIEs at all to represent this
24730 block. */
24731 if (inlined_func)
24732 /* The outer scopes for inlinings *must* always be represented. We
24733 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
24734 must_output_die = 1;
24735 else
24736 {
24737 /* Determine if this block directly contains any "significant"
24738 local declarations which we will need to output DIEs for. */
24739 if (debug_info_level > DINFO_LEVEL_TERSE)
24740 /* We are not in terse mode so *any* local declaration counts
24741 as being a "significant" one. */
24742 must_output_die = ((BLOCK_VARS (stmt) != NULL
24743 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
24744 && (TREE_USED (stmt)
24745 || TREE_ASM_WRITTEN (stmt)
24746 || BLOCK_ABSTRACT (stmt)));
24747 else if ((TREE_USED (stmt)
24748 || TREE_ASM_WRITTEN (stmt)
24749 || BLOCK_ABSTRACT (stmt))
24750 && !dwarf2out_ignore_block (stmt))
24751 must_output_die = 1;
24752 }
24753
24754 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
24755 DIE for any block which contains no significant local declarations at
24756 all. Rather, in such cases we just call `decls_for_scope' so that any
24757 needed Dwarf info for any sub-blocks will get properly generated. Note
24758 that in terse mode, our definition of what constitutes a "significant"
24759 local declaration gets restricted to include only inlined function
24760 instances and local (nested) function definitions. */
24761 if (must_output_die)
24762 {
24763 if (inlined_func)
24764 {
24765 /* If STMT block is abstract, that means we have been called
24766 indirectly from dwarf2out_abstract_function.
24767 That function rightfully marks the descendent blocks (of
24768 the abstract function it is dealing with) as being abstract,
24769 precisely to prevent us from emitting any
24770 DW_TAG_inlined_subroutine DIE as a descendent
24771 of an abstract function instance. So in that case, we should
24772 not call gen_inlined_subroutine_die.
24773
24774 Later though, when cgraph asks dwarf2out to emit info
24775 for the concrete instance of the function decl into which
24776 the concrete instance of STMT got inlined, the later will lead
24777 to the generation of a DW_TAG_inlined_subroutine DIE. */
24778 if (! BLOCK_ABSTRACT (stmt))
24779 gen_inlined_subroutine_die (stmt, context_die);
24780 }
24781 else
24782 gen_lexical_block_die (stmt, context_die);
24783 }
24784 else
24785 decls_for_scope (stmt, context_die);
24786 }
24787
24788 /* Process variable DECL (or variable with origin ORIGIN) within
24789 block STMT and add it to CONTEXT_DIE. */
24790 static void
24791 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
24792 {
24793 dw_die_ref die;
24794 tree decl_or_origin = decl ? decl : origin;
24795
24796 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
24797 die = lookup_decl_die (decl_or_origin);
24798 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
24799 {
24800 if (TYPE_DECL_IS_STUB (decl_or_origin))
24801 die = lookup_type_die (TREE_TYPE (decl_or_origin));
24802 else
24803 die = lookup_decl_die (decl_or_origin);
24804 /* Avoid re-creating the DIE late if it was optimized as unused early. */
24805 if (! die && ! early_dwarf)
24806 return;
24807 }
24808 else
24809 die = NULL;
24810
24811 if (die != NULL && die->die_parent == NULL)
24812 add_child_die (context_die, die);
24813 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
24814 {
24815 if (early_dwarf)
24816 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
24817 stmt, context_die);
24818 }
24819 else
24820 gen_decl_die (decl, origin, NULL, context_die);
24821 }
24822
24823 /* Generate all of the decls declared within a given scope and (recursively)
24824 all of its sub-blocks. */
24825
24826 static void
24827 decls_for_scope (tree stmt, dw_die_ref context_die)
24828 {
24829 tree decl;
24830 unsigned int i;
24831 tree subblocks;
24832
24833 /* Ignore NULL blocks. */
24834 if (stmt == NULL_TREE)
24835 return;
24836
24837 /* Output the DIEs to represent all of the data objects and typedefs
24838 declared directly within this block but not within any nested
24839 sub-blocks. Also, nested function and tag DIEs have been
24840 generated with a parent of NULL; fix that up now. We don't
24841 have to do this if we're at -g1. */
24842 if (debug_info_level > DINFO_LEVEL_TERSE)
24843 {
24844 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
24845 process_scope_var (stmt, decl, NULL_TREE, context_die);
24846 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
24847 origin - avoid doing this twice as we have no good way to see
24848 if we've done it once already. */
24849 if (! early_dwarf)
24850 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
24851 process_scope_var (stmt, NULL, BLOCK_NONLOCALIZED_VAR (stmt, i),
24852 context_die);
24853 }
24854
24855 /* Even if we're at -g1, we need to process the subblocks in order to get
24856 inlined call information. */
24857
24858 /* Output the DIEs to represent all sub-blocks (and the items declared
24859 therein) of this block. */
24860 for (subblocks = BLOCK_SUBBLOCKS (stmt);
24861 subblocks != NULL;
24862 subblocks = BLOCK_CHAIN (subblocks))
24863 gen_block_die (subblocks, context_die);
24864 }
24865
24866 /* Is this a typedef we can avoid emitting? */
24867
24868 bool
24869 is_redundant_typedef (const_tree decl)
24870 {
24871 if (TYPE_DECL_IS_STUB (decl))
24872 return true;
24873
24874 if (DECL_ARTIFICIAL (decl)
24875 && DECL_CONTEXT (decl)
24876 && is_tagged_type (DECL_CONTEXT (decl))
24877 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
24878 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
24879 /* Also ignore the artificial member typedef for the class name. */
24880 return true;
24881
24882 return false;
24883 }
24884
24885 /* Return TRUE if TYPE is a typedef that names a type for linkage
24886 purposes. This kind of typedefs is produced by the C++ FE for
24887 constructs like:
24888
24889 typedef struct {...} foo;
24890
24891 In that case, there is no typedef variant type produced for foo.
24892 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
24893 struct type. */
24894
24895 static bool
24896 is_naming_typedef_decl (const_tree decl)
24897 {
24898 if (decl == NULL_TREE
24899 || TREE_CODE (decl) != TYPE_DECL
24900 || DECL_NAMELESS (decl)
24901 || !is_tagged_type (TREE_TYPE (decl))
24902 || DECL_IS_BUILTIN (decl)
24903 || is_redundant_typedef (decl)
24904 /* It looks like Ada produces TYPE_DECLs that are very similar
24905 to C++ naming typedefs but that have different
24906 semantics. Let's be specific to c++ for now. */
24907 || !is_cxx (decl))
24908 return FALSE;
24909
24910 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
24911 && TYPE_NAME (TREE_TYPE (decl)) == decl
24912 && (TYPE_STUB_DECL (TREE_TYPE (decl))
24913 != TYPE_NAME (TREE_TYPE (decl))));
24914 }
24915
24916 /* Looks up the DIE for a context. */
24917
24918 static inline dw_die_ref
24919 lookup_context_die (tree context)
24920 {
24921 if (context)
24922 {
24923 /* Find die that represents this context. */
24924 if (TYPE_P (context))
24925 {
24926 context = TYPE_MAIN_VARIANT (context);
24927 dw_die_ref ctx = lookup_type_die (context);
24928 if (!ctx)
24929 return NULL;
24930 return strip_naming_typedef (context, ctx);
24931 }
24932 else
24933 return lookup_decl_die (context);
24934 }
24935 return comp_unit_die ();
24936 }
24937
24938 /* Returns the DIE for a context. */
24939
24940 static inline dw_die_ref
24941 get_context_die (tree context)
24942 {
24943 if (context)
24944 {
24945 /* Find die that represents this context. */
24946 if (TYPE_P (context))
24947 {
24948 context = TYPE_MAIN_VARIANT (context);
24949 return strip_naming_typedef (context, force_type_die (context));
24950 }
24951 else
24952 return force_decl_die (context);
24953 }
24954 return comp_unit_die ();
24955 }
24956
24957 /* Returns the DIE for decl. A DIE will always be returned. */
24958
24959 static dw_die_ref
24960 force_decl_die (tree decl)
24961 {
24962 dw_die_ref decl_die;
24963 unsigned saved_external_flag;
24964 tree save_fn = NULL_TREE;
24965 decl_die = lookup_decl_die (decl);
24966 if (!decl_die)
24967 {
24968 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
24969
24970 decl_die = lookup_decl_die (decl);
24971 if (decl_die)
24972 return decl_die;
24973
24974 switch (TREE_CODE (decl))
24975 {
24976 case FUNCTION_DECL:
24977 /* Clear current_function_decl, so that gen_subprogram_die thinks
24978 that this is a declaration. At this point, we just want to force
24979 declaration die. */
24980 save_fn = current_function_decl;
24981 current_function_decl = NULL_TREE;
24982 gen_subprogram_die (decl, context_die);
24983 current_function_decl = save_fn;
24984 break;
24985
24986 case VAR_DECL:
24987 /* Set external flag to force declaration die. Restore it after
24988 gen_decl_die() call. */
24989 saved_external_flag = DECL_EXTERNAL (decl);
24990 DECL_EXTERNAL (decl) = 1;
24991 gen_decl_die (decl, NULL, NULL, context_die);
24992 DECL_EXTERNAL (decl) = saved_external_flag;
24993 break;
24994
24995 case NAMESPACE_DECL:
24996 if (dwarf_version >= 3 || !dwarf_strict)
24997 dwarf2out_decl (decl);
24998 else
24999 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25000 decl_die = comp_unit_die ();
25001 break;
25002
25003 case TRANSLATION_UNIT_DECL:
25004 decl_die = comp_unit_die ();
25005 break;
25006
25007 default:
25008 gcc_unreachable ();
25009 }
25010
25011 /* We should be able to find the DIE now. */
25012 if (!decl_die)
25013 decl_die = lookup_decl_die (decl);
25014 gcc_assert (decl_die);
25015 }
25016
25017 return decl_die;
25018 }
25019
25020 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25021 always returned. */
25022
25023 static dw_die_ref
25024 force_type_die (tree type)
25025 {
25026 dw_die_ref type_die;
25027
25028 type_die = lookup_type_die (type);
25029 if (!type_die)
25030 {
25031 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25032
25033 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25034 false, context_die);
25035 gcc_assert (type_die);
25036 }
25037 return type_die;
25038 }
25039
25040 /* Force out any required namespaces to be able to output DECL,
25041 and return the new context_die for it, if it's changed. */
25042
25043 static dw_die_ref
25044 setup_namespace_context (tree thing, dw_die_ref context_die)
25045 {
25046 tree context = (DECL_P (thing)
25047 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25048 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25049 /* Force out the namespace. */
25050 context_die = force_decl_die (context);
25051
25052 return context_die;
25053 }
25054
25055 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25056 type) within its namespace, if appropriate.
25057
25058 For compatibility with older debuggers, namespace DIEs only contain
25059 declarations; all definitions are emitted at CU scope, with
25060 DW_AT_specification pointing to the declaration (like with class
25061 members). */
25062
25063 static dw_die_ref
25064 declare_in_namespace (tree thing, dw_die_ref context_die)
25065 {
25066 dw_die_ref ns_context;
25067
25068 if (debug_info_level <= DINFO_LEVEL_TERSE)
25069 return context_die;
25070
25071 /* External declarations in the local scope only need to be emitted
25072 once, not once in the namespace and once in the scope.
25073
25074 This avoids declaring the `extern' below in the
25075 namespace DIE as well as in the innermost scope:
25076
25077 namespace S
25078 {
25079 int i=5;
25080 int foo()
25081 {
25082 int i=8;
25083 extern int i;
25084 return i;
25085 }
25086 }
25087 */
25088 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25089 return context_die;
25090
25091 /* If this decl is from an inlined function, then don't try to emit it in its
25092 namespace, as we will get confused. It would have already been emitted
25093 when the abstract instance of the inline function was emitted anyways. */
25094 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
25095 return context_die;
25096
25097 ns_context = setup_namespace_context (thing, context_die);
25098
25099 if (ns_context != context_die)
25100 {
25101 if (is_fortran ())
25102 return ns_context;
25103 if (DECL_P (thing))
25104 gen_decl_die (thing, NULL, NULL, ns_context);
25105 else
25106 gen_type_die (thing, ns_context);
25107 }
25108 return context_die;
25109 }
25110
25111 /* Generate a DIE for a namespace or namespace alias. */
25112
25113 static void
25114 gen_namespace_die (tree decl, dw_die_ref context_die)
25115 {
25116 dw_die_ref namespace_die;
25117
25118 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
25119 they are an alias of. */
25120 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
25121 {
25122 /* Output a real namespace or module. */
25123 context_die = setup_namespace_context (decl, comp_unit_die ());
25124 namespace_die = new_die (is_fortran ()
25125 ? DW_TAG_module : DW_TAG_namespace,
25126 context_die, decl);
25127 /* For Fortran modules defined in different CU don't add src coords. */
25128 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
25129 {
25130 const char *name = dwarf2_name (decl, 0);
25131 if (name)
25132 add_name_attribute (namespace_die, name);
25133 }
25134 else
25135 add_name_and_src_coords_attributes (namespace_die, decl);
25136 if (DECL_EXTERNAL (decl))
25137 add_AT_flag (namespace_die, DW_AT_declaration, 1);
25138 equate_decl_number_to_die (decl, namespace_die);
25139 }
25140 else
25141 {
25142 /* Output a namespace alias. */
25143
25144 /* Force out the namespace we are an alias of, if necessary. */
25145 dw_die_ref origin_die
25146 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
25147
25148 if (DECL_FILE_SCOPE_P (decl)
25149 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
25150 context_die = setup_namespace_context (decl, comp_unit_die ());
25151 /* Now create the namespace alias DIE. */
25152 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
25153 add_name_and_src_coords_attributes (namespace_die, decl);
25154 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
25155 equate_decl_number_to_die (decl, namespace_die);
25156 }
25157 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
25158 if (want_pubnames ())
25159 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
25160 }
25161
25162 /* Generate Dwarf debug information for a decl described by DECL.
25163 The return value is currently only meaningful for PARM_DECLs,
25164 for all other decls it returns NULL.
25165
25166 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
25167 It can be NULL otherwise. */
25168
25169 static dw_die_ref
25170 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
25171 dw_die_ref context_die)
25172 {
25173 tree decl_or_origin = decl ? decl : origin;
25174 tree class_origin = NULL, ultimate_origin;
25175
25176 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
25177 return NULL;
25178
25179 /* Ignore pointer bounds decls. */
25180 if (DECL_P (decl_or_origin)
25181 && TREE_TYPE (decl_or_origin)
25182 && POINTER_BOUNDS_P (decl_or_origin))
25183 return NULL;
25184
25185 switch (TREE_CODE (decl_or_origin))
25186 {
25187 case ERROR_MARK:
25188 break;
25189
25190 case CONST_DECL:
25191 if (!is_fortran () && !is_ada ())
25192 {
25193 /* The individual enumerators of an enum type get output when we output
25194 the Dwarf representation of the relevant enum type itself. */
25195 break;
25196 }
25197
25198 /* Emit its type. */
25199 gen_type_die (TREE_TYPE (decl), context_die);
25200
25201 /* And its containing namespace. */
25202 context_die = declare_in_namespace (decl, context_die);
25203
25204 gen_const_die (decl, context_die);
25205 break;
25206
25207 case FUNCTION_DECL:
25208 /* Don't output any DIEs to represent mere function declarations,
25209 unless they are class members or explicit block externs. */
25210 if (DECL_INITIAL (decl_or_origin) == NULL_TREE
25211 && DECL_FILE_SCOPE_P (decl_or_origin)
25212 && (current_function_decl == NULL_TREE
25213 || DECL_ARTIFICIAL (decl_or_origin)))
25214 break;
25215
25216 #if 0
25217 /* FIXME */
25218 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
25219 on local redeclarations of global functions. That seems broken. */
25220 if (current_function_decl != decl)
25221 /* This is only a declaration. */;
25222 #endif
25223
25224 /* If we're emitting a clone, emit info for the abstract instance. */
25225 if (origin || DECL_ORIGIN (decl) != decl)
25226 dwarf2out_abstract_function (origin
25227 ? DECL_ORIGIN (origin)
25228 : DECL_ABSTRACT_ORIGIN (decl));
25229
25230 /* If we're emitting an out-of-line copy of an inline function,
25231 emit info for the abstract instance and set up to refer to it. */
25232 else if (cgraph_function_possibly_inlined_p (decl)
25233 && ! DECL_ABSTRACT_P (decl)
25234 && ! class_or_namespace_scope_p (context_die)
25235 /* dwarf2out_abstract_function won't emit a die if this is just
25236 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
25237 that case, because that works only if we have a die. */
25238 && DECL_INITIAL (decl) != NULL_TREE)
25239 {
25240 dwarf2out_abstract_function (decl);
25241 set_decl_origin_self (decl);
25242 }
25243
25244 /* Otherwise we're emitting the primary DIE for this decl. */
25245 else if (debug_info_level > DINFO_LEVEL_TERSE)
25246 {
25247 /* Before we describe the FUNCTION_DECL itself, make sure that we
25248 have its containing type. */
25249 if (!origin)
25250 origin = decl_class_context (decl);
25251 if (origin != NULL_TREE)
25252 gen_type_die (origin, context_die);
25253
25254 /* And its return type. */
25255 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
25256
25257 /* And its virtual context. */
25258 if (DECL_VINDEX (decl) != NULL_TREE)
25259 gen_type_die (DECL_CONTEXT (decl), context_die);
25260
25261 /* Make sure we have a member DIE for decl. */
25262 if (origin != NULL_TREE)
25263 gen_type_die_for_member (origin, decl, context_die);
25264
25265 /* And its containing namespace. */
25266 context_die = declare_in_namespace (decl, context_die);
25267 }
25268
25269 /* Now output a DIE to represent the function itself. */
25270 if (decl)
25271 gen_subprogram_die (decl, context_die);
25272 break;
25273
25274 case TYPE_DECL:
25275 /* If we are in terse mode, don't generate any DIEs to represent any
25276 actual typedefs. */
25277 if (debug_info_level <= DINFO_LEVEL_TERSE)
25278 break;
25279
25280 /* In the special case of a TYPE_DECL node representing the declaration
25281 of some type tag, if the given TYPE_DECL is marked as having been
25282 instantiated from some other (original) TYPE_DECL node (e.g. one which
25283 was generated within the original definition of an inline function) we
25284 used to generate a special (abbreviated) DW_TAG_structure_type,
25285 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
25286 should be actually referencing those DIEs, as variable DIEs with that
25287 type would be emitted already in the abstract origin, so it was always
25288 removed during unused type prunning. Don't add anything in this
25289 case. */
25290 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
25291 break;
25292
25293 if (is_redundant_typedef (decl))
25294 gen_type_die (TREE_TYPE (decl), context_die);
25295 else
25296 /* Output a DIE to represent the typedef itself. */
25297 gen_typedef_die (decl, context_die);
25298 break;
25299
25300 case LABEL_DECL:
25301 if (debug_info_level >= DINFO_LEVEL_NORMAL)
25302 gen_label_die (decl, context_die);
25303 break;
25304
25305 case VAR_DECL:
25306 case RESULT_DECL:
25307 /* If we are in terse mode, don't generate any DIEs to represent any
25308 variable declarations or definitions. */
25309 if (debug_info_level <= DINFO_LEVEL_TERSE)
25310 break;
25311
25312 /* Output any DIEs that are needed to specify the type of this data
25313 object. */
25314 if (decl_by_reference_p (decl_or_origin))
25315 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25316 else
25317 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25318
25319 /* And its containing type. */
25320 class_origin = decl_class_context (decl_or_origin);
25321 if (class_origin != NULL_TREE)
25322 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
25323
25324 /* And its containing namespace. */
25325 context_die = declare_in_namespace (decl_or_origin, context_die);
25326
25327 /* Now output the DIE to represent the data object itself. This gets
25328 complicated because of the possibility that the VAR_DECL really
25329 represents an inlined instance of a formal parameter for an inline
25330 function. */
25331 ultimate_origin = decl_ultimate_origin (decl_or_origin);
25332 if (ultimate_origin != NULL_TREE
25333 && TREE_CODE (ultimate_origin) == PARM_DECL)
25334 gen_formal_parameter_die (decl, origin,
25335 true /* Emit name attribute. */,
25336 context_die);
25337 else
25338 gen_variable_die (decl, origin, context_die);
25339 break;
25340
25341 case FIELD_DECL:
25342 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
25343 /* Ignore the nameless fields that are used to skip bits but handle C++
25344 anonymous unions and structs. */
25345 if (DECL_NAME (decl) != NULL_TREE
25346 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
25347 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
25348 {
25349 gen_type_die (member_declared_type (decl), context_die);
25350 gen_field_die (decl, ctx, context_die);
25351 }
25352 break;
25353
25354 case PARM_DECL:
25355 if (DECL_BY_REFERENCE (decl_or_origin))
25356 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25357 else
25358 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25359 return gen_formal_parameter_die (decl, origin,
25360 true /* Emit name attribute. */,
25361 context_die);
25362
25363 case NAMESPACE_DECL:
25364 if (dwarf_version >= 3 || !dwarf_strict)
25365 gen_namespace_die (decl, context_die);
25366 break;
25367
25368 case IMPORTED_DECL:
25369 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
25370 DECL_CONTEXT (decl), context_die);
25371 break;
25372
25373 case NAMELIST_DECL:
25374 gen_namelist_decl (DECL_NAME (decl), context_die,
25375 NAMELIST_DECL_ASSOCIATED_DECL (decl));
25376 break;
25377
25378 default:
25379 /* Probably some frontend-internal decl. Assume we don't care. */
25380 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
25381 break;
25382 }
25383
25384 return NULL;
25385 }
25386 \f
25387 /* Output initial debug information for global DECL. Called at the
25388 end of the parsing process.
25389
25390 This is the initial debug generation process. As such, the DIEs
25391 generated may be incomplete. A later debug generation pass
25392 (dwarf2out_late_global_decl) will augment the information generated
25393 in this pass (e.g., with complete location info). */
25394
25395 static void
25396 dwarf2out_early_global_decl (tree decl)
25397 {
25398 set_early_dwarf s;
25399
25400 /* gen_decl_die() will set DECL_ABSTRACT because
25401 cgraph_function_possibly_inlined_p() returns true. This is in
25402 turn will cause DW_AT_inline attributes to be set.
25403
25404 This happens because at early dwarf generation, there is no
25405 cgraph information, causing cgraph_function_possibly_inlined_p()
25406 to return true. Trick cgraph_function_possibly_inlined_p()
25407 while we generate dwarf early. */
25408 bool save = symtab->global_info_ready;
25409 symtab->global_info_ready = true;
25410
25411 /* We don't handle TYPE_DECLs. If required, they'll be reached via
25412 other DECLs and they can point to template types or other things
25413 that dwarf2out can't handle when done via dwarf2out_decl. */
25414 if (TREE_CODE (decl) != TYPE_DECL
25415 && TREE_CODE (decl) != PARM_DECL)
25416 {
25417 tree save_fndecl = current_function_decl;
25418 if (TREE_CODE (decl) == FUNCTION_DECL)
25419 {
25420 /* No cfun means the symbol has no body, so there's nothing
25421 to emit. */
25422 if (!DECL_STRUCT_FUNCTION (decl))
25423 goto early_decl_exit;
25424
25425 /* For nested functions, make sure we have DIEs for the parents first
25426 so that all nested DIEs are generated at the proper scope in the
25427 first shot. */
25428 tree context = decl_function_context (decl);
25429 if (context != NULL && lookup_decl_die (context) == NULL)
25430 {
25431 current_function_decl = context;
25432 dwarf2out_decl (context);
25433 }
25434
25435 current_function_decl = decl;
25436 }
25437 dwarf2out_decl (decl);
25438 if (TREE_CODE (decl) == FUNCTION_DECL)
25439 current_function_decl = save_fndecl;
25440 }
25441 early_decl_exit:
25442 symtab->global_info_ready = save;
25443 }
25444
25445 /* Output debug information for global decl DECL. Called from
25446 toplev.c after compilation proper has finished. */
25447
25448 static void
25449 dwarf2out_late_global_decl (tree decl)
25450 {
25451 /* Fill-in any location information we were unable to determine
25452 on the first pass. */
25453 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
25454 {
25455 dw_die_ref die = lookup_decl_die (decl);
25456
25457 /* We have to generate early debug late for LTO. */
25458 if (! die && in_lto_p)
25459 {
25460 dwarf2out_decl (decl);
25461 die = lookup_decl_die (decl);
25462 }
25463
25464 if (die)
25465 {
25466 /* We get called via the symtab code invoking late_global_decl
25467 for symbols that are optimized out. Do not add locations
25468 for those. */
25469 varpool_node *node = varpool_node::get (decl);
25470 if (! node || ! node->definition)
25471 tree_add_const_value_attribute_for_decl (die, decl);
25472 else
25473 add_location_or_const_value_attribute (die, decl, false);
25474 }
25475 }
25476 }
25477
25478 /* Output debug information for type decl DECL. Called from toplev.c
25479 and from language front ends (to record built-in types). */
25480 static void
25481 dwarf2out_type_decl (tree decl, int local)
25482 {
25483 if (!local)
25484 {
25485 set_early_dwarf s;
25486 dwarf2out_decl (decl);
25487 }
25488 }
25489
25490 /* Output debug information for imported module or decl DECL.
25491 NAME is non-NULL name in the lexical block if the decl has been renamed.
25492 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
25493 that DECL belongs to.
25494 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
25495 static void
25496 dwarf2out_imported_module_or_decl_1 (tree decl,
25497 tree name,
25498 tree lexical_block,
25499 dw_die_ref lexical_block_die)
25500 {
25501 expanded_location xloc;
25502 dw_die_ref imported_die = NULL;
25503 dw_die_ref at_import_die;
25504
25505 if (TREE_CODE (decl) == IMPORTED_DECL)
25506 {
25507 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
25508 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
25509 gcc_assert (decl);
25510 }
25511 else
25512 xloc = expand_location (input_location);
25513
25514 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
25515 {
25516 at_import_die = force_type_die (TREE_TYPE (decl));
25517 /* For namespace N { typedef void T; } using N::T; base_type_die
25518 returns NULL, but DW_TAG_imported_declaration requires
25519 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
25520 if (!at_import_die)
25521 {
25522 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
25523 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
25524 at_import_die = lookup_type_die (TREE_TYPE (decl));
25525 gcc_assert (at_import_die);
25526 }
25527 }
25528 else
25529 {
25530 at_import_die = lookup_decl_die (decl);
25531 if (!at_import_die)
25532 {
25533 /* If we're trying to avoid duplicate debug info, we may not have
25534 emitted the member decl for this field. Emit it now. */
25535 if (TREE_CODE (decl) == FIELD_DECL)
25536 {
25537 tree type = DECL_CONTEXT (decl);
25538
25539 if (TYPE_CONTEXT (type)
25540 && TYPE_P (TYPE_CONTEXT (type))
25541 && !should_emit_struct_debug (TYPE_CONTEXT (type),
25542 DINFO_USAGE_DIR_USE))
25543 return;
25544 gen_type_die_for_member (type, decl,
25545 get_context_die (TYPE_CONTEXT (type)));
25546 }
25547 if (TREE_CODE (decl) == NAMELIST_DECL)
25548 at_import_die = gen_namelist_decl (DECL_NAME (decl),
25549 get_context_die (DECL_CONTEXT (decl)),
25550 NULL_TREE);
25551 else
25552 at_import_die = force_decl_die (decl);
25553 }
25554 }
25555
25556 if (TREE_CODE (decl) == NAMESPACE_DECL)
25557 {
25558 if (dwarf_version >= 3 || !dwarf_strict)
25559 imported_die = new_die (DW_TAG_imported_module,
25560 lexical_block_die,
25561 lexical_block);
25562 else
25563 return;
25564 }
25565 else
25566 imported_die = new_die (DW_TAG_imported_declaration,
25567 lexical_block_die,
25568 lexical_block);
25569
25570 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
25571 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
25572 if (debug_column_info && xloc.column)
25573 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
25574 if (name)
25575 add_AT_string (imported_die, DW_AT_name,
25576 IDENTIFIER_POINTER (name));
25577 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
25578 }
25579
25580 /* Output debug information for imported module or decl DECL.
25581 NAME is non-NULL name in context if the decl has been renamed.
25582 CHILD is true if decl is one of the renamed decls as part of
25583 importing whole module. */
25584
25585 static void
25586 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
25587 bool child)
25588 {
25589 /* dw_die_ref at_import_die; */
25590 dw_die_ref scope_die;
25591
25592 if (debug_info_level <= DINFO_LEVEL_TERSE)
25593 return;
25594
25595 gcc_assert (decl);
25596
25597 set_early_dwarf s;
25598
25599 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
25600 We need decl DIE for reference and scope die. First, get DIE for the decl
25601 itself. */
25602
25603 /* Get the scope die for decl context. Use comp_unit_die for global module
25604 or decl. If die is not found for non globals, force new die. */
25605 if (context
25606 && TYPE_P (context)
25607 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
25608 return;
25609
25610 scope_die = get_context_die (context);
25611
25612 if (child)
25613 {
25614 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
25615 there is nothing we can do, here. */
25616 if (dwarf_version < 3 && dwarf_strict)
25617 return;
25618
25619 gcc_assert (scope_die->die_child);
25620 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
25621 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
25622 scope_die = scope_die->die_child;
25623 }
25624
25625 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
25626 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
25627 }
25628
25629 /* Output debug information for namelists. */
25630
25631 static dw_die_ref
25632 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
25633 {
25634 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
25635 tree value;
25636 unsigned i;
25637
25638 if (debug_info_level <= DINFO_LEVEL_TERSE)
25639 return NULL;
25640
25641 gcc_assert (scope_die != NULL);
25642 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
25643 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
25644
25645 /* If there are no item_decls, we have a nondefining namelist, e.g.
25646 with USE association; hence, set DW_AT_declaration. */
25647 if (item_decls == NULL_TREE)
25648 {
25649 add_AT_flag (nml_die, DW_AT_declaration, 1);
25650 return nml_die;
25651 }
25652
25653 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
25654 {
25655 nml_item_ref_die = lookup_decl_die (value);
25656 if (!nml_item_ref_die)
25657 nml_item_ref_die = force_decl_die (value);
25658
25659 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
25660 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
25661 }
25662 return nml_die;
25663 }
25664
25665
25666 /* Write the debugging output for DECL and return the DIE. */
25667
25668 static void
25669 dwarf2out_decl (tree decl)
25670 {
25671 dw_die_ref context_die = comp_unit_die ();
25672
25673 switch (TREE_CODE (decl))
25674 {
25675 case ERROR_MARK:
25676 return;
25677
25678 case FUNCTION_DECL:
25679 /* What we would really like to do here is to filter out all mere
25680 file-scope declarations of file-scope functions which are never
25681 referenced later within this translation unit (and keep all of ones
25682 that *are* referenced later on) but we aren't clairvoyant, so we have
25683 no idea which functions will be referenced in the future (i.e. later
25684 on within the current translation unit). So here we just ignore all
25685 file-scope function declarations which are not also definitions. If
25686 and when the debugger needs to know something about these functions,
25687 it will have to hunt around and find the DWARF information associated
25688 with the definition of the function.
25689
25690 We can't just check DECL_EXTERNAL to find out which FUNCTION_DECL
25691 nodes represent definitions and which ones represent mere
25692 declarations. We have to check DECL_INITIAL instead. That's because
25693 the C front-end supports some weird semantics for "extern inline"
25694 function definitions. These can get inlined within the current
25695 translation unit (and thus, we need to generate Dwarf info for their
25696 abstract instances so that the Dwarf info for the concrete inlined
25697 instances can have something to refer to) but the compiler never
25698 generates any out-of-lines instances of such things (despite the fact
25699 that they *are* definitions).
25700
25701 The important point is that the C front-end marks these "extern
25702 inline" functions as DECL_EXTERNAL, but we need to generate DWARF for
25703 them anyway. Note that the C++ front-end also plays some similar games
25704 for inline function definitions appearing within include files which
25705 also contain `#pragma interface' pragmas.
25706
25707 If we are called from dwarf2out_abstract_function output a DIE
25708 anyway. We can end up here this way with early inlining and LTO
25709 where the inlined function is output in a different LTRANS unit
25710 or not at all. */
25711 if (DECL_INITIAL (decl) == NULL_TREE
25712 && ! DECL_ABSTRACT_P (decl))
25713 return;
25714
25715 /* If we're a nested function, initially use a parent of NULL; if we're
25716 a plain function, this will be fixed up in decls_for_scope. If
25717 we're a method, it will be ignored, since we already have a DIE. */
25718 if (decl_function_context (decl)
25719 /* But if we're in terse mode, we don't care about scope. */
25720 && debug_info_level > DINFO_LEVEL_TERSE)
25721 context_die = NULL;
25722 break;
25723
25724 case VAR_DECL:
25725 /* For local statics lookup proper context die. */
25726 if (local_function_static (decl))
25727 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25728
25729 /* If we are in terse mode, don't generate any DIEs to represent any
25730 variable declarations or definitions. */
25731 if (debug_info_level <= DINFO_LEVEL_TERSE)
25732 return;
25733 break;
25734
25735 case CONST_DECL:
25736 if (debug_info_level <= DINFO_LEVEL_TERSE)
25737 return;
25738 if (!is_fortran () && !is_ada ())
25739 return;
25740 if (TREE_STATIC (decl) && decl_function_context (decl))
25741 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25742 break;
25743
25744 case NAMESPACE_DECL:
25745 case IMPORTED_DECL:
25746 if (debug_info_level <= DINFO_LEVEL_TERSE)
25747 return;
25748 if (lookup_decl_die (decl) != NULL)
25749 return;
25750 break;
25751
25752 case TYPE_DECL:
25753 /* Don't emit stubs for types unless they are needed by other DIEs. */
25754 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
25755 return;
25756
25757 /* Don't bother trying to generate any DIEs to represent any of the
25758 normal built-in types for the language we are compiling. */
25759 if (DECL_IS_BUILTIN (decl))
25760 return;
25761
25762 /* If we are in terse mode, don't generate any DIEs for types. */
25763 if (debug_info_level <= DINFO_LEVEL_TERSE)
25764 return;
25765
25766 /* If we're a function-scope tag, initially use a parent of NULL;
25767 this will be fixed up in decls_for_scope. */
25768 if (decl_function_context (decl))
25769 context_die = NULL;
25770
25771 break;
25772
25773 case NAMELIST_DECL:
25774 break;
25775
25776 default:
25777 return;
25778 }
25779
25780 gen_decl_die (decl, NULL, NULL, context_die);
25781
25782 if (flag_checking)
25783 {
25784 dw_die_ref die = lookup_decl_die (decl);
25785 if (die)
25786 check_die (die);
25787 }
25788 }
25789
25790 /* Write the debugging output for DECL. */
25791
25792 static void
25793 dwarf2out_function_decl (tree decl)
25794 {
25795 dwarf2out_decl (decl);
25796 call_arg_locations = NULL;
25797 call_arg_loc_last = NULL;
25798 call_site_count = -1;
25799 tail_call_site_count = -1;
25800 decl_loc_table->empty ();
25801 cached_dw_loc_list_table->empty ();
25802 }
25803
25804 /* Output a marker (i.e. a label) for the beginning of the generated code for
25805 a lexical block. */
25806
25807 static void
25808 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
25809 unsigned int blocknum)
25810 {
25811 switch_to_section (current_function_section ());
25812 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
25813 }
25814
25815 /* Output a marker (i.e. a label) for the end of the generated code for a
25816 lexical block. */
25817
25818 static void
25819 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
25820 {
25821 switch_to_section (current_function_section ());
25822 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
25823 }
25824
25825 /* Returns nonzero if it is appropriate not to emit any debugging
25826 information for BLOCK, because it doesn't contain any instructions.
25827
25828 Don't allow this for blocks with nested functions or local classes
25829 as we would end up with orphans, and in the presence of scheduling
25830 we may end up calling them anyway. */
25831
25832 static bool
25833 dwarf2out_ignore_block (const_tree block)
25834 {
25835 tree decl;
25836 unsigned int i;
25837
25838 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
25839 if (TREE_CODE (decl) == FUNCTION_DECL
25840 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
25841 return 0;
25842 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
25843 {
25844 decl = BLOCK_NONLOCALIZED_VAR (block, i);
25845 if (TREE_CODE (decl) == FUNCTION_DECL
25846 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
25847 return 0;
25848 }
25849
25850 return 1;
25851 }
25852
25853 /* Hash table routines for file_hash. */
25854
25855 bool
25856 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
25857 {
25858 return filename_cmp (p1->filename, p2) == 0;
25859 }
25860
25861 hashval_t
25862 dwarf_file_hasher::hash (dwarf_file_data *p)
25863 {
25864 return htab_hash_string (p->filename);
25865 }
25866
25867 /* Lookup FILE_NAME (in the list of filenames that we know about here in
25868 dwarf2out.c) and return its "index". The index of each (known) filename is
25869 just a unique number which is associated with only that one filename. We
25870 need such numbers for the sake of generating labels (in the .debug_sfnames
25871 section) and references to those files numbers (in the .debug_srcinfo
25872 and .debug_macinfo sections). If the filename given as an argument is not
25873 found in our current list, add it to the list and assign it the next
25874 available unique index number. */
25875
25876 static struct dwarf_file_data *
25877 lookup_filename (const char *file_name)
25878 {
25879 struct dwarf_file_data * created;
25880
25881 if (!file_name)
25882 return NULL;
25883
25884 dwarf_file_data **slot
25885 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
25886 INSERT);
25887 if (*slot)
25888 return *slot;
25889
25890 created = ggc_alloc<dwarf_file_data> ();
25891 created->filename = file_name;
25892 created->emitted_number = 0;
25893 *slot = created;
25894 return created;
25895 }
25896
25897 /* If the assembler will construct the file table, then translate the compiler
25898 internal file table number into the assembler file table number, and emit
25899 a .file directive if we haven't already emitted one yet. The file table
25900 numbers are different because we prune debug info for unused variables and
25901 types, which may include filenames. */
25902
25903 static int
25904 maybe_emit_file (struct dwarf_file_data * fd)
25905 {
25906 if (! fd->emitted_number)
25907 {
25908 if (last_emitted_file)
25909 fd->emitted_number = last_emitted_file->emitted_number + 1;
25910 else
25911 fd->emitted_number = 1;
25912 last_emitted_file = fd;
25913
25914 if (DWARF2_ASM_LINE_DEBUG_INFO)
25915 {
25916 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
25917 output_quoted_string (asm_out_file,
25918 remap_debug_filename (fd->filename));
25919 fputc ('\n', asm_out_file);
25920 }
25921 }
25922
25923 return fd->emitted_number;
25924 }
25925
25926 /* Schedule generation of a DW_AT_const_value attribute to DIE.
25927 That generation should happen after function debug info has been
25928 generated. The value of the attribute is the constant value of ARG. */
25929
25930 static void
25931 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
25932 {
25933 die_arg_entry entry;
25934
25935 if (!die || !arg)
25936 return;
25937
25938 gcc_assert (early_dwarf);
25939
25940 if (!tmpl_value_parm_die_table)
25941 vec_alloc (tmpl_value_parm_die_table, 32);
25942
25943 entry.die = die;
25944 entry.arg = arg;
25945 vec_safe_push (tmpl_value_parm_die_table, entry);
25946 }
25947
25948 /* Return TRUE if T is an instance of generic type, FALSE
25949 otherwise. */
25950
25951 static bool
25952 generic_type_p (tree t)
25953 {
25954 if (t == NULL_TREE || !TYPE_P (t))
25955 return false;
25956 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
25957 }
25958
25959 /* Schedule the generation of the generic parameter dies for the
25960 instance of generic type T. The proper generation itself is later
25961 done by gen_scheduled_generic_parms_dies. */
25962
25963 static void
25964 schedule_generic_params_dies_gen (tree t)
25965 {
25966 if (!generic_type_p (t))
25967 return;
25968
25969 gcc_assert (early_dwarf);
25970
25971 if (!generic_type_instances)
25972 vec_alloc (generic_type_instances, 256);
25973
25974 vec_safe_push (generic_type_instances, t);
25975 }
25976
25977 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
25978 by append_entry_to_tmpl_value_parm_die_table. This function must
25979 be called after function DIEs have been generated. */
25980
25981 static void
25982 gen_remaining_tmpl_value_param_die_attribute (void)
25983 {
25984 if (tmpl_value_parm_die_table)
25985 {
25986 unsigned i, j;
25987 die_arg_entry *e;
25988
25989 /* We do this in two phases - first get the cases we can
25990 handle during early-finish, preserving those we cannot
25991 (containing symbolic constants where we don't yet know
25992 whether we are going to output the referenced symbols).
25993 For those we try again at late-finish. */
25994 j = 0;
25995 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
25996 {
25997 if (!tree_add_const_value_attribute (e->die, e->arg))
25998 {
25999 dw_loc_descr_ref loc = NULL;
26000 if (! early_dwarf
26001 && (dwarf_version >= 5 || !dwarf_strict))
26002 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26003 if (loc)
26004 add_AT_loc (e->die, DW_AT_location, loc);
26005 else
26006 (*tmpl_value_parm_die_table)[j++] = *e;
26007 }
26008 }
26009 tmpl_value_parm_die_table->truncate (j);
26010 }
26011 }
26012
26013 /* Generate generic parameters DIEs for instances of generic types
26014 that have been previously scheduled by
26015 schedule_generic_params_dies_gen. This function must be called
26016 after all the types of the CU have been laid out. */
26017
26018 static void
26019 gen_scheduled_generic_parms_dies (void)
26020 {
26021 unsigned i;
26022 tree t;
26023
26024 if (!generic_type_instances)
26025 return;
26026
26027 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26028 if (COMPLETE_TYPE_P (t))
26029 gen_generic_params_dies (t);
26030
26031 generic_type_instances = NULL;
26032 }
26033
26034
26035 /* Replace DW_AT_name for the decl with name. */
26036
26037 static void
26038 dwarf2out_set_name (tree decl, tree name)
26039 {
26040 dw_die_ref die;
26041 dw_attr_node *attr;
26042 const char *dname;
26043
26044 die = TYPE_SYMTAB_DIE (decl);
26045 if (!die)
26046 return;
26047
26048 dname = dwarf2_name (name, 0);
26049 if (!dname)
26050 return;
26051
26052 attr = get_AT (die, DW_AT_name);
26053 if (attr)
26054 {
26055 struct indirect_string_node *node;
26056
26057 node = find_AT_string (dname);
26058 /* replace the string. */
26059 attr->dw_attr_val.v.val_str = node;
26060 }
26061
26062 else
26063 add_name_attribute (die, dname);
26064 }
26065
26066 /* True if before or during processing of the first function being emitted. */
26067 static bool in_first_function_p = true;
26068 /* True if loc_note during dwarf2out_var_location call might still be
26069 before first real instruction at address equal to .Ltext0. */
26070 static bool maybe_at_text_label_p = true;
26071 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
26072 static unsigned int first_loclabel_num_not_at_text_label;
26073
26074 /* Called by the final INSN scan whenever we see a var location. We
26075 use it to drop labels in the right places, and throw the location in
26076 our lookup table. */
26077
26078 static void
26079 dwarf2out_var_location (rtx_insn *loc_note)
26080 {
26081 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
26082 struct var_loc_node *newloc;
26083 rtx_insn *next_real, *next_note;
26084 rtx_insn *call_insn = NULL;
26085 static const char *last_label;
26086 static const char *last_postcall_label;
26087 static bool last_in_cold_section_p;
26088 static rtx_insn *expected_next_loc_note;
26089 tree decl;
26090 bool var_loc_p;
26091
26092 if (!NOTE_P (loc_note))
26093 {
26094 if (CALL_P (loc_note))
26095 {
26096 call_site_count++;
26097 if (SIBLING_CALL_P (loc_note))
26098 tail_call_site_count++;
26099 if (optimize == 0 && !flag_var_tracking)
26100 {
26101 /* When the var-tracking pass is not running, there is no note
26102 for indirect calls whose target is compile-time known. In this
26103 case, process such calls specifically so that we generate call
26104 sites for them anyway. */
26105 rtx x = PATTERN (loc_note);
26106 if (GET_CODE (x) == PARALLEL)
26107 x = XVECEXP (x, 0, 0);
26108 if (GET_CODE (x) == SET)
26109 x = SET_SRC (x);
26110 if (GET_CODE (x) == CALL)
26111 x = XEXP (x, 0);
26112 if (!MEM_P (x)
26113 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
26114 || !SYMBOL_REF_DECL (XEXP (x, 0))
26115 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
26116 != FUNCTION_DECL))
26117 {
26118 call_insn = loc_note;
26119 loc_note = NULL;
26120 var_loc_p = false;
26121
26122 next_real = next_real_insn (call_insn);
26123 next_note = NULL;
26124 cached_next_real_insn = NULL;
26125 goto create_label;
26126 }
26127 }
26128 }
26129 return;
26130 }
26131
26132 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
26133 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
26134 return;
26135
26136 /* Optimize processing a large consecutive sequence of location
26137 notes so we don't spend too much time in next_real_insn. If the
26138 next insn is another location note, remember the next_real_insn
26139 calculation for next time. */
26140 next_real = cached_next_real_insn;
26141 if (next_real)
26142 {
26143 if (expected_next_loc_note != loc_note)
26144 next_real = NULL;
26145 }
26146
26147 next_note = NEXT_INSN (loc_note);
26148 if (! next_note
26149 || next_note->deleted ()
26150 || ! NOTE_P (next_note)
26151 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
26152 && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
26153 next_note = NULL;
26154
26155 if (! next_real)
26156 next_real = next_real_insn (loc_note);
26157
26158 if (next_note)
26159 {
26160 expected_next_loc_note = next_note;
26161 cached_next_real_insn = next_real;
26162 }
26163 else
26164 cached_next_real_insn = NULL;
26165
26166 /* If there are no instructions which would be affected by this note,
26167 don't do anything. */
26168 if (var_loc_p
26169 && next_real == NULL_RTX
26170 && !NOTE_DURING_CALL_P (loc_note))
26171 return;
26172
26173 create_label:
26174
26175 if (next_real == NULL_RTX)
26176 next_real = get_last_insn ();
26177
26178 /* If there were any real insns between note we processed last time
26179 and this note (or if it is the first note), clear
26180 last_{,postcall_}label so that they are not reused this time. */
26181 if (last_var_location_insn == NULL_RTX
26182 || last_var_location_insn != next_real
26183 || last_in_cold_section_p != in_cold_section_p)
26184 {
26185 last_label = NULL;
26186 last_postcall_label = NULL;
26187 }
26188
26189 if (var_loc_p)
26190 {
26191 decl = NOTE_VAR_LOCATION_DECL (loc_note);
26192 newloc = add_var_loc_to_decl (decl, loc_note,
26193 NOTE_DURING_CALL_P (loc_note)
26194 ? last_postcall_label : last_label);
26195 if (newloc == NULL)
26196 return;
26197 }
26198 else
26199 {
26200 decl = NULL_TREE;
26201 newloc = NULL;
26202 }
26203
26204 /* If there were no real insns between note we processed last time
26205 and this note, use the label we emitted last time. Otherwise
26206 create a new label and emit it. */
26207 if (last_label == NULL)
26208 {
26209 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
26210 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
26211 loclabel_num++;
26212 last_label = ggc_strdup (loclabel);
26213 /* See if loclabel might be equal to .Ltext0. If yes,
26214 bump first_loclabel_num_not_at_text_label. */
26215 if (!have_multiple_function_sections
26216 && in_first_function_p
26217 && maybe_at_text_label_p)
26218 {
26219 static rtx_insn *last_start;
26220 rtx_insn *insn;
26221 for (insn = loc_note; insn; insn = previous_insn (insn))
26222 if (insn == last_start)
26223 break;
26224 else if (!NONDEBUG_INSN_P (insn))
26225 continue;
26226 else
26227 {
26228 rtx body = PATTERN (insn);
26229 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
26230 continue;
26231 /* Inline asm could occupy zero bytes. */
26232 else if (GET_CODE (body) == ASM_INPUT
26233 || asm_noperands (body) >= 0)
26234 continue;
26235 #ifdef HAVE_attr_length
26236 else if (get_attr_min_length (insn) == 0)
26237 continue;
26238 #endif
26239 else
26240 {
26241 /* Assume insn has non-zero length. */
26242 maybe_at_text_label_p = false;
26243 break;
26244 }
26245 }
26246 if (maybe_at_text_label_p)
26247 {
26248 last_start = loc_note;
26249 first_loclabel_num_not_at_text_label = loclabel_num;
26250 }
26251 }
26252 }
26253
26254 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
26255 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
26256
26257 if (!var_loc_p)
26258 {
26259 struct call_arg_loc_node *ca_loc
26260 = ggc_cleared_alloc<call_arg_loc_node> ();
26261 rtx_insn *prev
26262 = loc_note != NULL_RTX ? prev_real_insn (loc_note) : call_insn;
26263
26264 ca_loc->call_arg_loc_note = loc_note;
26265 ca_loc->next = NULL;
26266 ca_loc->label = last_label;
26267 gcc_assert (prev
26268 && (CALL_P (prev)
26269 || (NONJUMP_INSN_P (prev)
26270 && GET_CODE (PATTERN (prev)) == SEQUENCE
26271 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
26272 if (!CALL_P (prev))
26273 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
26274 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
26275
26276 /* Look for a SYMBOL_REF in the "prev" instruction. */
26277 rtx x = get_call_rtx_from (PATTERN (prev));
26278 if (x)
26279 {
26280 /* Try to get the call symbol, if any. */
26281 if (MEM_P (XEXP (x, 0)))
26282 x = XEXP (x, 0);
26283 /* First, look for a memory access to a symbol_ref. */
26284 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
26285 && SYMBOL_REF_DECL (XEXP (x, 0))
26286 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
26287 ca_loc->symbol_ref = XEXP (x, 0);
26288 /* Otherwise, look at a compile-time known user-level function
26289 declaration. */
26290 else if (MEM_P (x)
26291 && MEM_EXPR (x)
26292 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
26293 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
26294 }
26295
26296 ca_loc->block = insn_scope (prev);
26297 if (call_arg_locations)
26298 call_arg_loc_last->next = ca_loc;
26299 else
26300 call_arg_locations = ca_loc;
26301 call_arg_loc_last = ca_loc;
26302 }
26303 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
26304 newloc->label = last_label;
26305 else
26306 {
26307 if (!last_postcall_label)
26308 {
26309 sprintf (loclabel, "%s-1", last_label);
26310 last_postcall_label = ggc_strdup (loclabel);
26311 }
26312 newloc->label = last_postcall_label;
26313 }
26314
26315 last_var_location_insn = next_real;
26316 last_in_cold_section_p = in_cold_section_p;
26317 }
26318
26319 /* Called from finalize_size_functions for size functions so that their body
26320 can be encoded in the debug info to describe the layout of variable-length
26321 structures. */
26322
26323 static void
26324 dwarf2out_size_function (tree decl)
26325 {
26326 function_to_dwarf_procedure (decl);
26327 }
26328
26329 /* Note in one location list that text section has changed. */
26330
26331 int
26332 var_location_switch_text_section_1 (var_loc_list **slot, void *)
26333 {
26334 var_loc_list *list = *slot;
26335 if (list->first)
26336 list->last_before_switch
26337 = list->last->next ? list->last->next : list->last;
26338 return 1;
26339 }
26340
26341 /* Note in all location lists that text section has changed. */
26342
26343 static void
26344 var_location_switch_text_section (void)
26345 {
26346 if (decl_loc_table == NULL)
26347 return;
26348
26349 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
26350 }
26351
26352 /* Create a new line number table. */
26353
26354 static dw_line_info_table *
26355 new_line_info_table (void)
26356 {
26357 dw_line_info_table *table;
26358
26359 table = ggc_cleared_alloc<dw_line_info_table> ();
26360 table->file_num = 1;
26361 table->line_num = 1;
26362 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
26363
26364 return table;
26365 }
26366
26367 /* Lookup the "current" table into which we emit line info, so
26368 that we don't have to do it for every source line. */
26369
26370 static void
26371 set_cur_line_info_table (section *sec)
26372 {
26373 dw_line_info_table *table;
26374
26375 if (sec == text_section)
26376 table = text_section_line_info;
26377 else if (sec == cold_text_section)
26378 {
26379 table = cold_text_section_line_info;
26380 if (!table)
26381 {
26382 cold_text_section_line_info = table = new_line_info_table ();
26383 table->end_label = cold_end_label;
26384 }
26385 }
26386 else
26387 {
26388 const char *end_label;
26389
26390 if (flag_reorder_blocks_and_partition)
26391 {
26392 if (in_cold_section_p)
26393 end_label = crtl->subsections.cold_section_end_label;
26394 else
26395 end_label = crtl->subsections.hot_section_end_label;
26396 }
26397 else
26398 {
26399 char label[MAX_ARTIFICIAL_LABEL_BYTES];
26400 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
26401 current_function_funcdef_no);
26402 end_label = ggc_strdup (label);
26403 }
26404
26405 table = new_line_info_table ();
26406 table->end_label = end_label;
26407
26408 vec_safe_push (separate_line_info, table);
26409 }
26410
26411 if (DWARF2_ASM_LINE_DEBUG_INFO)
26412 table->is_stmt = (cur_line_info_table
26413 ? cur_line_info_table->is_stmt
26414 : DWARF_LINE_DEFAULT_IS_STMT_START);
26415 cur_line_info_table = table;
26416 }
26417
26418
26419 /* We need to reset the locations at the beginning of each
26420 function. We can't do this in the end_function hook, because the
26421 declarations that use the locations won't have been output when
26422 that hook is called. Also compute have_multiple_function_sections here. */
26423
26424 static void
26425 dwarf2out_begin_function (tree fun)
26426 {
26427 section *sec = function_section (fun);
26428
26429 if (sec != text_section)
26430 have_multiple_function_sections = true;
26431
26432 if (flag_reorder_blocks_and_partition && !cold_text_section)
26433 {
26434 gcc_assert (current_function_decl == fun);
26435 cold_text_section = unlikely_text_section ();
26436 switch_to_section (cold_text_section);
26437 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
26438 switch_to_section (sec);
26439 }
26440
26441 dwarf2out_note_section_used ();
26442 call_site_count = 0;
26443 tail_call_site_count = 0;
26444
26445 set_cur_line_info_table (sec);
26446 }
26447
26448 /* Helper function of dwarf2out_end_function, called only after emitting
26449 the very first function into assembly. Check if some .debug_loc range
26450 might end with a .LVL* label that could be equal to .Ltext0.
26451 In that case we must force using absolute addresses in .debug_loc ranges,
26452 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
26453 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
26454 list terminator.
26455 Set have_multiple_function_sections to true in that case and
26456 terminate htab traversal. */
26457
26458 int
26459 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
26460 {
26461 var_loc_list *entry = *slot;
26462 struct var_loc_node *node;
26463
26464 node = entry->first;
26465 if (node && node->next && node->next->label)
26466 {
26467 unsigned int i;
26468 const char *label = node->next->label;
26469 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
26470
26471 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
26472 {
26473 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
26474 if (strcmp (label, loclabel) == 0)
26475 {
26476 have_multiple_function_sections = true;
26477 return 0;
26478 }
26479 }
26480 }
26481 return 1;
26482 }
26483
26484 /* Hook called after emitting a function into assembly.
26485 This does something only for the very first function emitted. */
26486
26487 static void
26488 dwarf2out_end_function (unsigned int)
26489 {
26490 if (in_first_function_p
26491 && !have_multiple_function_sections
26492 && first_loclabel_num_not_at_text_label
26493 && decl_loc_table)
26494 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
26495 in_first_function_p = false;
26496 maybe_at_text_label_p = false;
26497 }
26498
26499 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
26500 front-ends register a translation unit even before dwarf2out_init is
26501 called. */
26502 static tree main_translation_unit = NULL_TREE;
26503
26504 /* Hook called by front-ends after they built their main translation unit.
26505 Associate comp_unit_die to UNIT. */
26506
26507 static void
26508 dwarf2out_register_main_translation_unit (tree unit)
26509 {
26510 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
26511 && main_translation_unit == NULL_TREE);
26512 main_translation_unit = unit;
26513 /* If dwarf2out_init has not been called yet, it will perform the association
26514 itself looking at main_translation_unit. */
26515 if (decl_die_table != NULL)
26516 equate_decl_number_to_die (unit, comp_unit_die ());
26517 }
26518
26519 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
26520
26521 static void
26522 push_dw_line_info_entry (dw_line_info_table *table,
26523 enum dw_line_info_opcode opcode, unsigned int val)
26524 {
26525 dw_line_info_entry e;
26526 e.opcode = opcode;
26527 e.val = val;
26528 vec_safe_push (table->entries, e);
26529 }
26530
26531 /* Output a label to mark the beginning of a source code line entry
26532 and record information relating to this source line, in
26533 'line_info_table' for later output of the .debug_line section. */
26534 /* ??? The discriminator parameter ought to be unsigned. */
26535
26536 static void
26537 dwarf2out_source_line (unsigned int line, const char *filename,
26538 int discriminator, bool is_stmt)
26539 {
26540 unsigned int file_num;
26541 dw_line_info_table *table;
26542
26543 if (debug_info_level < DINFO_LEVEL_TERSE || line == 0)
26544 return;
26545
26546 /* The discriminator column was added in dwarf4. Simplify the below
26547 by simply removing it if we're not supposed to output it. */
26548 if (dwarf_version < 4 && dwarf_strict)
26549 discriminator = 0;
26550
26551 table = cur_line_info_table;
26552 file_num = maybe_emit_file (lookup_filename (filename));
26553
26554 /* ??? TODO: Elide duplicate line number entries. Traditionally,
26555 the debugger has used the second (possibly duplicate) line number
26556 at the beginning of the function to mark the end of the prologue.
26557 We could eliminate any other duplicates within the function. For
26558 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
26559 that second line number entry. */
26560 /* Recall that this end-of-prologue indication is *not* the same thing
26561 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
26562 to which the hook corresponds, follows the last insn that was
26563 emitted by gen_prologue. What we need is to precede the first insn
26564 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
26565 insn that corresponds to something the user wrote. These may be
26566 very different locations once scheduling is enabled. */
26567
26568 if (0 && file_num == table->file_num
26569 && line == table->line_num
26570 && discriminator == table->discrim_num
26571 && is_stmt == table->is_stmt)
26572 return;
26573
26574 switch_to_section (current_function_section ());
26575
26576 /* If requested, emit something human-readable. */
26577 if (flag_debug_asm)
26578 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START, filename, line);
26579
26580 if (DWARF2_ASM_LINE_DEBUG_INFO)
26581 {
26582 /* Emit the .loc directive understood by GNU as. */
26583 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
26584 file_num, line, is_stmt, discriminator */
26585 fputs ("\t.loc ", asm_out_file);
26586 fprint_ul (asm_out_file, file_num);
26587 putc (' ', asm_out_file);
26588 fprint_ul (asm_out_file, line);
26589 putc (' ', asm_out_file);
26590 putc ('0', asm_out_file);
26591
26592 if (is_stmt != table->is_stmt)
26593 {
26594 fputs (" is_stmt ", asm_out_file);
26595 putc (is_stmt ? '1' : '0', asm_out_file);
26596 }
26597 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
26598 {
26599 gcc_assert (discriminator > 0);
26600 fputs (" discriminator ", asm_out_file);
26601 fprint_ul (asm_out_file, (unsigned long) discriminator);
26602 }
26603 putc ('\n', asm_out_file);
26604 }
26605 else
26606 {
26607 unsigned int label_num = ++line_info_label_num;
26608
26609 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
26610
26611 push_dw_line_info_entry (table, LI_set_address, label_num);
26612 if (file_num != table->file_num)
26613 push_dw_line_info_entry (table, LI_set_file, file_num);
26614 if (discriminator != table->discrim_num)
26615 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
26616 if (is_stmt != table->is_stmt)
26617 push_dw_line_info_entry (table, LI_negate_stmt, 0);
26618 push_dw_line_info_entry (table, LI_set_line, line);
26619 }
26620
26621 table->file_num = file_num;
26622 table->line_num = line;
26623 table->discrim_num = discriminator;
26624 table->is_stmt = is_stmt;
26625 table->in_use = true;
26626 }
26627
26628 /* Record the beginning of a new source file. */
26629
26630 static void
26631 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
26632 {
26633 if (flag_eliminate_dwarf2_dups)
26634 {
26635 /* Record the beginning of the file for break_out_includes. */
26636 dw_die_ref bincl_die;
26637
26638 bincl_die = new_die (DW_TAG_GNU_BINCL, comp_unit_die (), NULL);
26639 add_AT_string (bincl_die, DW_AT_name, remap_debug_filename (filename));
26640 }
26641
26642 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26643 {
26644 macinfo_entry e;
26645 e.code = DW_MACINFO_start_file;
26646 e.lineno = lineno;
26647 e.info = ggc_strdup (filename);
26648 vec_safe_push (macinfo_table, e);
26649 }
26650 }
26651
26652 /* Record the end of a source file. */
26653
26654 static void
26655 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
26656 {
26657 if (flag_eliminate_dwarf2_dups)
26658 /* Record the end of the file for break_out_includes. */
26659 new_die (DW_TAG_GNU_EINCL, comp_unit_die (), NULL);
26660
26661 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26662 {
26663 macinfo_entry e;
26664 e.code = DW_MACINFO_end_file;
26665 e.lineno = lineno;
26666 e.info = NULL;
26667 vec_safe_push (macinfo_table, e);
26668 }
26669 }
26670
26671 /* Called from debug_define in toplev.c. The `buffer' parameter contains
26672 the tail part of the directive line, i.e. the part which is past the
26673 initial whitespace, #, whitespace, directive-name, whitespace part. */
26674
26675 static void
26676 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
26677 const char *buffer ATTRIBUTE_UNUSED)
26678 {
26679 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26680 {
26681 macinfo_entry e;
26682 /* Insert a dummy first entry to be able to optimize the whole
26683 predefined macro block using DW_MACRO_import. */
26684 if (macinfo_table->is_empty () && lineno <= 1)
26685 {
26686 e.code = 0;
26687 e.lineno = 0;
26688 e.info = NULL;
26689 vec_safe_push (macinfo_table, e);
26690 }
26691 e.code = DW_MACINFO_define;
26692 e.lineno = lineno;
26693 e.info = ggc_strdup (buffer);
26694 vec_safe_push (macinfo_table, e);
26695 }
26696 }
26697
26698 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
26699 the tail part of the directive line, i.e. the part which is past the
26700 initial whitespace, #, whitespace, directive-name, whitespace part. */
26701
26702 static void
26703 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
26704 const char *buffer ATTRIBUTE_UNUSED)
26705 {
26706 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26707 {
26708 macinfo_entry e;
26709 /* Insert a dummy first entry to be able to optimize the whole
26710 predefined macro block using DW_MACRO_import. */
26711 if (macinfo_table->is_empty () && lineno <= 1)
26712 {
26713 e.code = 0;
26714 e.lineno = 0;
26715 e.info = NULL;
26716 vec_safe_push (macinfo_table, e);
26717 }
26718 e.code = DW_MACINFO_undef;
26719 e.lineno = lineno;
26720 e.info = ggc_strdup (buffer);
26721 vec_safe_push (macinfo_table, e);
26722 }
26723 }
26724
26725 /* Helpers to manipulate hash table of CUs. */
26726
26727 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
26728 {
26729 static inline hashval_t hash (const macinfo_entry *);
26730 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
26731 };
26732
26733 inline hashval_t
26734 macinfo_entry_hasher::hash (const macinfo_entry *entry)
26735 {
26736 return htab_hash_string (entry->info);
26737 }
26738
26739 inline bool
26740 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
26741 const macinfo_entry *entry2)
26742 {
26743 return !strcmp (entry1->info, entry2->info);
26744 }
26745
26746 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
26747
26748 /* Output a single .debug_macinfo entry. */
26749
26750 static void
26751 output_macinfo_op (macinfo_entry *ref)
26752 {
26753 int file_num;
26754 size_t len;
26755 struct indirect_string_node *node;
26756 char label[MAX_ARTIFICIAL_LABEL_BYTES];
26757 struct dwarf_file_data *fd;
26758
26759 switch (ref->code)
26760 {
26761 case DW_MACINFO_start_file:
26762 fd = lookup_filename (ref->info);
26763 file_num = maybe_emit_file (fd);
26764 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
26765 dw2_asm_output_data_uleb128 (ref->lineno,
26766 "Included from line number %lu",
26767 (unsigned long) ref->lineno);
26768 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
26769 break;
26770 case DW_MACINFO_end_file:
26771 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
26772 break;
26773 case DW_MACINFO_define:
26774 case DW_MACINFO_undef:
26775 len = strlen (ref->info) + 1;
26776 if (!dwarf_strict
26777 && len > DWARF_OFFSET_SIZE
26778 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
26779 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
26780 {
26781 ref->code = ref->code == DW_MACINFO_define
26782 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
26783 output_macinfo_op (ref);
26784 return;
26785 }
26786 dw2_asm_output_data (1, ref->code,
26787 ref->code == DW_MACINFO_define
26788 ? "Define macro" : "Undefine macro");
26789 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
26790 (unsigned long) ref->lineno);
26791 dw2_asm_output_nstring (ref->info, -1, "The macro");
26792 break;
26793 case DW_MACRO_define_strp:
26794 case DW_MACRO_undef_strp:
26795 node = find_AT_string (ref->info);
26796 gcc_assert (node
26797 && (node->form == DW_FORM_strp
26798 || node->form == DW_FORM_GNU_str_index));
26799 dw2_asm_output_data (1, ref->code,
26800 ref->code == DW_MACRO_define_strp
26801 ? "Define macro strp"
26802 : "Undefine macro strp");
26803 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
26804 (unsigned long) ref->lineno);
26805 if (node->form == DW_FORM_strp)
26806 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
26807 debug_str_section, "The macro: \"%s\"",
26808 ref->info);
26809 else
26810 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
26811 ref->info);
26812 break;
26813 case DW_MACRO_import:
26814 dw2_asm_output_data (1, ref->code, "Import");
26815 ASM_GENERATE_INTERNAL_LABEL (label,
26816 DEBUG_MACRO_SECTION_LABEL, ref->lineno);
26817 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
26818 break;
26819 default:
26820 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
26821 ASM_COMMENT_START, (unsigned long) ref->code);
26822 break;
26823 }
26824 }
26825
26826 /* Attempt to make a sequence of define/undef macinfo ops shareable with
26827 other compilation unit .debug_macinfo sections. IDX is the first
26828 index of a define/undef, return the number of ops that should be
26829 emitted in a comdat .debug_macinfo section and emit
26830 a DW_MACRO_import entry referencing it.
26831 If the define/undef entry should be emitted normally, return 0. */
26832
26833 static unsigned
26834 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
26835 macinfo_hash_type **macinfo_htab)
26836 {
26837 macinfo_entry *first, *second, *cur, *inc;
26838 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
26839 unsigned char checksum[16];
26840 struct md5_ctx ctx;
26841 char *grp_name, *tail;
26842 const char *base;
26843 unsigned int i, count, encoded_filename_len, linebuf_len;
26844 macinfo_entry **slot;
26845
26846 first = &(*macinfo_table)[idx];
26847 second = &(*macinfo_table)[idx + 1];
26848
26849 /* Optimize only if there are at least two consecutive define/undef ops,
26850 and either all of them are before first DW_MACINFO_start_file
26851 with lineno {0,1} (i.e. predefined macro block), or all of them are
26852 in some included header file. */
26853 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
26854 return 0;
26855 if (vec_safe_is_empty (files))
26856 {
26857 if (first->lineno > 1 || second->lineno > 1)
26858 return 0;
26859 }
26860 else if (first->lineno == 0)
26861 return 0;
26862
26863 /* Find the last define/undef entry that can be grouped together
26864 with first and at the same time compute md5 checksum of their
26865 codes, linenumbers and strings. */
26866 md5_init_ctx (&ctx);
26867 for (i = idx; macinfo_table->iterate (i, &cur); i++)
26868 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
26869 break;
26870 else if (vec_safe_is_empty (files) && cur->lineno > 1)
26871 break;
26872 else
26873 {
26874 unsigned char code = cur->code;
26875 md5_process_bytes (&code, 1, &ctx);
26876 checksum_uleb128 (cur->lineno, &ctx);
26877 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
26878 }
26879 md5_finish_ctx (&ctx, checksum);
26880 count = i - idx;
26881
26882 /* From the containing include filename (if any) pick up just
26883 usable characters from its basename. */
26884 if (vec_safe_is_empty (files))
26885 base = "";
26886 else
26887 base = lbasename (files->last ().info);
26888 for (encoded_filename_len = 0, i = 0; base[i]; i++)
26889 if (ISIDNUM (base[i]) || base[i] == '.')
26890 encoded_filename_len++;
26891 /* Count . at the end. */
26892 if (encoded_filename_len)
26893 encoded_filename_len++;
26894
26895 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
26896 linebuf_len = strlen (linebuf);
26897
26898 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
26899 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
26900 + 16 * 2 + 1);
26901 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
26902 tail = grp_name + 4;
26903 if (encoded_filename_len)
26904 {
26905 for (i = 0; base[i]; i++)
26906 if (ISIDNUM (base[i]) || base[i] == '.')
26907 *tail++ = base[i];
26908 *tail++ = '.';
26909 }
26910 memcpy (tail, linebuf, linebuf_len);
26911 tail += linebuf_len;
26912 *tail++ = '.';
26913 for (i = 0; i < 16; i++)
26914 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
26915
26916 /* Construct a macinfo_entry for DW_MACRO_import
26917 in the empty vector entry before the first define/undef. */
26918 inc = &(*macinfo_table)[idx - 1];
26919 inc->code = DW_MACRO_import;
26920 inc->lineno = 0;
26921 inc->info = ggc_strdup (grp_name);
26922 if (!*macinfo_htab)
26923 *macinfo_htab = new macinfo_hash_type (10);
26924 /* Avoid emitting duplicates. */
26925 slot = (*macinfo_htab)->find_slot (inc, INSERT);
26926 if (*slot != NULL)
26927 {
26928 inc->code = 0;
26929 inc->info = NULL;
26930 /* If such an entry has been used before, just emit
26931 a DW_MACRO_import op. */
26932 inc = *slot;
26933 output_macinfo_op (inc);
26934 /* And clear all macinfo_entry in the range to avoid emitting them
26935 in the second pass. */
26936 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
26937 {
26938 cur->code = 0;
26939 cur->info = NULL;
26940 }
26941 }
26942 else
26943 {
26944 *slot = inc;
26945 inc->lineno = (*macinfo_htab)->elements ();
26946 output_macinfo_op (inc);
26947 }
26948 return count;
26949 }
26950
26951 /* Save any strings needed by the macinfo table in the debug str
26952 table. All strings must be collected into the table by the time
26953 index_string is called. */
26954
26955 static void
26956 save_macinfo_strings (void)
26957 {
26958 unsigned len;
26959 unsigned i;
26960 macinfo_entry *ref;
26961
26962 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
26963 {
26964 switch (ref->code)
26965 {
26966 /* Match the logic in output_macinfo_op to decide on
26967 indirect strings. */
26968 case DW_MACINFO_define:
26969 case DW_MACINFO_undef:
26970 len = strlen (ref->info) + 1;
26971 if (!dwarf_strict
26972 && len > DWARF_OFFSET_SIZE
26973 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
26974 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
26975 set_indirect_string (find_AT_string (ref->info));
26976 break;
26977 case DW_MACRO_define_strp:
26978 case DW_MACRO_undef_strp:
26979 set_indirect_string (find_AT_string (ref->info));
26980 break;
26981 default:
26982 break;
26983 }
26984 }
26985 }
26986
26987 /* Output macinfo section(s). */
26988
26989 static void
26990 output_macinfo (void)
26991 {
26992 unsigned i;
26993 unsigned long length = vec_safe_length (macinfo_table);
26994 macinfo_entry *ref;
26995 vec<macinfo_entry, va_gc> *files = NULL;
26996 macinfo_hash_type *macinfo_htab = NULL;
26997
26998 if (! length)
26999 return;
27000
27001 /* output_macinfo* uses these interchangeably. */
27002 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
27003 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
27004 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
27005 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
27006
27007 /* For .debug_macro emit the section header. */
27008 if (!dwarf_strict || dwarf_version >= 5)
27009 {
27010 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27011 "DWARF macro version number");
27012 if (DWARF_OFFSET_SIZE == 8)
27013 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
27014 else
27015 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
27016 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
27017 (!dwarf_split_debug_info ? debug_line_section_label
27018 : debug_skeleton_line_section_label),
27019 debug_line_section, NULL);
27020 }
27021
27022 /* In the first loop, it emits the primary .debug_macinfo section
27023 and after each emitted op the macinfo_entry is cleared.
27024 If a longer range of define/undef ops can be optimized using
27025 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
27026 the vector before the first define/undef in the range and the
27027 whole range of define/undef ops is not emitted and kept. */
27028 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27029 {
27030 switch (ref->code)
27031 {
27032 case DW_MACINFO_start_file:
27033 vec_safe_push (files, *ref);
27034 break;
27035 case DW_MACINFO_end_file:
27036 if (!vec_safe_is_empty (files))
27037 files->pop ();
27038 break;
27039 case DW_MACINFO_define:
27040 case DW_MACINFO_undef:
27041 if ((!dwarf_strict || dwarf_version >= 5)
27042 && HAVE_COMDAT_GROUP
27043 && vec_safe_length (files) != 1
27044 && i > 0
27045 && i + 1 < length
27046 && (*macinfo_table)[i - 1].code == 0)
27047 {
27048 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
27049 if (count)
27050 {
27051 i += count - 1;
27052 continue;
27053 }
27054 }
27055 break;
27056 case 0:
27057 /* A dummy entry may be inserted at the beginning to be able
27058 to optimize the whole block of predefined macros. */
27059 if (i == 0)
27060 continue;
27061 default:
27062 break;
27063 }
27064 output_macinfo_op (ref);
27065 ref->info = NULL;
27066 ref->code = 0;
27067 }
27068
27069 if (!macinfo_htab)
27070 return;
27071
27072 delete macinfo_htab;
27073 macinfo_htab = NULL;
27074
27075 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
27076 terminate the current chain and switch to a new comdat .debug_macinfo
27077 section and emit the define/undef entries within it. */
27078 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27079 switch (ref->code)
27080 {
27081 case 0:
27082 continue;
27083 case DW_MACRO_import:
27084 {
27085 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27086 tree comdat_key = get_identifier (ref->info);
27087 /* Terminate the previous .debug_macinfo section. */
27088 dw2_asm_output_data (1, 0, "End compilation unit");
27089 targetm.asm_out.named_section (debug_macinfo_section_name,
27090 SECTION_DEBUG
27091 | SECTION_LINKONCE,
27092 comdat_key);
27093 ASM_GENERATE_INTERNAL_LABEL (label,
27094 DEBUG_MACRO_SECTION_LABEL,
27095 ref->lineno);
27096 ASM_OUTPUT_LABEL (asm_out_file, label);
27097 ref->code = 0;
27098 ref->info = NULL;
27099 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27100 "DWARF macro version number");
27101 if (DWARF_OFFSET_SIZE == 8)
27102 dw2_asm_output_data (1, 1, "Flags: 64-bit");
27103 else
27104 dw2_asm_output_data (1, 0, "Flags: 32-bit");
27105 }
27106 break;
27107 case DW_MACINFO_define:
27108 case DW_MACINFO_undef:
27109 output_macinfo_op (ref);
27110 ref->code = 0;
27111 ref->info = NULL;
27112 break;
27113 default:
27114 gcc_unreachable ();
27115 }
27116 }
27117
27118 /* Initialize the various sections and labels for dwarf output. */
27119
27120 static void
27121 init_sections_and_labels (void)
27122 {
27123 if (!dwarf_split_debug_info)
27124 {
27125 debug_info_section = get_section (DEBUG_INFO_SECTION,
27126 SECTION_DEBUG, NULL);
27127 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27128 SECTION_DEBUG, NULL);
27129 debug_loc_section = get_section (dwarf_version >= 5
27130 ? DEBUG_LOCLISTS_SECTION
27131 : DEBUG_LOC_SECTION,
27132 SECTION_DEBUG, NULL);
27133 debug_macinfo_section_name
27134 = (dwarf_strict && dwarf_version < 5)
27135 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION;
27136 debug_macinfo_section = get_section (debug_macinfo_section_name,
27137 SECTION_DEBUG, NULL);
27138 }
27139 else
27140 {
27141 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
27142 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27143 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
27144 SECTION_DEBUG | SECTION_EXCLUDE,
27145 NULL);
27146 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
27147 SECTION_DEBUG, NULL);
27148 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
27149 SECTION_DEBUG, NULL);
27150 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27151 SECTION_DEBUG, NULL);
27152 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27153 DEBUG_SKELETON_ABBREV_SECTION_LABEL, 0);
27154
27155 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections stay in
27156 the main .o, but the skeleton_line goes into the split off dwo. */
27157 debug_skeleton_line_section
27158 = get_section (DEBUG_DWO_LINE_SECTION,
27159 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27160 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27161 DEBUG_SKELETON_LINE_SECTION_LABEL, 0);
27162 debug_str_offsets_section = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
27163 SECTION_DEBUG | SECTION_EXCLUDE,
27164 NULL);
27165 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27166 DEBUG_SKELETON_INFO_SECTION_LABEL, 0);
27167 debug_loc_section = get_section (dwarf_version >= 5
27168 ? DEBUG_DWO_LOCLISTS_SECTION
27169 : DEBUG_DWO_LOC_SECTION,
27170 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27171 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
27172 DEBUG_STR_DWO_SECTION_FLAGS, NULL);
27173 debug_macinfo_section_name
27174 = (dwarf_strict && dwarf_version < 5)
27175 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION;
27176 debug_macinfo_section = get_section (debug_macinfo_section_name,
27177 SECTION_DEBUG | SECTION_EXCLUDE,
27178 NULL);
27179 }
27180 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
27181 SECTION_DEBUG, NULL);
27182 debug_line_section = get_section (DEBUG_LINE_SECTION,
27183 SECTION_DEBUG, NULL);
27184 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
27185 SECTION_DEBUG, NULL);
27186 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
27187 SECTION_DEBUG, NULL);
27188 debug_str_section = get_section (DEBUG_STR_SECTION,
27189 DEBUG_STR_SECTION_FLAGS, NULL);
27190 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27191 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
27192 DEBUG_STR_SECTION_FLAGS, NULL);
27193
27194 debug_ranges_section = get_section (dwarf_version >= 5
27195 ? DEBUG_RNGLISTS_SECTION
27196 : DEBUG_RANGES_SECTION,
27197 SECTION_DEBUG, NULL);
27198 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
27199 SECTION_DEBUG, NULL);
27200
27201 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
27202 DEBUG_ABBREV_SECTION_LABEL, 0);
27203 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
27204 DEBUG_INFO_SECTION_LABEL, 0);
27205 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
27206 DEBUG_LINE_SECTION_LABEL, 0);
27207 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
27208 DEBUG_RANGES_SECTION_LABEL, 0);
27209 if (dwarf_version >= 5 && dwarf_split_debug_info)
27210 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
27211 DEBUG_RANGES_SECTION_LABEL, 1);
27212 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
27213 DEBUG_ADDR_SECTION_LABEL, 0);
27214 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
27215 (dwarf_strict && dwarf_version < 5)
27216 ? DEBUG_MACINFO_SECTION_LABEL
27217 : DEBUG_MACRO_SECTION_LABEL, 0);
27218 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL, 0);
27219 }
27220
27221 /* Set up for Dwarf output at the start of compilation. */
27222
27223 static void
27224 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
27225 {
27226 /* This option is currently broken, see (PR53118 and PR46102). */
27227 if (flag_eliminate_dwarf2_dups
27228 && strstr (lang_hooks.name, "C++"))
27229 {
27230 warning (0, "-feliminate-dwarf2-dups is broken for C++, ignoring");
27231 flag_eliminate_dwarf2_dups = 0;
27232 }
27233
27234 /* Allocate the file_table. */
27235 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
27236
27237 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27238 /* Allocate the decl_die_table. */
27239 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
27240
27241 /* Allocate the decl_loc_table. */
27242 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
27243
27244 /* Allocate the cached_dw_loc_list_table. */
27245 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
27246
27247 /* Allocate the initial hunk of the decl_scope_table. */
27248 vec_alloc (decl_scope_table, 256);
27249
27250 /* Allocate the initial hunk of the abbrev_die_table. */
27251 vec_alloc (abbrev_die_table, 256);
27252 /* Zero-th entry is allocated, but unused. */
27253 abbrev_die_table->quick_push (NULL);
27254
27255 /* Allocate the dwarf_proc_stack_usage_map. */
27256 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
27257
27258 /* Allocate the pubtypes and pubnames vectors. */
27259 vec_alloc (pubname_table, 32);
27260 vec_alloc (pubtype_table, 32);
27261
27262 vec_alloc (incomplete_types, 64);
27263
27264 vec_alloc (used_rtx_array, 32);
27265
27266 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27267 vec_alloc (macinfo_table, 64);
27268 #endif
27269
27270 /* If front-ends already registered a main translation unit but we were not
27271 ready to perform the association, do this now. */
27272 if (main_translation_unit != NULL_TREE)
27273 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
27274 }
27275
27276 /* Called before compile () starts outputtting functions, variables
27277 and toplevel asms into assembly. */
27278
27279 static void
27280 dwarf2out_assembly_start (void)
27281 {
27282 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27283 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
27284 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
27285 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
27286 COLD_TEXT_SECTION_LABEL, 0);
27287 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
27288
27289 switch_to_section (text_section);
27290 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
27291 #endif
27292
27293 /* Make sure the line number table for .text always exists. */
27294 text_section_line_info = new_line_info_table ();
27295 text_section_line_info->end_label = text_end_label;
27296
27297 #ifdef DWARF2_LINENO_DEBUGGING_INFO
27298 cur_line_info_table = text_section_line_info;
27299 #endif
27300
27301 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
27302 && dwarf2out_do_cfi_asm ()
27303 && (!(flag_unwind_tables || flag_exceptions)
27304 || targetm_common.except_unwind_info (&global_options) != UI_DWARF2))
27305 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
27306 }
27307
27308 /* A helper function for dwarf2out_finish called through
27309 htab_traverse. Assign a string its index. All strings must be
27310 collected into the table by the time index_string is called,
27311 because the indexing code relies on htab_traverse to traverse nodes
27312 in the same order for each run. */
27313
27314 int
27315 index_string (indirect_string_node **h, unsigned int *index)
27316 {
27317 indirect_string_node *node = *h;
27318
27319 find_string_form (node);
27320 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27321 {
27322 gcc_assert (node->index == NO_INDEX_ASSIGNED);
27323 node->index = *index;
27324 *index += 1;
27325 }
27326 return 1;
27327 }
27328
27329 /* A helper function for output_indirect_strings called through
27330 htab_traverse. Output the offset to a string and update the
27331 current offset. */
27332
27333 int
27334 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
27335 {
27336 indirect_string_node *node = *h;
27337
27338 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27339 {
27340 /* Assert that this node has been assigned an index. */
27341 gcc_assert (node->index != NO_INDEX_ASSIGNED
27342 && node->index != NOT_INDEXED);
27343 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
27344 "indexed string 0x%x: %s", node->index, node->str);
27345 *offset += strlen (node->str) + 1;
27346 }
27347 return 1;
27348 }
27349
27350 /* A helper function for dwarf2out_finish called through
27351 htab_traverse. Output the indexed string. */
27352
27353 int
27354 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
27355 {
27356 struct indirect_string_node *node = *h;
27357
27358 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27359 {
27360 /* Assert that the strings are output in the same order as their
27361 indexes were assigned. */
27362 gcc_assert (*cur_idx == node->index);
27363 assemble_string (node->str, strlen (node->str) + 1);
27364 *cur_idx += 1;
27365 }
27366 return 1;
27367 }
27368
27369 /* A helper function for dwarf2out_finish called through
27370 htab_traverse. Emit one queued .debug_str string. */
27371
27372 int
27373 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
27374 {
27375 struct indirect_string_node *node = *h;
27376
27377 node->form = find_string_form (node);
27378 if (node->form == form && node->refcount > 0)
27379 {
27380 ASM_OUTPUT_LABEL (asm_out_file, node->label);
27381 assemble_string (node->str, strlen (node->str) + 1);
27382 }
27383
27384 return 1;
27385 }
27386
27387 /* Output the indexed string table. */
27388
27389 static void
27390 output_indirect_strings (void)
27391 {
27392 switch_to_section (debug_str_section);
27393 if (!dwarf_split_debug_info)
27394 debug_str_hash->traverse<enum dwarf_form,
27395 output_indirect_string> (DW_FORM_strp);
27396 else
27397 {
27398 unsigned int offset = 0;
27399 unsigned int cur_idx = 0;
27400
27401 skeleton_debug_str_hash->traverse<enum dwarf_form,
27402 output_indirect_string> (DW_FORM_strp);
27403
27404 switch_to_section (debug_str_offsets_section);
27405 debug_str_hash->traverse_noresize
27406 <unsigned int *, output_index_string_offset> (&offset);
27407 switch_to_section (debug_str_dwo_section);
27408 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
27409 (&cur_idx);
27410 }
27411 }
27412
27413 /* Callback for htab_traverse to assign an index to an entry in the
27414 table, and to write that entry to the .debug_addr section. */
27415
27416 int
27417 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
27418 {
27419 addr_table_entry *entry = *slot;
27420
27421 if (entry->refcount == 0)
27422 {
27423 gcc_assert (entry->index == NO_INDEX_ASSIGNED
27424 || entry->index == NOT_INDEXED);
27425 return 1;
27426 }
27427
27428 gcc_assert (entry->index == *cur_index);
27429 (*cur_index)++;
27430
27431 switch (entry->kind)
27432 {
27433 case ate_kind_rtx:
27434 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
27435 "0x%x", entry->index);
27436 break;
27437 case ate_kind_rtx_dtprel:
27438 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
27439 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
27440 DWARF2_ADDR_SIZE,
27441 entry->addr.rtl);
27442 fputc ('\n', asm_out_file);
27443 break;
27444 case ate_kind_label:
27445 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
27446 "0x%x", entry->index);
27447 break;
27448 default:
27449 gcc_unreachable ();
27450 }
27451 return 1;
27452 }
27453
27454 /* Produce the .debug_addr section. */
27455
27456 static void
27457 output_addr_table (void)
27458 {
27459 unsigned int index = 0;
27460 if (addr_index_table == NULL || addr_index_table->size () == 0)
27461 return;
27462
27463 switch_to_section (debug_addr_section);
27464 addr_index_table
27465 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
27466 }
27467
27468 #if ENABLE_ASSERT_CHECKING
27469 /* Verify that all marks are clear. */
27470
27471 static void
27472 verify_marks_clear (dw_die_ref die)
27473 {
27474 dw_die_ref c;
27475
27476 gcc_assert (! die->die_mark);
27477 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
27478 }
27479 #endif /* ENABLE_ASSERT_CHECKING */
27480
27481 /* Clear the marks for a die and its children.
27482 Be cool if the mark isn't set. */
27483
27484 static void
27485 prune_unmark_dies (dw_die_ref die)
27486 {
27487 dw_die_ref c;
27488
27489 if (die->die_mark)
27490 die->die_mark = 0;
27491 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
27492 }
27493
27494 /* Given LOC that is referenced by a DIE we're marking as used, find all
27495 referenced DWARF procedures it references and mark them as used. */
27496
27497 static void
27498 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
27499 {
27500 for (; loc != NULL; loc = loc->dw_loc_next)
27501 switch (loc->dw_loc_opc)
27502 {
27503 case DW_OP_implicit_pointer:
27504 case DW_OP_convert:
27505 case DW_OP_reinterpret:
27506 case DW_OP_GNU_implicit_pointer:
27507 case DW_OP_GNU_convert:
27508 case DW_OP_GNU_reinterpret:
27509 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
27510 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27511 break;
27512 case DW_OP_call2:
27513 case DW_OP_call4:
27514 case DW_OP_call_ref:
27515 case DW_OP_const_type:
27516 case DW_OP_GNU_const_type:
27517 case DW_OP_GNU_parameter_ref:
27518 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
27519 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27520 break;
27521 case DW_OP_regval_type:
27522 case DW_OP_deref_type:
27523 case DW_OP_GNU_regval_type:
27524 case DW_OP_GNU_deref_type:
27525 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
27526 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
27527 break;
27528 case DW_OP_entry_value:
27529 case DW_OP_GNU_entry_value:
27530 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
27531 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
27532 break;
27533 default:
27534 break;
27535 }
27536 }
27537
27538 /* Given DIE that we're marking as used, find any other dies
27539 it references as attributes and mark them as used. */
27540
27541 static void
27542 prune_unused_types_walk_attribs (dw_die_ref die)
27543 {
27544 dw_attr_node *a;
27545 unsigned ix;
27546
27547 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27548 {
27549 switch (AT_class (a))
27550 {
27551 /* Make sure DWARF procedures referenced by location descriptions will
27552 get emitted. */
27553 case dw_val_class_loc:
27554 prune_unused_types_walk_loc_descr (AT_loc (a));
27555 break;
27556 case dw_val_class_loc_list:
27557 for (dw_loc_list_ref list = AT_loc_list (a);
27558 list != NULL;
27559 list = list->dw_loc_next)
27560 prune_unused_types_walk_loc_descr (list->expr);
27561 break;
27562
27563 case dw_val_class_die_ref:
27564 /* A reference to another DIE.
27565 Make sure that it will get emitted.
27566 If it was broken out into a comdat group, don't follow it. */
27567 if (! AT_ref (a)->comdat_type_p
27568 || a->dw_attr == DW_AT_specification)
27569 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
27570 break;
27571
27572 case dw_val_class_str:
27573 /* Set the string's refcount to 0 so that prune_unused_types_mark
27574 accounts properly for it. */
27575 a->dw_attr_val.v.val_str->refcount = 0;
27576 break;
27577
27578 default:
27579 break;
27580 }
27581 }
27582 }
27583
27584 /* Mark the generic parameters and arguments children DIEs of DIE. */
27585
27586 static void
27587 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
27588 {
27589 dw_die_ref c;
27590
27591 if (die == NULL || die->die_child == NULL)
27592 return;
27593 c = die->die_child;
27594 do
27595 {
27596 if (is_template_parameter (c))
27597 prune_unused_types_mark (c, 1);
27598 c = c->die_sib;
27599 } while (c && c != die->die_child);
27600 }
27601
27602 /* Mark DIE as being used. If DOKIDS is true, then walk down
27603 to DIE's children. */
27604
27605 static void
27606 prune_unused_types_mark (dw_die_ref die, int dokids)
27607 {
27608 dw_die_ref c;
27609
27610 if (die->die_mark == 0)
27611 {
27612 /* We haven't done this node yet. Mark it as used. */
27613 die->die_mark = 1;
27614 /* If this is the DIE of a generic type instantiation,
27615 mark the children DIEs that describe its generic parms and
27616 args. */
27617 prune_unused_types_mark_generic_parms_dies (die);
27618
27619 /* We also have to mark its parents as used.
27620 (But we don't want to mark our parent's kids due to this,
27621 unless it is a class.) */
27622 if (die->die_parent)
27623 prune_unused_types_mark (die->die_parent,
27624 class_scope_p (die->die_parent));
27625
27626 /* Mark any referenced nodes. */
27627 prune_unused_types_walk_attribs (die);
27628
27629 /* If this node is a specification,
27630 also mark the definition, if it exists. */
27631 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
27632 prune_unused_types_mark (die->die_definition, 1);
27633 }
27634
27635 if (dokids && die->die_mark != 2)
27636 {
27637 /* We need to walk the children, but haven't done so yet.
27638 Remember that we've walked the kids. */
27639 die->die_mark = 2;
27640
27641 /* If this is an array type, we need to make sure our
27642 kids get marked, even if they're types. If we're
27643 breaking out types into comdat sections, do this
27644 for all type definitions. */
27645 if (die->die_tag == DW_TAG_array_type
27646 || (use_debug_types
27647 && is_type_die (die) && ! is_declaration_die (die)))
27648 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
27649 else
27650 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
27651 }
27652 }
27653
27654 /* For local classes, look if any static member functions were emitted
27655 and if so, mark them. */
27656
27657 static void
27658 prune_unused_types_walk_local_classes (dw_die_ref die)
27659 {
27660 dw_die_ref c;
27661
27662 if (die->die_mark == 2)
27663 return;
27664
27665 switch (die->die_tag)
27666 {
27667 case DW_TAG_structure_type:
27668 case DW_TAG_union_type:
27669 case DW_TAG_class_type:
27670 break;
27671
27672 case DW_TAG_subprogram:
27673 if (!get_AT_flag (die, DW_AT_declaration)
27674 || die->die_definition != NULL)
27675 prune_unused_types_mark (die, 1);
27676 return;
27677
27678 default:
27679 return;
27680 }
27681
27682 /* Mark children. */
27683 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
27684 }
27685
27686 /* Walk the tree DIE and mark types that we actually use. */
27687
27688 static void
27689 prune_unused_types_walk (dw_die_ref die)
27690 {
27691 dw_die_ref c;
27692
27693 /* Don't do anything if this node is already marked and
27694 children have been marked as well. */
27695 if (die->die_mark == 2)
27696 return;
27697
27698 switch (die->die_tag)
27699 {
27700 case DW_TAG_structure_type:
27701 case DW_TAG_union_type:
27702 case DW_TAG_class_type:
27703 if (die->die_perennial_p)
27704 break;
27705
27706 for (c = die->die_parent; c; c = c->die_parent)
27707 if (c->die_tag == DW_TAG_subprogram)
27708 break;
27709
27710 /* Finding used static member functions inside of classes
27711 is needed just for local classes, because for other classes
27712 static member function DIEs with DW_AT_specification
27713 are emitted outside of the DW_TAG_*_type. If we ever change
27714 it, we'd need to call this even for non-local classes. */
27715 if (c)
27716 prune_unused_types_walk_local_classes (die);
27717
27718 /* It's a type node --- don't mark it. */
27719 return;
27720
27721 case DW_TAG_const_type:
27722 case DW_TAG_packed_type:
27723 case DW_TAG_pointer_type:
27724 case DW_TAG_reference_type:
27725 case DW_TAG_rvalue_reference_type:
27726 case DW_TAG_volatile_type:
27727 case DW_TAG_typedef:
27728 case DW_TAG_array_type:
27729 case DW_TAG_interface_type:
27730 case DW_TAG_friend:
27731 case DW_TAG_enumeration_type:
27732 case DW_TAG_subroutine_type:
27733 case DW_TAG_string_type:
27734 case DW_TAG_set_type:
27735 case DW_TAG_subrange_type:
27736 case DW_TAG_ptr_to_member_type:
27737 case DW_TAG_file_type:
27738 /* Type nodes are useful only when other DIEs reference them --- don't
27739 mark them. */
27740 /* FALLTHROUGH */
27741
27742 case DW_TAG_dwarf_procedure:
27743 /* Likewise for DWARF procedures. */
27744
27745 if (die->die_perennial_p)
27746 break;
27747
27748 return;
27749
27750 default:
27751 /* Mark everything else. */
27752 break;
27753 }
27754
27755 if (die->die_mark == 0)
27756 {
27757 die->die_mark = 1;
27758
27759 /* Now, mark any dies referenced from here. */
27760 prune_unused_types_walk_attribs (die);
27761 }
27762
27763 die->die_mark = 2;
27764
27765 /* Mark children. */
27766 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
27767 }
27768
27769 /* Increment the string counts on strings referred to from DIE's
27770 attributes. */
27771
27772 static void
27773 prune_unused_types_update_strings (dw_die_ref die)
27774 {
27775 dw_attr_node *a;
27776 unsigned ix;
27777
27778 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27779 if (AT_class (a) == dw_val_class_str)
27780 {
27781 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
27782 s->refcount++;
27783 /* Avoid unnecessarily putting strings that are used less than
27784 twice in the hash table. */
27785 if (s->refcount
27786 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
27787 {
27788 indirect_string_node **slot
27789 = debug_str_hash->find_slot_with_hash (s->str,
27790 htab_hash_string (s->str),
27791 INSERT);
27792 gcc_assert (*slot == NULL);
27793 *slot = s;
27794 }
27795 }
27796 }
27797
27798 /* Mark DIE and its children as removed. */
27799
27800 static void
27801 mark_removed (dw_die_ref die)
27802 {
27803 dw_die_ref c;
27804 die->removed = true;
27805 FOR_EACH_CHILD (die, c, mark_removed (c));
27806 }
27807
27808 /* Remove from the tree DIE any dies that aren't marked. */
27809
27810 static void
27811 prune_unused_types_prune (dw_die_ref die)
27812 {
27813 dw_die_ref c;
27814
27815 gcc_assert (die->die_mark);
27816 prune_unused_types_update_strings (die);
27817
27818 if (! die->die_child)
27819 return;
27820
27821 c = die->die_child;
27822 do {
27823 dw_die_ref prev = c, next;
27824 for (c = c->die_sib; ! c->die_mark; c = next)
27825 if (c == die->die_child)
27826 {
27827 /* No marked children between 'prev' and the end of the list. */
27828 if (prev == c)
27829 /* No marked children at all. */
27830 die->die_child = NULL;
27831 else
27832 {
27833 prev->die_sib = c->die_sib;
27834 die->die_child = prev;
27835 }
27836 c->die_sib = NULL;
27837 mark_removed (c);
27838 return;
27839 }
27840 else
27841 {
27842 next = c->die_sib;
27843 c->die_sib = NULL;
27844 mark_removed (c);
27845 }
27846
27847 if (c != prev->die_sib)
27848 prev->die_sib = c;
27849 prune_unused_types_prune (c);
27850 } while (c != die->die_child);
27851 }
27852
27853 /* Remove dies representing declarations that we never use. */
27854
27855 static void
27856 prune_unused_types (void)
27857 {
27858 unsigned int i;
27859 limbo_die_node *node;
27860 comdat_type_node *ctnode;
27861 pubname_entry *pub;
27862 dw_die_ref base_type;
27863
27864 #if ENABLE_ASSERT_CHECKING
27865 /* All the marks should already be clear. */
27866 verify_marks_clear (comp_unit_die ());
27867 for (node = limbo_die_list; node; node = node->next)
27868 verify_marks_clear (node->die);
27869 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
27870 verify_marks_clear (ctnode->root_die);
27871 #endif /* ENABLE_ASSERT_CHECKING */
27872
27873 /* Mark types that are used in global variables. */
27874 premark_types_used_by_global_vars ();
27875
27876 /* Set the mark on nodes that are actually used. */
27877 prune_unused_types_walk (comp_unit_die ());
27878 for (node = limbo_die_list; node; node = node->next)
27879 prune_unused_types_walk (node->die);
27880 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
27881 {
27882 prune_unused_types_walk (ctnode->root_die);
27883 prune_unused_types_mark (ctnode->type_die, 1);
27884 }
27885
27886 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
27887 are unusual in that they are pubnames that are the children of pubtypes.
27888 They should only be marked via their parent DW_TAG_enumeration_type die,
27889 not as roots in themselves. */
27890 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
27891 if (pub->die->die_tag != DW_TAG_enumerator)
27892 prune_unused_types_mark (pub->die, 1);
27893 for (i = 0; base_types.iterate (i, &base_type); i++)
27894 prune_unused_types_mark (base_type, 1);
27895
27896 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
27897 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
27898 callees). */
27899 cgraph_node *cnode;
27900 FOR_EACH_FUNCTION (cnode)
27901 if (cnode->referred_to_p (false))
27902 {
27903 dw_die_ref die = lookup_decl_die (cnode->decl);
27904 if (die == NULL || die->die_mark)
27905 continue;
27906 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
27907 if (e->caller != cnode
27908 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
27909 {
27910 prune_unused_types_mark (die, 1);
27911 break;
27912 }
27913 }
27914
27915 if (debug_str_hash)
27916 debug_str_hash->empty ();
27917 if (skeleton_debug_str_hash)
27918 skeleton_debug_str_hash->empty ();
27919 prune_unused_types_prune (comp_unit_die ());
27920 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
27921 {
27922 node = *pnode;
27923 if (!node->die->die_mark)
27924 *pnode = node->next;
27925 else
27926 {
27927 prune_unused_types_prune (node->die);
27928 pnode = &node->next;
27929 }
27930 }
27931 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
27932 prune_unused_types_prune (ctnode->root_die);
27933
27934 /* Leave the marks clear. */
27935 prune_unmark_dies (comp_unit_die ());
27936 for (node = limbo_die_list; node; node = node->next)
27937 prune_unmark_dies (node->die);
27938 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
27939 prune_unmark_dies (ctnode->root_die);
27940 }
27941
27942 /* Helpers to manipulate hash table of comdat type units. */
27943
27944 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
27945 {
27946 static inline hashval_t hash (const comdat_type_node *);
27947 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
27948 };
27949
27950 inline hashval_t
27951 comdat_type_hasher::hash (const comdat_type_node *type_node)
27952 {
27953 hashval_t h;
27954 memcpy (&h, type_node->signature, sizeof (h));
27955 return h;
27956 }
27957
27958 inline bool
27959 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
27960 const comdat_type_node *type_node_2)
27961 {
27962 return (! memcmp (type_node_1->signature, type_node_2->signature,
27963 DWARF_TYPE_SIGNATURE_SIZE));
27964 }
27965
27966 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
27967 to the location it would have been added, should we know its
27968 DECL_ASSEMBLER_NAME when we added other attributes. This will
27969 probably improve compactness of debug info, removing equivalent
27970 abbrevs, and hide any differences caused by deferring the
27971 computation of the assembler name, triggered by e.g. PCH. */
27972
27973 static inline void
27974 move_linkage_attr (dw_die_ref die)
27975 {
27976 unsigned ix = vec_safe_length (die->die_attr);
27977 dw_attr_node linkage = (*die->die_attr)[ix - 1];
27978
27979 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
27980 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
27981
27982 while (--ix > 0)
27983 {
27984 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
27985
27986 if (prev->dw_attr == DW_AT_decl_line
27987 || prev->dw_attr == DW_AT_decl_column
27988 || prev->dw_attr == DW_AT_name)
27989 break;
27990 }
27991
27992 if (ix != vec_safe_length (die->die_attr) - 1)
27993 {
27994 die->die_attr->pop ();
27995 die->die_attr->quick_insert (ix, linkage);
27996 }
27997 }
27998
27999 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
28000 referenced from typed stack ops and count how often they are used. */
28001
28002 static void
28003 mark_base_types (dw_loc_descr_ref loc)
28004 {
28005 dw_die_ref base_type = NULL;
28006
28007 for (; loc; loc = loc->dw_loc_next)
28008 {
28009 switch (loc->dw_loc_opc)
28010 {
28011 case DW_OP_regval_type:
28012 case DW_OP_deref_type:
28013 case DW_OP_GNU_regval_type:
28014 case DW_OP_GNU_deref_type:
28015 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
28016 break;
28017 case DW_OP_convert:
28018 case DW_OP_reinterpret:
28019 case DW_OP_GNU_convert:
28020 case DW_OP_GNU_reinterpret:
28021 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
28022 continue;
28023 /* FALLTHRU */
28024 case DW_OP_const_type:
28025 case DW_OP_GNU_const_type:
28026 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
28027 break;
28028 case DW_OP_entry_value:
28029 case DW_OP_GNU_entry_value:
28030 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
28031 continue;
28032 default:
28033 continue;
28034 }
28035 gcc_assert (base_type->die_parent == comp_unit_die ());
28036 if (base_type->die_mark)
28037 base_type->die_mark++;
28038 else
28039 {
28040 base_types.safe_push (base_type);
28041 base_type->die_mark = 1;
28042 }
28043 }
28044 }
28045
28046 /* Comparison function for sorting marked base types. */
28047
28048 static int
28049 base_type_cmp (const void *x, const void *y)
28050 {
28051 dw_die_ref dx = *(const dw_die_ref *) x;
28052 dw_die_ref dy = *(const dw_die_ref *) y;
28053 unsigned int byte_size1, byte_size2;
28054 unsigned int encoding1, encoding2;
28055 unsigned int align1, align2;
28056 if (dx->die_mark > dy->die_mark)
28057 return -1;
28058 if (dx->die_mark < dy->die_mark)
28059 return 1;
28060 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
28061 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
28062 if (byte_size1 < byte_size2)
28063 return 1;
28064 if (byte_size1 > byte_size2)
28065 return -1;
28066 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
28067 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
28068 if (encoding1 < encoding2)
28069 return 1;
28070 if (encoding1 > encoding2)
28071 return -1;
28072 align1 = get_AT_unsigned (dx, DW_AT_alignment);
28073 align2 = get_AT_unsigned (dy, DW_AT_alignment);
28074 if (align1 < align2)
28075 return 1;
28076 if (align1 > align2)
28077 return -1;
28078 return 0;
28079 }
28080
28081 /* Move base types marked by mark_base_types as early as possible
28082 in the CU, sorted by decreasing usage count both to make the
28083 uleb128 references as small as possible and to make sure they
28084 will have die_offset already computed by calc_die_sizes when
28085 sizes of typed stack loc ops is computed. */
28086
28087 static void
28088 move_marked_base_types (void)
28089 {
28090 unsigned int i;
28091 dw_die_ref base_type, die, c;
28092
28093 if (base_types.is_empty ())
28094 return;
28095
28096 /* Sort by decreasing usage count, they will be added again in that
28097 order later on. */
28098 base_types.qsort (base_type_cmp);
28099 die = comp_unit_die ();
28100 c = die->die_child;
28101 do
28102 {
28103 dw_die_ref prev = c;
28104 c = c->die_sib;
28105 while (c->die_mark)
28106 {
28107 remove_child_with_prev (c, prev);
28108 /* As base types got marked, there must be at least
28109 one node other than DW_TAG_base_type. */
28110 gcc_assert (die->die_child != NULL);
28111 c = prev->die_sib;
28112 }
28113 }
28114 while (c != die->die_child);
28115 gcc_assert (die->die_child);
28116 c = die->die_child;
28117 for (i = 0; base_types.iterate (i, &base_type); i++)
28118 {
28119 base_type->die_mark = 0;
28120 base_type->die_sib = c->die_sib;
28121 c->die_sib = base_type;
28122 c = base_type;
28123 }
28124 }
28125
28126 /* Helper function for resolve_addr, attempt to resolve
28127 one CONST_STRING, return true if successful. Similarly verify that
28128 SYMBOL_REFs refer to variables emitted in the current CU. */
28129
28130 static bool
28131 resolve_one_addr (rtx *addr)
28132 {
28133 rtx rtl = *addr;
28134
28135 if (GET_CODE (rtl) == CONST_STRING)
28136 {
28137 size_t len = strlen (XSTR (rtl, 0)) + 1;
28138 tree t = build_string (len, XSTR (rtl, 0));
28139 tree tlen = size_int (len - 1);
28140 TREE_TYPE (t)
28141 = build_array_type (char_type_node, build_index_type (tlen));
28142 rtl = lookup_constant_def (t);
28143 if (!rtl || !MEM_P (rtl))
28144 return false;
28145 rtl = XEXP (rtl, 0);
28146 if (GET_CODE (rtl) == SYMBOL_REF
28147 && SYMBOL_REF_DECL (rtl)
28148 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28149 return false;
28150 vec_safe_push (used_rtx_array, rtl);
28151 *addr = rtl;
28152 return true;
28153 }
28154
28155 if (GET_CODE (rtl) == SYMBOL_REF
28156 && SYMBOL_REF_DECL (rtl))
28157 {
28158 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
28159 {
28160 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
28161 return false;
28162 }
28163 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28164 return false;
28165 }
28166
28167 if (GET_CODE (rtl) == CONST)
28168 {
28169 subrtx_ptr_iterator::array_type array;
28170 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
28171 if (!resolve_one_addr (*iter))
28172 return false;
28173 }
28174
28175 return true;
28176 }
28177
28178 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
28179 if possible, and create DW_TAG_dwarf_procedure that can be referenced
28180 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
28181
28182 static rtx
28183 string_cst_pool_decl (tree t)
28184 {
28185 rtx rtl = output_constant_def (t, 1);
28186 unsigned char *array;
28187 dw_loc_descr_ref l;
28188 tree decl;
28189 size_t len;
28190 dw_die_ref ref;
28191
28192 if (!rtl || !MEM_P (rtl))
28193 return NULL_RTX;
28194 rtl = XEXP (rtl, 0);
28195 if (GET_CODE (rtl) != SYMBOL_REF
28196 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
28197 return NULL_RTX;
28198
28199 decl = SYMBOL_REF_DECL (rtl);
28200 if (!lookup_decl_die (decl))
28201 {
28202 len = TREE_STRING_LENGTH (t);
28203 vec_safe_push (used_rtx_array, rtl);
28204 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
28205 array = ggc_vec_alloc<unsigned char> (len);
28206 memcpy (array, TREE_STRING_POINTER (t), len);
28207 l = new_loc_descr (DW_OP_implicit_value, len, 0);
28208 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
28209 l->dw_loc_oprnd2.v.val_vec.length = len;
28210 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
28211 l->dw_loc_oprnd2.v.val_vec.array = array;
28212 add_AT_loc (ref, DW_AT_location, l);
28213 equate_decl_number_to_die (decl, ref);
28214 }
28215 return rtl;
28216 }
28217
28218 /* Helper function of resolve_addr_in_expr. LOC is
28219 a DW_OP_addr followed by DW_OP_stack_value, either at the start
28220 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
28221 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
28222 with DW_OP_implicit_pointer if possible
28223 and return true, if unsuccessful, return false. */
28224
28225 static bool
28226 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
28227 {
28228 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
28229 HOST_WIDE_INT offset = 0;
28230 dw_die_ref ref = NULL;
28231 tree decl;
28232
28233 if (GET_CODE (rtl) == CONST
28234 && GET_CODE (XEXP (rtl, 0)) == PLUS
28235 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
28236 {
28237 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
28238 rtl = XEXP (XEXP (rtl, 0), 0);
28239 }
28240 if (GET_CODE (rtl) == CONST_STRING)
28241 {
28242 size_t len = strlen (XSTR (rtl, 0)) + 1;
28243 tree t = build_string (len, XSTR (rtl, 0));
28244 tree tlen = size_int (len - 1);
28245
28246 TREE_TYPE (t)
28247 = build_array_type (char_type_node, build_index_type (tlen));
28248 rtl = string_cst_pool_decl (t);
28249 if (!rtl)
28250 return false;
28251 }
28252 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
28253 {
28254 decl = SYMBOL_REF_DECL (rtl);
28255 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
28256 {
28257 ref = lookup_decl_die (decl);
28258 if (ref && (get_AT (ref, DW_AT_location)
28259 || get_AT (ref, DW_AT_const_value)))
28260 {
28261 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
28262 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28263 loc->dw_loc_oprnd1.val_entry = NULL;
28264 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28265 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28266 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28267 loc->dw_loc_oprnd2.v.val_int = offset;
28268 return true;
28269 }
28270 }
28271 }
28272 return false;
28273 }
28274
28275 /* Helper function for resolve_addr, handle one location
28276 expression, return false if at least one CONST_STRING or SYMBOL_REF in
28277 the location list couldn't be resolved. */
28278
28279 static bool
28280 resolve_addr_in_expr (dw_loc_descr_ref loc)
28281 {
28282 dw_loc_descr_ref keep = NULL;
28283 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
28284 switch (loc->dw_loc_opc)
28285 {
28286 case DW_OP_addr:
28287 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28288 {
28289 if ((prev == NULL
28290 || prev->dw_loc_opc == DW_OP_piece
28291 || prev->dw_loc_opc == DW_OP_bit_piece)
28292 && loc->dw_loc_next
28293 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
28294 && (!dwarf_strict || dwarf_version >= 5)
28295 && optimize_one_addr_into_implicit_ptr (loc))
28296 break;
28297 return false;
28298 }
28299 break;
28300 case DW_OP_GNU_addr_index:
28301 case DW_OP_GNU_const_index:
28302 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
28303 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
28304 {
28305 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
28306 if (!resolve_one_addr (&rtl))
28307 return false;
28308 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
28309 loc->dw_loc_oprnd1.val_entry
28310 = add_addr_table_entry (rtl, ate_kind_rtx);
28311 }
28312 break;
28313 case DW_OP_const4u:
28314 case DW_OP_const8u:
28315 if (loc->dtprel
28316 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28317 return false;
28318 break;
28319 case DW_OP_plus_uconst:
28320 if (size_of_loc_descr (loc)
28321 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
28322 + 1
28323 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
28324 {
28325 dw_loc_descr_ref repl
28326 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
28327 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
28328 add_loc_descr (&repl, loc->dw_loc_next);
28329 *loc = *repl;
28330 }
28331 break;
28332 case DW_OP_implicit_value:
28333 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
28334 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
28335 return false;
28336 break;
28337 case DW_OP_implicit_pointer:
28338 case DW_OP_GNU_implicit_pointer:
28339 case DW_OP_GNU_parameter_ref:
28340 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28341 {
28342 dw_die_ref ref
28343 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28344 if (ref == NULL)
28345 return false;
28346 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28347 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28348 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28349 }
28350 break;
28351 case DW_OP_const_type:
28352 case DW_OP_regval_type:
28353 case DW_OP_deref_type:
28354 case DW_OP_convert:
28355 case DW_OP_reinterpret:
28356 case DW_OP_GNU_const_type:
28357 case DW_OP_GNU_regval_type:
28358 case DW_OP_GNU_deref_type:
28359 case DW_OP_GNU_convert:
28360 case DW_OP_GNU_reinterpret:
28361 while (loc->dw_loc_next
28362 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
28363 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
28364 {
28365 dw_die_ref base1, base2;
28366 unsigned enc1, enc2, size1, size2;
28367 if (loc->dw_loc_opc == DW_OP_regval_type
28368 || loc->dw_loc_opc == DW_OP_deref_type
28369 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28370 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28371 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
28372 else if (loc->dw_loc_oprnd1.val_class
28373 == dw_val_class_unsigned_const)
28374 break;
28375 else
28376 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28377 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
28378 == dw_val_class_unsigned_const)
28379 break;
28380 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
28381 gcc_assert (base1->die_tag == DW_TAG_base_type
28382 && base2->die_tag == DW_TAG_base_type);
28383 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
28384 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
28385 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
28386 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
28387 if (size1 == size2
28388 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
28389 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
28390 && loc != keep)
28391 || enc1 == enc2))
28392 {
28393 /* Optimize away next DW_OP_convert after
28394 adjusting LOC's base type die reference. */
28395 if (loc->dw_loc_opc == DW_OP_regval_type
28396 || loc->dw_loc_opc == DW_OP_deref_type
28397 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28398 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28399 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
28400 else
28401 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
28402 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28403 continue;
28404 }
28405 /* Don't change integer DW_OP_convert after e.g. floating
28406 point typed stack entry. */
28407 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
28408 keep = loc->dw_loc_next;
28409 break;
28410 }
28411 break;
28412 default:
28413 break;
28414 }
28415 return true;
28416 }
28417
28418 /* Helper function of resolve_addr. DIE had DW_AT_location of
28419 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
28420 and DW_OP_addr couldn't be resolved. resolve_addr has already
28421 removed the DW_AT_location attribute. This function attempts to
28422 add a new DW_AT_location attribute with DW_OP_implicit_pointer
28423 to it or DW_AT_const_value attribute, if possible. */
28424
28425 static void
28426 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
28427 {
28428 if (!VAR_P (decl)
28429 || lookup_decl_die (decl) != die
28430 || DECL_EXTERNAL (decl)
28431 || !TREE_STATIC (decl)
28432 || DECL_INITIAL (decl) == NULL_TREE
28433 || DECL_P (DECL_INITIAL (decl))
28434 || get_AT (die, DW_AT_const_value))
28435 return;
28436
28437 tree init = DECL_INITIAL (decl);
28438 HOST_WIDE_INT offset = 0;
28439 /* For variables that have been optimized away and thus
28440 don't have a memory location, see if we can emit
28441 DW_AT_const_value instead. */
28442 if (tree_add_const_value_attribute (die, init))
28443 return;
28444 if (dwarf_strict && dwarf_version < 5)
28445 return;
28446 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
28447 and ADDR_EXPR refers to a decl that has DW_AT_location or
28448 DW_AT_const_value (but isn't addressable, otherwise
28449 resolving the original DW_OP_addr wouldn't fail), see if
28450 we can add DW_OP_implicit_pointer. */
28451 STRIP_NOPS (init);
28452 if (TREE_CODE (init) == POINTER_PLUS_EXPR
28453 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
28454 {
28455 offset = tree_to_shwi (TREE_OPERAND (init, 1));
28456 init = TREE_OPERAND (init, 0);
28457 STRIP_NOPS (init);
28458 }
28459 if (TREE_CODE (init) != ADDR_EXPR)
28460 return;
28461 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
28462 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
28463 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
28464 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
28465 && TREE_OPERAND (init, 0) != decl))
28466 {
28467 dw_die_ref ref;
28468 dw_loc_descr_ref l;
28469
28470 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
28471 {
28472 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
28473 if (!rtl)
28474 return;
28475 decl = SYMBOL_REF_DECL (rtl);
28476 }
28477 else
28478 decl = TREE_OPERAND (init, 0);
28479 ref = lookup_decl_die (decl);
28480 if (ref == NULL
28481 || (!get_AT (ref, DW_AT_location)
28482 && !get_AT (ref, DW_AT_const_value)))
28483 return;
28484 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
28485 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28486 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
28487 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
28488 add_AT_loc (die, DW_AT_location, l);
28489 }
28490 }
28491
28492 /* Return NULL if l is a DWARF expression, or first op that is not
28493 valid DWARF expression. */
28494
28495 static dw_loc_descr_ref
28496 non_dwarf_expression (dw_loc_descr_ref l)
28497 {
28498 while (l)
28499 {
28500 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28501 return l;
28502 switch (l->dw_loc_opc)
28503 {
28504 case DW_OP_regx:
28505 case DW_OP_implicit_value:
28506 case DW_OP_stack_value:
28507 case DW_OP_implicit_pointer:
28508 case DW_OP_GNU_implicit_pointer:
28509 case DW_OP_GNU_parameter_ref:
28510 case DW_OP_piece:
28511 case DW_OP_bit_piece:
28512 return l;
28513 default:
28514 break;
28515 }
28516 l = l->dw_loc_next;
28517 }
28518 return NULL;
28519 }
28520
28521 /* Return adjusted copy of EXPR:
28522 If it is empty DWARF expression, return it.
28523 If it is valid non-empty DWARF expression,
28524 return copy of EXPR with copy of DEREF appended to it.
28525 If it is DWARF expression followed by DW_OP_reg{N,x}, return
28526 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended
28527 and no DEREF.
28528 If it is DWARF expression followed by DW_OP_stack_value, return
28529 copy of the DWARF expression without anything appended.
28530 Otherwise, return NULL. */
28531
28532 static dw_loc_descr_ref
28533 copy_deref_exprloc (dw_loc_descr_ref expr, dw_loc_descr_ref deref)
28534 {
28535
28536 if (expr == NULL)
28537 return NULL;
28538
28539 dw_loc_descr_ref l = non_dwarf_expression (expr);
28540 if (l && l->dw_loc_next)
28541 return NULL;
28542
28543 if (l)
28544 {
28545 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28546 deref = new_loc_descr ((enum dwarf_location_atom)
28547 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
28548 0, 0);
28549 else
28550 switch (l->dw_loc_opc)
28551 {
28552 case DW_OP_regx:
28553 deref = new_loc_descr (DW_OP_bregx,
28554 l->dw_loc_oprnd1.v.val_unsigned, 0);
28555 break;
28556 case DW_OP_stack_value:
28557 deref = NULL;
28558 break;
28559 default:
28560 return NULL;
28561 }
28562 }
28563 else
28564 deref = new_loc_descr (deref->dw_loc_opc,
28565 deref->dw_loc_oprnd1.v.val_int, 0);
28566
28567 dw_loc_descr_ref ret = NULL, *p = &ret;
28568 while (expr != l)
28569 {
28570 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
28571 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
28572 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
28573 p = &(*p)->dw_loc_next;
28574 expr = expr->dw_loc_next;
28575 }
28576 *p = deref;
28577 return ret;
28578 }
28579
28580 /* For DW_AT_string_length attribute with DW_OP_call4 reference to a variable
28581 or argument, adjust it if needed and return:
28582 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
28583 attribute if present should be removed
28584 0 keep the attribute as is if the referenced var or argument has
28585 only DWARF expression that covers all ranges
28586 1 if the attribute has been successfully adjusted. */
28587
28588 static int
28589 optimize_string_length (dw_attr_node *a)
28590 {
28591 dw_loc_descr_ref l = AT_loc (a), lv;
28592 dw_die_ref die = l->dw_loc_oprnd1.v.val_die_ref.die;
28593 dw_attr_node *av = get_AT (die, DW_AT_location);
28594 dw_loc_list_ref d;
28595 bool non_dwarf_expr = false;
28596
28597 if (av == NULL)
28598 return -1;
28599 switch (AT_class (av))
28600 {
28601 case dw_val_class_loc_list:
28602 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
28603 if (d->expr && non_dwarf_expression (d->expr))
28604 non_dwarf_expr = true;
28605 break;
28606 case dw_val_class_loc:
28607 lv = AT_loc (av);
28608 if (lv == NULL)
28609 return -1;
28610 if (non_dwarf_expression (lv))
28611 non_dwarf_expr = true;
28612 break;
28613 default:
28614 return -1;
28615 }
28616
28617 /* If it is safe to keep DW_OP_call4 in, keep it. */
28618 if (!non_dwarf_expr
28619 && (l->dw_loc_next == NULL || AT_class (av) == dw_val_class_loc))
28620 return 0;
28621
28622 /* If not dereferencing the DW_OP_call4 afterwards, we can just
28623 copy over the DW_AT_location attribute from die to a. */
28624 if (l->dw_loc_next == NULL)
28625 {
28626 a->dw_attr_val = av->dw_attr_val;
28627 return 1;
28628 }
28629
28630 dw_loc_list_ref list, *p;
28631 switch (AT_class (av))
28632 {
28633 case dw_val_class_loc_list:
28634 p = &list;
28635 list = NULL;
28636 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
28637 {
28638 lv = copy_deref_exprloc (d->expr, l->dw_loc_next);
28639 if (lv)
28640 {
28641 *p = new_loc_list (lv, d->begin, d->end, d->section);
28642 p = &(*p)->dw_loc_next;
28643 }
28644 }
28645 if (list == NULL)
28646 return -1;
28647 a->dw_attr_val.val_class = dw_val_class_loc_list;
28648 gen_llsym (list);
28649 *AT_loc_list_ptr (a) = list;
28650 return 1;
28651 case dw_val_class_loc:
28652 lv = copy_deref_exprloc (AT_loc (av), l->dw_loc_next);
28653 if (lv == NULL)
28654 return -1;
28655 a->dw_attr_val.v.val_loc = lv;
28656 return 1;
28657 default:
28658 gcc_unreachable ();
28659 }
28660 }
28661
28662 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
28663 an address in .rodata section if the string literal is emitted there,
28664 or remove the containing location list or replace DW_AT_const_value
28665 with DW_AT_location and empty location expression, if it isn't found
28666 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
28667 to something that has been emitted in the current CU. */
28668
28669 static void
28670 resolve_addr (dw_die_ref die)
28671 {
28672 dw_die_ref c;
28673 dw_attr_node *a;
28674 dw_loc_list_ref *curr, *start, loc;
28675 unsigned ix;
28676 bool remove_AT_byte_size = false;
28677
28678 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28679 switch (AT_class (a))
28680 {
28681 case dw_val_class_loc_list:
28682 start = curr = AT_loc_list_ptr (a);
28683 loc = *curr;
28684 gcc_assert (loc);
28685 /* The same list can be referenced more than once. See if we have
28686 already recorded the result from a previous pass. */
28687 if (loc->replaced)
28688 *curr = loc->dw_loc_next;
28689 else if (!loc->resolved_addr)
28690 {
28691 /* As things stand, we do not expect or allow one die to
28692 reference a suffix of another die's location list chain.
28693 References must be identical or completely separate.
28694 There is therefore no need to cache the result of this
28695 pass on any list other than the first; doing so
28696 would lead to unnecessary writes. */
28697 while (*curr)
28698 {
28699 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
28700 if (!resolve_addr_in_expr ((*curr)->expr))
28701 {
28702 dw_loc_list_ref next = (*curr)->dw_loc_next;
28703 dw_loc_descr_ref l = (*curr)->expr;
28704
28705 if (next && (*curr)->ll_symbol)
28706 {
28707 gcc_assert (!next->ll_symbol);
28708 next->ll_symbol = (*curr)->ll_symbol;
28709 }
28710 if (dwarf_split_debug_info)
28711 remove_loc_list_addr_table_entries (l);
28712 *curr = next;
28713 }
28714 else
28715 {
28716 mark_base_types ((*curr)->expr);
28717 curr = &(*curr)->dw_loc_next;
28718 }
28719 }
28720 if (loc == *start)
28721 loc->resolved_addr = 1;
28722 else
28723 {
28724 loc->replaced = 1;
28725 loc->dw_loc_next = *start;
28726 }
28727 }
28728 if (!*start)
28729 {
28730 remove_AT (die, a->dw_attr);
28731 ix--;
28732 }
28733 break;
28734 case dw_val_class_loc:
28735 {
28736 dw_loc_descr_ref l = AT_loc (a);
28737 /* Using DW_OP_call4 or DW_OP_call4 DW_OP_deref in
28738 DW_AT_string_length is only a rough approximation; unfortunately
28739 DW_AT_string_length can't be a reference to a DIE. DW_OP_call4
28740 needs a DWARF expression, while DW_AT_location of the referenced
28741 variable or argument might be any location description. */
28742 if (a->dw_attr == DW_AT_string_length
28743 && l
28744 && l->dw_loc_opc == DW_OP_call4
28745 && l->dw_loc_oprnd1.val_class == dw_val_class_die_ref
28746 && (l->dw_loc_next == NULL
28747 || (l->dw_loc_next->dw_loc_next == NULL
28748 && (l->dw_loc_next->dw_loc_opc == DW_OP_deref
28749 || l->dw_loc_next->dw_loc_opc != DW_OP_deref_size))))
28750 {
28751 switch (optimize_string_length (a))
28752 {
28753 case -1:
28754 remove_AT (die, a->dw_attr);
28755 ix--;
28756 /* If we drop DW_AT_string_length, we need to drop also
28757 DW_AT_{string_length_,}byte_size. */
28758 remove_AT_byte_size = true;
28759 continue;
28760 default:
28761 break;
28762 case 1:
28763 /* Even if we keep the optimized DW_AT_string_length,
28764 it might have changed AT_class, so process it again. */
28765 ix--;
28766 continue;
28767 }
28768 }
28769 /* For -gdwarf-2 don't attempt to optimize
28770 DW_AT_data_member_location containing
28771 DW_OP_plus_uconst - older consumers might
28772 rely on it being that op instead of a more complex,
28773 but shorter, location description. */
28774 if ((dwarf_version > 2
28775 || a->dw_attr != DW_AT_data_member_location
28776 || l == NULL
28777 || l->dw_loc_opc != DW_OP_plus_uconst
28778 || l->dw_loc_next != NULL)
28779 && !resolve_addr_in_expr (l))
28780 {
28781 if (dwarf_split_debug_info)
28782 remove_loc_list_addr_table_entries (l);
28783 if (l != NULL
28784 && l->dw_loc_next == NULL
28785 && l->dw_loc_opc == DW_OP_addr
28786 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
28787 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
28788 && a->dw_attr == DW_AT_location)
28789 {
28790 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
28791 remove_AT (die, a->dw_attr);
28792 ix--;
28793 optimize_location_into_implicit_ptr (die, decl);
28794 break;
28795 }
28796 remove_AT (die, a->dw_attr);
28797 ix--;
28798 }
28799 else
28800 mark_base_types (l);
28801 }
28802 break;
28803 case dw_val_class_addr:
28804 if (a->dw_attr == DW_AT_const_value
28805 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
28806 {
28807 if (AT_index (a) != NOT_INDEXED)
28808 remove_addr_table_entry (a->dw_attr_val.val_entry);
28809 remove_AT (die, a->dw_attr);
28810 ix--;
28811 }
28812 if ((die->die_tag == DW_TAG_call_site
28813 && a->dw_attr == DW_AT_call_origin)
28814 || (die->die_tag == DW_TAG_GNU_call_site
28815 && a->dw_attr == DW_AT_abstract_origin))
28816 {
28817 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
28818 dw_die_ref tdie = lookup_decl_die (tdecl);
28819 dw_die_ref cdie;
28820 if (tdie == NULL
28821 && DECL_EXTERNAL (tdecl)
28822 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
28823 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
28824 {
28825 dw_die_ref pdie = cdie;
28826 /* Make sure we don't add these DIEs into type units.
28827 We could emit skeleton DIEs for context (namespaces,
28828 outer structs/classes) and a skeleton DIE for the
28829 innermost context with DW_AT_signature pointing to the
28830 type unit. See PR78835. */
28831 while (pdie && pdie->die_tag != DW_TAG_type_unit)
28832 pdie = pdie->die_parent;
28833 if (pdie == NULL)
28834 {
28835 /* Creating a full DIE for tdecl is overly expensive and
28836 at this point even wrong when in the LTO phase
28837 as it can end up generating new type DIEs we didn't
28838 output and thus optimize_external_refs will crash. */
28839 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
28840 add_AT_flag (tdie, DW_AT_external, 1);
28841 add_AT_flag (tdie, DW_AT_declaration, 1);
28842 add_linkage_attr (tdie, tdecl);
28843 add_name_and_src_coords_attributes (tdie, tdecl);
28844 equate_decl_number_to_die (tdecl, tdie);
28845 }
28846 }
28847 if (tdie)
28848 {
28849 a->dw_attr_val.val_class = dw_val_class_die_ref;
28850 a->dw_attr_val.v.val_die_ref.die = tdie;
28851 a->dw_attr_val.v.val_die_ref.external = 0;
28852 }
28853 else
28854 {
28855 if (AT_index (a) != NOT_INDEXED)
28856 remove_addr_table_entry (a->dw_attr_val.val_entry);
28857 remove_AT (die, a->dw_attr);
28858 ix--;
28859 }
28860 }
28861 break;
28862 default:
28863 break;
28864 }
28865
28866 if (remove_AT_byte_size)
28867 remove_AT (die, dwarf_version >= 5
28868 ? DW_AT_string_length_byte_size
28869 : DW_AT_byte_size);
28870
28871 FOR_EACH_CHILD (die, c, resolve_addr (c));
28872 }
28873 \f
28874 /* Helper routines for optimize_location_lists.
28875 This pass tries to share identical local lists in .debug_loc
28876 section. */
28877
28878 /* Iteratively hash operands of LOC opcode into HSTATE. */
28879
28880 static void
28881 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
28882 {
28883 dw_val_ref val1 = &loc->dw_loc_oprnd1;
28884 dw_val_ref val2 = &loc->dw_loc_oprnd2;
28885
28886 switch (loc->dw_loc_opc)
28887 {
28888 case DW_OP_const4u:
28889 case DW_OP_const8u:
28890 if (loc->dtprel)
28891 goto hash_addr;
28892 /* FALLTHRU */
28893 case DW_OP_const1u:
28894 case DW_OP_const1s:
28895 case DW_OP_const2u:
28896 case DW_OP_const2s:
28897 case DW_OP_const4s:
28898 case DW_OP_const8s:
28899 case DW_OP_constu:
28900 case DW_OP_consts:
28901 case DW_OP_pick:
28902 case DW_OP_plus_uconst:
28903 case DW_OP_breg0:
28904 case DW_OP_breg1:
28905 case DW_OP_breg2:
28906 case DW_OP_breg3:
28907 case DW_OP_breg4:
28908 case DW_OP_breg5:
28909 case DW_OP_breg6:
28910 case DW_OP_breg7:
28911 case DW_OP_breg8:
28912 case DW_OP_breg9:
28913 case DW_OP_breg10:
28914 case DW_OP_breg11:
28915 case DW_OP_breg12:
28916 case DW_OP_breg13:
28917 case DW_OP_breg14:
28918 case DW_OP_breg15:
28919 case DW_OP_breg16:
28920 case DW_OP_breg17:
28921 case DW_OP_breg18:
28922 case DW_OP_breg19:
28923 case DW_OP_breg20:
28924 case DW_OP_breg21:
28925 case DW_OP_breg22:
28926 case DW_OP_breg23:
28927 case DW_OP_breg24:
28928 case DW_OP_breg25:
28929 case DW_OP_breg26:
28930 case DW_OP_breg27:
28931 case DW_OP_breg28:
28932 case DW_OP_breg29:
28933 case DW_OP_breg30:
28934 case DW_OP_breg31:
28935 case DW_OP_regx:
28936 case DW_OP_fbreg:
28937 case DW_OP_piece:
28938 case DW_OP_deref_size:
28939 case DW_OP_xderef_size:
28940 hstate.add_object (val1->v.val_int);
28941 break;
28942 case DW_OP_skip:
28943 case DW_OP_bra:
28944 {
28945 int offset;
28946
28947 gcc_assert (val1->val_class == dw_val_class_loc);
28948 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
28949 hstate.add_object (offset);
28950 }
28951 break;
28952 case DW_OP_implicit_value:
28953 hstate.add_object (val1->v.val_unsigned);
28954 switch (val2->val_class)
28955 {
28956 case dw_val_class_const:
28957 hstate.add_object (val2->v.val_int);
28958 break;
28959 case dw_val_class_vec:
28960 {
28961 unsigned int elt_size = val2->v.val_vec.elt_size;
28962 unsigned int len = val2->v.val_vec.length;
28963
28964 hstate.add_int (elt_size);
28965 hstate.add_int (len);
28966 hstate.add (val2->v.val_vec.array, len * elt_size);
28967 }
28968 break;
28969 case dw_val_class_const_double:
28970 hstate.add_object (val2->v.val_double.low);
28971 hstate.add_object (val2->v.val_double.high);
28972 break;
28973 case dw_val_class_wide_int:
28974 hstate.add (val2->v.val_wide->get_val (),
28975 get_full_len (*val2->v.val_wide)
28976 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
28977 break;
28978 case dw_val_class_addr:
28979 inchash::add_rtx (val2->v.val_addr, hstate);
28980 break;
28981 default:
28982 gcc_unreachable ();
28983 }
28984 break;
28985 case DW_OP_bregx:
28986 case DW_OP_bit_piece:
28987 hstate.add_object (val1->v.val_int);
28988 hstate.add_object (val2->v.val_int);
28989 break;
28990 case DW_OP_addr:
28991 hash_addr:
28992 if (loc->dtprel)
28993 {
28994 unsigned char dtprel = 0xd1;
28995 hstate.add_object (dtprel);
28996 }
28997 inchash::add_rtx (val1->v.val_addr, hstate);
28998 break;
28999 case DW_OP_GNU_addr_index:
29000 case DW_OP_GNU_const_index:
29001 {
29002 if (loc->dtprel)
29003 {
29004 unsigned char dtprel = 0xd1;
29005 hstate.add_object (dtprel);
29006 }
29007 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
29008 }
29009 break;
29010 case DW_OP_implicit_pointer:
29011 case DW_OP_GNU_implicit_pointer:
29012 hstate.add_int (val2->v.val_int);
29013 break;
29014 case DW_OP_entry_value:
29015 case DW_OP_GNU_entry_value:
29016 hstate.add_object (val1->v.val_loc);
29017 break;
29018 case DW_OP_regval_type:
29019 case DW_OP_deref_type:
29020 case DW_OP_GNU_regval_type:
29021 case DW_OP_GNU_deref_type:
29022 {
29023 unsigned int byte_size
29024 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
29025 unsigned int encoding
29026 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
29027 hstate.add_object (val1->v.val_int);
29028 hstate.add_object (byte_size);
29029 hstate.add_object (encoding);
29030 }
29031 break;
29032 case DW_OP_convert:
29033 case DW_OP_reinterpret:
29034 case DW_OP_GNU_convert:
29035 case DW_OP_GNU_reinterpret:
29036 if (val1->val_class == dw_val_class_unsigned_const)
29037 {
29038 hstate.add_object (val1->v.val_unsigned);
29039 break;
29040 }
29041 /* FALLTHRU */
29042 case DW_OP_const_type:
29043 case DW_OP_GNU_const_type:
29044 {
29045 unsigned int byte_size
29046 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
29047 unsigned int encoding
29048 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
29049 hstate.add_object (byte_size);
29050 hstate.add_object (encoding);
29051 if (loc->dw_loc_opc != DW_OP_const_type
29052 && loc->dw_loc_opc != DW_OP_GNU_const_type)
29053 break;
29054 hstate.add_object (val2->val_class);
29055 switch (val2->val_class)
29056 {
29057 case dw_val_class_const:
29058 hstate.add_object (val2->v.val_int);
29059 break;
29060 case dw_val_class_vec:
29061 {
29062 unsigned int elt_size = val2->v.val_vec.elt_size;
29063 unsigned int len = val2->v.val_vec.length;
29064
29065 hstate.add_object (elt_size);
29066 hstate.add_object (len);
29067 hstate.add (val2->v.val_vec.array, len * elt_size);
29068 }
29069 break;
29070 case dw_val_class_const_double:
29071 hstate.add_object (val2->v.val_double.low);
29072 hstate.add_object (val2->v.val_double.high);
29073 break;
29074 case dw_val_class_wide_int:
29075 hstate.add (val2->v.val_wide->get_val (),
29076 get_full_len (*val2->v.val_wide)
29077 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29078 break;
29079 default:
29080 gcc_unreachable ();
29081 }
29082 }
29083 break;
29084
29085 default:
29086 /* Other codes have no operands. */
29087 break;
29088 }
29089 }
29090
29091 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
29092
29093 static inline void
29094 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
29095 {
29096 dw_loc_descr_ref l;
29097 bool sizes_computed = false;
29098 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
29099 size_of_locs (loc);
29100
29101 for (l = loc; l != NULL; l = l->dw_loc_next)
29102 {
29103 enum dwarf_location_atom opc = l->dw_loc_opc;
29104 hstate.add_object (opc);
29105 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
29106 {
29107 size_of_locs (loc);
29108 sizes_computed = true;
29109 }
29110 hash_loc_operands (l, hstate);
29111 }
29112 }
29113
29114 /* Compute hash of the whole location list LIST_HEAD. */
29115
29116 static inline void
29117 hash_loc_list (dw_loc_list_ref list_head)
29118 {
29119 dw_loc_list_ref curr = list_head;
29120 inchash::hash hstate;
29121
29122 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
29123 {
29124 hstate.add (curr->begin, strlen (curr->begin) + 1);
29125 hstate.add (curr->end, strlen (curr->end) + 1);
29126 if (curr->section)
29127 hstate.add (curr->section, strlen (curr->section) + 1);
29128 hash_locs (curr->expr, hstate);
29129 }
29130 list_head->hash = hstate.end ();
29131 }
29132
29133 /* Return true if X and Y opcodes have the same operands. */
29134
29135 static inline bool
29136 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
29137 {
29138 dw_val_ref valx1 = &x->dw_loc_oprnd1;
29139 dw_val_ref valx2 = &x->dw_loc_oprnd2;
29140 dw_val_ref valy1 = &y->dw_loc_oprnd1;
29141 dw_val_ref valy2 = &y->dw_loc_oprnd2;
29142
29143 switch (x->dw_loc_opc)
29144 {
29145 case DW_OP_const4u:
29146 case DW_OP_const8u:
29147 if (x->dtprel)
29148 goto hash_addr;
29149 /* FALLTHRU */
29150 case DW_OP_const1u:
29151 case DW_OP_const1s:
29152 case DW_OP_const2u:
29153 case DW_OP_const2s:
29154 case DW_OP_const4s:
29155 case DW_OP_const8s:
29156 case DW_OP_constu:
29157 case DW_OP_consts:
29158 case DW_OP_pick:
29159 case DW_OP_plus_uconst:
29160 case DW_OP_breg0:
29161 case DW_OP_breg1:
29162 case DW_OP_breg2:
29163 case DW_OP_breg3:
29164 case DW_OP_breg4:
29165 case DW_OP_breg5:
29166 case DW_OP_breg6:
29167 case DW_OP_breg7:
29168 case DW_OP_breg8:
29169 case DW_OP_breg9:
29170 case DW_OP_breg10:
29171 case DW_OP_breg11:
29172 case DW_OP_breg12:
29173 case DW_OP_breg13:
29174 case DW_OP_breg14:
29175 case DW_OP_breg15:
29176 case DW_OP_breg16:
29177 case DW_OP_breg17:
29178 case DW_OP_breg18:
29179 case DW_OP_breg19:
29180 case DW_OP_breg20:
29181 case DW_OP_breg21:
29182 case DW_OP_breg22:
29183 case DW_OP_breg23:
29184 case DW_OP_breg24:
29185 case DW_OP_breg25:
29186 case DW_OP_breg26:
29187 case DW_OP_breg27:
29188 case DW_OP_breg28:
29189 case DW_OP_breg29:
29190 case DW_OP_breg30:
29191 case DW_OP_breg31:
29192 case DW_OP_regx:
29193 case DW_OP_fbreg:
29194 case DW_OP_piece:
29195 case DW_OP_deref_size:
29196 case DW_OP_xderef_size:
29197 return valx1->v.val_int == valy1->v.val_int;
29198 case DW_OP_skip:
29199 case DW_OP_bra:
29200 /* If splitting debug info, the use of DW_OP_GNU_addr_index
29201 can cause irrelevant differences in dw_loc_addr. */
29202 gcc_assert (valx1->val_class == dw_val_class_loc
29203 && valy1->val_class == dw_val_class_loc
29204 && (dwarf_split_debug_info
29205 || x->dw_loc_addr == y->dw_loc_addr));
29206 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
29207 case DW_OP_implicit_value:
29208 if (valx1->v.val_unsigned != valy1->v.val_unsigned
29209 || valx2->val_class != valy2->val_class)
29210 return false;
29211 switch (valx2->val_class)
29212 {
29213 case dw_val_class_const:
29214 return valx2->v.val_int == valy2->v.val_int;
29215 case dw_val_class_vec:
29216 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29217 && valx2->v.val_vec.length == valy2->v.val_vec.length
29218 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29219 valx2->v.val_vec.elt_size
29220 * valx2->v.val_vec.length) == 0;
29221 case dw_val_class_const_double:
29222 return valx2->v.val_double.low == valy2->v.val_double.low
29223 && valx2->v.val_double.high == valy2->v.val_double.high;
29224 case dw_val_class_wide_int:
29225 return *valx2->v.val_wide == *valy2->v.val_wide;
29226 case dw_val_class_addr:
29227 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
29228 default:
29229 gcc_unreachable ();
29230 }
29231 case DW_OP_bregx:
29232 case DW_OP_bit_piece:
29233 return valx1->v.val_int == valy1->v.val_int
29234 && valx2->v.val_int == valy2->v.val_int;
29235 case DW_OP_addr:
29236 hash_addr:
29237 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
29238 case DW_OP_GNU_addr_index:
29239 case DW_OP_GNU_const_index:
29240 {
29241 rtx ax1 = valx1->val_entry->addr.rtl;
29242 rtx ay1 = valy1->val_entry->addr.rtl;
29243 return rtx_equal_p (ax1, ay1);
29244 }
29245 case DW_OP_implicit_pointer:
29246 case DW_OP_GNU_implicit_pointer:
29247 return valx1->val_class == dw_val_class_die_ref
29248 && valx1->val_class == valy1->val_class
29249 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
29250 && valx2->v.val_int == valy2->v.val_int;
29251 case DW_OP_entry_value:
29252 case DW_OP_GNU_entry_value:
29253 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
29254 case DW_OP_const_type:
29255 case DW_OP_GNU_const_type:
29256 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
29257 || valx2->val_class != valy2->val_class)
29258 return false;
29259 switch (valx2->val_class)
29260 {
29261 case dw_val_class_const:
29262 return valx2->v.val_int == valy2->v.val_int;
29263 case dw_val_class_vec:
29264 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29265 && valx2->v.val_vec.length == valy2->v.val_vec.length
29266 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29267 valx2->v.val_vec.elt_size
29268 * valx2->v.val_vec.length) == 0;
29269 case dw_val_class_const_double:
29270 return valx2->v.val_double.low == valy2->v.val_double.low
29271 && valx2->v.val_double.high == valy2->v.val_double.high;
29272 case dw_val_class_wide_int:
29273 return *valx2->v.val_wide == *valy2->v.val_wide;
29274 default:
29275 gcc_unreachable ();
29276 }
29277 case DW_OP_regval_type:
29278 case DW_OP_deref_type:
29279 case DW_OP_GNU_regval_type:
29280 case DW_OP_GNU_deref_type:
29281 return valx1->v.val_int == valy1->v.val_int
29282 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
29283 case DW_OP_convert:
29284 case DW_OP_reinterpret:
29285 case DW_OP_GNU_convert:
29286 case DW_OP_GNU_reinterpret:
29287 if (valx1->val_class != valy1->val_class)
29288 return false;
29289 if (valx1->val_class == dw_val_class_unsigned_const)
29290 return valx1->v.val_unsigned == valy1->v.val_unsigned;
29291 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29292 case DW_OP_GNU_parameter_ref:
29293 return valx1->val_class == dw_val_class_die_ref
29294 && valx1->val_class == valy1->val_class
29295 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29296 default:
29297 /* Other codes have no operands. */
29298 return true;
29299 }
29300 }
29301
29302 /* Return true if DWARF location expressions X and Y are the same. */
29303
29304 static inline bool
29305 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
29306 {
29307 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
29308 if (x->dw_loc_opc != y->dw_loc_opc
29309 || x->dtprel != y->dtprel
29310 || !compare_loc_operands (x, y))
29311 break;
29312 return x == NULL && y == NULL;
29313 }
29314
29315 /* Hashtable helpers. */
29316
29317 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
29318 {
29319 static inline hashval_t hash (const dw_loc_list_struct *);
29320 static inline bool equal (const dw_loc_list_struct *,
29321 const dw_loc_list_struct *);
29322 };
29323
29324 /* Return precomputed hash of location list X. */
29325
29326 inline hashval_t
29327 loc_list_hasher::hash (const dw_loc_list_struct *x)
29328 {
29329 return x->hash;
29330 }
29331
29332 /* Return true if location lists A and B are the same. */
29333
29334 inline bool
29335 loc_list_hasher::equal (const dw_loc_list_struct *a,
29336 const dw_loc_list_struct *b)
29337 {
29338 if (a == b)
29339 return 1;
29340 if (a->hash != b->hash)
29341 return 0;
29342 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
29343 if (strcmp (a->begin, b->begin) != 0
29344 || strcmp (a->end, b->end) != 0
29345 || (a->section == NULL) != (b->section == NULL)
29346 || (a->section && strcmp (a->section, b->section) != 0)
29347 || !compare_locs (a->expr, b->expr))
29348 break;
29349 return a == NULL && b == NULL;
29350 }
29351
29352 typedef hash_table<loc_list_hasher> loc_list_hash_type;
29353
29354
29355 /* Recursively optimize location lists referenced from DIE
29356 children and share them whenever possible. */
29357
29358 static void
29359 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
29360 {
29361 dw_die_ref c;
29362 dw_attr_node *a;
29363 unsigned ix;
29364 dw_loc_list_struct **slot;
29365
29366 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29367 if (AT_class (a) == dw_val_class_loc_list)
29368 {
29369 dw_loc_list_ref list = AT_loc_list (a);
29370 /* TODO: perform some optimizations here, before hashing
29371 it and storing into the hash table. */
29372 hash_loc_list (list);
29373 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
29374 if (*slot == NULL)
29375 *slot = list;
29376 else
29377 a->dw_attr_val.v.val_loc_list = *slot;
29378 }
29379
29380 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
29381 }
29382
29383
29384 /* Recursively assign each location list a unique index into the debug_addr
29385 section. */
29386
29387 static void
29388 index_location_lists (dw_die_ref die)
29389 {
29390 dw_die_ref c;
29391 dw_attr_node *a;
29392 unsigned ix;
29393
29394 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29395 if (AT_class (a) == dw_val_class_loc_list)
29396 {
29397 dw_loc_list_ref list = AT_loc_list (a);
29398 dw_loc_list_ref curr;
29399 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
29400 {
29401 /* Don't index an entry that has already been indexed
29402 or won't be output. */
29403 if (curr->begin_entry != NULL
29404 || (strcmp (curr->begin, curr->end) == 0 && !curr->force))
29405 continue;
29406
29407 curr->begin_entry
29408 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
29409 }
29410 }
29411
29412 FOR_EACH_CHILD (die, c, index_location_lists (c));
29413 }
29414
29415 /* Optimize location lists referenced from DIE
29416 children and share them whenever possible. */
29417
29418 static void
29419 optimize_location_lists (dw_die_ref die)
29420 {
29421 loc_list_hash_type htab (500);
29422 optimize_location_lists_1 (die, &htab);
29423 }
29424 \f
29425 /* Traverse the limbo die list, and add parent/child links. The only
29426 dies without parents that should be here are concrete instances of
29427 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
29428 For concrete instances, we can get the parent die from the abstract
29429 instance. */
29430
29431 static void
29432 flush_limbo_die_list (void)
29433 {
29434 limbo_die_node *node;
29435
29436 /* get_context_die calls force_decl_die, which can put new DIEs on the
29437 limbo list in LTO mode when nested functions are put in a different
29438 partition than that of their parent function. */
29439 while ((node = limbo_die_list))
29440 {
29441 dw_die_ref die = node->die;
29442 limbo_die_list = node->next;
29443
29444 if (die->die_parent == NULL)
29445 {
29446 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
29447
29448 if (origin && origin->die_parent)
29449 add_child_die (origin->die_parent, die);
29450 else if (is_cu_die (die))
29451 ;
29452 else if (seen_error ())
29453 /* It's OK to be confused by errors in the input. */
29454 add_child_die (comp_unit_die (), die);
29455 else
29456 {
29457 /* In certain situations, the lexical block containing a
29458 nested function can be optimized away, which results
29459 in the nested function die being orphaned. Likewise
29460 with the return type of that nested function. Force
29461 this to be a child of the containing function.
29462
29463 It may happen that even the containing function got fully
29464 inlined and optimized out. In that case we are lost and
29465 assign the empty child. This should not be big issue as
29466 the function is likely unreachable too. */
29467 gcc_assert (node->created_for);
29468
29469 if (DECL_P (node->created_for))
29470 origin = get_context_die (DECL_CONTEXT (node->created_for));
29471 else if (TYPE_P (node->created_for))
29472 origin = scope_die_for (node->created_for, comp_unit_die ());
29473 else
29474 origin = comp_unit_die ();
29475
29476 add_child_die (origin, die);
29477 }
29478 }
29479 }
29480 }
29481
29482 /* Output stuff that dwarf requires at the end of every file,
29483 and generate the DWARF-2 debugging info. */
29484
29485 static void
29486 dwarf2out_finish (const char *)
29487 {
29488 comdat_type_node *ctnode;
29489 dw_die_ref main_comp_unit_die;
29490 unsigned char checksum[16];
29491
29492 /* Flush out any latecomers to the limbo party. */
29493 flush_limbo_die_list ();
29494
29495 if (flag_checking)
29496 {
29497 verify_die (comp_unit_die ());
29498 for (limbo_die_node *node = cu_die_list; node; node = node->next)
29499 verify_die (node->die);
29500 }
29501
29502 /* We shouldn't have any symbols with delayed asm names for
29503 DIEs generated after early finish. */
29504 gcc_assert (deferred_asm_name == NULL);
29505
29506 gen_remaining_tmpl_value_param_die_attribute ();
29507
29508 #if ENABLE_ASSERT_CHECKING
29509 {
29510 dw_die_ref die = comp_unit_die (), c;
29511 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
29512 }
29513 #endif
29514 resolve_addr (comp_unit_die ());
29515 move_marked_base_types ();
29516
29517 /* Initialize sections and labels used for actual assembler output. */
29518 init_sections_and_labels ();
29519
29520 /* Traverse the DIE's and add sibling attributes to those DIE's that
29521 have children. */
29522 add_sibling_attributes (comp_unit_die ());
29523 limbo_die_node *node;
29524 for (node = cu_die_list; node; node = node->next)
29525 add_sibling_attributes (node->die);
29526 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
29527 add_sibling_attributes (ctnode->root_die);
29528
29529 /* When splitting DWARF info, we put some attributes in the
29530 skeleton compile_unit DIE that remains in the .o, while
29531 most attributes go in the DWO compile_unit_die. */
29532 if (dwarf_split_debug_info)
29533 {
29534 limbo_die_node *cu;
29535 main_comp_unit_die = gen_compile_unit_die (NULL);
29536 if (dwarf_version >= 5)
29537 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
29538 cu = limbo_die_list;
29539 gcc_assert (cu->die == main_comp_unit_die);
29540 limbo_die_list = limbo_die_list->next;
29541 cu->next = cu_die_list;
29542 cu_die_list = cu;
29543 }
29544 else
29545 main_comp_unit_die = comp_unit_die ();
29546
29547 /* Output a terminator label for the .text section. */
29548 switch_to_section (text_section);
29549 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
29550 if (cold_text_section)
29551 {
29552 switch_to_section (cold_text_section);
29553 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
29554 }
29555
29556 /* We can only use the low/high_pc attributes if all of the code was
29557 in .text. */
29558 if (!have_multiple_function_sections
29559 || (dwarf_version < 3 && dwarf_strict))
29560 {
29561 /* Don't add if the CU has no associated code. */
29562 if (text_section_used)
29563 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
29564 text_end_label, true);
29565 }
29566 else
29567 {
29568 unsigned fde_idx;
29569 dw_fde_ref fde;
29570 bool range_list_added = false;
29571
29572 if (text_section_used)
29573 add_ranges_by_labels (main_comp_unit_die, text_section_label,
29574 text_end_label, &range_list_added, true);
29575 if (cold_text_section_used)
29576 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
29577 cold_end_label, &range_list_added, true);
29578
29579 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
29580 {
29581 if (DECL_IGNORED_P (fde->decl))
29582 continue;
29583 if (!fde->in_std_section)
29584 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
29585 fde->dw_fde_end, &range_list_added,
29586 true);
29587 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
29588 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
29589 fde->dw_fde_second_end, &range_list_added,
29590 true);
29591 }
29592
29593 if (range_list_added)
29594 {
29595 /* We need to give .debug_loc and .debug_ranges an appropriate
29596 "base address". Use zero so that these addresses become
29597 absolute. Historically, we've emitted the unexpected
29598 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
29599 Emit both to give time for other tools to adapt. */
29600 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
29601 if (! dwarf_strict && dwarf_version < 4)
29602 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
29603
29604 add_ranges (NULL);
29605 }
29606 }
29607
29608 if (debug_info_level >= DINFO_LEVEL_TERSE)
29609 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
29610 debug_line_section_label);
29611
29612 if (have_macinfo)
29613 add_AT_macptr (comp_unit_die (),
29614 dwarf_version >= 5 ? DW_AT_macros
29615 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros,
29616 macinfo_section_label);
29617
29618 if (dwarf_split_debug_info)
29619 {
29620 if (have_location_lists)
29621 {
29622 if (dwarf_version >= 5)
29623 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
29624 loc_section_label);
29625 /* optimize_location_lists calculates the size of the lists,
29626 so index them first, and assign indices to the entries.
29627 Although optimize_location_lists will remove entries from
29628 the table, it only does so for duplicates, and therefore
29629 only reduces ref_counts to 1. */
29630 index_location_lists (comp_unit_die ());
29631 }
29632
29633 if (addr_index_table != NULL)
29634 {
29635 unsigned int index = 0;
29636 addr_index_table
29637 ->traverse_noresize<unsigned int *, index_addr_table_entry>
29638 (&index);
29639 }
29640 }
29641
29642 loc_list_idx = 0;
29643 if (have_location_lists)
29644 {
29645 optimize_location_lists (comp_unit_die ());
29646 /* And finally assign indexes to the entries for -gsplit-dwarf. */
29647 if (dwarf_version >= 5 && dwarf_split_debug_info)
29648 assign_location_list_indexes (comp_unit_die ());
29649 }
29650
29651 save_macinfo_strings ();
29652
29653 if (dwarf_split_debug_info)
29654 {
29655 unsigned int index = 0;
29656
29657 /* Add attributes common to skeleton compile_units and
29658 type_units. Because these attributes include strings, it
29659 must be done before freezing the string table. Top-level
29660 skeleton die attrs are added when the skeleton type unit is
29661 created, so ensure it is created by this point. */
29662 add_top_level_skeleton_die_attrs (main_comp_unit_die);
29663 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
29664 }
29665
29666 /* Output all of the compilation units. We put the main one last so that
29667 the offsets are available to output_pubnames. */
29668 for (node = cu_die_list; node; node = node->next)
29669 output_comp_unit (node->die, 0, NULL);
29670
29671 hash_table<comdat_type_hasher> comdat_type_table (100);
29672 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
29673 {
29674 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
29675
29676 /* Don't output duplicate types. */
29677 if (*slot != HTAB_EMPTY_ENTRY)
29678 continue;
29679
29680 /* Add a pointer to the line table for the main compilation unit
29681 so that the debugger can make sense of DW_AT_decl_file
29682 attributes. */
29683 if (debug_info_level >= DINFO_LEVEL_TERSE)
29684 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
29685 (!dwarf_split_debug_info
29686 ? debug_line_section_label
29687 : debug_skeleton_line_section_label));
29688
29689 output_comdat_type_unit (ctnode);
29690 *slot = ctnode;
29691 }
29692
29693 /* The AT_pubnames attribute needs to go in all skeleton dies, including
29694 both the main_cu and all skeleton TUs. Making this call unconditional
29695 would end up either adding a second copy of the AT_pubnames attribute, or
29696 requiring a special case in add_top_level_skeleton_die_attrs. */
29697 if (!dwarf_split_debug_info)
29698 add_AT_pubnames (comp_unit_die ());
29699
29700 if (dwarf_split_debug_info)
29701 {
29702 int mark;
29703 struct md5_ctx ctx;
29704
29705 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
29706 index_rnglists ();
29707
29708 /* Compute a checksum of the comp_unit to use as the dwo_id. */
29709 md5_init_ctx (&ctx);
29710 mark = 0;
29711 die_checksum (comp_unit_die (), &ctx, &mark);
29712 unmark_all_dies (comp_unit_die ());
29713 md5_finish_ctx (&ctx, checksum);
29714
29715 if (dwarf_version < 5)
29716 {
29717 /* Use the first 8 bytes of the checksum as the dwo_id,
29718 and add it to both comp-unit DIEs. */
29719 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
29720 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
29721 }
29722
29723 /* Add the base offset of the ranges table to the skeleton
29724 comp-unit DIE. */
29725 if (!vec_safe_is_empty (ranges_table))
29726 {
29727 if (dwarf_version >= 5)
29728 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
29729 ranges_base_label);
29730 else
29731 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
29732 ranges_section_label);
29733 }
29734
29735 switch_to_section (debug_addr_section);
29736 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
29737 output_addr_table ();
29738 }
29739
29740 /* Output the main compilation unit if non-empty or if .debug_macinfo
29741 or .debug_macro will be emitted. */
29742 output_comp_unit (comp_unit_die (), have_macinfo,
29743 dwarf_split_debug_info ? checksum : NULL);
29744
29745 if (dwarf_split_debug_info && info_section_emitted)
29746 output_skeleton_debug_sections (main_comp_unit_die, checksum);
29747
29748 /* Output the abbreviation table. */
29749 if (vec_safe_length (abbrev_die_table) != 1)
29750 {
29751 switch_to_section (debug_abbrev_section);
29752 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
29753 output_abbrev_section ();
29754 }
29755
29756 /* Output location list section if necessary. */
29757 if (have_location_lists)
29758 {
29759 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
29760 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
29761 /* Output the location lists info. */
29762 switch_to_section (debug_loc_section);
29763 if (dwarf_version >= 5)
29764 {
29765 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
29766 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
29767 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29768 dw2_asm_output_data (4, 0xffffffff,
29769 "Initial length escape value indicating "
29770 "64-bit DWARF extension");
29771 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
29772 "Length of Location Lists");
29773 ASM_OUTPUT_LABEL (asm_out_file, l1);
29774 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
29775 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
29776 dw2_asm_output_data (1, 0, "Segment Size");
29777 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
29778 "Offset Entry Count");
29779 }
29780 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
29781 if (dwarf_version >= 5 && dwarf_split_debug_info)
29782 {
29783 unsigned int save_loc_list_idx = loc_list_idx;
29784 loc_list_idx = 0;
29785 output_loclists_offsets (comp_unit_die ());
29786 gcc_assert (save_loc_list_idx == loc_list_idx);
29787 }
29788 output_location_lists (comp_unit_die ());
29789 if (dwarf_version >= 5)
29790 ASM_OUTPUT_LABEL (asm_out_file, l2);
29791 }
29792
29793 output_pubtables ();
29794
29795 /* Output the address range information if a CU (.debug_info section)
29796 was emitted. We output an empty table even if we had no functions
29797 to put in it. This because the consumer has no way to tell the
29798 difference between an empty table that we omitted and failure to
29799 generate a table that would have contained data. */
29800 if (info_section_emitted)
29801 {
29802 switch_to_section (debug_aranges_section);
29803 output_aranges ();
29804 }
29805
29806 /* Output ranges section if necessary. */
29807 if (!vec_safe_is_empty (ranges_table))
29808 {
29809 if (dwarf_version >= 5)
29810 output_rnglists ();
29811 else
29812 output_ranges ();
29813 }
29814
29815 /* Have to end the macro section. */
29816 if (have_macinfo)
29817 {
29818 switch_to_section (debug_macinfo_section);
29819 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
29820 output_macinfo ();
29821 dw2_asm_output_data (1, 0, "End compilation unit");
29822 }
29823
29824 /* Output the source line correspondence table. We must do this
29825 even if there is no line information. Otherwise, on an empty
29826 translation unit, we will generate a present, but empty,
29827 .debug_info section. IRIX 6.5 `nm' will then complain when
29828 examining the file. This is done late so that any filenames
29829 used by the debug_info section are marked as 'used'. */
29830 switch_to_section (debug_line_section);
29831 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
29832 if (! DWARF2_ASM_LINE_DEBUG_INFO)
29833 output_line_info (false);
29834
29835 if (dwarf_split_debug_info && info_section_emitted)
29836 {
29837 switch_to_section (debug_skeleton_line_section);
29838 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
29839 output_line_info (true);
29840 }
29841
29842 /* If we emitted any indirect strings, output the string table too. */
29843 if (debug_str_hash || skeleton_debug_str_hash)
29844 output_indirect_strings ();
29845 if (debug_line_str_hash)
29846 {
29847 switch_to_section (debug_line_str_section);
29848 const enum dwarf_form form = DW_FORM_line_strp;
29849 debug_line_str_hash->traverse<enum dwarf_form,
29850 output_indirect_string> (form);
29851 }
29852 }
29853
29854 /* Perform any cleanups needed after the early debug generation pass
29855 has run. */
29856
29857 static void
29858 dwarf2out_early_finish (const char *filename)
29859 {
29860 set_early_dwarf s;
29861
29862 /* PCH might result in DW_AT_producer string being restored from the
29863 header compilation, so always fill it with empty string initially
29864 and overwrite only here. */
29865 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
29866 producer_string = gen_producer_string ();
29867 producer->dw_attr_val.v.val_str->refcount--;
29868 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
29869
29870 /* Add the name for the main input file now. We delayed this from
29871 dwarf2out_init to avoid complications with PCH. */
29872 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
29873 add_comp_dir_attribute (comp_unit_die ());
29874
29875 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
29876 DW_AT_comp_dir into .debug_line_str section. */
29877 if (!DWARF2_ASM_LINE_DEBUG_INFO
29878 && dwarf_version >= 5
29879 && DWARF5_USE_DEBUG_LINE_STR)
29880 {
29881 for (int i = 0; i < 2; i++)
29882 {
29883 dw_attr_node *a = get_AT (comp_unit_die (),
29884 i ? DW_AT_comp_dir : DW_AT_name);
29885 if (a == NULL
29886 || AT_class (a) != dw_val_class_str
29887 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
29888 continue;
29889
29890 if (! debug_line_str_hash)
29891 debug_line_str_hash
29892 = hash_table<indirect_string_hasher>::create_ggc (10);
29893
29894 struct indirect_string_node *node
29895 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
29896 set_indirect_string (node);
29897 node->form = DW_FORM_line_strp;
29898 a->dw_attr_val.v.val_str->refcount--;
29899 a->dw_attr_val.v.val_str = node;
29900 }
29901 }
29902
29903 /* With LTO early dwarf was really finished at compile-time, so make
29904 sure to adjust the phase after annotating the LTRANS CU DIE. */
29905 if (in_lto_p)
29906 {
29907 early_dwarf_finished = true;
29908 return;
29909 }
29910
29911 /* Walk through the list of incomplete types again, trying once more to
29912 emit full debugging info for them. */
29913 retry_incomplete_types ();
29914
29915 /* The point here is to flush out the limbo list so that it is empty
29916 and we don't need to stream it for LTO. */
29917 flush_limbo_die_list ();
29918
29919 gen_scheduled_generic_parms_dies ();
29920 gen_remaining_tmpl_value_param_die_attribute ();
29921
29922 /* Add DW_AT_linkage_name for all deferred DIEs. */
29923 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
29924 {
29925 tree decl = node->created_for;
29926 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
29927 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
29928 ended up in deferred_asm_name before we knew it was
29929 constant and never written to disk. */
29930 && DECL_ASSEMBLER_NAME (decl))
29931 {
29932 add_linkage_attr (node->die, decl);
29933 move_linkage_attr (node->die);
29934 }
29935 }
29936 deferred_asm_name = NULL;
29937
29938 if (flag_eliminate_unused_debug_types)
29939 prune_unused_types ();
29940
29941 /* Generate separate COMDAT sections for type DIEs. */
29942 if (use_debug_types)
29943 {
29944 break_out_comdat_types (comp_unit_die ());
29945
29946 /* Each new type_unit DIE was added to the limbo die list when created.
29947 Since these have all been added to comdat_type_list, clear the
29948 limbo die list. */
29949 limbo_die_list = NULL;
29950
29951 /* For each new comdat type unit, copy declarations for incomplete
29952 types to make the new unit self-contained (i.e., no direct
29953 references to the main compile unit). */
29954 for (comdat_type_node *ctnode = comdat_type_list;
29955 ctnode != NULL; ctnode = ctnode->next)
29956 copy_decls_for_unworthy_types (ctnode->root_die);
29957 copy_decls_for_unworthy_types (comp_unit_die ());
29958
29959 /* In the process of copying declarations from one unit to another,
29960 we may have left some declarations behind that are no longer
29961 referenced. Prune them. */
29962 prune_unused_types ();
29963 }
29964
29965 /* Generate separate CUs for each of the include files we've seen.
29966 They will go into limbo_die_list and from there to cu_die_list. */
29967 if (flag_eliminate_dwarf2_dups)
29968 {
29969 gcc_assert (limbo_die_list == NULL);
29970 break_out_includes (comp_unit_die ());
29971 limbo_die_node *cu;
29972 while ((cu = limbo_die_list))
29973 {
29974 limbo_die_list = cu->next;
29975 cu->next = cu_die_list;
29976 cu_die_list = cu;
29977 }
29978 }
29979
29980 /* The early debug phase is now finished. */
29981 early_dwarf_finished = true;
29982 }
29983
29984 /* Reset all state within dwarf2out.c so that we can rerun the compiler
29985 within the same process. For use by toplev::finalize. */
29986
29987 void
29988 dwarf2out_c_finalize (void)
29989 {
29990 last_var_location_insn = NULL;
29991 cached_next_real_insn = NULL;
29992 used_rtx_array = NULL;
29993 incomplete_types = NULL;
29994 decl_scope_table = NULL;
29995 debug_info_section = NULL;
29996 debug_skeleton_info_section = NULL;
29997 debug_abbrev_section = NULL;
29998 debug_skeleton_abbrev_section = NULL;
29999 debug_aranges_section = NULL;
30000 debug_addr_section = NULL;
30001 debug_macinfo_section = NULL;
30002 debug_line_section = NULL;
30003 debug_skeleton_line_section = NULL;
30004 debug_loc_section = NULL;
30005 debug_pubnames_section = NULL;
30006 debug_pubtypes_section = NULL;
30007 debug_str_section = NULL;
30008 debug_line_str_section = NULL;
30009 debug_str_dwo_section = NULL;
30010 debug_str_offsets_section = NULL;
30011 debug_ranges_section = NULL;
30012 debug_frame_section = NULL;
30013 fde_vec = NULL;
30014 debug_str_hash = NULL;
30015 debug_line_str_hash = NULL;
30016 skeleton_debug_str_hash = NULL;
30017 dw2_string_counter = 0;
30018 have_multiple_function_sections = false;
30019 text_section_used = false;
30020 cold_text_section_used = false;
30021 cold_text_section = NULL;
30022 current_unit_personality = NULL;
30023
30024 early_dwarf = false;
30025 early_dwarf_finished = false;
30026
30027 next_die_offset = 0;
30028 single_comp_unit_die = NULL;
30029 comdat_type_list = NULL;
30030 limbo_die_list = NULL;
30031 file_table = NULL;
30032 decl_die_table = NULL;
30033 common_block_die_table = NULL;
30034 decl_loc_table = NULL;
30035 call_arg_locations = NULL;
30036 call_arg_loc_last = NULL;
30037 call_site_count = -1;
30038 tail_call_site_count = -1;
30039 cached_dw_loc_list_table = NULL;
30040 abbrev_die_table = NULL;
30041 delete dwarf_proc_stack_usage_map;
30042 dwarf_proc_stack_usage_map = NULL;
30043 line_info_label_num = 0;
30044 cur_line_info_table = NULL;
30045 text_section_line_info = NULL;
30046 cold_text_section_line_info = NULL;
30047 separate_line_info = NULL;
30048 info_section_emitted = false;
30049 pubname_table = NULL;
30050 pubtype_table = NULL;
30051 macinfo_table = NULL;
30052 ranges_table = NULL;
30053 ranges_by_label = NULL;
30054 rnglist_idx = 0;
30055 have_location_lists = false;
30056 loclabel_num = 0;
30057 poc_label_num = 0;
30058 last_emitted_file = NULL;
30059 label_num = 0;
30060 tmpl_value_parm_die_table = NULL;
30061 generic_type_instances = NULL;
30062 frame_pointer_fb_offset = 0;
30063 frame_pointer_fb_offset_valid = false;
30064 base_types.release ();
30065 XDELETEVEC (producer_string);
30066 producer_string = NULL;
30067 }
30068
30069 #include "gt-dwarf2out.h"