[PR55641] drop spurious const_type from reference_type variables
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2016 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "tm_p.h"
66 #include "stringpool.h"
67 #include "insn-config.h"
68 #include "ira.h"
69 #include "cgraph.h"
70 #include "diagnostic.h"
71 #include "fold-const.h"
72 #include "stor-layout.h"
73 #include "varasm.h"
74 #include "version.h"
75 #include "flags.h"
76 #include "rtlhash.h"
77 #include "reload.h"
78 #include "output.h"
79 #include "expr.h"
80 #include "dwarf2out.h"
81 #include "dwarf2asm.h"
82 #include "toplev.h"
83 #include "md5.h"
84 #include "tree-pretty-print.h"
85 #include "debug.h"
86 #include "common/common-target.h"
87 #include "langhooks.h"
88 #include "lra.h"
89 #include "dumpfile.h"
90 #include "opts.h"
91 #include "tree-dfa.h"
92 #include "gdb/gdb-index.h"
93 #include "rtl-iter.h"
94
95 static void dwarf2out_source_line (unsigned int, const char *, int, bool);
96 static rtx_insn *last_var_location_insn;
97 static rtx_insn *cached_next_real_insn;
98 static void dwarf2out_decl (tree);
99
100 #ifndef XCOFF_DEBUGGING_INFO
101 #define XCOFF_DEBUGGING_INFO 0
102 #endif
103
104 #ifndef HAVE_XCOFF_DWARF_EXTRAS
105 #define HAVE_XCOFF_DWARF_EXTRAS 0
106 #endif
107
108 #ifdef VMS_DEBUGGING_INFO
109 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
110
111 /* Define this macro to be a nonzero value if the directory specifications
112 which are output in the debug info should end with a separator. */
113 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
114 /* Define this macro to evaluate to a nonzero value if GCC should refrain
115 from generating indirect strings in DWARF2 debug information, for instance
116 if your target is stuck with an old version of GDB that is unable to
117 process them properly or uses VMS Debug. */
118 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
119 #else
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
121 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
122 #endif
123
124 /* ??? Poison these here until it can be done generically. They've been
125 totally replaced in this file; make sure it stays that way. */
126 #undef DWARF2_UNWIND_INFO
127 #undef DWARF2_FRAME_INFO
128 #if (GCC_VERSION >= 3000)
129 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
130 #endif
131
132 /* The size of the target's pointer type. */
133 #ifndef PTR_SIZE
134 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
135 #endif
136
137 /* Array of RTXes referenced by the debugging information, which therefore
138 must be kept around forever. */
139 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
140
141 /* A pointer to the base of a list of incomplete types which might be
142 completed at some later time. incomplete_types_list needs to be a
143 vec<tree, va_gc> *because we want to tell the garbage collector about
144 it. */
145 static GTY(()) vec<tree, va_gc> *incomplete_types;
146
147 /* A pointer to the base of a table of references to declaration
148 scopes. This table is a display which tracks the nesting
149 of declaration scopes at the current scope and containing
150 scopes. This table is used to find the proper place to
151 define type declaration DIE's. */
152 static GTY(()) vec<tree, va_gc> *decl_scope_table;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static GTY(()) section *debug_line_section;
163 static GTY(()) section *debug_skeleton_line_section;
164 static GTY(()) section *debug_loc_section;
165 static GTY(()) section *debug_pubnames_section;
166 static GTY(()) section *debug_pubtypes_section;
167 static GTY(()) section *debug_str_section;
168 static GTY(()) section *debug_str_dwo_section;
169 static GTY(()) section *debug_str_offsets_section;
170 static GTY(()) section *debug_ranges_section;
171 static GTY(()) section *debug_frame_section;
172
173 /* Maximum size (in bytes) of an artificially generated label. */
174 #define MAX_ARTIFICIAL_LABEL_BYTES 30
175
176 /* According to the (draft) DWARF 3 specification, the initial length
177 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
178 bytes are 0xffffffff, followed by the length stored in the next 8
179 bytes.
180
181 However, the SGI/MIPS ABI uses an initial length which is equal to
182 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
183
184 #ifndef DWARF_INITIAL_LENGTH_SIZE
185 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
186 #endif
187
188 /* Round SIZE up to the nearest BOUNDARY. */
189 #define DWARF_ROUND(SIZE,BOUNDARY) \
190 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
191
192 /* CIE identifier. */
193 #if HOST_BITS_PER_WIDE_INT >= 64
194 #define DWARF_CIE_ID \
195 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
196 #else
197 #define DWARF_CIE_ID DW_CIE_ID
198 #endif
199
200
201 /* A vector for a table that contains frame description
202 information for each routine. */
203 #define NOT_INDEXED (-1U)
204 #define NO_INDEX_ASSIGNED (-2U)
205
206 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
207
208 struct GTY((for_user)) indirect_string_node {
209 const char *str;
210 unsigned int refcount;
211 enum dwarf_form form;
212 char *label;
213 unsigned int index;
214 };
215
216 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
217 {
218 typedef const char *compare_type;
219
220 static hashval_t hash (indirect_string_node *);
221 static bool equal (indirect_string_node *, const char *);
222 };
223
224 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
225
226 /* With split_debug_info, both the comp_dir and dwo_name go in the
227 main object file, rather than the dwo, similar to the force_direct
228 parameter elsewhere but with additional complications:
229
230 1) The string is needed in both the main object file and the dwo.
231 That is, the comp_dir and dwo_name will appear in both places.
232
233 2) Strings can use three forms: DW_FORM_string, DW_FORM_strp or
234 DW_FORM_GNU_str_index.
235
236 3) GCC chooses the form to use late, depending on the size and
237 reference count.
238
239 Rather than forcing the all debug string handling functions and
240 callers to deal with these complications, simply use a separate,
241 special-cased string table for any attribute that should go in the
242 main object file. This limits the complexity to just the places
243 that need it. */
244
245 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
246
247 static GTY(()) int dw2_string_counter;
248
249 /* True if the compilation unit places functions in more than one section. */
250 static GTY(()) bool have_multiple_function_sections = false;
251
252 /* Whether the default text and cold text sections have been used at all. */
253
254 static GTY(()) bool text_section_used = false;
255 static GTY(()) bool cold_text_section_used = false;
256
257 /* The default cold text section. */
258 static GTY(()) section *cold_text_section;
259
260 /* The DIE for C++14 'auto' in a function return type. */
261 static GTY(()) dw_die_ref auto_die;
262
263 /* The DIE for C++14 'decltype(auto)' in a function return type. */
264 static GTY(()) dw_die_ref decltype_auto_die;
265
266 /* Forward declarations for functions defined in this file. */
267
268 static char *stripattributes (const char *);
269 static void output_call_frame_info (int);
270 static void dwarf2out_note_section_used (void);
271
272 /* Personality decl of current unit. Used only when assembler does not support
273 personality CFI. */
274 static GTY(()) rtx current_unit_personality;
275
276 /* Data and reference forms for relocatable data. */
277 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
278 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
279
280 #ifndef DEBUG_FRAME_SECTION
281 #define DEBUG_FRAME_SECTION ".debug_frame"
282 #endif
283
284 #ifndef FUNC_BEGIN_LABEL
285 #define FUNC_BEGIN_LABEL "LFB"
286 #endif
287
288 #ifndef FUNC_END_LABEL
289 #define FUNC_END_LABEL "LFE"
290 #endif
291
292 #ifndef PROLOGUE_END_LABEL
293 #define PROLOGUE_END_LABEL "LPE"
294 #endif
295
296 #ifndef EPILOGUE_BEGIN_LABEL
297 #define EPILOGUE_BEGIN_LABEL "LEB"
298 #endif
299
300 #ifndef FRAME_BEGIN_LABEL
301 #define FRAME_BEGIN_LABEL "Lframe"
302 #endif
303 #define CIE_AFTER_SIZE_LABEL "LSCIE"
304 #define CIE_END_LABEL "LECIE"
305 #define FDE_LABEL "LSFDE"
306 #define FDE_AFTER_SIZE_LABEL "LASFDE"
307 #define FDE_END_LABEL "LEFDE"
308 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
309 #define LINE_NUMBER_END_LABEL "LELT"
310 #define LN_PROLOG_AS_LABEL "LASLTP"
311 #define LN_PROLOG_END_LABEL "LELTP"
312 #define DIE_LABEL_PREFIX "DW"
313 \f
314 /* Match the base name of a file to the base name of a compilation unit. */
315
316 static int
317 matches_main_base (const char *path)
318 {
319 /* Cache the last query. */
320 static const char *last_path = NULL;
321 static int last_match = 0;
322 if (path != last_path)
323 {
324 const char *base;
325 int length = base_of_path (path, &base);
326 last_path = path;
327 last_match = (length == main_input_baselength
328 && memcmp (base, main_input_basename, length) == 0);
329 }
330 return last_match;
331 }
332
333 #ifdef DEBUG_DEBUG_STRUCT
334
335 static int
336 dump_struct_debug (tree type, enum debug_info_usage usage,
337 enum debug_struct_file criterion, int generic,
338 int matches, int result)
339 {
340 /* Find the type name. */
341 tree type_decl = TYPE_STUB_DECL (type);
342 tree t = type_decl;
343 const char *name = 0;
344 if (TREE_CODE (t) == TYPE_DECL)
345 t = DECL_NAME (t);
346 if (t)
347 name = IDENTIFIER_POINTER (t);
348
349 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
350 criterion,
351 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
352 matches ? "bas" : "hdr",
353 generic ? "gen" : "ord",
354 usage == DINFO_USAGE_DFN ? ";" :
355 usage == DINFO_USAGE_DIR_USE ? "." : "*",
356 result,
357 (void*) type_decl, name);
358 return result;
359 }
360 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
361 dump_struct_debug (type, usage, criterion, generic, matches, result)
362
363 #else
364
365 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
366 (result)
367
368 #endif
369
370 /* Get the number of HOST_WIDE_INTs needed to represent the precision
371 of the number. Some constants have a large uniform precision, so
372 we get the precision needed for the actual value of the number. */
373
374 static unsigned int
375 get_full_len (const wide_int &op)
376 {
377 int prec = wi::min_precision (op, UNSIGNED);
378 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
379 / HOST_BITS_PER_WIDE_INT);
380 }
381
382 static bool
383 should_emit_struct_debug (tree type, enum debug_info_usage usage)
384 {
385 enum debug_struct_file criterion;
386 tree type_decl;
387 bool generic = lang_hooks.types.generic_p (type);
388
389 if (generic)
390 criterion = debug_struct_generic[usage];
391 else
392 criterion = debug_struct_ordinary[usage];
393
394 if (criterion == DINFO_STRUCT_FILE_NONE)
395 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
396 if (criterion == DINFO_STRUCT_FILE_ANY)
397 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
398
399 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
400
401 if (type_decl != NULL)
402 {
403 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
404 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
405
406 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
407 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
408 }
409
410 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
411 }
412 \f
413 /* Return a pointer to a copy of the section string name S with all
414 attributes stripped off, and an asterisk prepended (for assemble_name). */
415
416 static inline char *
417 stripattributes (const char *s)
418 {
419 char *stripped = XNEWVEC (char, strlen (s) + 2);
420 char *p = stripped;
421
422 *p++ = '*';
423
424 while (*s && *s != ',')
425 *p++ = *s++;
426
427 *p = '\0';
428 return stripped;
429 }
430
431 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
432 switch to the data section instead, and write out a synthetic start label
433 for collect2 the first time around. */
434
435 static void
436 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
437 {
438 if (eh_frame_section == 0)
439 {
440 int flags;
441
442 if (EH_TABLES_CAN_BE_READ_ONLY)
443 {
444 int fde_encoding;
445 int per_encoding;
446 int lsda_encoding;
447
448 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
449 /*global=*/0);
450 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
451 /*global=*/1);
452 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
453 /*global=*/0);
454 flags = ((! flag_pic
455 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
456 && (fde_encoding & 0x70) != DW_EH_PE_aligned
457 && (per_encoding & 0x70) != DW_EH_PE_absptr
458 && (per_encoding & 0x70) != DW_EH_PE_aligned
459 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
460 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
461 ? 0 : SECTION_WRITE);
462 }
463 else
464 flags = SECTION_WRITE;
465
466 #ifdef EH_FRAME_SECTION_NAME
467 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
468 #else
469 eh_frame_section = ((flags == SECTION_WRITE)
470 ? data_section : readonly_data_section);
471 #endif /* EH_FRAME_SECTION_NAME */
472 }
473
474 switch_to_section (eh_frame_section);
475
476 #ifdef EH_FRAME_THROUGH_COLLECT2
477 /* We have no special eh_frame section. Emit special labels to guide
478 collect2. */
479 if (!back)
480 {
481 tree label = get_file_function_name ("F");
482 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
483 targetm.asm_out.globalize_label (asm_out_file,
484 IDENTIFIER_POINTER (label));
485 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
486 }
487 #endif
488 }
489
490 /* Switch [BACK] to the eh or debug frame table section, depending on
491 FOR_EH. */
492
493 static void
494 switch_to_frame_table_section (int for_eh, bool back)
495 {
496 if (for_eh)
497 switch_to_eh_frame_section (back);
498 else
499 {
500 if (!debug_frame_section)
501 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
502 SECTION_DEBUG, NULL);
503 switch_to_section (debug_frame_section);
504 }
505 }
506
507 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
508
509 enum dw_cfi_oprnd_type
510 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
511 {
512 switch (cfi)
513 {
514 case DW_CFA_nop:
515 case DW_CFA_GNU_window_save:
516 case DW_CFA_remember_state:
517 case DW_CFA_restore_state:
518 return dw_cfi_oprnd_unused;
519
520 case DW_CFA_set_loc:
521 case DW_CFA_advance_loc1:
522 case DW_CFA_advance_loc2:
523 case DW_CFA_advance_loc4:
524 case DW_CFA_MIPS_advance_loc8:
525 return dw_cfi_oprnd_addr;
526
527 case DW_CFA_offset:
528 case DW_CFA_offset_extended:
529 case DW_CFA_def_cfa:
530 case DW_CFA_offset_extended_sf:
531 case DW_CFA_def_cfa_sf:
532 case DW_CFA_restore:
533 case DW_CFA_restore_extended:
534 case DW_CFA_undefined:
535 case DW_CFA_same_value:
536 case DW_CFA_def_cfa_register:
537 case DW_CFA_register:
538 case DW_CFA_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 return dw_cfi_oprnd_loc;
573
574 default:
575 return dw_cfi_oprnd_unused;
576 }
577 }
578
579 /* Output one FDE. */
580
581 static void
582 output_fde (dw_fde_ref fde, bool for_eh, bool second,
583 char *section_start_label, int fde_encoding, char *augmentation,
584 bool any_lsda_needed, int lsda_encoding)
585 {
586 const char *begin, *end;
587 static unsigned int j;
588 char l1[20], l2[20];
589
590 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
591 /* empty */ 0);
592 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
593 for_eh + j);
594 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
595 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
596 if (!XCOFF_DEBUGGING_INFO || for_eh)
597 {
598 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
599 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
600 " indicating 64-bit DWARF extension");
601 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
602 "FDE Length");
603 }
604 ASM_OUTPUT_LABEL (asm_out_file, l1);
605
606 if (for_eh)
607 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
608 else
609 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
610 debug_frame_section, "FDE CIE offset");
611
612 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
613 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
614
615 if (for_eh)
616 {
617 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
618 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
619 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
620 "FDE initial location");
621 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
622 end, begin, "FDE address range");
623 }
624 else
625 {
626 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
627 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
628 }
629
630 if (augmentation[0])
631 {
632 if (any_lsda_needed)
633 {
634 int size = size_of_encoded_value (lsda_encoding);
635
636 if (lsda_encoding == DW_EH_PE_aligned)
637 {
638 int offset = ( 4 /* Length */
639 + 4 /* CIE offset */
640 + 2 * size_of_encoded_value (fde_encoding)
641 + 1 /* Augmentation size */ );
642 int pad = -offset & (PTR_SIZE - 1);
643
644 size += pad;
645 gcc_assert (size_of_uleb128 (size) == 1);
646 }
647
648 dw2_asm_output_data_uleb128 (size, "Augmentation size");
649
650 if (fde->uses_eh_lsda)
651 {
652 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
653 fde->funcdef_number);
654 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
655 gen_rtx_SYMBOL_REF (Pmode, l1),
656 false,
657 "Language Specific Data Area");
658 }
659 else
660 {
661 if (lsda_encoding == DW_EH_PE_aligned)
662 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
663 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
664 "Language Specific Data Area (none)");
665 }
666 }
667 else
668 dw2_asm_output_data_uleb128 (0, "Augmentation size");
669 }
670
671 /* Loop through the Call Frame Instructions associated with this FDE. */
672 fde->dw_fde_current_label = begin;
673 {
674 size_t from, until, i;
675
676 from = 0;
677 until = vec_safe_length (fde->dw_fde_cfi);
678
679 if (fde->dw_fde_second_begin == NULL)
680 ;
681 else if (!second)
682 until = fde->dw_fde_switch_cfi_index;
683 else
684 from = fde->dw_fde_switch_cfi_index;
685
686 for (i = from; i < until; i++)
687 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
688 }
689
690 /* If we are to emit a ref/link from function bodies to their frame tables,
691 do it now. This is typically performed to make sure that tables
692 associated with functions are dragged with them and not discarded in
693 garbage collecting links. We need to do this on a per function basis to
694 cope with -ffunction-sections. */
695
696 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
697 /* Switch to the function section, emit the ref to the tables, and
698 switch *back* into the table section. */
699 switch_to_section (function_section (fde->decl));
700 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
701 switch_to_frame_table_section (for_eh, true);
702 #endif
703
704 /* Pad the FDE out to an address sized boundary. */
705 ASM_OUTPUT_ALIGN (asm_out_file,
706 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
707 ASM_OUTPUT_LABEL (asm_out_file, l2);
708
709 j += 2;
710 }
711
712 /* Return true if frame description entry FDE is needed for EH. */
713
714 static bool
715 fde_needed_for_eh_p (dw_fde_ref fde)
716 {
717 if (flag_asynchronous_unwind_tables)
718 return true;
719
720 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
721 return true;
722
723 if (fde->uses_eh_lsda)
724 return true;
725
726 /* If exceptions are enabled, we have collected nothrow info. */
727 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
728 return false;
729
730 return true;
731 }
732
733 /* Output the call frame information used to record information
734 that relates to calculating the frame pointer, and records the
735 location of saved registers. */
736
737 static void
738 output_call_frame_info (int for_eh)
739 {
740 unsigned int i;
741 dw_fde_ref fde;
742 dw_cfi_ref cfi;
743 char l1[20], l2[20], section_start_label[20];
744 bool any_lsda_needed = false;
745 char augmentation[6];
746 int augmentation_size;
747 int fde_encoding = DW_EH_PE_absptr;
748 int per_encoding = DW_EH_PE_absptr;
749 int lsda_encoding = DW_EH_PE_absptr;
750 int return_reg;
751 rtx personality = NULL;
752 int dw_cie_version;
753
754 /* Don't emit a CIE if there won't be any FDEs. */
755 if (!fde_vec)
756 return;
757
758 /* Nothing to do if the assembler's doing it all. */
759 if (dwarf2out_do_cfi_asm ())
760 return;
761
762 /* If we don't have any functions we'll want to unwind out of, don't emit
763 any EH unwind information. If we make FDEs linkonce, we may have to
764 emit an empty label for an FDE that wouldn't otherwise be emitted. We
765 want to avoid having an FDE kept around when the function it refers to
766 is discarded. Example where this matters: a primary function template
767 in C++ requires EH information, an explicit specialization doesn't. */
768 if (for_eh)
769 {
770 bool any_eh_needed = false;
771
772 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
773 {
774 if (fde->uses_eh_lsda)
775 any_eh_needed = any_lsda_needed = true;
776 else if (fde_needed_for_eh_p (fde))
777 any_eh_needed = true;
778 else if (TARGET_USES_WEAK_UNWIND_INFO)
779 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
780 }
781
782 if (!any_eh_needed)
783 return;
784 }
785
786 /* We're going to be generating comments, so turn on app. */
787 if (flag_debug_asm)
788 app_enable ();
789
790 /* Switch to the proper frame section, first time. */
791 switch_to_frame_table_section (for_eh, false);
792
793 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
794 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
795
796 /* Output the CIE. */
797 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
798 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
799 if (!XCOFF_DEBUGGING_INFO || for_eh)
800 {
801 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
802 dw2_asm_output_data (4, 0xffffffff,
803 "Initial length escape value indicating 64-bit DWARF extension");
804 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
805 "Length of Common Information Entry");
806 }
807 ASM_OUTPUT_LABEL (asm_out_file, l1);
808
809 /* Now that the CIE pointer is PC-relative for EH,
810 use 0 to identify the CIE. */
811 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
812 (for_eh ? 0 : DWARF_CIE_ID),
813 "CIE Identifier Tag");
814
815 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
816 use CIE version 1, unless that would produce incorrect results
817 due to overflowing the return register column. */
818 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
819 dw_cie_version = 1;
820 if (return_reg >= 256 || dwarf_version > 2)
821 dw_cie_version = 3;
822 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
823
824 augmentation[0] = 0;
825 augmentation_size = 0;
826
827 personality = current_unit_personality;
828 if (for_eh)
829 {
830 char *p;
831
832 /* Augmentation:
833 z Indicates that a uleb128 is present to size the
834 augmentation section.
835 L Indicates the encoding (and thus presence) of
836 an LSDA pointer in the FDE augmentation.
837 R Indicates a non-default pointer encoding for
838 FDE code pointers.
839 P Indicates the presence of an encoding + language
840 personality routine in the CIE augmentation. */
841
842 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
843 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
844 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
845
846 p = augmentation + 1;
847 if (personality)
848 {
849 *p++ = 'P';
850 augmentation_size += 1 + size_of_encoded_value (per_encoding);
851 assemble_external_libcall (personality);
852 }
853 if (any_lsda_needed)
854 {
855 *p++ = 'L';
856 augmentation_size += 1;
857 }
858 if (fde_encoding != DW_EH_PE_absptr)
859 {
860 *p++ = 'R';
861 augmentation_size += 1;
862 }
863 if (p > augmentation + 1)
864 {
865 augmentation[0] = 'z';
866 *p = '\0';
867 }
868
869 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
870 if (personality && per_encoding == DW_EH_PE_aligned)
871 {
872 int offset = ( 4 /* Length */
873 + 4 /* CIE Id */
874 + 1 /* CIE version */
875 + strlen (augmentation) + 1 /* Augmentation */
876 + size_of_uleb128 (1) /* Code alignment */
877 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
878 + 1 /* RA column */
879 + 1 /* Augmentation size */
880 + 1 /* Personality encoding */ );
881 int pad = -offset & (PTR_SIZE - 1);
882
883 augmentation_size += pad;
884
885 /* Augmentations should be small, so there's scarce need to
886 iterate for a solution. Die if we exceed one uleb128 byte. */
887 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
888 }
889 }
890
891 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
892 if (dw_cie_version >= 4)
893 {
894 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
895 dw2_asm_output_data (1, 0, "CIE Segment Size");
896 }
897 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
898 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
899 "CIE Data Alignment Factor");
900
901 if (dw_cie_version == 1)
902 dw2_asm_output_data (1, return_reg, "CIE RA Column");
903 else
904 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
905
906 if (augmentation[0])
907 {
908 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
909 if (personality)
910 {
911 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
912 eh_data_format_name (per_encoding));
913 dw2_asm_output_encoded_addr_rtx (per_encoding,
914 personality,
915 true, NULL);
916 }
917
918 if (any_lsda_needed)
919 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
920 eh_data_format_name (lsda_encoding));
921
922 if (fde_encoding != DW_EH_PE_absptr)
923 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
924 eh_data_format_name (fde_encoding));
925 }
926
927 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
928 output_cfi (cfi, NULL, for_eh);
929
930 /* Pad the CIE out to an address sized boundary. */
931 ASM_OUTPUT_ALIGN (asm_out_file,
932 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
933 ASM_OUTPUT_LABEL (asm_out_file, l2);
934
935 /* Loop through all of the FDE's. */
936 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
937 {
938 unsigned int k;
939
940 /* Don't emit EH unwind info for leaf functions that don't need it. */
941 if (for_eh && !fde_needed_for_eh_p (fde))
942 continue;
943
944 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
945 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
946 augmentation, any_lsda_needed, lsda_encoding);
947 }
948
949 if (for_eh && targetm.terminate_dw2_eh_frame_info)
950 dw2_asm_output_data (4, 0, "End of Table");
951
952 /* Turn off app to make assembly quicker. */
953 if (flag_debug_asm)
954 app_disable ();
955 }
956
957 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
958
959 static void
960 dwarf2out_do_cfi_startproc (bool second)
961 {
962 int enc;
963 rtx ref;
964 rtx personality = get_personality_function (current_function_decl);
965
966 fprintf (asm_out_file, "\t.cfi_startproc\n");
967
968 if (personality)
969 {
970 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
971 ref = personality;
972
973 /* ??? The GAS support isn't entirely consistent. We have to
974 handle indirect support ourselves, but PC-relative is done
975 in the assembler. Further, the assembler can't handle any
976 of the weirder relocation types. */
977 if (enc & DW_EH_PE_indirect)
978 ref = dw2_force_const_mem (ref, true);
979
980 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
981 output_addr_const (asm_out_file, ref);
982 fputc ('\n', asm_out_file);
983 }
984
985 if (crtl->uses_eh_lsda)
986 {
987 char lab[20];
988
989 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
990 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
991 current_function_funcdef_no);
992 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
993 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
994
995 if (enc & DW_EH_PE_indirect)
996 ref = dw2_force_const_mem (ref, true);
997
998 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
999 output_addr_const (asm_out_file, ref);
1000 fputc ('\n', asm_out_file);
1001 }
1002 }
1003
1004 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1005 this allocation may be done before pass_final. */
1006
1007 dw_fde_ref
1008 dwarf2out_alloc_current_fde (void)
1009 {
1010 dw_fde_ref fde;
1011
1012 fde = ggc_cleared_alloc<dw_fde_node> ();
1013 fde->decl = current_function_decl;
1014 fde->funcdef_number = current_function_funcdef_no;
1015 fde->fde_index = vec_safe_length (fde_vec);
1016 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1017 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1018 fde->nothrow = crtl->nothrow;
1019 fde->drap_reg = INVALID_REGNUM;
1020 fde->vdrap_reg = INVALID_REGNUM;
1021
1022 /* Record the FDE associated with this function. */
1023 cfun->fde = fde;
1024 vec_safe_push (fde_vec, fde);
1025
1026 return fde;
1027 }
1028
1029 /* Output a marker (i.e. a label) for the beginning of a function, before
1030 the prologue. */
1031
1032 void
1033 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1034 const char *file ATTRIBUTE_UNUSED)
1035 {
1036 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1037 char * dup_label;
1038 dw_fde_ref fde;
1039 section *fnsec;
1040 bool do_frame;
1041
1042 current_function_func_begin_label = NULL;
1043
1044 do_frame = dwarf2out_do_frame ();
1045
1046 /* ??? current_function_func_begin_label is also used by except.c for
1047 call-site information. We must emit this label if it might be used. */
1048 if (!do_frame
1049 && (!flag_exceptions
1050 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1051 return;
1052
1053 fnsec = function_section (current_function_decl);
1054 switch_to_section (fnsec);
1055 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1056 current_function_funcdef_no);
1057 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1058 current_function_funcdef_no);
1059 dup_label = xstrdup (label);
1060 current_function_func_begin_label = dup_label;
1061
1062 /* We can elide the fde allocation if we're not emitting debug info. */
1063 if (!do_frame)
1064 return;
1065
1066 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1067 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1068 would include pass_dwarf2_frame. If we've not created the FDE yet,
1069 do so now. */
1070 fde = cfun->fde;
1071 if (fde == NULL)
1072 fde = dwarf2out_alloc_current_fde ();
1073
1074 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1075 fde->dw_fde_begin = dup_label;
1076 fde->dw_fde_current_label = dup_label;
1077 fde->in_std_section = (fnsec == text_section
1078 || (cold_text_section && fnsec == cold_text_section));
1079
1080 /* We only want to output line number information for the genuine dwarf2
1081 prologue case, not the eh frame case. */
1082 #ifdef DWARF2_DEBUGGING_INFO
1083 if (file)
1084 dwarf2out_source_line (line, file, 0, true);
1085 #endif
1086
1087 if (dwarf2out_do_cfi_asm ())
1088 dwarf2out_do_cfi_startproc (false);
1089 else
1090 {
1091 rtx personality = get_personality_function (current_function_decl);
1092 if (!current_unit_personality)
1093 current_unit_personality = personality;
1094
1095 /* We cannot keep a current personality per function as without CFI
1096 asm, at the point where we emit the CFI data, there is no current
1097 function anymore. */
1098 if (personality && current_unit_personality != personality)
1099 sorry ("multiple EH personalities are supported only with assemblers "
1100 "supporting .cfi_personality directive");
1101 }
1102 }
1103
1104 /* Output a marker (i.e. a label) for the end of the generated code
1105 for a function prologue. This gets called *after* the prologue code has
1106 been generated. */
1107
1108 void
1109 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1110 const char *file ATTRIBUTE_UNUSED)
1111 {
1112 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1113
1114 /* Output a label to mark the endpoint of the code generated for this
1115 function. */
1116 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1117 current_function_funcdef_no);
1118 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1119 current_function_funcdef_no);
1120 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1121 }
1122
1123 /* Output a marker (i.e. a label) for the beginning of the generated code
1124 for a function epilogue. This gets called *before* the prologue code has
1125 been generated. */
1126
1127 void
1128 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1129 const char *file ATTRIBUTE_UNUSED)
1130 {
1131 dw_fde_ref fde = cfun->fde;
1132 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1133
1134 if (fde->dw_fde_vms_begin_epilogue)
1135 return;
1136
1137 /* Output a label to mark the endpoint of the code generated for this
1138 function. */
1139 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1140 current_function_funcdef_no);
1141 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1142 current_function_funcdef_no);
1143 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1144 }
1145
1146 /* Output a marker (i.e. a label) for the absolute end of the generated code
1147 for a function definition. This gets called *after* the epilogue code has
1148 been generated. */
1149
1150 void
1151 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1152 const char *file ATTRIBUTE_UNUSED)
1153 {
1154 dw_fde_ref fde;
1155 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1156
1157 last_var_location_insn = NULL;
1158 cached_next_real_insn = NULL;
1159
1160 if (dwarf2out_do_cfi_asm ())
1161 fprintf (asm_out_file, "\t.cfi_endproc\n");
1162
1163 /* Output a label to mark the endpoint of the code generated for this
1164 function. */
1165 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1166 current_function_funcdef_no);
1167 ASM_OUTPUT_LABEL (asm_out_file, label);
1168 fde = cfun->fde;
1169 gcc_assert (fde != NULL);
1170 if (fde->dw_fde_second_begin == NULL)
1171 fde->dw_fde_end = xstrdup (label);
1172 }
1173
1174 void
1175 dwarf2out_frame_finish (void)
1176 {
1177 /* Output call frame information. */
1178 if (targetm.debug_unwind_info () == UI_DWARF2)
1179 output_call_frame_info (0);
1180
1181 /* Output another copy for the unwinder. */
1182 if ((flag_unwind_tables || flag_exceptions)
1183 && targetm_common.except_unwind_info (&global_options) == UI_DWARF2)
1184 output_call_frame_info (1);
1185 }
1186
1187 /* Note that the current function section is being used for code. */
1188
1189 static void
1190 dwarf2out_note_section_used (void)
1191 {
1192 section *sec = current_function_section ();
1193 if (sec == text_section)
1194 text_section_used = true;
1195 else if (sec == cold_text_section)
1196 cold_text_section_used = true;
1197 }
1198
1199 static void var_location_switch_text_section (void);
1200 static void set_cur_line_info_table (section *);
1201
1202 void
1203 dwarf2out_switch_text_section (void)
1204 {
1205 section *sect;
1206 dw_fde_ref fde = cfun->fde;
1207
1208 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1209
1210 if (!in_cold_section_p)
1211 {
1212 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1213 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1214 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1215 }
1216 else
1217 {
1218 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1219 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1220 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1221 }
1222 have_multiple_function_sections = true;
1223
1224 /* There is no need to mark used sections when not debugging. */
1225 if (cold_text_section != NULL)
1226 dwarf2out_note_section_used ();
1227
1228 if (dwarf2out_do_cfi_asm ())
1229 fprintf (asm_out_file, "\t.cfi_endproc\n");
1230
1231 /* Now do the real section switch. */
1232 sect = current_function_section ();
1233 switch_to_section (sect);
1234
1235 fde->second_in_std_section
1236 = (sect == text_section
1237 || (cold_text_section && sect == cold_text_section));
1238
1239 if (dwarf2out_do_cfi_asm ())
1240 dwarf2out_do_cfi_startproc (true);
1241
1242 var_location_switch_text_section ();
1243
1244 if (cold_text_section != NULL)
1245 set_cur_line_info_table (sect);
1246 }
1247 \f
1248 /* And now, the subset of the debugging information support code necessary
1249 for emitting location expressions. */
1250
1251 /* Data about a single source file. */
1252 struct GTY((for_user)) dwarf_file_data {
1253 const char * filename;
1254 int emitted_number;
1255 };
1256
1257 /* Describe an entry into the .debug_addr section. */
1258
1259 enum ate_kind {
1260 ate_kind_rtx,
1261 ate_kind_rtx_dtprel,
1262 ate_kind_label
1263 };
1264
1265 struct GTY((for_user)) addr_table_entry {
1266 enum ate_kind kind;
1267 unsigned int refcount;
1268 unsigned int index;
1269 union addr_table_entry_struct_union
1270 {
1271 rtx GTY ((tag ("0"))) rtl;
1272 char * GTY ((tag ("1"))) label;
1273 }
1274 GTY ((desc ("%1.kind"))) addr;
1275 };
1276
1277 /* Location lists are ranges + location descriptions for that range,
1278 so you can track variables that are in different places over
1279 their entire life. */
1280 typedef struct GTY(()) dw_loc_list_struct {
1281 dw_loc_list_ref dw_loc_next;
1282 const char *begin; /* Label and addr_entry for start of range */
1283 addr_table_entry *begin_entry;
1284 const char *end; /* Label for end of range */
1285 char *ll_symbol; /* Label for beginning of location list.
1286 Only on head of list */
1287 const char *section; /* Section this loclist is relative to */
1288 dw_loc_descr_ref expr;
1289 hashval_t hash;
1290 /* True if all addresses in this and subsequent lists are known to be
1291 resolved. */
1292 bool resolved_addr;
1293 /* True if this list has been replaced by dw_loc_next. */
1294 bool replaced;
1295 bool emitted;
1296 /* True if the range should be emitted even if begin and end
1297 are the same. */
1298 bool force;
1299 } dw_loc_list_node;
1300
1301 static dw_loc_descr_ref int_loc_descriptor (HOST_WIDE_INT);
1302 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1303
1304 /* Convert a DWARF stack opcode into its string name. */
1305
1306 static const char *
1307 dwarf_stack_op_name (unsigned int op)
1308 {
1309 const char *name = get_DW_OP_name (op);
1310
1311 if (name != NULL)
1312 return name;
1313
1314 return "OP_<unknown>";
1315 }
1316
1317 /* Return a pointer to a newly allocated location description. Location
1318 descriptions are simple expression terms that can be strung
1319 together to form more complicated location (address) descriptions. */
1320
1321 static inline dw_loc_descr_ref
1322 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1323 unsigned HOST_WIDE_INT oprnd2)
1324 {
1325 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1326
1327 descr->dw_loc_opc = op;
1328 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1329 descr->dw_loc_oprnd1.val_entry = NULL;
1330 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1331 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1332 descr->dw_loc_oprnd2.val_entry = NULL;
1333 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1334
1335 return descr;
1336 }
1337
1338 /* Return a pointer to a newly allocated location description for
1339 REG and OFFSET. */
1340
1341 static inline dw_loc_descr_ref
1342 new_reg_loc_descr (unsigned int reg, unsigned HOST_WIDE_INT offset)
1343 {
1344 if (reg <= 31)
1345 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1346 offset, 0);
1347 else
1348 return new_loc_descr (DW_OP_bregx, reg, offset);
1349 }
1350
1351 /* Add a location description term to a location description expression. */
1352
1353 static inline void
1354 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1355 {
1356 dw_loc_descr_ref *d;
1357
1358 /* Find the end of the chain. */
1359 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1360 ;
1361
1362 *d = descr;
1363 }
1364
1365 /* Compare two location operands for exact equality. */
1366
1367 static bool
1368 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1369 {
1370 if (a->val_class != b->val_class)
1371 return false;
1372 switch (a->val_class)
1373 {
1374 case dw_val_class_none:
1375 return true;
1376 case dw_val_class_addr:
1377 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1378
1379 case dw_val_class_offset:
1380 case dw_val_class_unsigned_const:
1381 case dw_val_class_const:
1382 case dw_val_class_range_list:
1383 case dw_val_class_lineptr:
1384 case dw_val_class_macptr:
1385 /* These are all HOST_WIDE_INT, signed or unsigned. */
1386 return a->v.val_unsigned == b->v.val_unsigned;
1387
1388 case dw_val_class_loc:
1389 return a->v.val_loc == b->v.val_loc;
1390 case dw_val_class_loc_list:
1391 return a->v.val_loc_list == b->v.val_loc_list;
1392 case dw_val_class_die_ref:
1393 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1394 case dw_val_class_fde_ref:
1395 return a->v.val_fde_index == b->v.val_fde_index;
1396 case dw_val_class_lbl_id:
1397 case dw_val_class_high_pc:
1398 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1399 case dw_val_class_str:
1400 return a->v.val_str == b->v.val_str;
1401 case dw_val_class_flag:
1402 return a->v.val_flag == b->v.val_flag;
1403 case dw_val_class_file:
1404 return a->v.val_file == b->v.val_file;
1405 case dw_val_class_decl_ref:
1406 return a->v.val_decl_ref == b->v.val_decl_ref;
1407
1408 case dw_val_class_const_double:
1409 return (a->v.val_double.high == b->v.val_double.high
1410 && a->v.val_double.low == b->v.val_double.low);
1411
1412 case dw_val_class_wide_int:
1413 return *a->v.val_wide == *b->v.val_wide;
1414
1415 case dw_val_class_vec:
1416 {
1417 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1418 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1419
1420 return (a_len == b_len
1421 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1422 }
1423
1424 case dw_val_class_data8:
1425 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1426
1427 case dw_val_class_vms_delta:
1428 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1429 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1430
1431 case dw_val_class_discr_value:
1432 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1433 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1434 case dw_val_class_discr_list:
1435 /* It makes no sense comparing two discriminant value lists. */
1436 return false;
1437 }
1438 gcc_unreachable ();
1439 }
1440
1441 /* Compare two location atoms for exact equality. */
1442
1443 static bool
1444 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1445 {
1446 if (a->dw_loc_opc != b->dw_loc_opc)
1447 return false;
1448
1449 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1450 address size, but since we always allocate cleared storage it
1451 should be zero for other types of locations. */
1452 if (a->dtprel != b->dtprel)
1453 return false;
1454
1455 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1456 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1457 }
1458
1459 /* Compare two complete location expressions for exact equality. */
1460
1461 bool
1462 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1463 {
1464 while (1)
1465 {
1466 if (a == b)
1467 return true;
1468 if (a == NULL || b == NULL)
1469 return false;
1470 if (!loc_descr_equal_p_1 (a, b))
1471 return false;
1472
1473 a = a->dw_loc_next;
1474 b = b->dw_loc_next;
1475 }
1476 }
1477
1478
1479 /* Add a constant OFFSET to a location expression. */
1480
1481 static void
1482 loc_descr_plus_const (dw_loc_descr_ref *list_head, HOST_WIDE_INT offset)
1483 {
1484 dw_loc_descr_ref loc;
1485 HOST_WIDE_INT *p;
1486
1487 gcc_assert (*list_head != NULL);
1488
1489 if (!offset)
1490 return;
1491
1492 /* Find the end of the chain. */
1493 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1494 ;
1495
1496 p = NULL;
1497 if (loc->dw_loc_opc == DW_OP_fbreg
1498 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1499 p = &loc->dw_loc_oprnd1.v.val_int;
1500 else if (loc->dw_loc_opc == DW_OP_bregx)
1501 p = &loc->dw_loc_oprnd2.v.val_int;
1502
1503 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1504 offset. Don't optimize if an signed integer overflow would happen. */
1505 if (p != NULL
1506 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1507 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1508 *p += offset;
1509
1510 else if (offset > 0)
1511 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1512
1513 else
1514 {
1515 loc->dw_loc_next = int_loc_descriptor (-offset);
1516 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1517 }
1518 }
1519
1520 /* Add a constant OFFSET to a location list. */
1521
1522 static void
1523 loc_list_plus_const (dw_loc_list_ref list_head, HOST_WIDE_INT offset)
1524 {
1525 dw_loc_list_ref d;
1526 for (d = list_head; d != NULL; d = d->dw_loc_next)
1527 loc_descr_plus_const (&d->expr, offset);
1528 }
1529
1530 #define DWARF_REF_SIZE \
1531 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1532
1533 static unsigned long int get_base_type_offset (dw_die_ref);
1534
1535 /* Return the size of a location descriptor. */
1536
1537 static unsigned long
1538 size_of_loc_descr (dw_loc_descr_ref loc)
1539 {
1540 unsigned long size = 1;
1541
1542 switch (loc->dw_loc_opc)
1543 {
1544 case DW_OP_addr:
1545 size += DWARF2_ADDR_SIZE;
1546 break;
1547 case DW_OP_GNU_addr_index:
1548 case DW_OP_GNU_const_index:
1549 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1550 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1551 break;
1552 case DW_OP_const1u:
1553 case DW_OP_const1s:
1554 size += 1;
1555 break;
1556 case DW_OP_const2u:
1557 case DW_OP_const2s:
1558 size += 2;
1559 break;
1560 case DW_OP_const4u:
1561 case DW_OP_const4s:
1562 size += 4;
1563 break;
1564 case DW_OP_const8u:
1565 case DW_OP_const8s:
1566 size += 8;
1567 break;
1568 case DW_OP_constu:
1569 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1570 break;
1571 case DW_OP_consts:
1572 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1573 break;
1574 case DW_OP_pick:
1575 size += 1;
1576 break;
1577 case DW_OP_plus_uconst:
1578 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1579 break;
1580 case DW_OP_skip:
1581 case DW_OP_bra:
1582 size += 2;
1583 break;
1584 case DW_OP_breg0:
1585 case DW_OP_breg1:
1586 case DW_OP_breg2:
1587 case DW_OP_breg3:
1588 case DW_OP_breg4:
1589 case DW_OP_breg5:
1590 case DW_OP_breg6:
1591 case DW_OP_breg7:
1592 case DW_OP_breg8:
1593 case DW_OP_breg9:
1594 case DW_OP_breg10:
1595 case DW_OP_breg11:
1596 case DW_OP_breg12:
1597 case DW_OP_breg13:
1598 case DW_OP_breg14:
1599 case DW_OP_breg15:
1600 case DW_OP_breg16:
1601 case DW_OP_breg17:
1602 case DW_OP_breg18:
1603 case DW_OP_breg19:
1604 case DW_OP_breg20:
1605 case DW_OP_breg21:
1606 case DW_OP_breg22:
1607 case DW_OP_breg23:
1608 case DW_OP_breg24:
1609 case DW_OP_breg25:
1610 case DW_OP_breg26:
1611 case DW_OP_breg27:
1612 case DW_OP_breg28:
1613 case DW_OP_breg29:
1614 case DW_OP_breg30:
1615 case DW_OP_breg31:
1616 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1617 break;
1618 case DW_OP_regx:
1619 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1620 break;
1621 case DW_OP_fbreg:
1622 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1623 break;
1624 case DW_OP_bregx:
1625 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1626 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1627 break;
1628 case DW_OP_piece:
1629 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1630 break;
1631 case DW_OP_bit_piece:
1632 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1633 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1634 break;
1635 case DW_OP_deref_size:
1636 case DW_OP_xderef_size:
1637 size += 1;
1638 break;
1639 case DW_OP_call2:
1640 size += 2;
1641 break;
1642 case DW_OP_call4:
1643 size += 4;
1644 break;
1645 case DW_OP_call_ref:
1646 size += DWARF_REF_SIZE;
1647 break;
1648 case DW_OP_implicit_value:
1649 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1650 + loc->dw_loc_oprnd1.v.val_unsigned;
1651 break;
1652 case DW_OP_GNU_implicit_pointer:
1653 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1654 break;
1655 case DW_OP_GNU_entry_value:
1656 {
1657 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1658 size += size_of_uleb128 (op_size) + op_size;
1659 break;
1660 }
1661 case DW_OP_GNU_const_type:
1662 {
1663 unsigned long o
1664 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1665 size += size_of_uleb128 (o) + 1;
1666 switch (loc->dw_loc_oprnd2.val_class)
1667 {
1668 case dw_val_class_vec:
1669 size += loc->dw_loc_oprnd2.v.val_vec.length
1670 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1671 break;
1672 case dw_val_class_const:
1673 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1674 break;
1675 case dw_val_class_const_double:
1676 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1677 break;
1678 case dw_val_class_wide_int:
1679 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1680 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1681 break;
1682 default:
1683 gcc_unreachable ();
1684 }
1685 break;
1686 }
1687 case DW_OP_GNU_regval_type:
1688 {
1689 unsigned long o
1690 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1691 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1692 + size_of_uleb128 (o);
1693 }
1694 break;
1695 case DW_OP_GNU_deref_type:
1696 {
1697 unsigned long o
1698 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1699 size += 1 + size_of_uleb128 (o);
1700 }
1701 break;
1702 case DW_OP_GNU_convert:
1703 case DW_OP_GNU_reinterpret:
1704 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1705 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1706 else
1707 {
1708 unsigned long o
1709 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1710 size += size_of_uleb128 (o);
1711 }
1712 break;
1713 case DW_OP_GNU_parameter_ref:
1714 size += 4;
1715 break;
1716 default:
1717 break;
1718 }
1719
1720 return size;
1721 }
1722
1723 /* Return the size of a series of location descriptors. */
1724
1725 unsigned long
1726 size_of_locs (dw_loc_descr_ref loc)
1727 {
1728 dw_loc_descr_ref l;
1729 unsigned long size;
1730
1731 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1732 field, to avoid writing to a PCH file. */
1733 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1734 {
1735 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1736 break;
1737 size += size_of_loc_descr (l);
1738 }
1739 if (! l)
1740 return size;
1741
1742 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1743 {
1744 l->dw_loc_addr = size;
1745 size += size_of_loc_descr (l);
1746 }
1747
1748 return size;
1749 }
1750
1751 /* Return the size of the value in a DW_AT_discr_value attribute. */
1752
1753 static int
1754 size_of_discr_value (dw_discr_value *discr_value)
1755 {
1756 if (discr_value->pos)
1757 return size_of_uleb128 (discr_value->v.uval);
1758 else
1759 return size_of_sleb128 (discr_value->v.sval);
1760 }
1761
1762 /* Return the size of the value in a DW_discr_list attribute. */
1763
1764 static int
1765 size_of_discr_list (dw_discr_list_ref discr_list)
1766 {
1767 int size = 0;
1768
1769 for (dw_discr_list_ref list = discr_list;
1770 list != NULL;
1771 list = list->dw_discr_next)
1772 {
1773 /* One byte for the discriminant value descriptor, and then one or two
1774 LEB128 numbers, depending on whether it's a single case label or a
1775 range label. */
1776 size += 1;
1777 size += size_of_discr_value (&list->dw_discr_lower_bound);
1778 if (list->dw_discr_range != 0)
1779 size += size_of_discr_value (&list->dw_discr_upper_bound);
1780 }
1781 return size;
1782 }
1783
1784 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
1785 static void get_ref_die_offset_label (char *, dw_die_ref);
1786 static unsigned long int get_ref_die_offset (dw_die_ref);
1787
1788 /* Output location description stack opcode's operands (if any).
1789 The for_eh_or_skip parameter controls whether register numbers are
1790 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
1791 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
1792 info). This should be suppressed for the cases that have not been converted
1793 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
1794
1795 static void
1796 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
1797 {
1798 dw_val_ref val1 = &loc->dw_loc_oprnd1;
1799 dw_val_ref val2 = &loc->dw_loc_oprnd2;
1800
1801 switch (loc->dw_loc_opc)
1802 {
1803 #ifdef DWARF2_DEBUGGING_INFO
1804 case DW_OP_const2u:
1805 case DW_OP_const2s:
1806 dw2_asm_output_data (2, val1->v.val_int, NULL);
1807 break;
1808 case DW_OP_const4u:
1809 if (loc->dtprel)
1810 {
1811 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1812 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
1813 val1->v.val_addr);
1814 fputc ('\n', asm_out_file);
1815 break;
1816 }
1817 /* FALLTHRU */
1818 case DW_OP_const4s:
1819 dw2_asm_output_data (4, val1->v.val_int, NULL);
1820 break;
1821 case DW_OP_const8u:
1822 if (loc->dtprel)
1823 {
1824 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1825 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
1826 val1->v.val_addr);
1827 fputc ('\n', asm_out_file);
1828 break;
1829 }
1830 /* FALLTHRU */
1831 case DW_OP_const8s:
1832 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
1833 dw2_asm_output_data (8, val1->v.val_int, NULL);
1834 break;
1835 case DW_OP_skip:
1836 case DW_OP_bra:
1837 {
1838 int offset;
1839
1840 gcc_assert (val1->val_class == dw_val_class_loc);
1841 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
1842
1843 dw2_asm_output_data (2, offset, NULL);
1844 }
1845 break;
1846 case DW_OP_implicit_value:
1847 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1848 switch (val2->val_class)
1849 {
1850 case dw_val_class_const:
1851 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
1852 break;
1853 case dw_val_class_vec:
1854 {
1855 unsigned int elt_size = val2->v.val_vec.elt_size;
1856 unsigned int len = val2->v.val_vec.length;
1857 unsigned int i;
1858 unsigned char *p;
1859
1860 if (elt_size > sizeof (HOST_WIDE_INT))
1861 {
1862 elt_size /= 2;
1863 len *= 2;
1864 }
1865 for (i = 0, p = val2->v.val_vec.array;
1866 i < len;
1867 i++, p += elt_size)
1868 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
1869 "fp or vector constant word %u", i);
1870 }
1871 break;
1872 case dw_val_class_const_double:
1873 {
1874 unsigned HOST_WIDE_INT first, second;
1875
1876 if (WORDS_BIG_ENDIAN)
1877 {
1878 first = val2->v.val_double.high;
1879 second = val2->v.val_double.low;
1880 }
1881 else
1882 {
1883 first = val2->v.val_double.low;
1884 second = val2->v.val_double.high;
1885 }
1886 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1887 first, NULL);
1888 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1889 second, NULL);
1890 }
1891 break;
1892 case dw_val_class_wide_int:
1893 {
1894 int i;
1895 int len = get_full_len (*val2->v.val_wide);
1896 if (WORDS_BIG_ENDIAN)
1897 for (i = len - 1; i >= 0; --i)
1898 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1899 val2->v.val_wide->elt (i), NULL);
1900 else
1901 for (i = 0; i < len; ++i)
1902 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1903 val2->v.val_wide->elt (i), NULL);
1904 }
1905 break;
1906 case dw_val_class_addr:
1907 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
1908 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
1909 break;
1910 default:
1911 gcc_unreachable ();
1912 }
1913 break;
1914 #else
1915 case DW_OP_const2u:
1916 case DW_OP_const2s:
1917 case DW_OP_const4u:
1918 case DW_OP_const4s:
1919 case DW_OP_const8u:
1920 case DW_OP_const8s:
1921 case DW_OP_skip:
1922 case DW_OP_bra:
1923 case DW_OP_implicit_value:
1924 /* We currently don't make any attempt to make sure these are
1925 aligned properly like we do for the main unwind info, so
1926 don't support emitting things larger than a byte if we're
1927 only doing unwinding. */
1928 gcc_unreachable ();
1929 #endif
1930 case DW_OP_const1u:
1931 case DW_OP_const1s:
1932 dw2_asm_output_data (1, val1->v.val_int, NULL);
1933 break;
1934 case DW_OP_constu:
1935 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1936 break;
1937 case DW_OP_consts:
1938 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
1939 break;
1940 case DW_OP_pick:
1941 dw2_asm_output_data (1, val1->v.val_int, NULL);
1942 break;
1943 case DW_OP_plus_uconst:
1944 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1945 break;
1946 case DW_OP_breg0:
1947 case DW_OP_breg1:
1948 case DW_OP_breg2:
1949 case DW_OP_breg3:
1950 case DW_OP_breg4:
1951 case DW_OP_breg5:
1952 case DW_OP_breg6:
1953 case DW_OP_breg7:
1954 case DW_OP_breg8:
1955 case DW_OP_breg9:
1956 case DW_OP_breg10:
1957 case DW_OP_breg11:
1958 case DW_OP_breg12:
1959 case DW_OP_breg13:
1960 case DW_OP_breg14:
1961 case DW_OP_breg15:
1962 case DW_OP_breg16:
1963 case DW_OP_breg17:
1964 case DW_OP_breg18:
1965 case DW_OP_breg19:
1966 case DW_OP_breg20:
1967 case DW_OP_breg21:
1968 case DW_OP_breg22:
1969 case DW_OP_breg23:
1970 case DW_OP_breg24:
1971 case DW_OP_breg25:
1972 case DW_OP_breg26:
1973 case DW_OP_breg27:
1974 case DW_OP_breg28:
1975 case DW_OP_breg29:
1976 case DW_OP_breg30:
1977 case DW_OP_breg31:
1978 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
1979 break;
1980 case DW_OP_regx:
1981 {
1982 unsigned r = val1->v.val_unsigned;
1983 if (for_eh_or_skip >= 0)
1984 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
1985 gcc_assert (size_of_uleb128 (r)
1986 == size_of_uleb128 (val1->v.val_unsigned));
1987 dw2_asm_output_data_uleb128 (r, NULL);
1988 }
1989 break;
1990 case DW_OP_fbreg:
1991 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
1992 break;
1993 case DW_OP_bregx:
1994 {
1995 unsigned r = val1->v.val_unsigned;
1996 if (for_eh_or_skip >= 0)
1997 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
1998 gcc_assert (size_of_uleb128 (r)
1999 == size_of_uleb128 (val1->v.val_unsigned));
2000 dw2_asm_output_data_uleb128 (r, NULL);
2001 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2002 }
2003 break;
2004 case DW_OP_piece:
2005 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2006 break;
2007 case DW_OP_bit_piece:
2008 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2009 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2010 break;
2011 case DW_OP_deref_size:
2012 case DW_OP_xderef_size:
2013 dw2_asm_output_data (1, val1->v.val_int, NULL);
2014 break;
2015
2016 case DW_OP_addr:
2017 if (loc->dtprel)
2018 {
2019 if (targetm.asm_out.output_dwarf_dtprel)
2020 {
2021 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2022 DWARF2_ADDR_SIZE,
2023 val1->v.val_addr);
2024 fputc ('\n', asm_out_file);
2025 }
2026 else
2027 gcc_unreachable ();
2028 }
2029 else
2030 {
2031 #ifdef DWARF2_DEBUGGING_INFO
2032 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2033 #else
2034 gcc_unreachable ();
2035 #endif
2036 }
2037 break;
2038
2039 case DW_OP_GNU_addr_index:
2040 case DW_OP_GNU_const_index:
2041 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2042 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2043 "(index into .debug_addr)");
2044 break;
2045
2046 case DW_OP_call2:
2047 case DW_OP_call4:
2048 {
2049 unsigned long die_offset
2050 = get_ref_die_offset (val1->v.val_die_ref.die);
2051 /* Make sure the offset has been computed and that we can encode it as
2052 an operand. */
2053 gcc_assert (die_offset > 0
2054 && die_offset <= (loc->dw_loc_opc == DW_OP_call2)
2055 ? 0xffff
2056 : 0xffffffff);
2057 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2058 die_offset, NULL);
2059 }
2060 break;
2061
2062 case DW_OP_GNU_implicit_pointer:
2063 {
2064 char label[MAX_ARTIFICIAL_LABEL_BYTES
2065 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2066 gcc_assert (val1->val_class == dw_val_class_die_ref);
2067 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2068 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2069 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2070 }
2071 break;
2072
2073 case DW_OP_GNU_entry_value:
2074 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2075 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2076 break;
2077
2078 case DW_OP_GNU_const_type:
2079 {
2080 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2081 gcc_assert (o);
2082 dw2_asm_output_data_uleb128 (o, NULL);
2083 switch (val2->val_class)
2084 {
2085 case dw_val_class_const:
2086 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2087 dw2_asm_output_data (1, l, NULL);
2088 dw2_asm_output_data (l, val2->v.val_int, NULL);
2089 break;
2090 case dw_val_class_vec:
2091 {
2092 unsigned int elt_size = val2->v.val_vec.elt_size;
2093 unsigned int len = val2->v.val_vec.length;
2094 unsigned int i;
2095 unsigned char *p;
2096
2097 l = len * elt_size;
2098 dw2_asm_output_data (1, l, NULL);
2099 if (elt_size > sizeof (HOST_WIDE_INT))
2100 {
2101 elt_size /= 2;
2102 len *= 2;
2103 }
2104 for (i = 0, p = val2->v.val_vec.array;
2105 i < len;
2106 i++, p += elt_size)
2107 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2108 "fp or vector constant word %u", i);
2109 }
2110 break;
2111 case dw_val_class_const_double:
2112 {
2113 unsigned HOST_WIDE_INT first, second;
2114 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2115
2116 dw2_asm_output_data (1, 2 * l, NULL);
2117 if (WORDS_BIG_ENDIAN)
2118 {
2119 first = val2->v.val_double.high;
2120 second = val2->v.val_double.low;
2121 }
2122 else
2123 {
2124 first = val2->v.val_double.low;
2125 second = val2->v.val_double.high;
2126 }
2127 dw2_asm_output_data (l, first, NULL);
2128 dw2_asm_output_data (l, second, NULL);
2129 }
2130 break;
2131 case dw_val_class_wide_int:
2132 {
2133 int i;
2134 int len = get_full_len (*val2->v.val_wide);
2135 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2136
2137 dw2_asm_output_data (1, len * l, NULL);
2138 if (WORDS_BIG_ENDIAN)
2139 for (i = len - 1; i >= 0; --i)
2140 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2141 else
2142 for (i = 0; i < len; ++i)
2143 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2144 }
2145 break;
2146 default:
2147 gcc_unreachable ();
2148 }
2149 }
2150 break;
2151 case DW_OP_GNU_regval_type:
2152 {
2153 unsigned r = val1->v.val_unsigned;
2154 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2155 gcc_assert (o);
2156 if (for_eh_or_skip >= 0)
2157 {
2158 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2159 gcc_assert (size_of_uleb128 (r)
2160 == size_of_uleb128 (val1->v.val_unsigned));
2161 }
2162 dw2_asm_output_data_uleb128 (r, NULL);
2163 dw2_asm_output_data_uleb128 (o, NULL);
2164 }
2165 break;
2166 case DW_OP_GNU_deref_type:
2167 {
2168 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2169 gcc_assert (o);
2170 dw2_asm_output_data (1, val1->v.val_int, NULL);
2171 dw2_asm_output_data_uleb128 (o, NULL);
2172 }
2173 break;
2174 case DW_OP_GNU_convert:
2175 case DW_OP_GNU_reinterpret:
2176 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2177 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2178 else
2179 {
2180 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2181 gcc_assert (o);
2182 dw2_asm_output_data_uleb128 (o, NULL);
2183 }
2184 break;
2185
2186 case DW_OP_GNU_parameter_ref:
2187 {
2188 unsigned long o;
2189 gcc_assert (val1->val_class == dw_val_class_die_ref);
2190 o = get_ref_die_offset (val1->v.val_die_ref.die);
2191 dw2_asm_output_data (4, o, NULL);
2192 }
2193 break;
2194
2195 default:
2196 /* Other codes have no operands. */
2197 break;
2198 }
2199 }
2200
2201 /* Output a sequence of location operations.
2202 The for_eh_or_skip parameter controls whether register numbers are
2203 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2204 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2205 info). This should be suppressed for the cases that have not been converted
2206 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2207
2208 void
2209 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2210 {
2211 for (; loc != NULL; loc = loc->dw_loc_next)
2212 {
2213 enum dwarf_location_atom opc = loc->dw_loc_opc;
2214 /* Output the opcode. */
2215 if (for_eh_or_skip >= 0
2216 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2217 {
2218 unsigned r = (opc - DW_OP_breg0);
2219 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2220 gcc_assert (r <= 31);
2221 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2222 }
2223 else if (for_eh_or_skip >= 0
2224 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2225 {
2226 unsigned r = (opc - DW_OP_reg0);
2227 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2228 gcc_assert (r <= 31);
2229 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2230 }
2231
2232 dw2_asm_output_data (1, opc,
2233 "%s", dwarf_stack_op_name (opc));
2234
2235 /* Output the operand(s) (if any). */
2236 output_loc_operands (loc, for_eh_or_skip);
2237 }
2238 }
2239
2240 /* Output location description stack opcode's operands (if any).
2241 The output is single bytes on a line, suitable for .cfi_escape. */
2242
2243 static void
2244 output_loc_operands_raw (dw_loc_descr_ref loc)
2245 {
2246 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2247 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2248
2249 switch (loc->dw_loc_opc)
2250 {
2251 case DW_OP_addr:
2252 case DW_OP_GNU_addr_index:
2253 case DW_OP_GNU_const_index:
2254 case DW_OP_implicit_value:
2255 /* We cannot output addresses in .cfi_escape, only bytes. */
2256 gcc_unreachable ();
2257
2258 case DW_OP_const1u:
2259 case DW_OP_const1s:
2260 case DW_OP_pick:
2261 case DW_OP_deref_size:
2262 case DW_OP_xderef_size:
2263 fputc (',', asm_out_file);
2264 dw2_asm_output_data_raw (1, val1->v.val_int);
2265 break;
2266
2267 case DW_OP_const2u:
2268 case DW_OP_const2s:
2269 fputc (',', asm_out_file);
2270 dw2_asm_output_data_raw (2, val1->v.val_int);
2271 break;
2272
2273 case DW_OP_const4u:
2274 case DW_OP_const4s:
2275 fputc (',', asm_out_file);
2276 dw2_asm_output_data_raw (4, val1->v.val_int);
2277 break;
2278
2279 case DW_OP_const8u:
2280 case DW_OP_const8s:
2281 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2282 fputc (',', asm_out_file);
2283 dw2_asm_output_data_raw (8, val1->v.val_int);
2284 break;
2285
2286 case DW_OP_skip:
2287 case DW_OP_bra:
2288 {
2289 int offset;
2290
2291 gcc_assert (val1->val_class == dw_val_class_loc);
2292 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2293
2294 fputc (',', asm_out_file);
2295 dw2_asm_output_data_raw (2, offset);
2296 }
2297 break;
2298
2299 case DW_OP_regx:
2300 {
2301 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2302 gcc_assert (size_of_uleb128 (r)
2303 == size_of_uleb128 (val1->v.val_unsigned));
2304 fputc (',', asm_out_file);
2305 dw2_asm_output_data_uleb128_raw (r);
2306 }
2307 break;
2308
2309 case DW_OP_constu:
2310 case DW_OP_plus_uconst:
2311 case DW_OP_piece:
2312 fputc (',', asm_out_file);
2313 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2314 break;
2315
2316 case DW_OP_bit_piece:
2317 fputc (',', asm_out_file);
2318 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2319 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2320 break;
2321
2322 case DW_OP_consts:
2323 case DW_OP_breg0:
2324 case DW_OP_breg1:
2325 case DW_OP_breg2:
2326 case DW_OP_breg3:
2327 case DW_OP_breg4:
2328 case DW_OP_breg5:
2329 case DW_OP_breg6:
2330 case DW_OP_breg7:
2331 case DW_OP_breg8:
2332 case DW_OP_breg9:
2333 case DW_OP_breg10:
2334 case DW_OP_breg11:
2335 case DW_OP_breg12:
2336 case DW_OP_breg13:
2337 case DW_OP_breg14:
2338 case DW_OP_breg15:
2339 case DW_OP_breg16:
2340 case DW_OP_breg17:
2341 case DW_OP_breg18:
2342 case DW_OP_breg19:
2343 case DW_OP_breg20:
2344 case DW_OP_breg21:
2345 case DW_OP_breg22:
2346 case DW_OP_breg23:
2347 case DW_OP_breg24:
2348 case DW_OP_breg25:
2349 case DW_OP_breg26:
2350 case DW_OP_breg27:
2351 case DW_OP_breg28:
2352 case DW_OP_breg29:
2353 case DW_OP_breg30:
2354 case DW_OP_breg31:
2355 case DW_OP_fbreg:
2356 fputc (',', asm_out_file);
2357 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2358 break;
2359
2360 case DW_OP_bregx:
2361 {
2362 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2363 gcc_assert (size_of_uleb128 (r)
2364 == size_of_uleb128 (val1->v.val_unsigned));
2365 fputc (',', asm_out_file);
2366 dw2_asm_output_data_uleb128_raw (r);
2367 fputc (',', asm_out_file);
2368 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2369 }
2370 break;
2371
2372 case DW_OP_GNU_implicit_pointer:
2373 case DW_OP_GNU_entry_value:
2374 case DW_OP_GNU_const_type:
2375 case DW_OP_GNU_regval_type:
2376 case DW_OP_GNU_deref_type:
2377 case DW_OP_GNU_convert:
2378 case DW_OP_GNU_reinterpret:
2379 case DW_OP_GNU_parameter_ref:
2380 gcc_unreachable ();
2381 break;
2382
2383 default:
2384 /* Other codes have no operands. */
2385 break;
2386 }
2387 }
2388
2389 void
2390 output_loc_sequence_raw (dw_loc_descr_ref loc)
2391 {
2392 while (1)
2393 {
2394 enum dwarf_location_atom opc = loc->dw_loc_opc;
2395 /* Output the opcode. */
2396 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2397 {
2398 unsigned r = (opc - DW_OP_breg0);
2399 r = DWARF2_FRAME_REG_OUT (r, 1);
2400 gcc_assert (r <= 31);
2401 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2402 }
2403 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2404 {
2405 unsigned r = (opc - DW_OP_reg0);
2406 r = DWARF2_FRAME_REG_OUT (r, 1);
2407 gcc_assert (r <= 31);
2408 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2409 }
2410 /* Output the opcode. */
2411 fprintf (asm_out_file, "%#x", opc);
2412 output_loc_operands_raw (loc);
2413
2414 if (!loc->dw_loc_next)
2415 break;
2416 loc = loc->dw_loc_next;
2417
2418 fputc (',', asm_out_file);
2419 }
2420 }
2421
2422 /* This function builds a dwarf location descriptor sequence from a
2423 dw_cfa_location, adding the given OFFSET to the result of the
2424 expression. */
2425
2426 struct dw_loc_descr_node *
2427 build_cfa_loc (dw_cfa_location *cfa, HOST_WIDE_INT offset)
2428 {
2429 struct dw_loc_descr_node *head, *tmp;
2430
2431 offset += cfa->offset;
2432
2433 if (cfa->indirect)
2434 {
2435 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2436 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2437 head->dw_loc_oprnd1.val_entry = NULL;
2438 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2439 add_loc_descr (&head, tmp);
2440 if (offset != 0)
2441 {
2442 tmp = new_loc_descr (DW_OP_plus_uconst, offset, 0);
2443 add_loc_descr (&head, tmp);
2444 }
2445 }
2446 else
2447 head = new_reg_loc_descr (cfa->reg, offset);
2448
2449 return head;
2450 }
2451
2452 /* This function builds a dwarf location descriptor sequence for
2453 the address at OFFSET from the CFA when stack is aligned to
2454 ALIGNMENT byte. */
2455
2456 struct dw_loc_descr_node *
2457 build_cfa_aligned_loc (dw_cfa_location *cfa,
2458 HOST_WIDE_INT offset, HOST_WIDE_INT alignment)
2459 {
2460 struct dw_loc_descr_node *head;
2461 unsigned int dwarf_fp
2462 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2463
2464 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2465 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2466 {
2467 head = new_reg_loc_descr (dwarf_fp, 0);
2468 add_loc_descr (&head, int_loc_descriptor (alignment));
2469 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2470 loc_descr_plus_const (&head, offset);
2471 }
2472 else
2473 head = new_reg_loc_descr (dwarf_fp, offset);
2474 return head;
2475 }
2476 \f
2477 /* And now, the support for symbolic debugging information. */
2478
2479 /* .debug_str support. */
2480
2481 static void dwarf2out_init (const char *);
2482 static void dwarf2out_finish (const char *);
2483 static void dwarf2out_early_finish (void);
2484 static void dwarf2out_assembly_start (void);
2485 static void dwarf2out_define (unsigned int, const char *);
2486 static void dwarf2out_undef (unsigned int, const char *);
2487 static void dwarf2out_start_source_file (unsigned, const char *);
2488 static void dwarf2out_end_source_file (unsigned);
2489 static void dwarf2out_function_decl (tree);
2490 static void dwarf2out_begin_block (unsigned, unsigned);
2491 static void dwarf2out_end_block (unsigned, unsigned);
2492 static bool dwarf2out_ignore_block (const_tree);
2493 static void dwarf2out_early_global_decl (tree);
2494 static void dwarf2out_late_global_decl (tree);
2495 static void dwarf2out_type_decl (tree, int);
2496 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool);
2497 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2498 dw_die_ref);
2499 static void dwarf2out_abstract_function (tree);
2500 static void dwarf2out_var_location (rtx_insn *);
2501 static void dwarf2out_size_function (tree);
2502 static void dwarf2out_begin_function (tree);
2503 static void dwarf2out_end_function (unsigned int);
2504 static void dwarf2out_register_main_translation_unit (tree unit);
2505 static void dwarf2out_set_name (tree, tree);
2506
2507 /* The debug hooks structure. */
2508
2509 const struct gcc_debug_hooks dwarf2_debug_hooks =
2510 {
2511 dwarf2out_init,
2512 dwarf2out_finish,
2513 dwarf2out_early_finish,
2514 dwarf2out_assembly_start,
2515 dwarf2out_define,
2516 dwarf2out_undef,
2517 dwarf2out_start_source_file,
2518 dwarf2out_end_source_file,
2519 dwarf2out_begin_block,
2520 dwarf2out_end_block,
2521 dwarf2out_ignore_block,
2522 dwarf2out_source_line,
2523 dwarf2out_begin_prologue,
2524 #if VMS_DEBUGGING_INFO
2525 dwarf2out_vms_end_prologue,
2526 dwarf2out_vms_begin_epilogue,
2527 #else
2528 debug_nothing_int_charstar,
2529 debug_nothing_int_charstar,
2530 #endif
2531 dwarf2out_end_epilogue,
2532 dwarf2out_begin_function,
2533 dwarf2out_end_function, /* end_function */
2534 dwarf2out_register_main_translation_unit,
2535 dwarf2out_function_decl, /* function_decl */
2536 dwarf2out_early_global_decl,
2537 dwarf2out_late_global_decl,
2538 dwarf2out_type_decl, /* type_decl */
2539 dwarf2out_imported_module_or_decl,
2540 debug_nothing_tree, /* deferred_inline_function */
2541 /* The DWARF 2 backend tries to reduce debugging bloat by not
2542 emitting the abstract description of inline functions until
2543 something tries to reference them. */
2544 dwarf2out_abstract_function, /* outlining_inline_function */
2545 debug_nothing_rtx_code_label, /* label */
2546 debug_nothing_int, /* handle_pch */
2547 dwarf2out_var_location,
2548 dwarf2out_size_function, /* size_function */
2549 dwarf2out_switch_text_section,
2550 dwarf2out_set_name,
2551 1, /* start_end_main_source_file */
2552 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2553 };
2554
2555 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2556 {
2557 dwarf2out_init,
2558 debug_nothing_charstar,
2559 debug_nothing_void,
2560 debug_nothing_void,
2561 debug_nothing_int_charstar,
2562 debug_nothing_int_charstar,
2563 debug_nothing_int_charstar,
2564 debug_nothing_int,
2565 debug_nothing_int_int, /* begin_block */
2566 debug_nothing_int_int, /* end_block */
2567 debug_true_const_tree, /* ignore_block */
2568 dwarf2out_source_line, /* source_line */
2569 debug_nothing_int_charstar, /* begin_prologue */
2570 debug_nothing_int_charstar, /* end_prologue */
2571 debug_nothing_int_charstar, /* begin_epilogue */
2572 debug_nothing_int_charstar, /* end_epilogue */
2573 debug_nothing_tree, /* begin_function */
2574 debug_nothing_int, /* end_function */
2575 debug_nothing_tree, /* register_main_translation_unit */
2576 debug_nothing_tree, /* function_decl */
2577 debug_nothing_tree, /* early_global_decl */
2578 debug_nothing_tree, /* late_global_decl */
2579 debug_nothing_tree_int, /* type_decl */
2580 debug_nothing_tree_tree_tree_bool, /* imported_module_or_decl */
2581 debug_nothing_tree, /* deferred_inline_function */
2582 debug_nothing_tree, /* outlining_inline_function */
2583 debug_nothing_rtx_code_label, /* label */
2584 debug_nothing_int, /* handle_pch */
2585 debug_nothing_rtx_insn, /* var_location */
2586 debug_nothing_tree, /* size_function */
2587 debug_nothing_void, /* switch_text_section */
2588 debug_nothing_tree_tree, /* set_name */
2589 0, /* start_end_main_source_file */
2590 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2591 };
2592 \f
2593 /* NOTE: In the comments in this file, many references are made to
2594 "Debugging Information Entries". This term is abbreviated as `DIE'
2595 throughout the remainder of this file. */
2596
2597 /* An internal representation of the DWARF output is built, and then
2598 walked to generate the DWARF debugging info. The walk of the internal
2599 representation is done after the entire program has been compiled.
2600 The types below are used to describe the internal representation. */
2601
2602 /* Whether to put type DIEs into their own section .debug_types instead
2603 of making them part of the .debug_info section. Only supported for
2604 Dwarf V4 or higher and the user didn't disable them through
2605 -fno-debug-types-section. It is more efficient to put them in a
2606 separate comdat sections since the linker will then be able to
2607 remove duplicates. But not all tools support .debug_types sections
2608 yet. */
2609
2610 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2611
2612 /* Various DIE's use offsets relative to the beginning of the
2613 .debug_info section to refer to each other. */
2614
2615 typedef long int dw_offset;
2616
2617 struct comdat_type_node;
2618
2619 /* The entries in the line_info table more-or-less mirror the opcodes
2620 that are used in the real dwarf line table. Arrays of these entries
2621 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2622 supported. */
2623
2624 enum dw_line_info_opcode {
2625 /* Emit DW_LNE_set_address; the operand is the label index. */
2626 LI_set_address,
2627
2628 /* Emit a row to the matrix with the given line. This may be done
2629 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2630 special opcodes. */
2631 LI_set_line,
2632
2633 /* Emit a DW_LNS_set_file. */
2634 LI_set_file,
2635
2636 /* Emit a DW_LNS_set_column. */
2637 LI_set_column,
2638
2639 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2640 LI_negate_stmt,
2641
2642 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2643 LI_set_prologue_end,
2644 LI_set_epilogue_begin,
2645
2646 /* Emit a DW_LNE_set_discriminator. */
2647 LI_set_discriminator
2648 };
2649
2650 typedef struct GTY(()) dw_line_info_struct {
2651 enum dw_line_info_opcode opcode;
2652 unsigned int val;
2653 } dw_line_info_entry;
2654
2655
2656 struct GTY(()) dw_line_info_table {
2657 /* The label that marks the end of this section. */
2658 const char *end_label;
2659
2660 /* The values for the last row of the matrix, as collected in the table.
2661 These are used to minimize the changes to the next row. */
2662 unsigned int file_num;
2663 unsigned int line_num;
2664 unsigned int column_num;
2665 int discrim_num;
2666 bool is_stmt;
2667 bool in_use;
2668
2669 vec<dw_line_info_entry, va_gc> *entries;
2670 };
2671
2672
2673 /* Each DIE attribute has a field specifying the attribute kind,
2674 a link to the next attribute in the chain, and an attribute value.
2675 Attributes are typically linked below the DIE they modify. */
2676
2677 typedef struct GTY(()) dw_attr_struct {
2678 enum dwarf_attribute dw_attr;
2679 dw_val_node dw_attr_val;
2680 }
2681 dw_attr_node;
2682
2683
2684 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
2685 The children of each node form a circular list linked by
2686 die_sib. die_child points to the node *before* the "first" child node. */
2687
2688 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
2689 union die_symbol_or_type_node
2690 {
2691 const char * GTY ((tag ("0"))) die_symbol;
2692 comdat_type_node *GTY ((tag ("1"))) die_type_node;
2693 }
2694 GTY ((desc ("%0.comdat_type_p"))) die_id;
2695 vec<dw_attr_node, va_gc> *die_attr;
2696 dw_die_ref die_parent;
2697 dw_die_ref die_child;
2698 dw_die_ref die_sib;
2699 dw_die_ref die_definition; /* ref from a specification to its definition */
2700 dw_offset die_offset;
2701 unsigned long die_abbrev;
2702 int die_mark;
2703 unsigned int decl_id;
2704 enum dwarf_tag die_tag;
2705 /* Die is used and must not be pruned as unused. */
2706 BOOL_BITFIELD die_perennial_p : 1;
2707 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
2708 /* Lots of spare bits. */
2709 }
2710 die_node;
2711
2712 /* Set to TRUE while dwarf2out_early_global_decl is running. */
2713 static bool early_dwarf;
2714 struct set_early_dwarf {
2715 bool saved;
2716 set_early_dwarf () : saved(early_dwarf) { early_dwarf = true; }
2717 ~set_early_dwarf () { early_dwarf = saved; }
2718 };
2719
2720 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
2721 #define FOR_EACH_CHILD(die, c, expr) do { \
2722 c = die->die_child; \
2723 if (c) do { \
2724 c = c->die_sib; \
2725 expr; \
2726 } while (c != die->die_child); \
2727 } while (0)
2728
2729 /* The pubname structure */
2730
2731 typedef struct GTY(()) pubname_struct {
2732 dw_die_ref die;
2733 const char *name;
2734 }
2735 pubname_entry;
2736
2737
2738 struct GTY(()) dw_ranges {
2739 /* If this is positive, it's a block number, otherwise it's a
2740 bitwise-negated index into dw_ranges_by_label. */
2741 int num;
2742 };
2743
2744 /* A structure to hold a macinfo entry. */
2745
2746 typedef struct GTY(()) macinfo_struct {
2747 unsigned char code;
2748 unsigned HOST_WIDE_INT lineno;
2749 const char *info;
2750 }
2751 macinfo_entry;
2752
2753
2754 struct GTY(()) dw_ranges_by_label {
2755 const char *begin;
2756 const char *end;
2757 };
2758
2759 /* The comdat type node structure. */
2760 struct GTY(()) comdat_type_node
2761 {
2762 dw_die_ref root_die;
2763 dw_die_ref type_die;
2764 dw_die_ref skeleton_die;
2765 char signature[DWARF_TYPE_SIGNATURE_SIZE];
2766 comdat_type_node *next;
2767 };
2768
2769 /* A list of DIEs for which we can't determine ancestry (parent_die
2770 field) just yet. Later in dwarf2out_finish we will fill in the
2771 missing bits. */
2772 typedef struct GTY(()) limbo_die_struct {
2773 dw_die_ref die;
2774 /* The tree for which this DIE was created. We use this to
2775 determine ancestry later. */
2776 tree created_for;
2777 struct limbo_die_struct *next;
2778 }
2779 limbo_die_node;
2780
2781 typedef struct skeleton_chain_struct
2782 {
2783 dw_die_ref old_die;
2784 dw_die_ref new_die;
2785 struct skeleton_chain_struct *parent;
2786 }
2787 skeleton_chain_node;
2788
2789 /* Define a macro which returns nonzero for a TYPE_DECL which was
2790 implicitly generated for a type.
2791
2792 Note that, unlike the C front-end (which generates a NULL named
2793 TYPE_DECL node for each complete tagged type, each array type,
2794 and each function type node created) the C++ front-end generates
2795 a _named_ TYPE_DECL node for each tagged type node created.
2796 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
2797 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
2798 front-end, but for each type, tagged or not. */
2799
2800 #define TYPE_DECL_IS_STUB(decl) \
2801 (DECL_NAME (decl) == NULL_TREE \
2802 || (DECL_ARTIFICIAL (decl) \
2803 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
2804 /* This is necessary for stub decls that \
2805 appear in nested inline functions. */ \
2806 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
2807 && (decl_ultimate_origin (decl) \
2808 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
2809
2810 /* Information concerning the compilation unit's programming
2811 language, and compiler version. */
2812
2813 /* Fixed size portion of the DWARF compilation unit header. */
2814 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
2815 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 3)
2816
2817 /* Fixed size portion of the DWARF comdat type unit header. */
2818 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
2819 (DWARF_COMPILE_UNIT_HEADER_SIZE + DWARF_TYPE_SIGNATURE_SIZE \
2820 + DWARF_OFFSET_SIZE)
2821
2822 /* Fixed size portion of public names info. */
2823 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
2824
2825 /* Fixed size portion of the address range info. */
2826 #define DWARF_ARANGES_HEADER_SIZE \
2827 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
2828 DWARF2_ADDR_SIZE * 2) \
2829 - DWARF_INITIAL_LENGTH_SIZE)
2830
2831 /* Size of padding portion in the address range info. It must be
2832 aligned to twice the pointer size. */
2833 #define DWARF_ARANGES_PAD_SIZE \
2834 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
2835 DWARF2_ADDR_SIZE * 2) \
2836 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
2837
2838 /* Use assembler line directives if available. */
2839 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
2840 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
2841 #define DWARF2_ASM_LINE_DEBUG_INFO 1
2842 #else
2843 #define DWARF2_ASM_LINE_DEBUG_INFO 0
2844 #endif
2845 #endif
2846
2847 /* Minimum line offset in a special line info. opcode.
2848 This value was chosen to give a reasonable range of values. */
2849 #define DWARF_LINE_BASE -10
2850
2851 /* First special line opcode - leave room for the standard opcodes. */
2852 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
2853
2854 /* Range of line offsets in a special line info. opcode. */
2855 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
2856
2857 /* Flag that indicates the initial value of the is_stmt_start flag.
2858 In the present implementation, we do not mark any lines as
2859 the beginning of a source statement, because that information
2860 is not made available by the GCC front-end. */
2861 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
2862
2863 /* Maximum number of operations per instruction bundle. */
2864 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
2865 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
2866 #endif
2867
2868 /* This location is used by calc_die_sizes() to keep track
2869 the offset of each DIE within the .debug_info section. */
2870 static unsigned long next_die_offset;
2871
2872 /* Record the root of the DIE's built for the current compilation unit. */
2873 static GTY(()) dw_die_ref single_comp_unit_die;
2874
2875 /* A list of type DIEs that have been separated into comdat sections. */
2876 static GTY(()) comdat_type_node *comdat_type_list;
2877
2878 /* A list of DIEs with a NULL parent waiting to be relocated. */
2879 static GTY(()) limbo_die_node *limbo_die_list;
2880
2881 /* A list of DIEs for which we may have to generate
2882 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
2883 static GTY(()) limbo_die_node *deferred_asm_name;
2884
2885 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
2886 {
2887 typedef const char *compare_type;
2888
2889 static hashval_t hash (dwarf_file_data *);
2890 static bool equal (dwarf_file_data *, const char *);
2891 };
2892
2893 /* Filenames referenced by this compilation unit. */
2894 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
2895
2896 struct decl_die_hasher : ggc_ptr_hash<die_node>
2897 {
2898 typedef tree compare_type;
2899
2900 static hashval_t hash (die_node *);
2901 static bool equal (die_node *, tree);
2902 };
2903 /* A hash table of references to DIE's that describe declarations.
2904 The key is a DECL_UID() which is a unique number identifying each decl. */
2905 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
2906
2907 struct block_die_hasher : ggc_ptr_hash<die_struct>
2908 {
2909 static hashval_t hash (die_struct *);
2910 static bool equal (die_struct *, die_struct *);
2911 };
2912
2913 /* A hash table of references to DIE's that describe COMMON blocks.
2914 The key is DECL_UID() ^ die_parent. */
2915 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
2916
2917 typedef struct GTY(()) die_arg_entry_struct {
2918 dw_die_ref die;
2919 tree arg;
2920 } die_arg_entry;
2921
2922
2923 /* Node of the variable location list. */
2924 struct GTY ((chain_next ("%h.next"))) var_loc_node {
2925 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
2926 EXPR_LIST chain. For small bitsizes, bitsize is encoded
2927 in mode of the EXPR_LIST node and first EXPR_LIST operand
2928 is either NOTE_INSN_VAR_LOCATION for a piece with a known
2929 location or NULL for padding. For larger bitsizes,
2930 mode is 0 and first operand is a CONCAT with bitsize
2931 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
2932 NULL as second operand. */
2933 rtx GTY (()) loc;
2934 const char * GTY (()) label;
2935 struct var_loc_node * GTY (()) next;
2936 };
2937
2938 /* Variable location list. */
2939 struct GTY ((for_user)) var_loc_list_def {
2940 struct var_loc_node * GTY (()) first;
2941
2942 /* Pointer to the last but one or last element of the
2943 chained list. If the list is empty, both first and
2944 last are NULL, if the list contains just one node
2945 or the last node certainly is not redundant, it points
2946 to the last node, otherwise points to the last but one.
2947 Do not mark it for GC because it is marked through the chain. */
2948 struct var_loc_node * GTY ((skip ("%h"))) last;
2949
2950 /* Pointer to the last element before section switch,
2951 if NULL, either sections weren't switched or first
2952 is after section switch. */
2953 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
2954
2955 /* DECL_UID of the variable decl. */
2956 unsigned int decl_id;
2957 };
2958 typedef struct var_loc_list_def var_loc_list;
2959
2960 /* Call argument location list. */
2961 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
2962 rtx GTY (()) call_arg_loc_note;
2963 const char * GTY (()) label;
2964 tree GTY (()) block;
2965 bool tail_call_p;
2966 rtx GTY (()) symbol_ref;
2967 struct call_arg_loc_node * GTY (()) next;
2968 };
2969
2970
2971 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
2972 {
2973 typedef const_tree compare_type;
2974
2975 static hashval_t hash (var_loc_list *);
2976 static bool equal (var_loc_list *, const_tree);
2977 };
2978
2979 /* Table of decl location linked lists. */
2980 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
2981
2982 /* Head and tail of call_arg_loc chain. */
2983 static GTY (()) struct call_arg_loc_node *call_arg_locations;
2984 static struct call_arg_loc_node *call_arg_loc_last;
2985
2986 /* Number of call sites in the current function. */
2987 static int call_site_count = -1;
2988 /* Number of tail call sites in the current function. */
2989 static int tail_call_site_count = -1;
2990
2991 /* A cached location list. */
2992 struct GTY ((for_user)) cached_dw_loc_list_def {
2993 /* The DECL_UID of the decl that this entry describes. */
2994 unsigned int decl_id;
2995
2996 /* The cached location list. */
2997 dw_loc_list_ref loc_list;
2998 };
2999 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3000
3001 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3002 {
3003
3004 typedef const_tree compare_type;
3005
3006 static hashval_t hash (cached_dw_loc_list *);
3007 static bool equal (cached_dw_loc_list *, const_tree);
3008 };
3009
3010 /* Table of cached location lists. */
3011 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3012
3013 /* A pointer to the base of a list of references to DIE's that
3014 are uniquely identified by their tag, presence/absence of
3015 children DIE's, and list of attribute/value pairs. */
3016 static GTY((length ("abbrev_die_table_allocated")))
3017 dw_die_ref *abbrev_die_table;
3018
3019 /* Number of elements currently allocated for abbrev_die_table. */
3020 static GTY(()) unsigned abbrev_die_table_allocated;
3021
3022 /* Number of elements in abbrev_die_table currently in use. */
3023 static GTY(()) unsigned abbrev_die_table_in_use;
3024
3025 /* A hash map to remember the stack usage for DWARF procedures. The value
3026 stored is the stack size difference between before the DWARF procedure
3027 invokation and after it returned. In other words, for a DWARF procedure
3028 that consumes N stack slots and that pushes M ones, this stores M - N. */
3029 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3030
3031 /* Size (in elements) of increments by which we may expand the
3032 abbrev_die_table. */
3033 #define ABBREV_DIE_TABLE_INCREMENT 256
3034
3035 /* A global counter for generating labels for line number data. */
3036 static unsigned int line_info_label_num;
3037
3038 /* The current table to which we should emit line number information
3039 for the current function. This will be set up at the beginning of
3040 assembly for the function. */
3041 static GTY(()) dw_line_info_table *cur_line_info_table;
3042
3043 /* The two default tables of line number info. */
3044 static GTY(()) dw_line_info_table *text_section_line_info;
3045 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3046
3047 /* The set of all non-default tables of line number info. */
3048 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3049
3050 /* A flag to tell pubnames/types export if there is an info section to
3051 refer to. */
3052 static bool info_section_emitted;
3053
3054 /* A pointer to the base of a table that contains a list of publicly
3055 accessible names. */
3056 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3057
3058 /* A pointer to the base of a table that contains a list of publicly
3059 accessible types. */
3060 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3061
3062 /* A pointer to the base of a table that contains a list of macro
3063 defines/undefines (and file start/end markers). */
3064 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3065
3066 /* True if .debug_macinfo or .debug_macros section is going to be
3067 emitted. */
3068 #define have_macinfo \
3069 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3070 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3071 && !macinfo_table->is_empty ())
3072
3073 /* Array of dies for which we should generate .debug_ranges info. */
3074 static GTY ((length ("ranges_table_allocated"))) dw_ranges *ranges_table;
3075
3076 /* Number of elements currently allocated for ranges_table. */
3077 static GTY(()) unsigned ranges_table_allocated;
3078
3079 /* Number of elements in ranges_table currently in use. */
3080 static GTY(()) unsigned ranges_table_in_use;
3081
3082 /* Array of pairs of labels referenced in ranges_table. */
3083 static GTY ((length ("ranges_by_label_allocated")))
3084 dw_ranges_by_label *ranges_by_label;
3085
3086 /* Number of elements currently allocated for ranges_by_label. */
3087 static GTY(()) unsigned ranges_by_label_allocated;
3088
3089 /* Number of elements in ranges_by_label currently in use. */
3090 static GTY(()) unsigned ranges_by_label_in_use;
3091
3092 /* Size (in elements) of increments by which we may expand the
3093 ranges_table. */
3094 #define RANGES_TABLE_INCREMENT 64
3095
3096 /* Whether we have location lists that need outputting */
3097 static GTY(()) bool have_location_lists;
3098
3099 /* Unique label counter. */
3100 static GTY(()) unsigned int loclabel_num;
3101
3102 /* Unique label counter for point-of-call tables. */
3103 static GTY(()) unsigned int poc_label_num;
3104
3105 /* The last file entry emitted by maybe_emit_file(). */
3106 static GTY(()) struct dwarf_file_data * last_emitted_file;
3107
3108 /* Number of internal labels generated by gen_internal_sym(). */
3109 static GTY(()) int label_num;
3110
3111 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3112
3113 /* Instances of generic types for which we need to generate debug
3114 info that describe their generic parameters and arguments. That
3115 generation needs to happen once all types are properly laid out so
3116 we do it at the end of compilation. */
3117 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3118
3119 /* Offset from the "steady-state frame pointer" to the frame base,
3120 within the current function. */
3121 static HOST_WIDE_INT frame_pointer_fb_offset;
3122 static bool frame_pointer_fb_offset_valid;
3123
3124 static vec<dw_die_ref> base_types;
3125
3126 /* Flags to represent a set of attribute classes for attributes that represent
3127 a scalar value (bounds, pointers, ...). */
3128 enum dw_scalar_form
3129 {
3130 dw_scalar_form_constant = 0x01,
3131 dw_scalar_form_exprloc = 0x02,
3132 dw_scalar_form_reference = 0x04
3133 };
3134
3135 /* Forward declarations for functions defined in this file. */
3136
3137 static int is_pseudo_reg (const_rtx);
3138 static tree type_main_variant (tree);
3139 static int is_tagged_type (const_tree);
3140 static const char *dwarf_tag_name (unsigned);
3141 static const char *dwarf_attr_name (unsigned);
3142 static const char *dwarf_form_name (unsigned);
3143 static tree decl_ultimate_origin (const_tree);
3144 static tree decl_class_context (tree);
3145 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3146 static inline enum dw_val_class AT_class (dw_attr_node *);
3147 static inline unsigned int AT_index (dw_attr_node *);
3148 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3149 static inline unsigned AT_flag (dw_attr_node *);
3150 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3151 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3152 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3153 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3154 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3155 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3156 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3157 unsigned int, unsigned char *);
3158 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3159 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3160 static inline const char *AT_string (dw_attr_node *);
3161 static enum dwarf_form AT_string_form (dw_attr_node *);
3162 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3163 static void add_AT_specification (dw_die_ref, dw_die_ref);
3164 static inline dw_die_ref AT_ref (dw_attr_node *);
3165 static inline int AT_ref_external (dw_attr_node *);
3166 static inline void set_AT_ref_external (dw_attr_node *, int);
3167 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3168 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3169 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3170 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3171 dw_loc_list_ref);
3172 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3173 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3174 static void remove_addr_table_entry (addr_table_entry *);
3175 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3176 static inline rtx AT_addr (dw_attr_node *);
3177 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3178 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3179 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3180 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3181 unsigned HOST_WIDE_INT);
3182 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3183 unsigned long, bool);
3184 static inline const char *AT_lbl (dw_attr_node *);
3185 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3186 static const char *get_AT_low_pc (dw_die_ref);
3187 static const char *get_AT_hi_pc (dw_die_ref);
3188 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3189 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3190 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3191 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3192 static bool is_cxx (void);
3193 static bool is_fortran (void);
3194 static bool is_ada (void);
3195 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3196 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3197 static void add_child_die (dw_die_ref, dw_die_ref);
3198 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3199 static dw_die_ref lookup_type_die (tree);
3200 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3201 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3202 static void equate_type_number_to_die (tree, dw_die_ref);
3203 static dw_die_ref lookup_decl_die (tree);
3204 static var_loc_list *lookup_decl_loc (const_tree);
3205 static void equate_decl_number_to_die (tree, dw_die_ref);
3206 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
3207 static void print_spaces (FILE *);
3208 static void print_die (dw_die_ref, FILE *);
3209 static dw_die_ref push_new_compile_unit (dw_die_ref, dw_die_ref);
3210 static dw_die_ref pop_compile_unit (dw_die_ref);
3211 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3212 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3213 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3214 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3215 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3216 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3217 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3218 struct md5_ctx *, int *);
3219 struct checksum_attributes;
3220 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3221 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3222 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3223 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3224 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3225 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3226 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3227 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3228 static int same_die_p_wrap (dw_die_ref, dw_die_ref);
3229 static void compute_section_prefix (dw_die_ref);
3230 static int is_type_die (dw_die_ref);
3231 static int is_comdat_die (dw_die_ref);
3232 static int is_symbol_die (dw_die_ref);
3233 static inline bool is_template_instantiation (dw_die_ref);
3234 static void assign_symbol_names (dw_die_ref);
3235 static void break_out_includes (dw_die_ref);
3236 static int is_declaration_die (dw_die_ref);
3237 static int should_move_die_to_comdat (dw_die_ref);
3238 static dw_die_ref clone_as_declaration (dw_die_ref);
3239 static dw_die_ref clone_die (dw_die_ref);
3240 static dw_die_ref clone_tree (dw_die_ref);
3241 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3242 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3243 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3244 static dw_die_ref generate_skeleton (dw_die_ref);
3245 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3246 dw_die_ref,
3247 dw_die_ref);
3248 static void break_out_comdat_types (dw_die_ref);
3249 static void copy_decls_for_unworthy_types (dw_die_ref);
3250
3251 static void add_sibling_attributes (dw_die_ref);
3252 static void output_location_lists (dw_die_ref);
3253 static int constant_size (unsigned HOST_WIDE_INT);
3254 static unsigned long size_of_die (dw_die_ref);
3255 static void calc_die_sizes (dw_die_ref);
3256 static void calc_base_type_die_sizes (void);
3257 static void mark_dies (dw_die_ref);
3258 static void unmark_dies (dw_die_ref);
3259 static void unmark_all_dies (dw_die_ref);
3260 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3261 static unsigned long size_of_aranges (void);
3262 static enum dwarf_form value_format (dw_attr_node *);
3263 static void output_value_format (dw_attr_node *);
3264 static void output_abbrev_section (void);
3265 static void output_die_abbrevs (unsigned long, dw_die_ref);
3266 static void output_die_symbol (dw_die_ref);
3267 static void output_die (dw_die_ref);
3268 static void output_compilation_unit_header (void);
3269 static void output_comp_unit (dw_die_ref, int);
3270 static void output_comdat_type_unit (comdat_type_node *);
3271 static const char *dwarf2_name (tree, int);
3272 static void add_pubname (tree, dw_die_ref);
3273 static void add_enumerator_pubname (const char *, dw_die_ref);
3274 static void add_pubname_string (const char *, dw_die_ref);
3275 static void add_pubtype (tree, dw_die_ref);
3276 static void output_pubnames (vec<pubname_entry, va_gc> *);
3277 static void output_aranges (void);
3278 static unsigned int add_ranges_num (int);
3279 static unsigned int add_ranges (const_tree);
3280 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3281 bool *, bool);
3282 static void output_ranges (void);
3283 static dw_line_info_table *new_line_info_table (void);
3284 static void output_line_info (bool);
3285 static void output_file_names (void);
3286 static dw_die_ref base_type_die (tree, bool);
3287 static int is_base_type (tree);
3288 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3289 static int decl_quals (const_tree);
3290 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3291 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3292 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3293 static int type_is_enum (const_tree);
3294 static unsigned int dbx_reg_number (const_rtx);
3295 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3296 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3297 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3298 enum var_init_status);
3299 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3300 enum var_init_status);
3301 static dw_loc_descr_ref based_loc_descr (rtx, HOST_WIDE_INT,
3302 enum var_init_status);
3303 static int is_based_loc (const_rtx);
3304 static bool resolve_one_addr (rtx *);
3305 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3306 enum var_init_status);
3307 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3308 enum var_init_status);
3309 struct loc_descr_context;
3310 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3311 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3312 static dw_loc_list_ref loc_list_from_tree (tree, int,
3313 const struct loc_descr_context *);
3314 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3315 const struct loc_descr_context *);
3316 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3317 static tree field_type (const_tree);
3318 static unsigned int simple_type_align_in_bits (const_tree);
3319 static unsigned int simple_decl_align_in_bits (const_tree);
3320 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3321 struct vlr_context;
3322 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3323 HOST_WIDE_INT *);
3324 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3325 dw_loc_list_ref);
3326 static void add_data_member_location_attribute (dw_die_ref, tree,
3327 struct vlr_context *);
3328 static bool add_const_value_attribute (dw_die_ref, rtx);
3329 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3330 static void insert_wide_int (const wide_int &, unsigned char *, int);
3331 static void insert_float (const_rtx, unsigned char *);
3332 static rtx rtl_for_decl_location (tree);
3333 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3334 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3335 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3336 static void add_name_attribute (dw_die_ref, const char *);
3337 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3338 static void add_comp_dir_attribute (dw_die_ref);
3339 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3340 const struct loc_descr_context *);
3341 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3342 const struct loc_descr_context *);
3343 static void add_subscript_info (dw_die_ref, tree, bool);
3344 static void add_byte_size_attribute (dw_die_ref, tree);
3345 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3346 struct vlr_context *);
3347 static void add_bit_size_attribute (dw_die_ref, tree);
3348 static void add_prototyped_attribute (dw_die_ref, tree);
3349 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3350 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3351 static void add_src_coords_attributes (dw_die_ref, tree);
3352 static void add_name_and_src_coords_attributes (dw_die_ref, tree);
3353 static void add_discr_value (dw_die_ref, dw_discr_value *);
3354 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3355 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3356 static void push_decl_scope (tree);
3357 static void pop_decl_scope (void);
3358 static dw_die_ref scope_die_for (tree, dw_die_ref);
3359 static inline int local_scope_p (dw_die_ref);
3360 static inline int class_scope_p (dw_die_ref);
3361 static inline int class_or_namespace_scope_p (dw_die_ref);
3362 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3363 static void add_calling_convention_attribute (dw_die_ref, tree);
3364 static const char *type_tag (const_tree);
3365 static tree member_declared_type (const_tree);
3366 #if 0
3367 static const char *decl_start_label (tree);
3368 #endif
3369 static void gen_array_type_die (tree, dw_die_ref);
3370 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3371 #if 0
3372 static void gen_entry_point_die (tree, dw_die_ref);
3373 #endif
3374 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3375 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3376 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3377 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3378 static void gen_formal_types_die (tree, dw_die_ref);
3379 static void gen_subprogram_die (tree, dw_die_ref);
3380 static void gen_variable_die (tree, tree, dw_die_ref);
3381 static void gen_const_die (tree, dw_die_ref);
3382 static void gen_label_die (tree, dw_die_ref);
3383 static void gen_lexical_block_die (tree, dw_die_ref);
3384 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3385 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3386 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3387 static dw_die_ref gen_compile_unit_die (const char *);
3388 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3389 static void gen_member_die (tree, dw_die_ref);
3390 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3391 enum debug_info_usage);
3392 static void gen_subroutine_type_die (tree, dw_die_ref);
3393 static void gen_typedef_die (tree, dw_die_ref);
3394 static void gen_type_die (tree, dw_die_ref);
3395 static void gen_block_die (tree, dw_die_ref);
3396 static void decls_for_scope (tree, dw_die_ref);
3397 static bool is_naming_typedef_decl (const_tree);
3398 static inline dw_die_ref get_context_die (tree);
3399 static void gen_namespace_die (tree, dw_die_ref);
3400 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3401 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3402 static dw_die_ref force_decl_die (tree);
3403 static dw_die_ref force_type_die (tree);
3404 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3405 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3406 static struct dwarf_file_data * lookup_filename (const char *);
3407 static void retry_incomplete_types (void);
3408 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3409 static void gen_generic_params_dies (tree);
3410 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3411 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3412 static void splice_child_die (dw_die_ref, dw_die_ref);
3413 static int file_info_cmp (const void *, const void *);
3414 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
3415 const char *, const char *);
3416 static void output_loc_list (dw_loc_list_ref);
3417 static char *gen_internal_sym (const char *);
3418 static bool want_pubnames (void);
3419
3420 static void prune_unmark_dies (dw_die_ref);
3421 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3422 static void prune_unused_types_mark (dw_die_ref, int);
3423 static void prune_unused_types_walk (dw_die_ref);
3424 static void prune_unused_types_walk_attribs (dw_die_ref);
3425 static void prune_unused_types_prune (dw_die_ref);
3426 static void prune_unused_types (void);
3427 static int maybe_emit_file (struct dwarf_file_data *fd);
3428 static inline const char *AT_vms_delta1 (dw_attr_node *);
3429 static inline const char *AT_vms_delta2 (dw_attr_node *);
3430 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3431 const char *, const char *);
3432 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3433 static void gen_remaining_tmpl_value_param_die_attribute (void);
3434 static bool generic_type_p (tree);
3435 static void schedule_generic_params_dies_gen (tree t);
3436 static void gen_scheduled_generic_parms_dies (void);
3437
3438 static const char *comp_dir_string (void);
3439
3440 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3441
3442 /* enum for tracking thread-local variables whose address is really an offset
3443 relative to the TLS pointer, which will need link-time relocation, but will
3444 not need relocation by the DWARF consumer. */
3445
3446 enum dtprel_bool
3447 {
3448 dtprel_false = 0,
3449 dtprel_true = 1
3450 };
3451
3452 /* Return the operator to use for an address of a variable. For dtprel_true, we
3453 use DW_OP_const*. For regular variables, which need both link-time
3454 relocation and consumer-level relocation (e.g., to account for shared objects
3455 loaded at a random address), we use DW_OP_addr*. */
3456
3457 static inline enum dwarf_location_atom
3458 dw_addr_op (enum dtprel_bool dtprel)
3459 {
3460 if (dtprel == dtprel_true)
3461 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3462 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3463 else
3464 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3465 }
3466
3467 /* Return a pointer to a newly allocated address location description. If
3468 dwarf_split_debug_info is true, then record the address with the appropriate
3469 relocation. */
3470 static inline dw_loc_descr_ref
3471 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3472 {
3473 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3474
3475 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3476 ref->dw_loc_oprnd1.v.val_addr = addr;
3477 ref->dtprel = dtprel;
3478 if (dwarf_split_debug_info)
3479 ref->dw_loc_oprnd1.val_entry
3480 = add_addr_table_entry (addr,
3481 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3482 else
3483 ref->dw_loc_oprnd1.val_entry = NULL;
3484
3485 return ref;
3486 }
3487
3488 /* Section names used to hold DWARF debugging information. */
3489
3490 #ifndef DEBUG_INFO_SECTION
3491 #define DEBUG_INFO_SECTION ".debug_info"
3492 #endif
3493 #ifndef DEBUG_DWO_INFO_SECTION
3494 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3495 #endif
3496 #ifndef DEBUG_ABBREV_SECTION
3497 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3498 #endif
3499 #ifndef DEBUG_DWO_ABBREV_SECTION
3500 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3501 #endif
3502 #ifndef DEBUG_ARANGES_SECTION
3503 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3504 #endif
3505 #ifndef DEBUG_ADDR_SECTION
3506 #define DEBUG_ADDR_SECTION ".debug_addr"
3507 #endif
3508 #ifndef DEBUG_NORM_MACINFO_SECTION
3509 #define DEBUG_NORM_MACINFO_SECTION ".debug_macinfo"
3510 #endif
3511 #ifndef DEBUG_DWO_MACINFO_SECTION
3512 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3513 #endif
3514 #ifndef DEBUG_MACINFO_SECTION
3515 #define DEBUG_MACINFO_SECTION \
3516 (!dwarf_split_debug_info \
3517 ? (DEBUG_NORM_MACINFO_SECTION) : (DEBUG_DWO_MACINFO_SECTION))
3518 #endif
3519 #ifndef DEBUG_NORM_MACRO_SECTION
3520 #define DEBUG_NORM_MACRO_SECTION ".debug_macro"
3521 #endif
3522 #ifndef DEBUG_DWO_MACRO_SECTION
3523 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3524 #endif
3525 #ifndef DEBUG_MACRO_SECTION
3526 #define DEBUG_MACRO_SECTION \
3527 (!dwarf_split_debug_info \
3528 ? (DEBUG_NORM_MACRO_SECTION) : (DEBUG_DWO_MACRO_SECTION))
3529 #endif
3530 #ifndef DEBUG_LINE_SECTION
3531 #define DEBUG_LINE_SECTION ".debug_line"
3532 #endif
3533 #ifndef DEBUG_DWO_LINE_SECTION
3534 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3535 #endif
3536 #ifndef DEBUG_LOC_SECTION
3537 #define DEBUG_LOC_SECTION ".debug_loc"
3538 #endif
3539 #ifndef DEBUG_DWO_LOC_SECTION
3540 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3541 #endif
3542 #ifndef DEBUG_PUBNAMES_SECTION
3543 #define DEBUG_PUBNAMES_SECTION \
3544 ((debug_generate_pub_sections == 2) \
3545 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3546 #endif
3547 #ifndef DEBUG_PUBTYPES_SECTION
3548 #define DEBUG_PUBTYPES_SECTION \
3549 ((debug_generate_pub_sections == 2) \
3550 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
3551 #endif
3552 #define DEBUG_NORM_STR_OFFSETS_SECTION ".debug_str_offsets"
3553 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
3554 #ifndef DEBUG_STR_OFFSETS_SECTION
3555 #define DEBUG_STR_OFFSETS_SECTION \
3556 (!dwarf_split_debug_info \
3557 ? (DEBUG_NORM_STR_OFFSETS_SECTION) : (DEBUG_DWO_STR_OFFSETS_SECTION))
3558 #endif
3559 #ifndef DEBUG_STR_DWO_SECTION
3560 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
3561 #endif
3562 #ifndef DEBUG_STR_SECTION
3563 #define DEBUG_STR_SECTION ".debug_str"
3564 #endif
3565 #ifndef DEBUG_RANGES_SECTION
3566 #define DEBUG_RANGES_SECTION ".debug_ranges"
3567 #endif
3568
3569 /* Standard ELF section names for compiled code and data. */
3570 #ifndef TEXT_SECTION_NAME
3571 #define TEXT_SECTION_NAME ".text"
3572 #endif
3573
3574 /* Section flags for .debug_macinfo/.debug_macro section. */
3575 #define DEBUG_MACRO_SECTION_FLAGS \
3576 (dwarf_split_debug_info ? SECTION_DEBUG | SECTION_EXCLUDE : SECTION_DEBUG)
3577
3578 /* Section flags for .debug_str section. */
3579 #define DEBUG_STR_SECTION_FLAGS \
3580 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
3581 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
3582 : SECTION_DEBUG)
3583
3584 /* Section flags for .debug_str.dwo section. */
3585 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
3586
3587 /* Labels we insert at beginning sections we can reference instead of
3588 the section names themselves. */
3589
3590 #ifndef TEXT_SECTION_LABEL
3591 #define TEXT_SECTION_LABEL "Ltext"
3592 #endif
3593 #ifndef COLD_TEXT_SECTION_LABEL
3594 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
3595 #endif
3596 #ifndef DEBUG_LINE_SECTION_LABEL
3597 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
3598 #endif
3599 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
3600 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
3601 #endif
3602 #ifndef DEBUG_INFO_SECTION_LABEL
3603 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
3604 #endif
3605 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
3606 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
3607 #endif
3608 #ifndef DEBUG_ABBREV_SECTION_LABEL
3609 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
3610 #endif
3611 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
3612 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
3613 #endif
3614 #ifndef DEBUG_ADDR_SECTION_LABEL
3615 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
3616 #endif
3617 #ifndef DEBUG_LOC_SECTION_LABEL
3618 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
3619 #endif
3620 #ifndef DEBUG_RANGES_SECTION_LABEL
3621 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
3622 #endif
3623 #ifndef DEBUG_MACINFO_SECTION_LABEL
3624 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
3625 #endif
3626 #ifndef DEBUG_MACRO_SECTION_LABEL
3627 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
3628 #endif
3629 #define SKELETON_COMP_DIE_ABBREV 1
3630 #define SKELETON_TYPE_DIE_ABBREV 2
3631
3632 /* Definitions of defaults for formats and names of various special
3633 (artificial) labels which may be generated within this file (when the -g
3634 options is used and DWARF2_DEBUGGING_INFO is in effect.
3635 If necessary, these may be overridden from within the tm.h file, but
3636 typically, overriding these defaults is unnecessary. */
3637
3638 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3639 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3640 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3641 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3642 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3643 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3644 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3645 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3646 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3647 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3648 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3649 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3650 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3651 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3652
3653 #ifndef TEXT_END_LABEL
3654 #define TEXT_END_LABEL "Letext"
3655 #endif
3656 #ifndef COLD_END_LABEL
3657 #define COLD_END_LABEL "Letext_cold"
3658 #endif
3659 #ifndef BLOCK_BEGIN_LABEL
3660 #define BLOCK_BEGIN_LABEL "LBB"
3661 #endif
3662 #ifndef BLOCK_END_LABEL
3663 #define BLOCK_END_LABEL "LBE"
3664 #endif
3665 #ifndef LINE_CODE_LABEL
3666 #define LINE_CODE_LABEL "LM"
3667 #endif
3668
3669 \f
3670 /* Return the root of the DIE's built for the current compilation unit. */
3671 static dw_die_ref
3672 comp_unit_die (void)
3673 {
3674 if (!single_comp_unit_die)
3675 single_comp_unit_die = gen_compile_unit_die (NULL);
3676 return single_comp_unit_die;
3677 }
3678
3679 /* We allow a language front-end to designate a function that is to be
3680 called to "demangle" any name before it is put into a DIE. */
3681
3682 static const char *(*demangle_name_func) (const char *);
3683
3684 void
3685 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
3686 {
3687 demangle_name_func = func;
3688 }
3689
3690 /* Test if rtl node points to a pseudo register. */
3691
3692 static inline int
3693 is_pseudo_reg (const_rtx rtl)
3694 {
3695 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
3696 || (GET_CODE (rtl) == SUBREG
3697 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
3698 }
3699
3700 /* Return a reference to a type, with its const and volatile qualifiers
3701 removed. */
3702
3703 static inline tree
3704 type_main_variant (tree type)
3705 {
3706 type = TYPE_MAIN_VARIANT (type);
3707
3708 /* ??? There really should be only one main variant among any group of
3709 variants of a given type (and all of the MAIN_VARIANT values for all
3710 members of the group should point to that one type) but sometimes the C
3711 front-end messes this up for array types, so we work around that bug
3712 here. */
3713 if (TREE_CODE (type) == ARRAY_TYPE)
3714 while (type != TYPE_MAIN_VARIANT (type))
3715 type = TYPE_MAIN_VARIANT (type);
3716
3717 return type;
3718 }
3719
3720 /* Return nonzero if the given type node represents a tagged type. */
3721
3722 static inline int
3723 is_tagged_type (const_tree type)
3724 {
3725 enum tree_code code = TREE_CODE (type);
3726
3727 return (code == RECORD_TYPE || code == UNION_TYPE
3728 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
3729 }
3730
3731 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
3732
3733 static void
3734 get_ref_die_offset_label (char *label, dw_die_ref ref)
3735 {
3736 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
3737 }
3738
3739 /* Return die_offset of a DIE reference to a base type. */
3740
3741 static unsigned long int
3742 get_base_type_offset (dw_die_ref ref)
3743 {
3744 if (ref->die_offset)
3745 return ref->die_offset;
3746 if (comp_unit_die ()->die_abbrev)
3747 {
3748 calc_base_type_die_sizes ();
3749 gcc_assert (ref->die_offset);
3750 }
3751 return ref->die_offset;
3752 }
3753
3754 /* Return die_offset of a DIE reference other than base type. */
3755
3756 static unsigned long int
3757 get_ref_die_offset (dw_die_ref ref)
3758 {
3759 gcc_assert (ref->die_offset);
3760 return ref->die_offset;
3761 }
3762
3763 /* Convert a DIE tag into its string name. */
3764
3765 static const char *
3766 dwarf_tag_name (unsigned int tag)
3767 {
3768 const char *name = get_DW_TAG_name (tag);
3769
3770 if (name != NULL)
3771 return name;
3772
3773 return "DW_TAG_<unknown>";
3774 }
3775
3776 /* Convert a DWARF attribute code into its string name. */
3777
3778 static const char *
3779 dwarf_attr_name (unsigned int attr)
3780 {
3781 const char *name;
3782
3783 switch (attr)
3784 {
3785 #if VMS_DEBUGGING_INFO
3786 case DW_AT_HP_prologue:
3787 return "DW_AT_HP_prologue";
3788 #else
3789 case DW_AT_MIPS_loop_unroll_factor:
3790 return "DW_AT_MIPS_loop_unroll_factor";
3791 #endif
3792
3793 #if VMS_DEBUGGING_INFO
3794 case DW_AT_HP_epilogue:
3795 return "DW_AT_HP_epilogue";
3796 #else
3797 case DW_AT_MIPS_stride:
3798 return "DW_AT_MIPS_stride";
3799 #endif
3800 }
3801
3802 name = get_DW_AT_name (attr);
3803
3804 if (name != NULL)
3805 return name;
3806
3807 return "DW_AT_<unknown>";
3808 }
3809
3810 /* Convert a DWARF value form code into its string name. */
3811
3812 static const char *
3813 dwarf_form_name (unsigned int form)
3814 {
3815 const char *name = get_DW_FORM_name (form);
3816
3817 if (name != NULL)
3818 return name;
3819
3820 return "DW_FORM_<unknown>";
3821 }
3822 \f
3823 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
3824 instance of an inlined instance of a decl which is local to an inline
3825 function, so we have to trace all of the way back through the origin chain
3826 to find out what sort of node actually served as the original seed for the
3827 given block. */
3828
3829 static tree
3830 decl_ultimate_origin (const_tree decl)
3831 {
3832 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
3833 return NULL_TREE;
3834
3835 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
3836 we're trying to output the abstract instance of this function. */
3837 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
3838 return NULL_TREE;
3839
3840 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
3841 most distant ancestor, this should never happen. */
3842 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
3843
3844 return DECL_ABSTRACT_ORIGIN (decl);
3845 }
3846
3847 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
3848 of a virtual function may refer to a base class, so we check the 'this'
3849 parameter. */
3850
3851 static tree
3852 decl_class_context (tree decl)
3853 {
3854 tree context = NULL_TREE;
3855
3856 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
3857 context = DECL_CONTEXT (decl);
3858 else
3859 context = TYPE_MAIN_VARIANT
3860 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
3861
3862 if (context && !TYPE_P (context))
3863 context = NULL_TREE;
3864
3865 return context;
3866 }
3867 \f
3868 /* Add an attribute/value pair to a DIE. */
3869
3870 static inline void
3871 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
3872 {
3873 /* Maybe this should be an assert? */
3874 if (die == NULL)
3875 return;
3876
3877 vec_safe_reserve (die->die_attr, 1);
3878 vec_safe_push (die->die_attr, *attr);
3879 }
3880
3881 static inline enum dw_val_class
3882 AT_class (dw_attr_node *a)
3883 {
3884 return a->dw_attr_val.val_class;
3885 }
3886
3887 /* Return the index for any attribute that will be referenced with a
3888 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
3889 are stored in dw_attr_val.v.val_str for reference counting
3890 pruning. */
3891
3892 static inline unsigned int
3893 AT_index (dw_attr_node *a)
3894 {
3895 if (AT_class (a) == dw_val_class_str)
3896 return a->dw_attr_val.v.val_str->index;
3897 else if (a->dw_attr_val.val_entry != NULL)
3898 return a->dw_attr_val.val_entry->index;
3899 return NOT_INDEXED;
3900 }
3901
3902 /* Add a flag value attribute to a DIE. */
3903
3904 static inline void
3905 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
3906 {
3907 dw_attr_node attr;
3908
3909 attr.dw_attr = attr_kind;
3910 attr.dw_attr_val.val_class = dw_val_class_flag;
3911 attr.dw_attr_val.val_entry = NULL;
3912 attr.dw_attr_val.v.val_flag = flag;
3913 add_dwarf_attr (die, &attr);
3914 }
3915
3916 static inline unsigned
3917 AT_flag (dw_attr_node *a)
3918 {
3919 gcc_assert (a && AT_class (a) == dw_val_class_flag);
3920 return a->dw_attr_val.v.val_flag;
3921 }
3922
3923 /* Add a signed integer attribute value to a DIE. */
3924
3925 static inline void
3926 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
3927 {
3928 dw_attr_node attr;
3929
3930 attr.dw_attr = attr_kind;
3931 attr.dw_attr_val.val_class = dw_val_class_const;
3932 attr.dw_attr_val.val_entry = NULL;
3933 attr.dw_attr_val.v.val_int = int_val;
3934 add_dwarf_attr (die, &attr);
3935 }
3936
3937 static inline HOST_WIDE_INT
3938 AT_int (dw_attr_node *a)
3939 {
3940 gcc_assert (a && AT_class (a) == dw_val_class_const);
3941 return a->dw_attr_val.v.val_int;
3942 }
3943
3944 /* Add an unsigned integer attribute value to a DIE. */
3945
3946 static inline void
3947 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
3948 unsigned HOST_WIDE_INT unsigned_val)
3949 {
3950 dw_attr_node attr;
3951
3952 attr.dw_attr = attr_kind;
3953 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
3954 attr.dw_attr_val.val_entry = NULL;
3955 attr.dw_attr_val.v.val_unsigned = unsigned_val;
3956 add_dwarf_attr (die, &attr);
3957 }
3958
3959 static inline unsigned HOST_WIDE_INT
3960 AT_unsigned (dw_attr_node *a)
3961 {
3962 gcc_assert (a && AT_class (a) == dw_val_class_unsigned_const);
3963 return a->dw_attr_val.v.val_unsigned;
3964 }
3965
3966 /* Add an unsigned wide integer attribute value to a DIE. */
3967
3968 static inline void
3969 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
3970 const wide_int& w)
3971 {
3972 dw_attr_node attr;
3973
3974 attr.dw_attr = attr_kind;
3975 attr.dw_attr_val.val_class = dw_val_class_wide_int;
3976 attr.dw_attr_val.val_entry = NULL;
3977 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
3978 *attr.dw_attr_val.v.val_wide = w;
3979 add_dwarf_attr (die, &attr);
3980 }
3981
3982 /* Add an unsigned double integer attribute value to a DIE. */
3983
3984 static inline void
3985 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
3986 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
3987 {
3988 dw_attr_node attr;
3989
3990 attr.dw_attr = attr_kind;
3991 attr.dw_attr_val.val_class = dw_val_class_const_double;
3992 attr.dw_attr_val.val_entry = NULL;
3993 attr.dw_attr_val.v.val_double.high = high;
3994 attr.dw_attr_val.v.val_double.low = low;
3995 add_dwarf_attr (die, &attr);
3996 }
3997
3998 /* Add a floating point attribute value to a DIE and return it. */
3999
4000 static inline void
4001 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4002 unsigned int length, unsigned int elt_size, unsigned char *array)
4003 {
4004 dw_attr_node attr;
4005
4006 attr.dw_attr = attr_kind;
4007 attr.dw_attr_val.val_class = dw_val_class_vec;
4008 attr.dw_attr_val.val_entry = NULL;
4009 attr.dw_attr_val.v.val_vec.length = length;
4010 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4011 attr.dw_attr_val.v.val_vec.array = array;
4012 add_dwarf_attr (die, &attr);
4013 }
4014
4015 /* Add an 8-byte data attribute value to a DIE. */
4016
4017 static inline void
4018 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4019 unsigned char data8[8])
4020 {
4021 dw_attr_node attr;
4022
4023 attr.dw_attr = attr_kind;
4024 attr.dw_attr_val.val_class = dw_val_class_data8;
4025 attr.dw_attr_val.val_entry = NULL;
4026 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4027 add_dwarf_attr (die, &attr);
4028 }
4029
4030 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4031 dwarf_split_debug_info, address attributes in dies destined for the
4032 final executable have force_direct set to avoid using indexed
4033 references. */
4034
4035 static inline void
4036 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4037 bool force_direct)
4038 {
4039 dw_attr_node attr;
4040 char * lbl_id;
4041
4042 lbl_id = xstrdup (lbl_low);
4043 attr.dw_attr = DW_AT_low_pc;
4044 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4045 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4046 if (dwarf_split_debug_info && !force_direct)
4047 attr.dw_attr_val.val_entry
4048 = add_addr_table_entry (lbl_id, ate_kind_label);
4049 else
4050 attr.dw_attr_val.val_entry = NULL;
4051 add_dwarf_attr (die, &attr);
4052
4053 attr.dw_attr = DW_AT_high_pc;
4054 if (dwarf_version < 4)
4055 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4056 else
4057 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4058 lbl_id = xstrdup (lbl_high);
4059 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4060 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4061 && dwarf_split_debug_info && !force_direct)
4062 attr.dw_attr_val.val_entry
4063 = add_addr_table_entry (lbl_id, ate_kind_label);
4064 else
4065 attr.dw_attr_val.val_entry = NULL;
4066 add_dwarf_attr (die, &attr);
4067 }
4068
4069 /* Hash and equality functions for debug_str_hash. */
4070
4071 hashval_t
4072 indirect_string_hasher::hash (indirect_string_node *x)
4073 {
4074 return htab_hash_string (x->str);
4075 }
4076
4077 bool
4078 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4079 {
4080 return strcmp (x1->str, x2) == 0;
4081 }
4082
4083 /* Add STR to the given string hash table. */
4084
4085 static struct indirect_string_node *
4086 find_AT_string_in_table (const char *str,
4087 hash_table<indirect_string_hasher> *table)
4088 {
4089 struct indirect_string_node *node;
4090
4091 indirect_string_node **slot
4092 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4093 if (*slot == NULL)
4094 {
4095 node = ggc_cleared_alloc<indirect_string_node> ();
4096 node->str = ggc_strdup (str);
4097 *slot = node;
4098 }
4099 else
4100 node = *slot;
4101
4102 node->refcount++;
4103 return node;
4104 }
4105
4106 /* Add STR to the indirect string hash table. */
4107
4108 static struct indirect_string_node *
4109 find_AT_string (const char *str)
4110 {
4111 if (! debug_str_hash)
4112 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4113
4114 return find_AT_string_in_table (str, debug_str_hash);
4115 }
4116
4117 /* Add a string attribute value to a DIE. */
4118
4119 static inline void
4120 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4121 {
4122 dw_attr_node attr;
4123 struct indirect_string_node *node;
4124
4125 node = find_AT_string (str);
4126
4127 attr.dw_attr = attr_kind;
4128 attr.dw_attr_val.val_class = dw_val_class_str;
4129 attr.dw_attr_val.val_entry = NULL;
4130 attr.dw_attr_val.v.val_str = node;
4131 add_dwarf_attr (die, &attr);
4132 }
4133
4134 static inline const char *
4135 AT_string (dw_attr_node *a)
4136 {
4137 gcc_assert (a && AT_class (a) == dw_val_class_str);
4138 return a->dw_attr_val.v.val_str->str;
4139 }
4140
4141 /* Call this function directly to bypass AT_string_form's logic to put
4142 the string inline in the die. */
4143
4144 static void
4145 set_indirect_string (struct indirect_string_node *node)
4146 {
4147 char label[32];
4148 /* Already indirect is a no op. */
4149 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4150 {
4151 gcc_assert (node->label);
4152 return;
4153 }
4154 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4155 ++dw2_string_counter;
4156 node->label = xstrdup (label);
4157
4158 if (!dwarf_split_debug_info)
4159 {
4160 node->form = DW_FORM_strp;
4161 node->index = NOT_INDEXED;
4162 }
4163 else
4164 {
4165 node->form = DW_FORM_GNU_str_index;
4166 node->index = NO_INDEX_ASSIGNED;
4167 }
4168 }
4169
4170 /* Find out whether a string should be output inline in DIE
4171 or out-of-line in .debug_str section. */
4172
4173 static enum dwarf_form
4174 find_string_form (struct indirect_string_node *node)
4175 {
4176 unsigned int len;
4177
4178 if (node->form)
4179 return node->form;
4180
4181 len = strlen (node->str) + 1;
4182
4183 /* If the string is shorter or equal to the size of the reference, it is
4184 always better to put it inline. */
4185 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4186 return node->form = DW_FORM_string;
4187
4188 /* If we cannot expect the linker to merge strings in .debug_str
4189 section, only put it into .debug_str if it is worth even in this
4190 single module. */
4191 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4192 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4193 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4194 return node->form = DW_FORM_string;
4195
4196 set_indirect_string (node);
4197
4198 return node->form;
4199 }
4200
4201 /* Find out whether the string referenced from the attribute should be
4202 output inline in DIE or out-of-line in .debug_str section. */
4203
4204 static enum dwarf_form
4205 AT_string_form (dw_attr_node *a)
4206 {
4207 gcc_assert (a && AT_class (a) == dw_val_class_str);
4208 return find_string_form (a->dw_attr_val.v.val_str);
4209 }
4210
4211 /* Add a DIE reference attribute value to a DIE. */
4212
4213 static inline void
4214 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4215 {
4216 dw_attr_node attr;
4217 gcc_checking_assert (targ_die != NULL);
4218
4219 /* With LTO we can end up trying to reference something we didn't create
4220 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4221 if (targ_die == NULL)
4222 return;
4223
4224 attr.dw_attr = attr_kind;
4225 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4226 attr.dw_attr_val.val_entry = NULL;
4227 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4228 attr.dw_attr_val.v.val_die_ref.external = 0;
4229 add_dwarf_attr (die, &attr);
4230 }
4231
4232 /* Change DIE reference REF to point to NEW_DIE instead. */
4233
4234 static inline void
4235 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4236 {
4237 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4238 ref->dw_attr_val.v.val_die_ref.die = new_die;
4239 ref->dw_attr_val.v.val_die_ref.external = 0;
4240 }
4241
4242 /* Add an AT_specification attribute to a DIE, and also make the back
4243 pointer from the specification to the definition. */
4244
4245 static inline void
4246 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4247 {
4248 add_AT_die_ref (die, DW_AT_specification, targ_die);
4249 gcc_assert (!targ_die->die_definition);
4250 targ_die->die_definition = die;
4251 }
4252
4253 static inline dw_die_ref
4254 AT_ref (dw_attr_node *a)
4255 {
4256 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4257 return a->dw_attr_val.v.val_die_ref.die;
4258 }
4259
4260 static inline int
4261 AT_ref_external (dw_attr_node *a)
4262 {
4263 if (a && AT_class (a) == dw_val_class_die_ref)
4264 return a->dw_attr_val.v.val_die_ref.external;
4265
4266 return 0;
4267 }
4268
4269 static inline void
4270 set_AT_ref_external (dw_attr_node *a, int i)
4271 {
4272 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4273 a->dw_attr_val.v.val_die_ref.external = i;
4274 }
4275
4276 /* Add an FDE reference attribute value to a DIE. */
4277
4278 static inline void
4279 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4280 {
4281 dw_attr_node attr;
4282
4283 attr.dw_attr = attr_kind;
4284 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4285 attr.dw_attr_val.val_entry = NULL;
4286 attr.dw_attr_val.v.val_fde_index = targ_fde;
4287 add_dwarf_attr (die, &attr);
4288 }
4289
4290 /* Add a location description attribute value to a DIE. */
4291
4292 static inline void
4293 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4294 {
4295 dw_attr_node attr;
4296
4297 attr.dw_attr = attr_kind;
4298 attr.dw_attr_val.val_class = dw_val_class_loc;
4299 attr.dw_attr_val.val_entry = NULL;
4300 attr.dw_attr_val.v.val_loc = loc;
4301 add_dwarf_attr (die, &attr);
4302 }
4303
4304 static inline dw_loc_descr_ref
4305 AT_loc (dw_attr_node *a)
4306 {
4307 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4308 return a->dw_attr_val.v.val_loc;
4309 }
4310
4311 static inline void
4312 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4313 {
4314 dw_attr_node attr;
4315
4316 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4317 return;
4318
4319 attr.dw_attr = attr_kind;
4320 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4321 attr.dw_attr_val.val_entry = NULL;
4322 attr.dw_attr_val.v.val_loc_list = loc_list;
4323 add_dwarf_attr (die, &attr);
4324 have_location_lists = true;
4325 }
4326
4327 static inline dw_loc_list_ref
4328 AT_loc_list (dw_attr_node *a)
4329 {
4330 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4331 return a->dw_attr_val.v.val_loc_list;
4332 }
4333
4334 static inline dw_loc_list_ref *
4335 AT_loc_list_ptr (dw_attr_node *a)
4336 {
4337 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4338 return &a->dw_attr_val.v.val_loc_list;
4339 }
4340
4341 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4342 {
4343 static hashval_t hash (addr_table_entry *);
4344 static bool equal (addr_table_entry *, addr_table_entry *);
4345 };
4346
4347 /* Table of entries into the .debug_addr section. */
4348
4349 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4350
4351 /* Hash an address_table_entry. */
4352
4353 hashval_t
4354 addr_hasher::hash (addr_table_entry *a)
4355 {
4356 inchash::hash hstate;
4357 switch (a->kind)
4358 {
4359 case ate_kind_rtx:
4360 hstate.add_int (0);
4361 break;
4362 case ate_kind_rtx_dtprel:
4363 hstate.add_int (1);
4364 break;
4365 case ate_kind_label:
4366 return htab_hash_string (a->addr.label);
4367 default:
4368 gcc_unreachable ();
4369 }
4370 inchash::add_rtx (a->addr.rtl, hstate);
4371 return hstate.end ();
4372 }
4373
4374 /* Determine equality for two address_table_entries. */
4375
4376 bool
4377 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4378 {
4379 if (a1->kind != a2->kind)
4380 return 0;
4381 switch (a1->kind)
4382 {
4383 case ate_kind_rtx:
4384 case ate_kind_rtx_dtprel:
4385 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4386 case ate_kind_label:
4387 return strcmp (a1->addr.label, a2->addr.label) == 0;
4388 default:
4389 gcc_unreachable ();
4390 }
4391 }
4392
4393 /* Initialize an addr_table_entry. */
4394
4395 void
4396 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4397 {
4398 e->kind = kind;
4399 switch (kind)
4400 {
4401 case ate_kind_rtx:
4402 case ate_kind_rtx_dtprel:
4403 e->addr.rtl = (rtx) addr;
4404 break;
4405 case ate_kind_label:
4406 e->addr.label = (char *) addr;
4407 break;
4408 }
4409 e->refcount = 0;
4410 e->index = NO_INDEX_ASSIGNED;
4411 }
4412
4413 /* Add attr to the address table entry to the table. Defer setting an
4414 index until output time. */
4415
4416 static addr_table_entry *
4417 add_addr_table_entry (void *addr, enum ate_kind kind)
4418 {
4419 addr_table_entry *node;
4420 addr_table_entry finder;
4421
4422 gcc_assert (dwarf_split_debug_info);
4423 if (! addr_index_table)
4424 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4425 init_addr_table_entry (&finder, kind, addr);
4426 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4427
4428 if (*slot == HTAB_EMPTY_ENTRY)
4429 {
4430 node = ggc_cleared_alloc<addr_table_entry> ();
4431 init_addr_table_entry (node, kind, addr);
4432 *slot = node;
4433 }
4434 else
4435 node = *slot;
4436
4437 node->refcount++;
4438 return node;
4439 }
4440
4441 /* Remove an entry from the addr table by decrementing its refcount.
4442 Strictly, decrementing the refcount would be enough, but the
4443 assertion that the entry is actually in the table has found
4444 bugs. */
4445
4446 static void
4447 remove_addr_table_entry (addr_table_entry *entry)
4448 {
4449 gcc_assert (dwarf_split_debug_info && addr_index_table);
4450 /* After an index is assigned, the table is frozen. */
4451 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
4452 entry->refcount--;
4453 }
4454
4455 /* Given a location list, remove all addresses it refers to from the
4456 address_table. */
4457
4458 static void
4459 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
4460 {
4461 for (; descr; descr = descr->dw_loc_next)
4462 if (descr->dw_loc_oprnd1.val_entry != NULL)
4463 {
4464 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
4465 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
4466 }
4467 }
4468
4469 /* A helper function for dwarf2out_finish called through
4470 htab_traverse. Assign an addr_table_entry its index. All entries
4471 must be collected into the table when this function is called,
4472 because the indexing code relies on htab_traverse to traverse nodes
4473 in the same order for each run. */
4474
4475 int
4476 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
4477 {
4478 addr_table_entry *node = *h;
4479
4480 /* Don't index unreferenced nodes. */
4481 if (node->refcount == 0)
4482 return 1;
4483
4484 gcc_assert (node->index == NO_INDEX_ASSIGNED);
4485 node->index = *index;
4486 *index += 1;
4487
4488 return 1;
4489 }
4490
4491 /* Add an address constant attribute value to a DIE. When using
4492 dwarf_split_debug_info, address attributes in dies destined for the
4493 final executable should be direct references--setting the parameter
4494 force_direct ensures this behavior. */
4495
4496 static inline void
4497 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
4498 bool force_direct)
4499 {
4500 dw_attr_node attr;
4501
4502 attr.dw_attr = attr_kind;
4503 attr.dw_attr_val.val_class = dw_val_class_addr;
4504 attr.dw_attr_val.v.val_addr = addr;
4505 if (dwarf_split_debug_info && !force_direct)
4506 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
4507 else
4508 attr.dw_attr_val.val_entry = NULL;
4509 add_dwarf_attr (die, &attr);
4510 }
4511
4512 /* Get the RTX from to an address DIE attribute. */
4513
4514 static inline rtx
4515 AT_addr (dw_attr_node *a)
4516 {
4517 gcc_assert (a && AT_class (a) == dw_val_class_addr);
4518 return a->dw_attr_val.v.val_addr;
4519 }
4520
4521 /* Add a file attribute value to a DIE. */
4522
4523 static inline void
4524 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
4525 struct dwarf_file_data *fd)
4526 {
4527 dw_attr_node attr;
4528
4529 attr.dw_attr = attr_kind;
4530 attr.dw_attr_val.val_class = dw_val_class_file;
4531 attr.dw_attr_val.val_entry = NULL;
4532 attr.dw_attr_val.v.val_file = fd;
4533 add_dwarf_attr (die, &attr);
4534 }
4535
4536 /* Get the dwarf_file_data from a file DIE attribute. */
4537
4538 static inline struct dwarf_file_data *
4539 AT_file (dw_attr_node *a)
4540 {
4541 gcc_assert (a && AT_class (a) == dw_val_class_file);
4542 return a->dw_attr_val.v.val_file;
4543 }
4544
4545 /* Add a vms delta attribute value to a DIE. */
4546
4547 static inline void
4548 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
4549 const char *lbl1, const char *lbl2)
4550 {
4551 dw_attr_node attr;
4552
4553 attr.dw_attr = attr_kind;
4554 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
4555 attr.dw_attr_val.val_entry = NULL;
4556 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
4557 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
4558 add_dwarf_attr (die, &attr);
4559 }
4560
4561 /* Add a label identifier attribute value to a DIE. */
4562
4563 static inline void
4564 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
4565 const char *lbl_id)
4566 {
4567 dw_attr_node attr;
4568
4569 attr.dw_attr = attr_kind;
4570 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4571 attr.dw_attr_val.val_entry = NULL;
4572 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
4573 if (dwarf_split_debug_info)
4574 attr.dw_attr_val.val_entry
4575 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
4576 ate_kind_label);
4577 add_dwarf_attr (die, &attr);
4578 }
4579
4580 /* Add a section offset attribute value to a DIE, an offset into the
4581 debug_line section. */
4582
4583 static inline void
4584 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4585 const char *label)
4586 {
4587 dw_attr_node attr;
4588
4589 attr.dw_attr = attr_kind;
4590 attr.dw_attr_val.val_class = dw_val_class_lineptr;
4591 attr.dw_attr_val.val_entry = NULL;
4592 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4593 add_dwarf_attr (die, &attr);
4594 }
4595
4596 /* Add a section offset attribute value to a DIE, an offset into the
4597 debug_macinfo section. */
4598
4599 static inline void
4600 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4601 const char *label)
4602 {
4603 dw_attr_node attr;
4604
4605 attr.dw_attr = attr_kind;
4606 attr.dw_attr_val.val_class = dw_val_class_macptr;
4607 attr.dw_attr_val.val_entry = NULL;
4608 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4609 add_dwarf_attr (die, &attr);
4610 }
4611
4612 /* Add an offset attribute value to a DIE. */
4613
4614 static inline void
4615 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
4616 unsigned HOST_WIDE_INT offset)
4617 {
4618 dw_attr_node attr;
4619
4620 attr.dw_attr = attr_kind;
4621 attr.dw_attr_val.val_class = dw_val_class_offset;
4622 attr.dw_attr_val.val_entry = NULL;
4623 attr.dw_attr_val.v.val_offset = offset;
4624 add_dwarf_attr (die, &attr);
4625 }
4626
4627 /* Add a range_list attribute value to a DIE. When using
4628 dwarf_split_debug_info, address attributes in dies destined for the
4629 final executable should be direct references--setting the parameter
4630 force_direct ensures this behavior. */
4631
4632 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
4633 #define RELOCATED_OFFSET (NULL)
4634
4635 static void
4636 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
4637 long unsigned int offset, bool force_direct)
4638 {
4639 dw_attr_node attr;
4640
4641 attr.dw_attr = attr_kind;
4642 attr.dw_attr_val.val_class = dw_val_class_range_list;
4643 /* For the range_list attribute, use val_entry to store whether the
4644 offset should follow split-debug-info or normal semantics. This
4645 value is read in output_range_list_offset. */
4646 if (dwarf_split_debug_info && !force_direct)
4647 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
4648 else
4649 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
4650 attr.dw_attr_val.v.val_offset = offset;
4651 add_dwarf_attr (die, &attr);
4652 }
4653
4654 /* Return the start label of a delta attribute. */
4655
4656 static inline const char *
4657 AT_vms_delta1 (dw_attr_node *a)
4658 {
4659 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4660 return a->dw_attr_val.v.val_vms_delta.lbl1;
4661 }
4662
4663 /* Return the end label of a delta attribute. */
4664
4665 static inline const char *
4666 AT_vms_delta2 (dw_attr_node *a)
4667 {
4668 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4669 return a->dw_attr_val.v.val_vms_delta.lbl2;
4670 }
4671
4672 static inline const char *
4673 AT_lbl (dw_attr_node *a)
4674 {
4675 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
4676 || AT_class (a) == dw_val_class_lineptr
4677 || AT_class (a) == dw_val_class_macptr
4678 || AT_class (a) == dw_val_class_high_pc));
4679 return a->dw_attr_val.v.val_lbl_id;
4680 }
4681
4682 /* Get the attribute of type attr_kind. */
4683
4684 static dw_attr_node *
4685 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
4686 {
4687 dw_attr_node *a;
4688 unsigned ix;
4689 dw_die_ref spec = NULL;
4690
4691 if (! die)
4692 return NULL;
4693
4694 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4695 if (a->dw_attr == attr_kind)
4696 return a;
4697 else if (a->dw_attr == DW_AT_specification
4698 || a->dw_attr == DW_AT_abstract_origin)
4699 spec = AT_ref (a);
4700
4701 if (spec)
4702 return get_AT (spec, attr_kind);
4703
4704 return NULL;
4705 }
4706
4707 /* Returns the parent of the declaration of DIE. */
4708
4709 static dw_die_ref
4710 get_die_parent (dw_die_ref die)
4711 {
4712 dw_die_ref t;
4713
4714 if (!die)
4715 return NULL;
4716
4717 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
4718 || (t = get_AT_ref (die, DW_AT_specification)))
4719 die = t;
4720
4721 return die->die_parent;
4722 }
4723
4724 /* Return the "low pc" attribute value, typically associated with a subprogram
4725 DIE. Return null if the "low pc" attribute is either not present, or if it
4726 cannot be represented as an assembler label identifier. */
4727
4728 static inline const char *
4729 get_AT_low_pc (dw_die_ref die)
4730 {
4731 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
4732
4733 return a ? AT_lbl (a) : NULL;
4734 }
4735
4736 /* Return the "high pc" attribute value, typically associated with a subprogram
4737 DIE. Return null if the "high pc" attribute is either not present, or if it
4738 cannot be represented as an assembler label identifier. */
4739
4740 static inline const char *
4741 get_AT_hi_pc (dw_die_ref die)
4742 {
4743 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
4744
4745 return a ? AT_lbl (a) : NULL;
4746 }
4747
4748 /* Return the value of the string attribute designated by ATTR_KIND, or
4749 NULL if it is not present. */
4750
4751 static inline const char *
4752 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
4753 {
4754 dw_attr_node *a = get_AT (die, attr_kind);
4755
4756 return a ? AT_string (a) : NULL;
4757 }
4758
4759 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
4760 if it is not present. */
4761
4762 static inline int
4763 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
4764 {
4765 dw_attr_node *a = get_AT (die, attr_kind);
4766
4767 return a ? AT_flag (a) : 0;
4768 }
4769
4770 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
4771 if it is not present. */
4772
4773 static inline unsigned
4774 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
4775 {
4776 dw_attr_node *a = get_AT (die, attr_kind);
4777
4778 return a ? AT_unsigned (a) : 0;
4779 }
4780
4781 static inline dw_die_ref
4782 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
4783 {
4784 dw_attr_node *a = get_AT (die, attr_kind);
4785
4786 return a ? AT_ref (a) : NULL;
4787 }
4788
4789 static inline struct dwarf_file_data *
4790 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
4791 {
4792 dw_attr_node *a = get_AT (die, attr_kind);
4793
4794 return a ? AT_file (a) : NULL;
4795 }
4796
4797 /* Return TRUE if the language is C++. */
4798
4799 static inline bool
4800 is_cxx (void)
4801 {
4802 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4803
4804 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
4805 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
4806 }
4807
4808 /* Return TRUE if the language is Java. */
4809
4810 static inline bool
4811 is_java (void)
4812 {
4813 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4814
4815 return lang == DW_LANG_Java;
4816 }
4817
4818 /* Return TRUE if the language is Fortran. */
4819
4820 static inline bool
4821 is_fortran (void)
4822 {
4823 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4824
4825 return (lang == DW_LANG_Fortran77
4826 || lang == DW_LANG_Fortran90
4827 || lang == DW_LANG_Fortran95
4828 || lang == DW_LANG_Fortran03
4829 || lang == DW_LANG_Fortran08);
4830 }
4831
4832 /* Return TRUE if the language is Ada. */
4833
4834 static inline bool
4835 is_ada (void)
4836 {
4837 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4838
4839 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
4840 }
4841
4842 /* Remove the specified attribute if present. Return TRUE if removal
4843 was successful. */
4844
4845 static bool
4846 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
4847 {
4848 dw_attr_node *a;
4849 unsigned ix;
4850
4851 if (! die)
4852 return false;
4853
4854 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4855 if (a->dw_attr == attr_kind)
4856 {
4857 if (AT_class (a) == dw_val_class_str)
4858 if (a->dw_attr_val.v.val_str->refcount)
4859 a->dw_attr_val.v.val_str->refcount--;
4860
4861 /* vec::ordered_remove should help reduce the number of abbrevs
4862 that are needed. */
4863 die->die_attr->ordered_remove (ix);
4864 return true;
4865 }
4866 return false;
4867 }
4868
4869 /* Remove CHILD from its parent. PREV must have the property that
4870 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
4871
4872 static void
4873 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
4874 {
4875 gcc_assert (child->die_parent == prev->die_parent);
4876 gcc_assert (prev->die_sib == child);
4877 if (prev == child)
4878 {
4879 gcc_assert (child->die_parent->die_child == child);
4880 prev = NULL;
4881 }
4882 else
4883 prev->die_sib = child->die_sib;
4884 if (child->die_parent->die_child == child)
4885 child->die_parent->die_child = prev;
4886 }
4887
4888 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
4889 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
4890
4891 static void
4892 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
4893 {
4894 dw_die_ref parent = old_child->die_parent;
4895
4896 gcc_assert (parent == prev->die_parent);
4897 gcc_assert (prev->die_sib == old_child);
4898
4899 new_child->die_parent = parent;
4900 if (prev == old_child)
4901 {
4902 gcc_assert (parent->die_child == old_child);
4903 new_child->die_sib = new_child;
4904 }
4905 else
4906 {
4907 prev->die_sib = new_child;
4908 new_child->die_sib = old_child->die_sib;
4909 }
4910 if (old_child->die_parent->die_child == old_child)
4911 old_child->die_parent->die_child = new_child;
4912 }
4913
4914 /* Move all children from OLD_PARENT to NEW_PARENT. */
4915
4916 static void
4917 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
4918 {
4919 dw_die_ref c;
4920 new_parent->die_child = old_parent->die_child;
4921 old_parent->die_child = NULL;
4922 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
4923 }
4924
4925 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
4926 matches TAG. */
4927
4928 static void
4929 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
4930 {
4931 dw_die_ref c;
4932
4933 c = die->die_child;
4934 if (c) do {
4935 dw_die_ref prev = c;
4936 c = c->die_sib;
4937 while (c->die_tag == tag)
4938 {
4939 remove_child_with_prev (c, prev);
4940 c->die_parent = NULL;
4941 /* Might have removed every child. */
4942 if (c == c->die_sib)
4943 return;
4944 c = c->die_sib;
4945 }
4946 } while (c != die->die_child);
4947 }
4948
4949 /* Add a CHILD_DIE as the last child of DIE. */
4950
4951 static void
4952 add_child_die (dw_die_ref die, dw_die_ref child_die)
4953 {
4954 /* FIXME this should probably be an assert. */
4955 if (! die || ! child_die)
4956 return;
4957 gcc_assert (die != child_die);
4958
4959 child_die->die_parent = die;
4960 if (die->die_child)
4961 {
4962 child_die->die_sib = die->die_child->die_sib;
4963 die->die_child->die_sib = child_die;
4964 }
4965 else
4966 child_die->die_sib = child_die;
4967 die->die_child = child_die;
4968 }
4969
4970 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
4971
4972 static void
4973 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
4974 dw_die_ref after_die)
4975 {
4976 gcc_assert (die
4977 && child_die
4978 && after_die
4979 && die->die_child
4980 && die != child_die);
4981
4982 child_die->die_parent = die;
4983 child_die->die_sib = after_die->die_sib;
4984 after_die->die_sib = child_die;
4985 if (die->die_child == after_die)
4986 die->die_child = child_die;
4987 }
4988
4989 /* Unassociate CHILD from its parent, and make its parent be
4990 NEW_PARENT. */
4991
4992 static void
4993 reparent_child (dw_die_ref child, dw_die_ref new_parent)
4994 {
4995 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
4996 if (p->die_sib == child)
4997 {
4998 remove_child_with_prev (child, p);
4999 break;
5000 }
5001 add_child_die (new_parent, child);
5002 }
5003
5004 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5005 is the specification, to the end of PARENT's list of children.
5006 This is done by removing and re-adding it. */
5007
5008 static void
5009 splice_child_die (dw_die_ref parent, dw_die_ref child)
5010 {
5011 /* We want the declaration DIE from inside the class, not the
5012 specification DIE at toplevel. */
5013 if (child->die_parent != parent)
5014 {
5015 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5016
5017 if (tmp)
5018 child = tmp;
5019 }
5020
5021 gcc_assert (child->die_parent == parent
5022 || (child->die_parent
5023 == get_AT_ref (parent, DW_AT_specification)));
5024
5025 reparent_child (child, parent);
5026 }
5027
5028 /* Create and return a new die with a parent of PARENT_DIE. If
5029 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5030 associated tree T must be supplied to determine parenthood
5031 later. */
5032
5033 static inline dw_die_ref
5034 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5035 {
5036 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5037
5038 die->die_tag = tag_value;
5039
5040 if (parent_die != NULL)
5041 add_child_die (parent_die, die);
5042 else
5043 {
5044 limbo_die_node *limbo_node;
5045
5046 /* No DIEs created after early dwarf should end up in limbo,
5047 because the limbo list should not persist past LTO
5048 streaming. */
5049 if (tag_value != DW_TAG_compile_unit
5050 /* These are allowed because they're generated while
5051 breaking out COMDAT units late. */
5052 && tag_value != DW_TAG_type_unit
5053 && !early_dwarf
5054 /* Allow nested functions to live in limbo because they will
5055 only temporarily live there, as decls_for_scope will fix
5056 them up. */
5057 && (TREE_CODE (t) != FUNCTION_DECL
5058 || !decl_function_context (t))
5059 /* Same as nested functions above but for types. Types that
5060 are local to a function will be fixed in
5061 decls_for_scope. */
5062 && (!RECORD_OR_UNION_TYPE_P (t)
5063 || !TYPE_CONTEXT (t)
5064 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5065 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5066 especially in the ltrans stage, but once we implement LTO
5067 dwarf streaming, we should remove this exception. */
5068 && !in_lto_p)
5069 {
5070 fprintf (stderr, "symbol ended up in limbo too late:");
5071 debug_generic_stmt (t);
5072 gcc_unreachable ();
5073 }
5074
5075 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5076 limbo_node->die = die;
5077 limbo_node->created_for = t;
5078 limbo_node->next = limbo_die_list;
5079 limbo_die_list = limbo_node;
5080 }
5081
5082 return die;
5083 }
5084
5085 /* Return the DIE associated with the given type specifier. */
5086
5087 static inline dw_die_ref
5088 lookup_type_die (tree type)
5089 {
5090 return TYPE_SYMTAB_DIE (type);
5091 }
5092
5093 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5094 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5095 anonymous type instead the one of the naming typedef. */
5096
5097 static inline dw_die_ref
5098 strip_naming_typedef (tree type, dw_die_ref type_die)
5099 {
5100 if (type
5101 && TREE_CODE (type) == RECORD_TYPE
5102 && type_die
5103 && type_die->die_tag == DW_TAG_typedef
5104 && is_naming_typedef_decl (TYPE_NAME (type)))
5105 type_die = get_AT_ref (type_die, DW_AT_type);
5106 return type_die;
5107 }
5108
5109 /* Like lookup_type_die, but if type is an anonymous type named by a
5110 typedef[1], return the DIE of the anonymous type instead the one of
5111 the naming typedef. This is because in gen_typedef_die, we did
5112 equate the anonymous struct named by the typedef with the DIE of
5113 the naming typedef. So by default, lookup_type_die on an anonymous
5114 struct yields the DIE of the naming typedef.
5115
5116 [1]: Read the comment of is_naming_typedef_decl to learn about what
5117 a naming typedef is. */
5118
5119 static inline dw_die_ref
5120 lookup_type_die_strip_naming_typedef (tree type)
5121 {
5122 dw_die_ref die = lookup_type_die (type);
5123 return strip_naming_typedef (type, die);
5124 }
5125
5126 /* Equate a DIE to a given type specifier. */
5127
5128 static inline void
5129 equate_type_number_to_die (tree type, dw_die_ref type_die)
5130 {
5131 TYPE_SYMTAB_DIE (type) = type_die;
5132 }
5133
5134 /* Returns a hash value for X (which really is a die_struct). */
5135
5136 inline hashval_t
5137 decl_die_hasher::hash (die_node *x)
5138 {
5139 return (hashval_t) x->decl_id;
5140 }
5141
5142 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5143
5144 inline bool
5145 decl_die_hasher::equal (die_node *x, tree y)
5146 {
5147 return (x->decl_id == DECL_UID (y));
5148 }
5149
5150 /* Return the DIE associated with a given declaration. */
5151
5152 static inline dw_die_ref
5153 lookup_decl_die (tree decl)
5154 {
5155 return decl_die_table->find_with_hash (decl, DECL_UID (decl));
5156 }
5157
5158 /* Returns a hash value for X (which really is a var_loc_list). */
5159
5160 inline hashval_t
5161 decl_loc_hasher::hash (var_loc_list *x)
5162 {
5163 return (hashval_t) x->decl_id;
5164 }
5165
5166 /* Return nonzero if decl_id of var_loc_list X is the same as
5167 UID of decl *Y. */
5168
5169 inline bool
5170 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5171 {
5172 return (x->decl_id == DECL_UID (y));
5173 }
5174
5175 /* Return the var_loc list associated with a given declaration. */
5176
5177 static inline var_loc_list *
5178 lookup_decl_loc (const_tree decl)
5179 {
5180 if (!decl_loc_table)
5181 return NULL;
5182 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5183 }
5184
5185 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5186
5187 inline hashval_t
5188 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5189 {
5190 return (hashval_t) x->decl_id;
5191 }
5192
5193 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5194 UID of decl *Y. */
5195
5196 inline bool
5197 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5198 {
5199 return (x->decl_id == DECL_UID (y));
5200 }
5201
5202 /* Equate a DIE to a particular declaration. */
5203
5204 static void
5205 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
5206 {
5207 unsigned int decl_id = DECL_UID (decl);
5208
5209 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
5210 decl_die->decl_id = decl_id;
5211 }
5212
5213 /* Return how many bits covers PIECE EXPR_LIST. */
5214
5215 static HOST_WIDE_INT
5216 decl_piece_bitsize (rtx piece)
5217 {
5218 int ret = (int) GET_MODE (piece);
5219 if (ret)
5220 return ret;
5221 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
5222 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
5223 return INTVAL (XEXP (XEXP (piece, 0), 0));
5224 }
5225
5226 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
5227
5228 static rtx *
5229 decl_piece_varloc_ptr (rtx piece)
5230 {
5231 if ((int) GET_MODE (piece))
5232 return &XEXP (piece, 0);
5233 else
5234 return &XEXP (XEXP (piece, 0), 1);
5235 }
5236
5237 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
5238 Next is the chain of following piece nodes. */
5239
5240 static rtx_expr_list *
5241 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
5242 {
5243 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
5244 return alloc_EXPR_LIST (bitsize, loc_note, next);
5245 else
5246 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
5247 GEN_INT (bitsize),
5248 loc_note), next);
5249 }
5250
5251 /* Return rtx that should be stored into loc field for
5252 LOC_NOTE and BITPOS/BITSIZE. */
5253
5254 static rtx
5255 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
5256 HOST_WIDE_INT bitsize)
5257 {
5258 if (bitsize != -1)
5259 {
5260 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
5261 if (bitpos != 0)
5262 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
5263 }
5264 return loc_note;
5265 }
5266
5267 /* This function either modifies location piece list *DEST in
5268 place (if SRC and INNER is NULL), or copies location piece list
5269 *SRC to *DEST while modifying it. Location BITPOS is modified
5270 to contain LOC_NOTE, any pieces overlapping it are removed resp.
5271 not copied and if needed some padding around it is added.
5272 When modifying in place, DEST should point to EXPR_LIST where
5273 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
5274 to the start of the whole list and INNER points to the EXPR_LIST
5275 where earlier pieces cover PIECE_BITPOS bits. */
5276
5277 static void
5278 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
5279 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
5280 HOST_WIDE_INT bitsize, rtx loc_note)
5281 {
5282 HOST_WIDE_INT diff;
5283 bool copy = inner != NULL;
5284
5285 if (copy)
5286 {
5287 /* First copy all nodes preceding the current bitpos. */
5288 while (src != inner)
5289 {
5290 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5291 decl_piece_bitsize (*src), NULL_RTX);
5292 dest = &XEXP (*dest, 1);
5293 src = &XEXP (*src, 1);
5294 }
5295 }
5296 /* Add padding if needed. */
5297 if (bitpos != piece_bitpos)
5298 {
5299 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
5300 copy ? NULL_RTX : *dest);
5301 dest = &XEXP (*dest, 1);
5302 }
5303 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
5304 {
5305 gcc_assert (!copy);
5306 /* A piece with correct bitpos and bitsize already exist,
5307 just update the location for it and return. */
5308 *decl_piece_varloc_ptr (*dest) = loc_note;
5309 return;
5310 }
5311 /* Add the piece that changed. */
5312 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
5313 dest = &XEXP (*dest, 1);
5314 /* Skip over pieces that overlap it. */
5315 diff = bitpos - piece_bitpos + bitsize;
5316 if (!copy)
5317 src = dest;
5318 while (diff > 0 && *src)
5319 {
5320 rtx piece = *src;
5321 diff -= decl_piece_bitsize (piece);
5322 if (copy)
5323 src = &XEXP (piece, 1);
5324 else
5325 {
5326 *src = XEXP (piece, 1);
5327 free_EXPR_LIST_node (piece);
5328 }
5329 }
5330 /* Add padding if needed. */
5331 if (diff < 0 && *src)
5332 {
5333 if (!copy)
5334 dest = src;
5335 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
5336 dest = &XEXP (*dest, 1);
5337 }
5338 if (!copy)
5339 return;
5340 /* Finally copy all nodes following it. */
5341 while (*src)
5342 {
5343 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5344 decl_piece_bitsize (*src), NULL_RTX);
5345 dest = &XEXP (*dest, 1);
5346 src = &XEXP (*src, 1);
5347 }
5348 }
5349
5350 /* Add a variable location node to the linked list for DECL. */
5351
5352 static struct var_loc_node *
5353 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
5354 {
5355 unsigned int decl_id;
5356 var_loc_list *temp;
5357 struct var_loc_node *loc = NULL;
5358 HOST_WIDE_INT bitsize = -1, bitpos = -1;
5359
5360 if (TREE_CODE (decl) == VAR_DECL
5361 && DECL_HAS_DEBUG_EXPR_P (decl))
5362 {
5363 tree realdecl = DECL_DEBUG_EXPR (decl);
5364 if (handled_component_p (realdecl)
5365 || (TREE_CODE (realdecl) == MEM_REF
5366 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5367 {
5368 HOST_WIDE_INT maxsize;
5369 bool reverse;
5370 tree innerdecl
5371 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize,
5372 &reverse);
5373 if (!DECL_P (innerdecl)
5374 || DECL_IGNORED_P (innerdecl)
5375 || TREE_STATIC (innerdecl)
5376 || bitsize <= 0
5377 || bitpos + bitsize > 256
5378 || bitsize != maxsize)
5379 return NULL;
5380 decl = innerdecl;
5381 }
5382 }
5383
5384 decl_id = DECL_UID (decl);
5385 var_loc_list **slot
5386 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
5387 if (*slot == NULL)
5388 {
5389 temp = ggc_cleared_alloc<var_loc_list> ();
5390 temp->decl_id = decl_id;
5391 *slot = temp;
5392 }
5393 else
5394 temp = *slot;
5395
5396 /* For PARM_DECLs try to keep around the original incoming value,
5397 even if that means we'll emit a zero-range .debug_loc entry. */
5398 if (temp->last
5399 && temp->first == temp->last
5400 && TREE_CODE (decl) == PARM_DECL
5401 && NOTE_P (temp->first->loc)
5402 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
5403 && DECL_INCOMING_RTL (decl)
5404 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
5405 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
5406 == GET_CODE (DECL_INCOMING_RTL (decl))
5407 && prev_real_insn (temp->first->loc) == NULL_RTX
5408 && (bitsize != -1
5409 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
5410 NOTE_VAR_LOCATION_LOC (loc_note))
5411 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
5412 != NOTE_VAR_LOCATION_STATUS (loc_note))))
5413 {
5414 loc = ggc_cleared_alloc<var_loc_node> ();
5415 temp->first->next = loc;
5416 temp->last = loc;
5417 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5418 }
5419 else if (temp->last)
5420 {
5421 struct var_loc_node *last = temp->last, *unused = NULL;
5422 rtx *piece_loc = NULL, last_loc_note;
5423 HOST_WIDE_INT piece_bitpos = 0;
5424 if (last->next)
5425 {
5426 last = last->next;
5427 gcc_assert (last->next == NULL);
5428 }
5429 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
5430 {
5431 piece_loc = &last->loc;
5432 do
5433 {
5434 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
5435 if (piece_bitpos + cur_bitsize > bitpos)
5436 break;
5437 piece_bitpos += cur_bitsize;
5438 piece_loc = &XEXP (*piece_loc, 1);
5439 }
5440 while (*piece_loc);
5441 }
5442 /* TEMP->LAST here is either pointer to the last but one or
5443 last element in the chained list, LAST is pointer to the
5444 last element. */
5445 if (label && strcmp (last->label, label) == 0)
5446 {
5447 /* For SRA optimized variables if there weren't any real
5448 insns since last note, just modify the last node. */
5449 if (piece_loc != NULL)
5450 {
5451 adjust_piece_list (piece_loc, NULL, NULL,
5452 bitpos, piece_bitpos, bitsize, loc_note);
5453 return NULL;
5454 }
5455 /* If the last note doesn't cover any instructions, remove it. */
5456 if (temp->last != last)
5457 {
5458 temp->last->next = NULL;
5459 unused = last;
5460 last = temp->last;
5461 gcc_assert (strcmp (last->label, label) != 0);
5462 }
5463 else
5464 {
5465 gcc_assert (temp->first == temp->last
5466 || (temp->first->next == temp->last
5467 && TREE_CODE (decl) == PARM_DECL));
5468 memset (temp->last, '\0', sizeof (*temp->last));
5469 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
5470 return temp->last;
5471 }
5472 }
5473 if (bitsize == -1 && NOTE_P (last->loc))
5474 last_loc_note = last->loc;
5475 else if (piece_loc != NULL
5476 && *piece_loc != NULL_RTX
5477 && piece_bitpos == bitpos
5478 && decl_piece_bitsize (*piece_loc) == bitsize)
5479 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
5480 else
5481 last_loc_note = NULL_RTX;
5482 /* If the current location is the same as the end of the list,
5483 and either both or neither of the locations is uninitialized,
5484 we have nothing to do. */
5485 if (last_loc_note == NULL_RTX
5486 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
5487 NOTE_VAR_LOCATION_LOC (loc_note)))
5488 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
5489 != NOTE_VAR_LOCATION_STATUS (loc_note))
5490 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
5491 == VAR_INIT_STATUS_UNINITIALIZED)
5492 || (NOTE_VAR_LOCATION_STATUS (loc_note)
5493 == VAR_INIT_STATUS_UNINITIALIZED))))
5494 {
5495 /* Add LOC to the end of list and update LAST. If the last
5496 element of the list has been removed above, reuse its
5497 memory for the new node, otherwise allocate a new one. */
5498 if (unused)
5499 {
5500 loc = unused;
5501 memset (loc, '\0', sizeof (*loc));
5502 }
5503 else
5504 loc = ggc_cleared_alloc<var_loc_node> ();
5505 if (bitsize == -1 || piece_loc == NULL)
5506 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5507 else
5508 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
5509 bitpos, piece_bitpos, bitsize, loc_note);
5510 last->next = loc;
5511 /* Ensure TEMP->LAST will point either to the new last but one
5512 element of the chain, or to the last element in it. */
5513 if (last != temp->last)
5514 temp->last = last;
5515 }
5516 else if (unused)
5517 ggc_free (unused);
5518 }
5519 else
5520 {
5521 loc = ggc_cleared_alloc<var_loc_node> ();
5522 temp->first = loc;
5523 temp->last = loc;
5524 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5525 }
5526 return loc;
5527 }
5528 \f
5529 /* Keep track of the number of spaces used to indent the
5530 output of the debugging routines that print the structure of
5531 the DIE internal representation. */
5532 static int print_indent;
5533
5534 /* Indent the line the number of spaces given by print_indent. */
5535
5536 static inline void
5537 print_spaces (FILE *outfile)
5538 {
5539 fprintf (outfile, "%*s", print_indent, "");
5540 }
5541
5542 /* Print a type signature in hex. */
5543
5544 static inline void
5545 print_signature (FILE *outfile, char *sig)
5546 {
5547 int i;
5548
5549 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
5550 fprintf (outfile, "%02x", sig[i] & 0xff);
5551 }
5552
5553 static inline void
5554 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
5555 {
5556 if (discr_value->pos)
5557 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
5558 else
5559 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
5560 }
5561
5562 static void print_loc_descr (dw_loc_descr_ref, FILE *);
5563
5564 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
5565 RECURSE, output location descriptor operations. */
5566
5567 static void
5568 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
5569 {
5570 switch (val->val_class)
5571 {
5572 case dw_val_class_addr:
5573 fprintf (outfile, "address");
5574 break;
5575 case dw_val_class_offset:
5576 fprintf (outfile, "offset");
5577 break;
5578 case dw_val_class_loc:
5579 fprintf (outfile, "location descriptor");
5580 if (val->v.val_loc == NULL)
5581 fprintf (outfile, " -> <null>\n");
5582 else if (recurse)
5583 {
5584 fprintf (outfile, ":\n");
5585 print_indent += 4;
5586 print_loc_descr (val->v.val_loc, outfile);
5587 print_indent -= 4;
5588 }
5589 else
5590 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
5591 break;
5592 case dw_val_class_loc_list:
5593 fprintf (outfile, "location list -> label:%s",
5594 val->v.val_loc_list->ll_symbol);
5595 break;
5596 case dw_val_class_range_list:
5597 fprintf (outfile, "range list");
5598 break;
5599 case dw_val_class_const:
5600 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
5601 break;
5602 case dw_val_class_unsigned_const:
5603 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
5604 break;
5605 case dw_val_class_const_double:
5606 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
5607 HOST_WIDE_INT_PRINT_UNSIGNED")",
5608 val->v.val_double.high,
5609 val->v.val_double.low);
5610 break;
5611 case dw_val_class_wide_int:
5612 {
5613 int i = val->v.val_wide->get_len ();
5614 fprintf (outfile, "constant (");
5615 gcc_assert (i > 0);
5616 if (val->v.val_wide->elt (i - 1) == 0)
5617 fprintf (outfile, "0x");
5618 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
5619 val->v.val_wide->elt (--i));
5620 while (--i >= 0)
5621 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
5622 val->v.val_wide->elt (i));
5623 fprintf (outfile, ")");
5624 break;
5625 }
5626 case dw_val_class_vec:
5627 fprintf (outfile, "floating-point or vector constant");
5628 break;
5629 case dw_val_class_flag:
5630 fprintf (outfile, "%u", val->v.val_flag);
5631 break;
5632 case dw_val_class_die_ref:
5633 if (val->v.val_die_ref.die != NULL)
5634 {
5635 dw_die_ref die = val->v.val_die_ref.die;
5636
5637 if (die->comdat_type_p)
5638 {
5639 fprintf (outfile, "die -> signature: ");
5640 print_signature (outfile,
5641 die->die_id.die_type_node->signature);
5642 }
5643 else if (die->die_id.die_symbol)
5644 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
5645 else
5646 fprintf (outfile, "die -> %ld", die->die_offset);
5647 fprintf (outfile, " (%p)", (void *) die);
5648 }
5649 else
5650 fprintf (outfile, "die -> <null>");
5651 break;
5652 case dw_val_class_vms_delta:
5653 fprintf (outfile, "delta: @slotcount(%s-%s)",
5654 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
5655 break;
5656 case dw_val_class_lbl_id:
5657 case dw_val_class_lineptr:
5658 case dw_val_class_macptr:
5659 case dw_val_class_high_pc:
5660 fprintf (outfile, "label: %s", val->v.val_lbl_id);
5661 break;
5662 case dw_val_class_str:
5663 if (val->v.val_str->str != NULL)
5664 fprintf (outfile, "\"%s\"", val->v.val_str->str);
5665 else
5666 fprintf (outfile, "<null>");
5667 break;
5668 case dw_val_class_file:
5669 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
5670 val->v.val_file->emitted_number);
5671 break;
5672 case dw_val_class_data8:
5673 {
5674 int i;
5675
5676 for (i = 0; i < 8; i++)
5677 fprintf (outfile, "%02x", val->v.val_data8[i]);
5678 break;
5679 }
5680 case dw_val_class_discr_value:
5681 print_discr_value (outfile, &val->v.val_discr_value);
5682 break;
5683 case dw_val_class_discr_list:
5684 for (dw_discr_list_ref node = val->v.val_discr_list;
5685 node != NULL;
5686 node = node->dw_discr_next)
5687 {
5688 if (node->dw_discr_range)
5689 {
5690 fprintf (outfile, " .. ");
5691 print_discr_value (outfile, &node->dw_discr_lower_bound);
5692 print_discr_value (outfile, &node->dw_discr_upper_bound);
5693 }
5694 else
5695 print_discr_value (outfile, &node->dw_discr_lower_bound);
5696
5697 if (node->dw_discr_next != NULL)
5698 fprintf (outfile, " | ");
5699 }
5700 default:
5701 break;
5702 }
5703 }
5704
5705 /* Likewise, for a DIE attribute. */
5706
5707 static void
5708 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
5709 {
5710 print_dw_val (&a->dw_attr_val, recurse, outfile);
5711 }
5712
5713
5714 /* Print the list of operands in the LOC location description to OUTFILE. This
5715 routine is a debugging aid only. */
5716
5717 static void
5718 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
5719 {
5720 dw_loc_descr_ref l = loc;
5721
5722 if (loc == NULL)
5723 {
5724 print_spaces (outfile);
5725 fprintf (outfile, "<null>\n");
5726 return;
5727 }
5728
5729 for (l = loc; l != NULL; l = l->dw_loc_next)
5730 {
5731 print_spaces (outfile);
5732 fprintf (outfile, "(%p) %s",
5733 (void *) l,
5734 dwarf_stack_op_name (l->dw_loc_opc));
5735 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
5736 {
5737 fprintf (outfile, " ");
5738 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
5739 }
5740 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
5741 {
5742 fprintf (outfile, ", ");
5743 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
5744 }
5745 fprintf (outfile, "\n");
5746 }
5747 }
5748
5749 /* Print the information associated with a given DIE, and its children.
5750 This routine is a debugging aid only. */
5751
5752 static void
5753 print_die (dw_die_ref die, FILE *outfile)
5754 {
5755 dw_attr_node *a;
5756 dw_die_ref c;
5757 unsigned ix;
5758
5759 print_spaces (outfile);
5760 fprintf (outfile, "DIE %4ld: %s (%p)\n",
5761 die->die_offset, dwarf_tag_name (die->die_tag),
5762 (void*) die);
5763 print_spaces (outfile);
5764 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
5765 fprintf (outfile, " offset: %ld", die->die_offset);
5766 fprintf (outfile, " mark: %d\n", die->die_mark);
5767
5768 if (die->comdat_type_p)
5769 {
5770 print_spaces (outfile);
5771 fprintf (outfile, " signature: ");
5772 print_signature (outfile, die->die_id.die_type_node->signature);
5773 fprintf (outfile, "\n");
5774 }
5775
5776 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5777 {
5778 print_spaces (outfile);
5779 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
5780
5781 print_attribute (a, true, outfile);
5782 fprintf (outfile, "\n");
5783 }
5784
5785 if (die->die_child != NULL)
5786 {
5787 print_indent += 4;
5788 FOR_EACH_CHILD (die, c, print_die (c, outfile));
5789 print_indent -= 4;
5790 }
5791 if (print_indent == 0)
5792 fprintf (outfile, "\n");
5793 }
5794
5795 /* Print the list of operations in the LOC location description. */
5796
5797 DEBUG_FUNCTION void
5798 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
5799 {
5800 print_loc_descr (loc, stderr);
5801 }
5802
5803 /* Print the information collected for a given DIE. */
5804
5805 DEBUG_FUNCTION void
5806 debug_dwarf_die (dw_die_ref die)
5807 {
5808 print_die (die, stderr);
5809 }
5810
5811 DEBUG_FUNCTION void
5812 debug (die_struct &ref)
5813 {
5814 print_die (&ref, stderr);
5815 }
5816
5817 DEBUG_FUNCTION void
5818 debug (die_struct *ptr)
5819 {
5820 if (ptr)
5821 debug (*ptr);
5822 else
5823 fprintf (stderr, "<nil>\n");
5824 }
5825
5826
5827 /* Print all DWARF information collected for the compilation unit.
5828 This routine is a debugging aid only. */
5829
5830 DEBUG_FUNCTION void
5831 debug_dwarf (void)
5832 {
5833 print_indent = 0;
5834 print_die (comp_unit_die (), stderr);
5835 }
5836
5837 /* Sanity checks on DIEs. */
5838
5839 static void
5840 check_die (dw_die_ref die)
5841 {
5842 unsigned ix;
5843 dw_attr_node *a;
5844 bool inline_found = false;
5845 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
5846 int n_decl_line = 0, n_decl_file = 0;
5847 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5848 {
5849 switch (a->dw_attr)
5850 {
5851 case DW_AT_inline:
5852 if (a->dw_attr_val.v.val_unsigned)
5853 inline_found = true;
5854 break;
5855 case DW_AT_location:
5856 ++n_location;
5857 break;
5858 case DW_AT_low_pc:
5859 ++n_low_pc;
5860 break;
5861 case DW_AT_high_pc:
5862 ++n_high_pc;
5863 break;
5864 case DW_AT_artificial:
5865 ++n_artificial;
5866 break;
5867 case DW_AT_decl_line:
5868 ++n_decl_line;
5869 break;
5870 case DW_AT_decl_file:
5871 ++n_decl_file;
5872 break;
5873 default:
5874 break;
5875 }
5876 }
5877 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
5878 || n_decl_line > 1 || n_decl_file > 1)
5879 {
5880 fprintf (stderr, "Duplicate attributes in DIE:\n");
5881 debug_dwarf_die (die);
5882 gcc_unreachable ();
5883 }
5884 if (inline_found)
5885 {
5886 /* A debugging information entry that is a member of an abstract
5887 instance tree [that has DW_AT_inline] should not contain any
5888 attributes which describe aspects of the subroutine which vary
5889 between distinct inlined expansions or distinct out-of-line
5890 expansions. */
5891 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5892 gcc_assert (a->dw_attr != DW_AT_low_pc
5893 && a->dw_attr != DW_AT_high_pc
5894 && a->dw_attr != DW_AT_location
5895 && a->dw_attr != DW_AT_frame_base
5896 && a->dw_attr != DW_AT_GNU_all_call_sites);
5897 }
5898 }
5899 \f
5900 /* Start a new compilation unit DIE for an include file. OLD_UNIT is the CU
5901 for the enclosing include file, if any. BINCL_DIE is the DW_TAG_GNU_BINCL
5902 DIE that marks the start of the DIEs for this include file. */
5903
5904 static dw_die_ref
5905 push_new_compile_unit (dw_die_ref old_unit, dw_die_ref bincl_die)
5906 {
5907 const char *filename = get_AT_string (bincl_die, DW_AT_name);
5908 dw_die_ref new_unit = gen_compile_unit_die (filename);
5909
5910 new_unit->die_sib = old_unit;
5911 return new_unit;
5912 }
5913
5914 /* Close an include-file CU and reopen the enclosing one. */
5915
5916 static dw_die_ref
5917 pop_compile_unit (dw_die_ref old_unit)
5918 {
5919 dw_die_ref new_unit = old_unit->die_sib;
5920
5921 old_unit->die_sib = NULL;
5922 return new_unit;
5923 }
5924
5925 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
5926 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
5927 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
5928
5929 /* Calculate the checksum of a location expression. */
5930
5931 static inline void
5932 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
5933 {
5934 int tem;
5935 inchash::hash hstate;
5936 hashval_t hash;
5937
5938 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
5939 CHECKSUM (tem);
5940 hash_loc_operands (loc, hstate);
5941 hash = hstate.end();
5942 CHECKSUM (hash);
5943 }
5944
5945 /* Calculate the checksum of an attribute. */
5946
5947 static void
5948 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
5949 {
5950 dw_loc_descr_ref loc;
5951 rtx r;
5952
5953 CHECKSUM (at->dw_attr);
5954
5955 /* We don't care that this was compiled with a different compiler
5956 snapshot; if the output is the same, that's what matters. */
5957 if (at->dw_attr == DW_AT_producer)
5958 return;
5959
5960 switch (AT_class (at))
5961 {
5962 case dw_val_class_const:
5963 CHECKSUM (at->dw_attr_val.v.val_int);
5964 break;
5965 case dw_val_class_unsigned_const:
5966 CHECKSUM (at->dw_attr_val.v.val_unsigned);
5967 break;
5968 case dw_val_class_const_double:
5969 CHECKSUM (at->dw_attr_val.v.val_double);
5970 break;
5971 case dw_val_class_wide_int:
5972 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
5973 get_full_len (*at->dw_attr_val.v.val_wide)
5974 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
5975 break;
5976 case dw_val_class_vec:
5977 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
5978 (at->dw_attr_val.v.val_vec.length
5979 * at->dw_attr_val.v.val_vec.elt_size));
5980 break;
5981 case dw_val_class_flag:
5982 CHECKSUM (at->dw_attr_val.v.val_flag);
5983 break;
5984 case dw_val_class_str:
5985 CHECKSUM_STRING (AT_string (at));
5986 break;
5987
5988 case dw_val_class_addr:
5989 r = AT_addr (at);
5990 gcc_assert (GET_CODE (r) == SYMBOL_REF);
5991 CHECKSUM_STRING (XSTR (r, 0));
5992 break;
5993
5994 case dw_val_class_offset:
5995 CHECKSUM (at->dw_attr_val.v.val_offset);
5996 break;
5997
5998 case dw_val_class_loc:
5999 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6000 loc_checksum (loc, ctx);
6001 break;
6002
6003 case dw_val_class_die_ref:
6004 die_checksum (AT_ref (at), ctx, mark);
6005 break;
6006
6007 case dw_val_class_fde_ref:
6008 case dw_val_class_vms_delta:
6009 case dw_val_class_lbl_id:
6010 case dw_val_class_lineptr:
6011 case dw_val_class_macptr:
6012 case dw_val_class_high_pc:
6013 break;
6014
6015 case dw_val_class_file:
6016 CHECKSUM_STRING (AT_file (at)->filename);
6017 break;
6018
6019 case dw_val_class_data8:
6020 CHECKSUM (at->dw_attr_val.v.val_data8);
6021 break;
6022
6023 default:
6024 break;
6025 }
6026 }
6027
6028 /* Calculate the checksum of a DIE. */
6029
6030 static void
6031 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6032 {
6033 dw_die_ref c;
6034 dw_attr_node *a;
6035 unsigned ix;
6036
6037 /* To avoid infinite recursion. */
6038 if (die->die_mark)
6039 {
6040 CHECKSUM (die->die_mark);
6041 return;
6042 }
6043 die->die_mark = ++(*mark);
6044
6045 CHECKSUM (die->die_tag);
6046
6047 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6048 attr_checksum (a, ctx, mark);
6049
6050 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6051 }
6052
6053 #undef CHECKSUM
6054 #undef CHECKSUM_BLOCK
6055 #undef CHECKSUM_STRING
6056
6057 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6058 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6059 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6060 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6061 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6062 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6063 #define CHECKSUM_ATTR(FOO) \
6064 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6065
6066 /* Calculate the checksum of a number in signed LEB128 format. */
6067
6068 static void
6069 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6070 {
6071 unsigned char byte;
6072 bool more;
6073
6074 while (1)
6075 {
6076 byte = (value & 0x7f);
6077 value >>= 7;
6078 more = !((value == 0 && (byte & 0x40) == 0)
6079 || (value == -1 && (byte & 0x40) != 0));
6080 if (more)
6081 byte |= 0x80;
6082 CHECKSUM (byte);
6083 if (!more)
6084 break;
6085 }
6086 }
6087
6088 /* Calculate the checksum of a number in unsigned LEB128 format. */
6089
6090 static void
6091 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6092 {
6093 while (1)
6094 {
6095 unsigned char byte = (value & 0x7f);
6096 value >>= 7;
6097 if (value != 0)
6098 /* More bytes to follow. */
6099 byte |= 0x80;
6100 CHECKSUM (byte);
6101 if (value == 0)
6102 break;
6103 }
6104 }
6105
6106 /* Checksum the context of the DIE. This adds the names of any
6107 surrounding namespaces or structures to the checksum. */
6108
6109 static void
6110 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6111 {
6112 const char *name;
6113 dw_die_ref spec;
6114 int tag = die->die_tag;
6115
6116 if (tag != DW_TAG_namespace
6117 && tag != DW_TAG_structure_type
6118 && tag != DW_TAG_class_type)
6119 return;
6120
6121 name = get_AT_string (die, DW_AT_name);
6122
6123 spec = get_AT_ref (die, DW_AT_specification);
6124 if (spec != NULL)
6125 die = spec;
6126
6127 if (die->die_parent != NULL)
6128 checksum_die_context (die->die_parent, ctx);
6129
6130 CHECKSUM_ULEB128 ('C');
6131 CHECKSUM_ULEB128 (tag);
6132 if (name != NULL)
6133 CHECKSUM_STRING (name);
6134 }
6135
6136 /* Calculate the checksum of a location expression. */
6137
6138 static inline void
6139 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6140 {
6141 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6142 were emitted as a DW_FORM_sdata instead of a location expression. */
6143 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6144 {
6145 CHECKSUM_ULEB128 (DW_FORM_sdata);
6146 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6147 return;
6148 }
6149
6150 /* Otherwise, just checksum the raw location expression. */
6151 while (loc != NULL)
6152 {
6153 inchash::hash hstate;
6154 hashval_t hash;
6155
6156 CHECKSUM_ULEB128 (loc->dtprel);
6157 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6158 hash_loc_operands (loc, hstate);
6159 hash = hstate.end ();
6160 CHECKSUM (hash);
6161 loc = loc->dw_loc_next;
6162 }
6163 }
6164
6165 /* Calculate the checksum of an attribute. */
6166
6167 static void
6168 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6169 struct md5_ctx *ctx, int *mark)
6170 {
6171 dw_loc_descr_ref loc;
6172 rtx r;
6173
6174 if (AT_class (at) == dw_val_class_die_ref)
6175 {
6176 dw_die_ref target_die = AT_ref (at);
6177
6178 /* For pointer and reference types, we checksum only the (qualified)
6179 name of the target type (if there is a name). For friend entries,
6180 we checksum only the (qualified) name of the target type or function.
6181 This allows the checksum to remain the same whether the target type
6182 is complete or not. */
6183 if ((at->dw_attr == DW_AT_type
6184 && (tag == DW_TAG_pointer_type
6185 || tag == DW_TAG_reference_type
6186 || tag == DW_TAG_rvalue_reference_type
6187 || tag == DW_TAG_ptr_to_member_type))
6188 || (at->dw_attr == DW_AT_friend
6189 && tag == DW_TAG_friend))
6190 {
6191 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
6192
6193 if (name_attr != NULL)
6194 {
6195 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6196
6197 if (decl == NULL)
6198 decl = target_die;
6199 CHECKSUM_ULEB128 ('N');
6200 CHECKSUM_ULEB128 (at->dw_attr);
6201 if (decl->die_parent != NULL)
6202 checksum_die_context (decl->die_parent, ctx);
6203 CHECKSUM_ULEB128 ('E');
6204 CHECKSUM_STRING (AT_string (name_attr));
6205 return;
6206 }
6207 }
6208
6209 /* For all other references to another DIE, we check to see if the
6210 target DIE has already been visited. If it has, we emit a
6211 backward reference; if not, we descend recursively. */
6212 if (target_die->die_mark > 0)
6213 {
6214 CHECKSUM_ULEB128 ('R');
6215 CHECKSUM_ULEB128 (at->dw_attr);
6216 CHECKSUM_ULEB128 (target_die->die_mark);
6217 }
6218 else
6219 {
6220 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6221
6222 if (decl == NULL)
6223 decl = target_die;
6224 target_die->die_mark = ++(*mark);
6225 CHECKSUM_ULEB128 ('T');
6226 CHECKSUM_ULEB128 (at->dw_attr);
6227 if (decl->die_parent != NULL)
6228 checksum_die_context (decl->die_parent, ctx);
6229 die_checksum_ordered (target_die, ctx, mark);
6230 }
6231 return;
6232 }
6233
6234 CHECKSUM_ULEB128 ('A');
6235 CHECKSUM_ULEB128 (at->dw_attr);
6236
6237 switch (AT_class (at))
6238 {
6239 case dw_val_class_const:
6240 CHECKSUM_ULEB128 (DW_FORM_sdata);
6241 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
6242 break;
6243
6244 case dw_val_class_unsigned_const:
6245 CHECKSUM_ULEB128 (DW_FORM_sdata);
6246 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
6247 break;
6248
6249 case dw_val_class_const_double:
6250 CHECKSUM_ULEB128 (DW_FORM_block);
6251 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
6252 CHECKSUM (at->dw_attr_val.v.val_double);
6253 break;
6254
6255 case dw_val_class_wide_int:
6256 CHECKSUM_ULEB128 (DW_FORM_block);
6257 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
6258 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
6259 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6260 get_full_len (*at->dw_attr_val.v.val_wide)
6261 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6262 break;
6263
6264 case dw_val_class_vec:
6265 CHECKSUM_ULEB128 (DW_FORM_block);
6266 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
6267 * at->dw_attr_val.v.val_vec.elt_size);
6268 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6269 (at->dw_attr_val.v.val_vec.length
6270 * at->dw_attr_val.v.val_vec.elt_size));
6271 break;
6272
6273 case dw_val_class_flag:
6274 CHECKSUM_ULEB128 (DW_FORM_flag);
6275 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
6276 break;
6277
6278 case dw_val_class_str:
6279 CHECKSUM_ULEB128 (DW_FORM_string);
6280 CHECKSUM_STRING (AT_string (at));
6281 break;
6282
6283 case dw_val_class_addr:
6284 r = AT_addr (at);
6285 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6286 CHECKSUM_ULEB128 (DW_FORM_string);
6287 CHECKSUM_STRING (XSTR (r, 0));
6288 break;
6289
6290 case dw_val_class_offset:
6291 CHECKSUM_ULEB128 (DW_FORM_sdata);
6292 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
6293 break;
6294
6295 case dw_val_class_loc:
6296 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6297 loc_checksum_ordered (loc, ctx);
6298 break;
6299
6300 case dw_val_class_fde_ref:
6301 case dw_val_class_lbl_id:
6302 case dw_val_class_lineptr:
6303 case dw_val_class_macptr:
6304 case dw_val_class_high_pc:
6305 break;
6306
6307 case dw_val_class_file:
6308 CHECKSUM_ULEB128 (DW_FORM_string);
6309 CHECKSUM_STRING (AT_file (at)->filename);
6310 break;
6311
6312 case dw_val_class_data8:
6313 CHECKSUM (at->dw_attr_val.v.val_data8);
6314 break;
6315
6316 default:
6317 break;
6318 }
6319 }
6320
6321 struct checksum_attributes
6322 {
6323 dw_attr_node *at_name;
6324 dw_attr_node *at_type;
6325 dw_attr_node *at_friend;
6326 dw_attr_node *at_accessibility;
6327 dw_attr_node *at_address_class;
6328 dw_attr_node *at_allocated;
6329 dw_attr_node *at_artificial;
6330 dw_attr_node *at_associated;
6331 dw_attr_node *at_binary_scale;
6332 dw_attr_node *at_bit_offset;
6333 dw_attr_node *at_bit_size;
6334 dw_attr_node *at_bit_stride;
6335 dw_attr_node *at_byte_size;
6336 dw_attr_node *at_byte_stride;
6337 dw_attr_node *at_const_value;
6338 dw_attr_node *at_containing_type;
6339 dw_attr_node *at_count;
6340 dw_attr_node *at_data_location;
6341 dw_attr_node *at_data_member_location;
6342 dw_attr_node *at_decimal_scale;
6343 dw_attr_node *at_decimal_sign;
6344 dw_attr_node *at_default_value;
6345 dw_attr_node *at_digit_count;
6346 dw_attr_node *at_discr;
6347 dw_attr_node *at_discr_list;
6348 dw_attr_node *at_discr_value;
6349 dw_attr_node *at_encoding;
6350 dw_attr_node *at_endianity;
6351 dw_attr_node *at_explicit;
6352 dw_attr_node *at_is_optional;
6353 dw_attr_node *at_location;
6354 dw_attr_node *at_lower_bound;
6355 dw_attr_node *at_mutable;
6356 dw_attr_node *at_ordering;
6357 dw_attr_node *at_picture_string;
6358 dw_attr_node *at_prototyped;
6359 dw_attr_node *at_small;
6360 dw_attr_node *at_segment;
6361 dw_attr_node *at_string_length;
6362 dw_attr_node *at_threads_scaled;
6363 dw_attr_node *at_upper_bound;
6364 dw_attr_node *at_use_location;
6365 dw_attr_node *at_use_UTF8;
6366 dw_attr_node *at_variable_parameter;
6367 dw_attr_node *at_virtuality;
6368 dw_attr_node *at_visibility;
6369 dw_attr_node *at_vtable_elem_location;
6370 };
6371
6372 /* Collect the attributes that we will want to use for the checksum. */
6373
6374 static void
6375 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
6376 {
6377 dw_attr_node *a;
6378 unsigned ix;
6379
6380 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6381 {
6382 switch (a->dw_attr)
6383 {
6384 case DW_AT_name:
6385 attrs->at_name = a;
6386 break;
6387 case DW_AT_type:
6388 attrs->at_type = a;
6389 break;
6390 case DW_AT_friend:
6391 attrs->at_friend = a;
6392 break;
6393 case DW_AT_accessibility:
6394 attrs->at_accessibility = a;
6395 break;
6396 case DW_AT_address_class:
6397 attrs->at_address_class = a;
6398 break;
6399 case DW_AT_allocated:
6400 attrs->at_allocated = a;
6401 break;
6402 case DW_AT_artificial:
6403 attrs->at_artificial = a;
6404 break;
6405 case DW_AT_associated:
6406 attrs->at_associated = a;
6407 break;
6408 case DW_AT_binary_scale:
6409 attrs->at_binary_scale = a;
6410 break;
6411 case DW_AT_bit_offset:
6412 attrs->at_bit_offset = a;
6413 break;
6414 case DW_AT_bit_size:
6415 attrs->at_bit_size = a;
6416 break;
6417 case DW_AT_bit_stride:
6418 attrs->at_bit_stride = a;
6419 break;
6420 case DW_AT_byte_size:
6421 attrs->at_byte_size = a;
6422 break;
6423 case DW_AT_byte_stride:
6424 attrs->at_byte_stride = a;
6425 break;
6426 case DW_AT_const_value:
6427 attrs->at_const_value = a;
6428 break;
6429 case DW_AT_containing_type:
6430 attrs->at_containing_type = a;
6431 break;
6432 case DW_AT_count:
6433 attrs->at_count = a;
6434 break;
6435 case DW_AT_data_location:
6436 attrs->at_data_location = a;
6437 break;
6438 case DW_AT_data_member_location:
6439 attrs->at_data_member_location = a;
6440 break;
6441 case DW_AT_decimal_scale:
6442 attrs->at_decimal_scale = a;
6443 break;
6444 case DW_AT_decimal_sign:
6445 attrs->at_decimal_sign = a;
6446 break;
6447 case DW_AT_default_value:
6448 attrs->at_default_value = a;
6449 break;
6450 case DW_AT_digit_count:
6451 attrs->at_digit_count = a;
6452 break;
6453 case DW_AT_discr:
6454 attrs->at_discr = a;
6455 break;
6456 case DW_AT_discr_list:
6457 attrs->at_discr_list = a;
6458 break;
6459 case DW_AT_discr_value:
6460 attrs->at_discr_value = a;
6461 break;
6462 case DW_AT_encoding:
6463 attrs->at_encoding = a;
6464 break;
6465 case DW_AT_endianity:
6466 attrs->at_endianity = a;
6467 break;
6468 case DW_AT_explicit:
6469 attrs->at_explicit = a;
6470 break;
6471 case DW_AT_is_optional:
6472 attrs->at_is_optional = a;
6473 break;
6474 case DW_AT_location:
6475 attrs->at_location = a;
6476 break;
6477 case DW_AT_lower_bound:
6478 attrs->at_lower_bound = a;
6479 break;
6480 case DW_AT_mutable:
6481 attrs->at_mutable = a;
6482 break;
6483 case DW_AT_ordering:
6484 attrs->at_ordering = a;
6485 break;
6486 case DW_AT_picture_string:
6487 attrs->at_picture_string = a;
6488 break;
6489 case DW_AT_prototyped:
6490 attrs->at_prototyped = a;
6491 break;
6492 case DW_AT_small:
6493 attrs->at_small = a;
6494 break;
6495 case DW_AT_segment:
6496 attrs->at_segment = a;
6497 break;
6498 case DW_AT_string_length:
6499 attrs->at_string_length = a;
6500 break;
6501 case DW_AT_threads_scaled:
6502 attrs->at_threads_scaled = a;
6503 break;
6504 case DW_AT_upper_bound:
6505 attrs->at_upper_bound = a;
6506 break;
6507 case DW_AT_use_location:
6508 attrs->at_use_location = a;
6509 break;
6510 case DW_AT_use_UTF8:
6511 attrs->at_use_UTF8 = a;
6512 break;
6513 case DW_AT_variable_parameter:
6514 attrs->at_variable_parameter = a;
6515 break;
6516 case DW_AT_virtuality:
6517 attrs->at_virtuality = a;
6518 break;
6519 case DW_AT_visibility:
6520 attrs->at_visibility = a;
6521 break;
6522 case DW_AT_vtable_elem_location:
6523 attrs->at_vtable_elem_location = a;
6524 break;
6525 default:
6526 break;
6527 }
6528 }
6529 }
6530
6531 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
6532
6533 static void
6534 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6535 {
6536 dw_die_ref c;
6537 dw_die_ref decl;
6538 struct checksum_attributes attrs;
6539
6540 CHECKSUM_ULEB128 ('D');
6541 CHECKSUM_ULEB128 (die->die_tag);
6542
6543 memset (&attrs, 0, sizeof (attrs));
6544
6545 decl = get_AT_ref (die, DW_AT_specification);
6546 if (decl != NULL)
6547 collect_checksum_attributes (&attrs, decl);
6548 collect_checksum_attributes (&attrs, die);
6549
6550 CHECKSUM_ATTR (attrs.at_name);
6551 CHECKSUM_ATTR (attrs.at_accessibility);
6552 CHECKSUM_ATTR (attrs.at_address_class);
6553 CHECKSUM_ATTR (attrs.at_allocated);
6554 CHECKSUM_ATTR (attrs.at_artificial);
6555 CHECKSUM_ATTR (attrs.at_associated);
6556 CHECKSUM_ATTR (attrs.at_binary_scale);
6557 CHECKSUM_ATTR (attrs.at_bit_offset);
6558 CHECKSUM_ATTR (attrs.at_bit_size);
6559 CHECKSUM_ATTR (attrs.at_bit_stride);
6560 CHECKSUM_ATTR (attrs.at_byte_size);
6561 CHECKSUM_ATTR (attrs.at_byte_stride);
6562 CHECKSUM_ATTR (attrs.at_const_value);
6563 CHECKSUM_ATTR (attrs.at_containing_type);
6564 CHECKSUM_ATTR (attrs.at_count);
6565 CHECKSUM_ATTR (attrs.at_data_location);
6566 CHECKSUM_ATTR (attrs.at_data_member_location);
6567 CHECKSUM_ATTR (attrs.at_decimal_scale);
6568 CHECKSUM_ATTR (attrs.at_decimal_sign);
6569 CHECKSUM_ATTR (attrs.at_default_value);
6570 CHECKSUM_ATTR (attrs.at_digit_count);
6571 CHECKSUM_ATTR (attrs.at_discr);
6572 CHECKSUM_ATTR (attrs.at_discr_list);
6573 CHECKSUM_ATTR (attrs.at_discr_value);
6574 CHECKSUM_ATTR (attrs.at_encoding);
6575 CHECKSUM_ATTR (attrs.at_endianity);
6576 CHECKSUM_ATTR (attrs.at_explicit);
6577 CHECKSUM_ATTR (attrs.at_is_optional);
6578 CHECKSUM_ATTR (attrs.at_location);
6579 CHECKSUM_ATTR (attrs.at_lower_bound);
6580 CHECKSUM_ATTR (attrs.at_mutable);
6581 CHECKSUM_ATTR (attrs.at_ordering);
6582 CHECKSUM_ATTR (attrs.at_picture_string);
6583 CHECKSUM_ATTR (attrs.at_prototyped);
6584 CHECKSUM_ATTR (attrs.at_small);
6585 CHECKSUM_ATTR (attrs.at_segment);
6586 CHECKSUM_ATTR (attrs.at_string_length);
6587 CHECKSUM_ATTR (attrs.at_threads_scaled);
6588 CHECKSUM_ATTR (attrs.at_upper_bound);
6589 CHECKSUM_ATTR (attrs.at_use_location);
6590 CHECKSUM_ATTR (attrs.at_use_UTF8);
6591 CHECKSUM_ATTR (attrs.at_variable_parameter);
6592 CHECKSUM_ATTR (attrs.at_virtuality);
6593 CHECKSUM_ATTR (attrs.at_visibility);
6594 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
6595 CHECKSUM_ATTR (attrs.at_type);
6596 CHECKSUM_ATTR (attrs.at_friend);
6597
6598 /* Checksum the child DIEs. */
6599 c = die->die_child;
6600 if (c) do {
6601 dw_attr_node *name_attr;
6602
6603 c = c->die_sib;
6604 name_attr = get_AT (c, DW_AT_name);
6605 if (is_template_instantiation (c))
6606 {
6607 /* Ignore instantiations of member type and function templates. */
6608 }
6609 else if (name_attr != NULL
6610 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
6611 {
6612 /* Use a shallow checksum for named nested types and member
6613 functions. */
6614 CHECKSUM_ULEB128 ('S');
6615 CHECKSUM_ULEB128 (c->die_tag);
6616 CHECKSUM_STRING (AT_string (name_attr));
6617 }
6618 else
6619 {
6620 /* Use a deep checksum for other children. */
6621 /* Mark this DIE so it gets processed when unmarking. */
6622 if (c->die_mark == 0)
6623 c->die_mark = -1;
6624 die_checksum_ordered (c, ctx, mark);
6625 }
6626 } while (c != die->die_child);
6627
6628 CHECKSUM_ULEB128 (0);
6629 }
6630
6631 /* Add a type name and tag to a hash. */
6632 static void
6633 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
6634 {
6635 CHECKSUM_ULEB128 (tag);
6636 CHECKSUM_STRING (name);
6637 }
6638
6639 #undef CHECKSUM
6640 #undef CHECKSUM_STRING
6641 #undef CHECKSUM_ATTR
6642 #undef CHECKSUM_LEB128
6643 #undef CHECKSUM_ULEB128
6644
6645 /* Generate the type signature for DIE. This is computed by generating an
6646 MD5 checksum over the DIE's tag, its relevant attributes, and its
6647 children. Attributes that are references to other DIEs are processed
6648 by recursion, using the MARK field to prevent infinite recursion.
6649 If the DIE is nested inside a namespace or another type, we also
6650 need to include that context in the signature. The lower 64 bits
6651 of the resulting MD5 checksum comprise the signature. */
6652
6653 static void
6654 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
6655 {
6656 int mark;
6657 const char *name;
6658 unsigned char checksum[16];
6659 struct md5_ctx ctx;
6660 dw_die_ref decl;
6661 dw_die_ref parent;
6662
6663 name = get_AT_string (die, DW_AT_name);
6664 decl = get_AT_ref (die, DW_AT_specification);
6665 parent = get_die_parent (die);
6666
6667 /* First, compute a signature for just the type name (and its surrounding
6668 context, if any. This is stored in the type unit DIE for link-time
6669 ODR (one-definition rule) checking. */
6670
6671 if (is_cxx () && name != NULL)
6672 {
6673 md5_init_ctx (&ctx);
6674
6675 /* Checksum the names of surrounding namespaces and structures. */
6676 if (parent != NULL)
6677 checksum_die_context (parent, &ctx);
6678
6679 /* Checksum the current DIE. */
6680 die_odr_checksum (die->die_tag, name, &ctx);
6681 md5_finish_ctx (&ctx, checksum);
6682
6683 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
6684 }
6685
6686 /* Next, compute the complete type signature. */
6687
6688 md5_init_ctx (&ctx);
6689 mark = 1;
6690 die->die_mark = mark;
6691
6692 /* Checksum the names of surrounding namespaces and structures. */
6693 if (parent != NULL)
6694 checksum_die_context (parent, &ctx);
6695
6696 /* Checksum the DIE and its children. */
6697 die_checksum_ordered (die, &ctx, &mark);
6698 unmark_all_dies (die);
6699 md5_finish_ctx (&ctx, checksum);
6700
6701 /* Store the signature in the type node and link the type DIE and the
6702 type node together. */
6703 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
6704 DWARF_TYPE_SIGNATURE_SIZE);
6705 die->comdat_type_p = true;
6706 die->die_id.die_type_node = type_node;
6707 type_node->type_die = die;
6708
6709 /* If the DIE is a specification, link its declaration to the type node
6710 as well. */
6711 if (decl != NULL)
6712 {
6713 decl->comdat_type_p = true;
6714 decl->die_id.die_type_node = type_node;
6715 }
6716 }
6717
6718 /* Do the location expressions look same? */
6719 static inline int
6720 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
6721 {
6722 return loc1->dw_loc_opc == loc2->dw_loc_opc
6723 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
6724 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
6725 }
6726
6727 /* Do the values look the same? */
6728 static int
6729 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
6730 {
6731 dw_loc_descr_ref loc1, loc2;
6732 rtx r1, r2;
6733
6734 if (v1->val_class != v2->val_class)
6735 return 0;
6736
6737 switch (v1->val_class)
6738 {
6739 case dw_val_class_const:
6740 return v1->v.val_int == v2->v.val_int;
6741 case dw_val_class_unsigned_const:
6742 return v1->v.val_unsigned == v2->v.val_unsigned;
6743 case dw_val_class_const_double:
6744 return v1->v.val_double.high == v2->v.val_double.high
6745 && v1->v.val_double.low == v2->v.val_double.low;
6746 case dw_val_class_wide_int:
6747 return *v1->v.val_wide == *v2->v.val_wide;
6748 case dw_val_class_vec:
6749 if (v1->v.val_vec.length != v2->v.val_vec.length
6750 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
6751 return 0;
6752 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
6753 v1->v.val_vec.length * v1->v.val_vec.elt_size))
6754 return 0;
6755 return 1;
6756 case dw_val_class_flag:
6757 return v1->v.val_flag == v2->v.val_flag;
6758 case dw_val_class_str:
6759 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
6760
6761 case dw_val_class_addr:
6762 r1 = v1->v.val_addr;
6763 r2 = v2->v.val_addr;
6764 if (GET_CODE (r1) != GET_CODE (r2))
6765 return 0;
6766 return !rtx_equal_p (r1, r2);
6767
6768 case dw_val_class_offset:
6769 return v1->v.val_offset == v2->v.val_offset;
6770
6771 case dw_val_class_loc:
6772 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
6773 loc1 && loc2;
6774 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
6775 if (!same_loc_p (loc1, loc2, mark))
6776 return 0;
6777 return !loc1 && !loc2;
6778
6779 case dw_val_class_die_ref:
6780 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
6781
6782 case dw_val_class_fde_ref:
6783 case dw_val_class_vms_delta:
6784 case dw_val_class_lbl_id:
6785 case dw_val_class_lineptr:
6786 case dw_val_class_macptr:
6787 case dw_val_class_high_pc:
6788 return 1;
6789
6790 case dw_val_class_file:
6791 return v1->v.val_file == v2->v.val_file;
6792
6793 case dw_val_class_data8:
6794 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
6795
6796 default:
6797 return 1;
6798 }
6799 }
6800
6801 /* Do the attributes look the same? */
6802
6803 static int
6804 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
6805 {
6806 if (at1->dw_attr != at2->dw_attr)
6807 return 0;
6808
6809 /* We don't care that this was compiled with a different compiler
6810 snapshot; if the output is the same, that's what matters. */
6811 if (at1->dw_attr == DW_AT_producer)
6812 return 1;
6813
6814 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
6815 }
6816
6817 /* Do the dies look the same? */
6818
6819 static int
6820 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
6821 {
6822 dw_die_ref c1, c2;
6823 dw_attr_node *a1;
6824 unsigned ix;
6825
6826 /* To avoid infinite recursion. */
6827 if (die1->die_mark)
6828 return die1->die_mark == die2->die_mark;
6829 die1->die_mark = die2->die_mark = ++(*mark);
6830
6831 if (die1->die_tag != die2->die_tag)
6832 return 0;
6833
6834 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
6835 return 0;
6836
6837 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
6838 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
6839 return 0;
6840
6841 c1 = die1->die_child;
6842 c2 = die2->die_child;
6843 if (! c1)
6844 {
6845 if (c2)
6846 return 0;
6847 }
6848 else
6849 for (;;)
6850 {
6851 if (!same_die_p (c1, c2, mark))
6852 return 0;
6853 c1 = c1->die_sib;
6854 c2 = c2->die_sib;
6855 if (c1 == die1->die_child)
6856 {
6857 if (c2 == die2->die_child)
6858 break;
6859 else
6860 return 0;
6861 }
6862 }
6863
6864 return 1;
6865 }
6866
6867 /* Do the dies look the same? Wrapper around same_die_p. */
6868
6869 static int
6870 same_die_p_wrap (dw_die_ref die1, dw_die_ref die2)
6871 {
6872 int mark = 0;
6873 int ret = same_die_p (die1, die2, &mark);
6874
6875 unmark_all_dies (die1);
6876 unmark_all_dies (die2);
6877
6878 return ret;
6879 }
6880
6881 /* The prefix to attach to symbols on DIEs in the current comdat debug
6882 info section. */
6883 static const char *comdat_symbol_id;
6884
6885 /* The index of the current symbol within the current comdat CU. */
6886 static unsigned int comdat_symbol_number;
6887
6888 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
6889 children, and set comdat_symbol_id accordingly. */
6890
6891 static void
6892 compute_section_prefix (dw_die_ref unit_die)
6893 {
6894 const char *die_name = get_AT_string (unit_die, DW_AT_name);
6895 const char *base = die_name ? lbasename (die_name) : "anonymous";
6896 char *name = XALLOCAVEC (char, strlen (base) + 64);
6897 char *p;
6898 int i, mark;
6899 unsigned char checksum[16];
6900 struct md5_ctx ctx;
6901
6902 /* Compute the checksum of the DIE, then append part of it as hex digits to
6903 the name filename of the unit. */
6904
6905 md5_init_ctx (&ctx);
6906 mark = 0;
6907 die_checksum (unit_die, &ctx, &mark);
6908 unmark_all_dies (unit_die);
6909 md5_finish_ctx (&ctx, checksum);
6910
6911 sprintf (name, "%s.", base);
6912 clean_symbol_name (name);
6913
6914 p = name + strlen (name);
6915 for (i = 0; i < 4; i++)
6916 {
6917 sprintf (p, "%.2x", checksum[i]);
6918 p += 2;
6919 }
6920
6921 comdat_symbol_id = unit_die->die_id.die_symbol = xstrdup (name);
6922 comdat_symbol_number = 0;
6923 }
6924
6925 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
6926
6927 static int
6928 is_type_die (dw_die_ref die)
6929 {
6930 switch (die->die_tag)
6931 {
6932 case DW_TAG_array_type:
6933 case DW_TAG_class_type:
6934 case DW_TAG_interface_type:
6935 case DW_TAG_enumeration_type:
6936 case DW_TAG_pointer_type:
6937 case DW_TAG_reference_type:
6938 case DW_TAG_rvalue_reference_type:
6939 case DW_TAG_string_type:
6940 case DW_TAG_structure_type:
6941 case DW_TAG_subroutine_type:
6942 case DW_TAG_union_type:
6943 case DW_TAG_ptr_to_member_type:
6944 case DW_TAG_set_type:
6945 case DW_TAG_subrange_type:
6946 case DW_TAG_base_type:
6947 case DW_TAG_const_type:
6948 case DW_TAG_file_type:
6949 case DW_TAG_packed_type:
6950 case DW_TAG_volatile_type:
6951 case DW_TAG_typedef:
6952 return 1;
6953 default:
6954 return 0;
6955 }
6956 }
6957
6958 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
6959 Basically, we want to choose the bits that are likely to be shared between
6960 compilations (types) and leave out the bits that are specific to individual
6961 compilations (functions). */
6962
6963 static int
6964 is_comdat_die (dw_die_ref c)
6965 {
6966 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
6967 we do for stabs. The advantage is a greater likelihood of sharing between
6968 objects that don't include headers in the same order (and therefore would
6969 put the base types in a different comdat). jason 8/28/00 */
6970
6971 if (c->die_tag == DW_TAG_base_type)
6972 return 0;
6973
6974 if (c->die_tag == DW_TAG_pointer_type
6975 || c->die_tag == DW_TAG_reference_type
6976 || c->die_tag == DW_TAG_rvalue_reference_type
6977 || c->die_tag == DW_TAG_const_type
6978 || c->die_tag == DW_TAG_volatile_type)
6979 {
6980 dw_die_ref t = get_AT_ref (c, DW_AT_type);
6981
6982 return t ? is_comdat_die (t) : 0;
6983 }
6984
6985 return is_type_die (c);
6986 }
6987
6988 /* Returns 1 iff C is the sort of DIE that might be referred to from another
6989 compilation unit. */
6990
6991 static int
6992 is_symbol_die (dw_die_ref c)
6993 {
6994 return (is_type_die (c)
6995 || is_declaration_die (c)
6996 || c->die_tag == DW_TAG_namespace
6997 || c->die_tag == DW_TAG_module);
6998 }
6999
7000 /* Returns true iff C is a compile-unit DIE. */
7001
7002 static inline bool
7003 is_cu_die (dw_die_ref c)
7004 {
7005 return c && c->die_tag == DW_TAG_compile_unit;
7006 }
7007
7008 /* Returns true iff C is a unit DIE of some sort. */
7009
7010 static inline bool
7011 is_unit_die (dw_die_ref c)
7012 {
7013 return c && (c->die_tag == DW_TAG_compile_unit
7014 || c->die_tag == DW_TAG_partial_unit
7015 || c->die_tag == DW_TAG_type_unit);
7016 }
7017
7018 /* Returns true iff C is a namespace DIE. */
7019
7020 static inline bool
7021 is_namespace_die (dw_die_ref c)
7022 {
7023 return c && c->die_tag == DW_TAG_namespace;
7024 }
7025
7026 /* Returns true iff C is a class or structure DIE. */
7027
7028 static inline bool
7029 is_class_die (dw_die_ref c)
7030 {
7031 return c && (c->die_tag == DW_TAG_class_type
7032 || c->die_tag == DW_TAG_structure_type);
7033 }
7034
7035 /* Return non-zero if this DIE is a template parameter. */
7036
7037 static inline bool
7038 is_template_parameter (dw_die_ref die)
7039 {
7040 switch (die->die_tag)
7041 {
7042 case DW_TAG_template_type_param:
7043 case DW_TAG_template_value_param:
7044 case DW_TAG_GNU_template_template_param:
7045 case DW_TAG_GNU_template_parameter_pack:
7046 return true;
7047 default:
7048 return false;
7049 }
7050 }
7051
7052 /* Return non-zero if this DIE represents a template instantiation. */
7053
7054 static inline bool
7055 is_template_instantiation (dw_die_ref die)
7056 {
7057 dw_die_ref c;
7058
7059 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7060 return false;
7061 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7062 return false;
7063 }
7064
7065 static char *
7066 gen_internal_sym (const char *prefix)
7067 {
7068 char buf[256];
7069
7070 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7071 return xstrdup (buf);
7072 }
7073
7074 /* Assign symbols to all worthy DIEs under DIE. */
7075
7076 static void
7077 assign_symbol_names (dw_die_ref die)
7078 {
7079 dw_die_ref c;
7080
7081 if (is_symbol_die (die) && !die->comdat_type_p)
7082 {
7083 if (comdat_symbol_id)
7084 {
7085 char *p = XALLOCAVEC (char, strlen (comdat_symbol_id) + 64);
7086
7087 sprintf (p, "%s.%s.%x", DIE_LABEL_PREFIX,
7088 comdat_symbol_id, comdat_symbol_number++);
7089 die->die_id.die_symbol = xstrdup (p);
7090 }
7091 else
7092 die->die_id.die_symbol = gen_internal_sym ("LDIE");
7093 }
7094
7095 FOR_EACH_CHILD (die, c, assign_symbol_names (c));
7096 }
7097
7098 struct cu_hash_table_entry
7099 {
7100 dw_die_ref cu;
7101 unsigned min_comdat_num, max_comdat_num;
7102 struct cu_hash_table_entry *next;
7103 };
7104
7105 /* Helpers to manipulate hash table of CUs. */
7106
7107 struct cu_hash_table_entry_hasher : pointer_hash <cu_hash_table_entry>
7108 {
7109 typedef die_struct *compare_type;
7110 static inline hashval_t hash (const cu_hash_table_entry *);
7111 static inline bool equal (const cu_hash_table_entry *, const die_struct *);
7112 static inline void remove (cu_hash_table_entry *);
7113 };
7114
7115 inline hashval_t
7116 cu_hash_table_entry_hasher::hash (const cu_hash_table_entry *entry)
7117 {
7118 return htab_hash_string (entry->cu->die_id.die_symbol);
7119 }
7120
7121 inline bool
7122 cu_hash_table_entry_hasher::equal (const cu_hash_table_entry *entry1,
7123 const die_struct *entry2)
7124 {
7125 return !strcmp (entry1->cu->die_id.die_symbol, entry2->die_id.die_symbol);
7126 }
7127
7128 inline void
7129 cu_hash_table_entry_hasher::remove (cu_hash_table_entry *entry)
7130 {
7131 struct cu_hash_table_entry *next;
7132
7133 while (entry)
7134 {
7135 next = entry->next;
7136 free (entry);
7137 entry = next;
7138 }
7139 }
7140
7141 typedef hash_table<cu_hash_table_entry_hasher> cu_hash_type;
7142
7143 /* Check whether we have already seen this CU and set up SYM_NUM
7144 accordingly. */
7145 static int
7146 check_duplicate_cu (dw_die_ref cu, cu_hash_type *htable, unsigned int *sym_num)
7147 {
7148 struct cu_hash_table_entry dummy;
7149 struct cu_hash_table_entry **slot, *entry, *last = &dummy;
7150
7151 dummy.max_comdat_num = 0;
7152
7153 slot = htable->find_slot_with_hash (cu,
7154 htab_hash_string (cu->die_id.die_symbol),
7155 INSERT);
7156 entry = *slot;
7157
7158 for (; entry; last = entry, entry = entry->next)
7159 {
7160 if (same_die_p_wrap (cu, entry->cu))
7161 break;
7162 }
7163
7164 if (entry)
7165 {
7166 *sym_num = entry->min_comdat_num;
7167 return 1;
7168 }
7169
7170 entry = XCNEW (struct cu_hash_table_entry);
7171 entry->cu = cu;
7172 entry->min_comdat_num = *sym_num = last->max_comdat_num;
7173 entry->next = *slot;
7174 *slot = entry;
7175
7176 return 0;
7177 }
7178
7179 /* Record SYM_NUM to record of CU in HTABLE. */
7180 static void
7181 record_comdat_symbol_number (dw_die_ref cu, cu_hash_type *htable,
7182 unsigned int sym_num)
7183 {
7184 struct cu_hash_table_entry **slot, *entry;
7185
7186 slot = htable->find_slot_with_hash (cu,
7187 htab_hash_string (cu->die_id.die_symbol),
7188 NO_INSERT);
7189 entry = *slot;
7190
7191 entry->max_comdat_num = sym_num;
7192 }
7193
7194 /* Traverse the DIE (which is always comp_unit_die), and set up
7195 additional compilation units for each of the include files we see
7196 bracketed by BINCL/EINCL. */
7197
7198 static void
7199 break_out_includes (dw_die_ref die)
7200 {
7201 dw_die_ref c;
7202 dw_die_ref unit = NULL;
7203 limbo_die_node *node, **pnode;
7204
7205 c = die->die_child;
7206 if (c) do {
7207 dw_die_ref prev = c;
7208 c = c->die_sib;
7209 while (c->die_tag == DW_TAG_GNU_BINCL || c->die_tag == DW_TAG_GNU_EINCL
7210 || (unit && is_comdat_die (c)))
7211 {
7212 dw_die_ref next = c->die_sib;
7213
7214 /* This DIE is for a secondary CU; remove it from the main one. */
7215 remove_child_with_prev (c, prev);
7216
7217 if (c->die_tag == DW_TAG_GNU_BINCL)
7218 unit = push_new_compile_unit (unit, c);
7219 else if (c->die_tag == DW_TAG_GNU_EINCL)
7220 unit = pop_compile_unit (unit);
7221 else
7222 add_child_die (unit, c);
7223 c = next;
7224 if (c == die->die_child)
7225 break;
7226 }
7227 } while (c != die->die_child);
7228
7229 #if 0
7230 /* We can only use this in debugging, since the frontend doesn't check
7231 to make sure that we leave every include file we enter. */
7232 gcc_assert (!unit);
7233 #endif
7234
7235 assign_symbol_names (die);
7236 cu_hash_type cu_hash_table (10);
7237 for (node = limbo_die_list, pnode = &limbo_die_list;
7238 node;
7239 node = node->next)
7240 {
7241 int is_dupl;
7242
7243 compute_section_prefix (node->die);
7244 is_dupl = check_duplicate_cu (node->die, &cu_hash_table,
7245 &comdat_symbol_number);
7246 assign_symbol_names (node->die);
7247 if (is_dupl)
7248 *pnode = node->next;
7249 else
7250 {
7251 pnode = &node->next;
7252 record_comdat_symbol_number (node->die, &cu_hash_table,
7253 comdat_symbol_number);
7254 }
7255 }
7256 }
7257
7258 /* Return non-zero if this DIE is a declaration. */
7259
7260 static int
7261 is_declaration_die (dw_die_ref die)
7262 {
7263 dw_attr_node *a;
7264 unsigned ix;
7265
7266 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7267 if (a->dw_attr == DW_AT_declaration)
7268 return 1;
7269
7270 return 0;
7271 }
7272
7273 /* Return non-zero if this DIE is nested inside a subprogram. */
7274
7275 static int
7276 is_nested_in_subprogram (dw_die_ref die)
7277 {
7278 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7279
7280 if (decl == NULL)
7281 decl = die;
7282 return local_scope_p (decl);
7283 }
7284
7285 /* Return non-zero if this DIE contains a defining declaration of a
7286 subprogram. */
7287
7288 static int
7289 contains_subprogram_definition (dw_die_ref die)
7290 {
7291 dw_die_ref c;
7292
7293 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7294 return 1;
7295 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7296 return 0;
7297 }
7298
7299 /* Return non-zero if this is a type DIE that should be moved to a
7300 COMDAT .debug_types section. */
7301
7302 static int
7303 should_move_die_to_comdat (dw_die_ref die)
7304 {
7305 switch (die->die_tag)
7306 {
7307 case DW_TAG_class_type:
7308 case DW_TAG_structure_type:
7309 case DW_TAG_enumeration_type:
7310 case DW_TAG_union_type:
7311 /* Don't move declarations, inlined instances, types nested in a
7312 subprogram, or types that contain subprogram definitions. */
7313 if (is_declaration_die (die)
7314 || get_AT (die, DW_AT_abstract_origin)
7315 || is_nested_in_subprogram (die)
7316 || contains_subprogram_definition (die))
7317 return 0;
7318 return 1;
7319 case DW_TAG_array_type:
7320 case DW_TAG_interface_type:
7321 case DW_TAG_pointer_type:
7322 case DW_TAG_reference_type:
7323 case DW_TAG_rvalue_reference_type:
7324 case DW_TAG_string_type:
7325 case DW_TAG_subroutine_type:
7326 case DW_TAG_ptr_to_member_type:
7327 case DW_TAG_set_type:
7328 case DW_TAG_subrange_type:
7329 case DW_TAG_base_type:
7330 case DW_TAG_const_type:
7331 case DW_TAG_file_type:
7332 case DW_TAG_packed_type:
7333 case DW_TAG_volatile_type:
7334 case DW_TAG_typedef:
7335 default:
7336 return 0;
7337 }
7338 }
7339
7340 /* Make a clone of DIE. */
7341
7342 static dw_die_ref
7343 clone_die (dw_die_ref die)
7344 {
7345 dw_die_ref clone;
7346 dw_attr_node *a;
7347 unsigned ix;
7348
7349 clone = ggc_cleared_alloc<die_node> ();
7350 clone->die_tag = die->die_tag;
7351
7352 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7353 add_dwarf_attr (clone, a);
7354
7355 return clone;
7356 }
7357
7358 /* Make a clone of the tree rooted at DIE. */
7359
7360 static dw_die_ref
7361 clone_tree (dw_die_ref die)
7362 {
7363 dw_die_ref c;
7364 dw_die_ref clone = clone_die (die);
7365
7366 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
7367
7368 return clone;
7369 }
7370
7371 /* Make a clone of DIE as a declaration. */
7372
7373 static dw_die_ref
7374 clone_as_declaration (dw_die_ref die)
7375 {
7376 dw_die_ref clone;
7377 dw_die_ref decl;
7378 dw_attr_node *a;
7379 unsigned ix;
7380
7381 /* If the DIE is already a declaration, just clone it. */
7382 if (is_declaration_die (die))
7383 return clone_die (die);
7384
7385 /* If the DIE is a specification, just clone its declaration DIE. */
7386 decl = get_AT_ref (die, DW_AT_specification);
7387 if (decl != NULL)
7388 {
7389 clone = clone_die (decl);
7390 if (die->comdat_type_p)
7391 add_AT_die_ref (clone, DW_AT_signature, die);
7392 return clone;
7393 }
7394
7395 clone = ggc_cleared_alloc<die_node> ();
7396 clone->die_tag = die->die_tag;
7397
7398 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7399 {
7400 /* We don't want to copy over all attributes.
7401 For example we don't want DW_AT_byte_size because otherwise we will no
7402 longer have a declaration and GDB will treat it as a definition. */
7403
7404 switch (a->dw_attr)
7405 {
7406 case DW_AT_abstract_origin:
7407 case DW_AT_artificial:
7408 case DW_AT_containing_type:
7409 case DW_AT_external:
7410 case DW_AT_name:
7411 case DW_AT_type:
7412 case DW_AT_virtuality:
7413 case DW_AT_linkage_name:
7414 case DW_AT_MIPS_linkage_name:
7415 add_dwarf_attr (clone, a);
7416 break;
7417 case DW_AT_byte_size:
7418 default:
7419 break;
7420 }
7421 }
7422
7423 if (die->comdat_type_p)
7424 add_AT_die_ref (clone, DW_AT_signature, die);
7425
7426 add_AT_flag (clone, DW_AT_declaration, 1);
7427 return clone;
7428 }
7429
7430
7431 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
7432
7433 struct decl_table_entry
7434 {
7435 dw_die_ref orig;
7436 dw_die_ref copy;
7437 };
7438
7439 /* Helpers to manipulate hash table of copied declarations. */
7440
7441 /* Hashtable helpers. */
7442
7443 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
7444 {
7445 typedef die_struct *compare_type;
7446 static inline hashval_t hash (const decl_table_entry *);
7447 static inline bool equal (const decl_table_entry *, const die_struct *);
7448 };
7449
7450 inline hashval_t
7451 decl_table_entry_hasher::hash (const decl_table_entry *entry)
7452 {
7453 return htab_hash_pointer (entry->orig);
7454 }
7455
7456 inline bool
7457 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
7458 const die_struct *entry2)
7459 {
7460 return entry1->orig == entry2;
7461 }
7462
7463 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
7464
7465 /* Copy DIE and its ancestors, up to, but not including, the compile unit
7466 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
7467 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
7468 to check if the ancestor has already been copied into UNIT. */
7469
7470 static dw_die_ref
7471 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
7472 decl_hash_type *decl_table)
7473 {
7474 dw_die_ref parent = die->die_parent;
7475 dw_die_ref new_parent = unit;
7476 dw_die_ref copy;
7477 decl_table_entry **slot = NULL;
7478 struct decl_table_entry *entry = NULL;
7479
7480 if (decl_table)
7481 {
7482 /* Check if the entry has already been copied to UNIT. */
7483 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
7484 INSERT);
7485 if (*slot != HTAB_EMPTY_ENTRY)
7486 {
7487 entry = *slot;
7488 return entry->copy;
7489 }
7490
7491 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
7492 entry = XCNEW (struct decl_table_entry);
7493 entry->orig = die;
7494 entry->copy = NULL;
7495 *slot = entry;
7496 }
7497
7498 if (parent != NULL)
7499 {
7500 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
7501 if (spec != NULL)
7502 parent = spec;
7503 if (!is_unit_die (parent))
7504 new_parent = copy_ancestor_tree (unit, parent, decl_table);
7505 }
7506
7507 copy = clone_as_declaration (die);
7508 add_child_die (new_parent, copy);
7509
7510 if (decl_table)
7511 {
7512 /* Record the pointer to the copy. */
7513 entry->copy = copy;
7514 }
7515
7516 return copy;
7517 }
7518 /* Copy the declaration context to the new type unit DIE. This includes
7519 any surrounding namespace or type declarations. If the DIE has an
7520 AT_specification attribute, it also includes attributes and children
7521 attached to the specification, and returns a pointer to the original
7522 parent of the declaration DIE. Returns NULL otherwise. */
7523
7524 static dw_die_ref
7525 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
7526 {
7527 dw_die_ref decl;
7528 dw_die_ref new_decl;
7529 dw_die_ref orig_parent = NULL;
7530
7531 decl = get_AT_ref (die, DW_AT_specification);
7532 if (decl == NULL)
7533 decl = die;
7534 else
7535 {
7536 unsigned ix;
7537 dw_die_ref c;
7538 dw_attr_node *a;
7539
7540 /* The original DIE will be changed to a declaration, and must
7541 be moved to be a child of the original declaration DIE. */
7542 orig_parent = decl->die_parent;
7543
7544 /* Copy the type node pointer from the new DIE to the original
7545 declaration DIE so we can forward references later. */
7546 decl->comdat_type_p = true;
7547 decl->die_id.die_type_node = die->die_id.die_type_node;
7548
7549 remove_AT (die, DW_AT_specification);
7550
7551 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
7552 {
7553 if (a->dw_attr != DW_AT_name
7554 && a->dw_attr != DW_AT_declaration
7555 && a->dw_attr != DW_AT_external)
7556 add_dwarf_attr (die, a);
7557 }
7558
7559 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
7560 }
7561
7562 if (decl->die_parent != NULL
7563 && !is_unit_die (decl->die_parent))
7564 {
7565 new_decl = copy_ancestor_tree (unit, decl, NULL);
7566 if (new_decl != NULL)
7567 {
7568 remove_AT (new_decl, DW_AT_signature);
7569 add_AT_specification (die, new_decl);
7570 }
7571 }
7572
7573 return orig_parent;
7574 }
7575
7576 /* Generate the skeleton ancestor tree for the given NODE, then clone
7577 the DIE and add the clone into the tree. */
7578
7579 static void
7580 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
7581 {
7582 if (node->new_die != NULL)
7583 return;
7584
7585 node->new_die = clone_as_declaration (node->old_die);
7586
7587 if (node->parent != NULL)
7588 {
7589 generate_skeleton_ancestor_tree (node->parent);
7590 add_child_die (node->parent->new_die, node->new_die);
7591 }
7592 }
7593
7594 /* Generate a skeleton tree of DIEs containing any declarations that are
7595 found in the original tree. We traverse the tree looking for declaration
7596 DIEs, and construct the skeleton from the bottom up whenever we find one. */
7597
7598 static void
7599 generate_skeleton_bottom_up (skeleton_chain_node *parent)
7600 {
7601 skeleton_chain_node node;
7602 dw_die_ref c;
7603 dw_die_ref first;
7604 dw_die_ref prev = NULL;
7605 dw_die_ref next = NULL;
7606
7607 node.parent = parent;
7608
7609 first = c = parent->old_die->die_child;
7610 if (c)
7611 next = c->die_sib;
7612 if (c) do {
7613 if (prev == NULL || prev->die_sib == c)
7614 prev = c;
7615 c = next;
7616 next = (c == first ? NULL : c->die_sib);
7617 node.old_die = c;
7618 node.new_die = NULL;
7619 if (is_declaration_die (c))
7620 {
7621 if (is_template_instantiation (c))
7622 {
7623 /* Instantiated templates do not need to be cloned into the
7624 type unit. Just move the DIE and its children back to
7625 the skeleton tree (in the main CU). */
7626 remove_child_with_prev (c, prev);
7627 add_child_die (parent->new_die, c);
7628 c = prev;
7629 }
7630 else
7631 {
7632 /* Clone the existing DIE, move the original to the skeleton
7633 tree (which is in the main CU), and put the clone, with
7634 all the original's children, where the original came from
7635 (which is about to be moved to the type unit). */
7636 dw_die_ref clone = clone_die (c);
7637 move_all_children (c, clone);
7638
7639 /* If the original has a DW_AT_object_pointer attribute,
7640 it would now point to a child DIE just moved to the
7641 cloned tree, so we need to remove that attribute from
7642 the original. */
7643 remove_AT (c, DW_AT_object_pointer);
7644
7645 replace_child (c, clone, prev);
7646 generate_skeleton_ancestor_tree (parent);
7647 add_child_die (parent->new_die, c);
7648 node.new_die = c;
7649 c = clone;
7650 }
7651 }
7652 generate_skeleton_bottom_up (&node);
7653 } while (next != NULL);
7654 }
7655
7656 /* Wrapper function for generate_skeleton_bottom_up. */
7657
7658 static dw_die_ref
7659 generate_skeleton (dw_die_ref die)
7660 {
7661 skeleton_chain_node node;
7662
7663 node.old_die = die;
7664 node.new_die = NULL;
7665 node.parent = NULL;
7666
7667 /* If this type definition is nested inside another type,
7668 and is not an instantiation of a template, always leave
7669 at least a declaration in its place. */
7670 if (die->die_parent != NULL
7671 && is_type_die (die->die_parent)
7672 && !is_template_instantiation (die))
7673 node.new_die = clone_as_declaration (die);
7674
7675 generate_skeleton_bottom_up (&node);
7676 return node.new_die;
7677 }
7678
7679 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
7680 declaration. The original DIE is moved to a new compile unit so that
7681 existing references to it follow it to the new location. If any of the
7682 original DIE's descendants is a declaration, we need to replace the
7683 original DIE with a skeleton tree and move the declarations back into the
7684 skeleton tree. */
7685
7686 static dw_die_ref
7687 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
7688 dw_die_ref prev)
7689 {
7690 dw_die_ref skeleton, orig_parent;
7691
7692 /* Copy the declaration context to the type unit DIE. If the returned
7693 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
7694 that DIE. */
7695 orig_parent = copy_declaration_context (unit, child);
7696
7697 skeleton = generate_skeleton (child);
7698 if (skeleton == NULL)
7699 remove_child_with_prev (child, prev);
7700 else
7701 {
7702 skeleton->comdat_type_p = true;
7703 skeleton->die_id.die_type_node = child->die_id.die_type_node;
7704
7705 /* If the original DIE was a specification, we need to put
7706 the skeleton under the parent DIE of the declaration.
7707 This leaves the original declaration in the tree, but
7708 it will be pruned later since there are no longer any
7709 references to it. */
7710 if (orig_parent != NULL)
7711 {
7712 remove_child_with_prev (child, prev);
7713 add_child_die (orig_parent, skeleton);
7714 }
7715 else
7716 replace_child (child, skeleton, prev);
7717 }
7718
7719 return skeleton;
7720 }
7721
7722 static void
7723 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
7724 comdat_type_node *type_node,
7725 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
7726
7727 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
7728 procedure, put it under TYPE_NODE and return the copy. Continue looking for
7729 DWARF procedure references in the DW_AT_location attribute. */
7730
7731 static dw_die_ref
7732 copy_dwarf_procedure (dw_die_ref die,
7733 comdat_type_node *type_node,
7734 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
7735 {
7736 /* We do this for COMDAT section, which is DWARFv4 specific, so
7737 DWARF procedure are always DW_TAG_dwarf_procedure DIEs (unlike
7738 DW_TAG_variable in DWARFv3). */
7739 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
7740
7741 /* DWARF procedures are not supposed to have children... */
7742 gcc_assert (die->die_child == NULL);
7743
7744 /* ... and they are supposed to have only one attribute: DW_AT_location. */
7745 gcc_assert (vec_safe_length (die->die_attr) == 1
7746 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
7747
7748 /* Do not copy more than once DWARF procedures. */
7749 bool existed;
7750 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
7751 if (existed)
7752 return die_copy;
7753
7754 die_copy = clone_die (die);
7755 add_child_die (type_node->root_die, die_copy);
7756 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
7757 return die_copy;
7758 }
7759
7760 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
7761 procedures in DIE's attributes. */
7762
7763 static void
7764 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
7765 comdat_type_node *type_node,
7766 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
7767 {
7768 dw_attr_node *a;
7769 unsigned i;
7770
7771 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
7772 {
7773 dw_loc_descr_ref loc;
7774
7775 if (a->dw_attr_val.val_class != dw_val_class_loc)
7776 continue;
7777
7778 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
7779 {
7780 switch (loc->dw_loc_opc)
7781 {
7782 case DW_OP_call2:
7783 case DW_OP_call4:
7784 case DW_OP_call_ref:
7785 gcc_assert (loc->dw_loc_oprnd1.val_class
7786 == dw_val_class_die_ref);
7787 loc->dw_loc_oprnd1.v.val_die_ref.die
7788 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
7789 type_node,
7790 copied_dwarf_procs);
7791
7792 default:
7793 break;
7794 }
7795 }
7796 }
7797 }
7798
7799 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
7800 rewrite references to point to the copies.
7801
7802 References are looked for in DIE's attributes and recursively in all its
7803 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
7804 mapping from old DWARF procedures to their copy. It is used not to copy
7805 twice the same DWARF procedure under TYPE_NODE. */
7806
7807 static void
7808 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
7809 comdat_type_node *type_node,
7810 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
7811 {
7812 dw_die_ref c;
7813
7814 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
7815 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
7816 type_node,
7817 copied_dwarf_procs));
7818 }
7819
7820 /* Traverse the DIE and set up additional .debug_types sections for each
7821 type worthy of being placed in a COMDAT section. */
7822
7823 static void
7824 break_out_comdat_types (dw_die_ref die)
7825 {
7826 dw_die_ref c;
7827 dw_die_ref first;
7828 dw_die_ref prev = NULL;
7829 dw_die_ref next = NULL;
7830 dw_die_ref unit = NULL;
7831
7832 first = c = die->die_child;
7833 if (c)
7834 next = c->die_sib;
7835 if (c) do {
7836 if (prev == NULL || prev->die_sib == c)
7837 prev = c;
7838 c = next;
7839 next = (c == first ? NULL : c->die_sib);
7840 if (should_move_die_to_comdat (c))
7841 {
7842 dw_die_ref replacement;
7843 comdat_type_node *type_node;
7844
7845 /* Break out nested types into their own type units. */
7846 break_out_comdat_types (c);
7847
7848 /* Create a new type unit DIE as the root for the new tree, and
7849 add it to the list of comdat types. */
7850 unit = new_die (DW_TAG_type_unit, NULL, NULL);
7851 add_AT_unsigned (unit, DW_AT_language,
7852 get_AT_unsigned (comp_unit_die (), DW_AT_language));
7853 type_node = ggc_cleared_alloc<comdat_type_node> ();
7854 type_node->root_die = unit;
7855 type_node->next = comdat_type_list;
7856 comdat_type_list = type_node;
7857
7858 /* Generate the type signature. */
7859 generate_type_signature (c, type_node);
7860
7861 /* Copy the declaration context, attributes, and children of the
7862 declaration into the new type unit DIE, then remove this DIE
7863 from the main CU (or replace it with a skeleton if necessary). */
7864 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
7865 type_node->skeleton_die = replacement;
7866
7867 /* Add the DIE to the new compunit. */
7868 add_child_die (unit, c);
7869
7870 /* Types can reference DWARF procedures for type size or data location
7871 expressions. Calls in DWARF expressions cannot target procedures
7872 that are not in the same section. So we must copy DWARF procedures
7873 along with this type and then rewrite references to them. */
7874 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
7875 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
7876
7877 if (replacement != NULL)
7878 c = replacement;
7879 }
7880 else if (c->die_tag == DW_TAG_namespace
7881 || c->die_tag == DW_TAG_class_type
7882 || c->die_tag == DW_TAG_structure_type
7883 || c->die_tag == DW_TAG_union_type)
7884 {
7885 /* Look for nested types that can be broken out. */
7886 break_out_comdat_types (c);
7887 }
7888 } while (next != NULL);
7889 }
7890
7891 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
7892 Enter all the cloned children into the hash table decl_table. */
7893
7894 static dw_die_ref
7895 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
7896 {
7897 dw_die_ref c;
7898 dw_die_ref clone;
7899 struct decl_table_entry *entry;
7900 decl_table_entry **slot;
7901
7902 if (die->die_tag == DW_TAG_subprogram)
7903 clone = clone_as_declaration (die);
7904 else
7905 clone = clone_die (die);
7906
7907 slot = decl_table->find_slot_with_hash (die,
7908 htab_hash_pointer (die), INSERT);
7909
7910 /* Assert that DIE isn't in the hash table yet. If it would be there
7911 before, the ancestors would be necessarily there as well, therefore
7912 clone_tree_partial wouldn't be called. */
7913 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
7914
7915 entry = XCNEW (struct decl_table_entry);
7916 entry->orig = die;
7917 entry->copy = clone;
7918 *slot = entry;
7919
7920 if (die->die_tag != DW_TAG_subprogram)
7921 FOR_EACH_CHILD (die, c,
7922 add_child_die (clone, clone_tree_partial (c, decl_table)));
7923
7924 return clone;
7925 }
7926
7927 /* Walk the DIE and its children, looking for references to incomplete
7928 or trivial types that are unmarked (i.e., that are not in the current
7929 type_unit). */
7930
7931 static void
7932 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
7933 {
7934 dw_die_ref c;
7935 dw_attr_node *a;
7936 unsigned ix;
7937
7938 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7939 {
7940 if (AT_class (a) == dw_val_class_die_ref)
7941 {
7942 dw_die_ref targ = AT_ref (a);
7943 decl_table_entry **slot;
7944 struct decl_table_entry *entry;
7945
7946 if (targ->die_mark != 0 || targ->comdat_type_p)
7947 continue;
7948
7949 slot = decl_table->find_slot_with_hash (targ,
7950 htab_hash_pointer (targ),
7951 INSERT);
7952
7953 if (*slot != HTAB_EMPTY_ENTRY)
7954 {
7955 /* TARG has already been copied, so we just need to
7956 modify the reference to point to the copy. */
7957 entry = *slot;
7958 a->dw_attr_val.v.val_die_ref.die = entry->copy;
7959 }
7960 else
7961 {
7962 dw_die_ref parent = unit;
7963 dw_die_ref copy = clone_die (targ);
7964
7965 /* Record in DECL_TABLE that TARG has been copied.
7966 Need to do this now, before the recursive call,
7967 because DECL_TABLE may be expanded and SLOT
7968 would no longer be a valid pointer. */
7969 entry = XCNEW (struct decl_table_entry);
7970 entry->orig = targ;
7971 entry->copy = copy;
7972 *slot = entry;
7973
7974 /* If TARG is not a declaration DIE, we need to copy its
7975 children. */
7976 if (!is_declaration_die (targ))
7977 {
7978 FOR_EACH_CHILD (
7979 targ, c,
7980 add_child_die (copy,
7981 clone_tree_partial (c, decl_table)));
7982 }
7983
7984 /* Make sure the cloned tree is marked as part of the
7985 type unit. */
7986 mark_dies (copy);
7987
7988 /* If TARG has surrounding context, copy its ancestor tree
7989 into the new type unit. */
7990 if (targ->die_parent != NULL
7991 && !is_unit_die (targ->die_parent))
7992 parent = copy_ancestor_tree (unit, targ->die_parent,
7993 decl_table);
7994
7995 add_child_die (parent, copy);
7996 a->dw_attr_val.v.val_die_ref.die = copy;
7997
7998 /* Make sure the newly-copied DIE is walked. If it was
7999 installed in a previously-added context, it won't
8000 get visited otherwise. */
8001 if (parent != unit)
8002 {
8003 /* Find the highest point of the newly-added tree,
8004 mark each node along the way, and walk from there. */
8005 parent->die_mark = 1;
8006 while (parent->die_parent
8007 && parent->die_parent->die_mark == 0)
8008 {
8009 parent = parent->die_parent;
8010 parent->die_mark = 1;
8011 }
8012 copy_decls_walk (unit, parent, decl_table);
8013 }
8014 }
8015 }
8016 }
8017
8018 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8019 }
8020
8021 /* Copy declarations for "unworthy" types into the new comdat section.
8022 Incomplete types, modified types, and certain other types aren't broken
8023 out into comdat sections of their own, so they don't have a signature,
8024 and we need to copy the declaration into the same section so that we
8025 don't have an external reference. */
8026
8027 static void
8028 copy_decls_for_unworthy_types (dw_die_ref unit)
8029 {
8030 mark_dies (unit);
8031 decl_hash_type decl_table (10);
8032 copy_decls_walk (unit, unit, &decl_table);
8033 unmark_dies (unit);
8034 }
8035
8036 /* Traverse the DIE and add a sibling attribute if it may have the
8037 effect of speeding up access to siblings. To save some space,
8038 avoid generating sibling attributes for DIE's without children. */
8039
8040 static void
8041 add_sibling_attributes (dw_die_ref die)
8042 {
8043 dw_die_ref c;
8044
8045 if (! die->die_child)
8046 return;
8047
8048 if (die->die_parent && die != die->die_parent->die_child)
8049 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8050
8051 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8052 }
8053
8054 /* Output all location lists for the DIE and its children. */
8055
8056 static void
8057 output_location_lists (dw_die_ref die)
8058 {
8059 dw_die_ref c;
8060 dw_attr_node *a;
8061 unsigned ix;
8062
8063 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8064 if (AT_class (a) == dw_val_class_loc_list)
8065 output_loc_list (AT_loc_list (a));
8066
8067 FOR_EACH_CHILD (die, c, output_location_lists (c));
8068 }
8069
8070 /* We want to limit the number of external references, because they are
8071 larger than local references: a relocation takes multiple words, and
8072 even a sig8 reference is always eight bytes, whereas a local reference
8073 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8074 So if we encounter multiple external references to the same type DIE, we
8075 make a local typedef stub for it and redirect all references there.
8076
8077 This is the element of the hash table for keeping track of these
8078 references. */
8079
8080 struct external_ref
8081 {
8082 dw_die_ref type;
8083 dw_die_ref stub;
8084 unsigned n_refs;
8085 };
8086
8087 /* Hashtable helpers. */
8088
8089 struct external_ref_hasher : free_ptr_hash <external_ref>
8090 {
8091 static inline hashval_t hash (const external_ref *);
8092 static inline bool equal (const external_ref *, const external_ref *);
8093 };
8094
8095 inline hashval_t
8096 external_ref_hasher::hash (const external_ref *r)
8097 {
8098 dw_die_ref die = r->type;
8099 hashval_t h = 0;
8100
8101 /* We can't use the address of the DIE for hashing, because
8102 that will make the order of the stub DIEs non-deterministic. */
8103 if (! die->comdat_type_p)
8104 /* We have a symbol; use it to compute a hash. */
8105 h = htab_hash_string (die->die_id.die_symbol);
8106 else
8107 {
8108 /* We have a type signature; use a subset of the bits as the hash.
8109 The 8-byte signature is at least as large as hashval_t. */
8110 comdat_type_node *type_node = die->die_id.die_type_node;
8111 memcpy (&h, type_node->signature, sizeof (h));
8112 }
8113 return h;
8114 }
8115
8116 inline bool
8117 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8118 {
8119 return r1->type == r2->type;
8120 }
8121
8122 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8123
8124 /* Return a pointer to the external_ref for references to DIE. */
8125
8126 static struct external_ref *
8127 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8128 {
8129 struct external_ref ref, *ref_p;
8130 external_ref **slot;
8131
8132 ref.type = die;
8133 slot = map->find_slot (&ref, INSERT);
8134 if (*slot != HTAB_EMPTY_ENTRY)
8135 return *slot;
8136
8137 ref_p = XCNEW (struct external_ref);
8138 ref_p->type = die;
8139 *slot = ref_p;
8140 return ref_p;
8141 }
8142
8143 /* Subroutine of optimize_external_refs, below.
8144
8145 If we see a type skeleton, record it as our stub. If we see external
8146 references, remember how many we've seen. */
8147
8148 static void
8149 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8150 {
8151 dw_die_ref c;
8152 dw_attr_node *a;
8153 unsigned ix;
8154 struct external_ref *ref_p;
8155
8156 if (is_type_die (die)
8157 && (c = get_AT_ref (die, DW_AT_signature)))
8158 {
8159 /* This is a local skeleton; use it for local references. */
8160 ref_p = lookup_external_ref (map, c);
8161 ref_p->stub = die;
8162 }
8163
8164 /* Scan the DIE references, and remember any that refer to DIEs from
8165 other CUs (i.e. those which are not marked). */
8166 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8167 if (AT_class (a) == dw_val_class_die_ref
8168 && (c = AT_ref (a))->die_mark == 0
8169 && is_type_die (c))
8170 {
8171 ref_p = lookup_external_ref (map, c);
8172 ref_p->n_refs++;
8173 }
8174
8175 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8176 }
8177
8178 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8179 points to an external_ref, DATA is the CU we're processing. If we don't
8180 already have a local stub, and we have multiple refs, build a stub. */
8181
8182 int
8183 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8184 {
8185 struct external_ref *ref_p = *slot;
8186
8187 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8188 {
8189 /* We have multiple references to this type, so build a small stub.
8190 Both of these forms are a bit dodgy from the perspective of the
8191 DWARF standard, since technically they should have names. */
8192 dw_die_ref cu = data;
8193 dw_die_ref type = ref_p->type;
8194 dw_die_ref stub = NULL;
8195
8196 if (type->comdat_type_p)
8197 {
8198 /* If we refer to this type via sig8, use AT_signature. */
8199 stub = new_die (type->die_tag, cu, NULL_TREE);
8200 add_AT_die_ref (stub, DW_AT_signature, type);
8201 }
8202 else
8203 {
8204 /* Otherwise, use a typedef with no name. */
8205 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8206 add_AT_die_ref (stub, DW_AT_type, type);
8207 }
8208
8209 stub->die_mark++;
8210 ref_p->stub = stub;
8211 }
8212 return 1;
8213 }
8214
8215 /* DIE is a unit; look through all the DIE references to see if there are
8216 any external references to types, and if so, create local stubs for
8217 them which will be applied in build_abbrev_table. This is useful because
8218 references to local DIEs are smaller. */
8219
8220 static external_ref_hash_type *
8221 optimize_external_refs (dw_die_ref die)
8222 {
8223 external_ref_hash_type *map = new external_ref_hash_type (10);
8224 optimize_external_refs_1 (die, map);
8225 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8226 return map;
8227 }
8228
8229 /* The format of each DIE (and its attribute value pairs) is encoded in an
8230 abbreviation table. This routine builds the abbreviation table and assigns
8231 a unique abbreviation id for each abbreviation entry. The children of each
8232 die are visited recursively. */
8233
8234 static void
8235 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8236 {
8237 unsigned long abbrev_id;
8238 unsigned int n_alloc;
8239 dw_die_ref c;
8240 dw_attr_node *a;
8241 unsigned ix;
8242
8243 /* Scan the DIE references, and replace any that refer to
8244 DIEs from other CUs (i.e. those which are not marked) with
8245 the local stubs we built in optimize_external_refs. */
8246 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8247 if (AT_class (a) == dw_val_class_die_ref
8248 && (c = AT_ref (a))->die_mark == 0)
8249 {
8250 struct external_ref *ref_p;
8251 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8252
8253 ref_p = lookup_external_ref (extern_map, c);
8254 if (ref_p->stub && ref_p->stub != die)
8255 change_AT_die_ref (a, ref_p->stub);
8256 else
8257 /* We aren't changing this reference, so mark it external. */
8258 set_AT_ref_external (a, 1);
8259 }
8260
8261 for (abbrev_id = 1; abbrev_id < abbrev_die_table_in_use; ++abbrev_id)
8262 {
8263 dw_die_ref abbrev = abbrev_die_table[abbrev_id];
8264 dw_attr_node *die_a, *abbrev_a;
8265 unsigned ix;
8266 bool ok = true;
8267
8268 if (abbrev->die_tag != die->die_tag)
8269 continue;
8270 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8271 continue;
8272
8273 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8274 continue;
8275
8276 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8277 {
8278 abbrev_a = &(*abbrev->die_attr)[ix];
8279 if ((abbrev_a->dw_attr != die_a->dw_attr)
8280 || (value_format (abbrev_a) != value_format (die_a)))
8281 {
8282 ok = false;
8283 break;
8284 }
8285 }
8286 if (ok)
8287 break;
8288 }
8289
8290 if (abbrev_id >= abbrev_die_table_in_use)
8291 {
8292 if (abbrev_die_table_in_use >= abbrev_die_table_allocated)
8293 {
8294 n_alloc = abbrev_die_table_allocated + ABBREV_DIE_TABLE_INCREMENT;
8295 abbrev_die_table = GGC_RESIZEVEC (dw_die_ref, abbrev_die_table,
8296 n_alloc);
8297
8298 memset (&abbrev_die_table[abbrev_die_table_allocated], 0,
8299 (n_alloc - abbrev_die_table_allocated) * sizeof (dw_die_ref));
8300 abbrev_die_table_allocated = n_alloc;
8301 }
8302
8303 ++abbrev_die_table_in_use;
8304 abbrev_die_table[abbrev_id] = die;
8305 }
8306
8307 die->die_abbrev = abbrev_id;
8308 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
8309 }
8310 \f
8311 /* Return the power-of-two number of bytes necessary to represent VALUE. */
8312
8313 static int
8314 constant_size (unsigned HOST_WIDE_INT value)
8315 {
8316 int log;
8317
8318 if (value == 0)
8319 log = 0;
8320 else
8321 log = floor_log2 (value);
8322
8323 log = log / 8;
8324 log = 1 << (floor_log2 (log) + 1);
8325
8326 return log;
8327 }
8328
8329 /* Return the size of a DIE as it is represented in the
8330 .debug_info section. */
8331
8332 static unsigned long
8333 size_of_die (dw_die_ref die)
8334 {
8335 unsigned long size = 0;
8336 dw_attr_node *a;
8337 unsigned ix;
8338 enum dwarf_form form;
8339
8340 size += size_of_uleb128 (die->die_abbrev);
8341 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8342 {
8343 switch (AT_class (a))
8344 {
8345 case dw_val_class_addr:
8346 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8347 {
8348 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8349 size += size_of_uleb128 (AT_index (a));
8350 }
8351 else
8352 size += DWARF2_ADDR_SIZE;
8353 break;
8354 case dw_val_class_offset:
8355 size += DWARF_OFFSET_SIZE;
8356 break;
8357 case dw_val_class_loc:
8358 {
8359 unsigned long lsize = size_of_locs (AT_loc (a));
8360
8361 /* Block length. */
8362 if (dwarf_version >= 4)
8363 size += size_of_uleb128 (lsize);
8364 else
8365 size += constant_size (lsize);
8366 size += lsize;
8367 }
8368 break;
8369 case dw_val_class_loc_list:
8370 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8371 {
8372 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8373 size += size_of_uleb128 (AT_index (a));
8374 }
8375 else
8376 size += DWARF_OFFSET_SIZE;
8377 break;
8378 case dw_val_class_range_list:
8379 size += DWARF_OFFSET_SIZE;
8380 break;
8381 case dw_val_class_const:
8382 size += size_of_sleb128 (AT_int (a));
8383 break;
8384 case dw_val_class_unsigned_const:
8385 {
8386 int csize = constant_size (AT_unsigned (a));
8387 if (dwarf_version == 3
8388 && a->dw_attr == DW_AT_data_member_location
8389 && csize >= 4)
8390 size += size_of_uleb128 (AT_unsigned (a));
8391 else
8392 size += csize;
8393 }
8394 break;
8395 case dw_val_class_const_double:
8396 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
8397 if (HOST_BITS_PER_WIDE_INT >= 64)
8398 size++; /* block */
8399 break;
8400 case dw_val_class_wide_int:
8401 size += (get_full_len (*a->dw_attr_val.v.val_wide)
8402 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
8403 if (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT
8404 > 64)
8405 size++; /* block */
8406 break;
8407 case dw_val_class_vec:
8408 size += constant_size (a->dw_attr_val.v.val_vec.length
8409 * a->dw_attr_val.v.val_vec.elt_size)
8410 + a->dw_attr_val.v.val_vec.length
8411 * a->dw_attr_val.v.val_vec.elt_size; /* block */
8412 break;
8413 case dw_val_class_flag:
8414 if (dwarf_version >= 4)
8415 /* Currently all add_AT_flag calls pass in 1 as last argument,
8416 so DW_FORM_flag_present can be used. If that ever changes,
8417 we'll need to use DW_FORM_flag and have some optimization
8418 in build_abbrev_table that will change those to
8419 DW_FORM_flag_present if it is set to 1 in all DIEs using
8420 the same abbrev entry. */
8421 gcc_assert (a->dw_attr_val.v.val_flag == 1);
8422 else
8423 size += 1;
8424 break;
8425 case dw_val_class_die_ref:
8426 if (AT_ref_external (a))
8427 {
8428 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
8429 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
8430 is sized by target address length, whereas in DWARF3
8431 it's always sized as an offset. */
8432 if (use_debug_types)
8433 size += DWARF_TYPE_SIGNATURE_SIZE;
8434 else if (dwarf_version == 2)
8435 size += DWARF2_ADDR_SIZE;
8436 else
8437 size += DWARF_OFFSET_SIZE;
8438 }
8439 else
8440 size += DWARF_OFFSET_SIZE;
8441 break;
8442 case dw_val_class_fde_ref:
8443 size += DWARF_OFFSET_SIZE;
8444 break;
8445 case dw_val_class_lbl_id:
8446 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8447 {
8448 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8449 size += size_of_uleb128 (AT_index (a));
8450 }
8451 else
8452 size += DWARF2_ADDR_SIZE;
8453 break;
8454 case dw_val_class_lineptr:
8455 case dw_val_class_macptr:
8456 size += DWARF_OFFSET_SIZE;
8457 break;
8458 case dw_val_class_str:
8459 form = AT_string_form (a);
8460 if (form == DW_FORM_strp)
8461 size += DWARF_OFFSET_SIZE;
8462 else if (form == DW_FORM_GNU_str_index)
8463 size += size_of_uleb128 (AT_index (a));
8464 else
8465 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
8466 break;
8467 case dw_val_class_file:
8468 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
8469 break;
8470 case dw_val_class_data8:
8471 size += 8;
8472 break;
8473 case dw_val_class_vms_delta:
8474 size += DWARF_OFFSET_SIZE;
8475 break;
8476 case dw_val_class_high_pc:
8477 size += DWARF2_ADDR_SIZE;
8478 break;
8479 case dw_val_class_discr_value:
8480 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
8481 break;
8482 case dw_val_class_discr_list:
8483 {
8484 unsigned block_size = size_of_discr_list (AT_discr_list (a));
8485
8486 /* This is a block, so we have the block length and then its
8487 data. */
8488 size += constant_size (block_size) + block_size;
8489 }
8490 break;
8491 default:
8492 gcc_unreachable ();
8493 }
8494 }
8495
8496 return size;
8497 }
8498
8499 /* Size the debugging information associated with a given DIE. Visits the
8500 DIE's children recursively. Updates the global variable next_die_offset, on
8501 each time through. Uses the current value of next_die_offset to update the
8502 die_offset field in each DIE. */
8503
8504 static void
8505 calc_die_sizes (dw_die_ref die)
8506 {
8507 dw_die_ref c;
8508
8509 gcc_assert (die->die_offset == 0
8510 || (unsigned long int) die->die_offset == next_die_offset);
8511 die->die_offset = next_die_offset;
8512 next_die_offset += size_of_die (die);
8513
8514 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
8515
8516 if (die->die_child != NULL)
8517 /* Count the null byte used to terminate sibling lists. */
8518 next_die_offset += 1;
8519 }
8520
8521 /* Size just the base type children at the start of the CU.
8522 This is needed because build_abbrev needs to size locs
8523 and sizing of type based stack ops needs to know die_offset
8524 values for the base types. */
8525
8526 static void
8527 calc_base_type_die_sizes (void)
8528 {
8529 unsigned long die_offset = DWARF_COMPILE_UNIT_HEADER_SIZE;
8530 unsigned int i;
8531 dw_die_ref base_type;
8532 #if ENABLE_ASSERT_CHECKING
8533 dw_die_ref prev = comp_unit_die ()->die_child;
8534 #endif
8535
8536 die_offset += size_of_die (comp_unit_die ());
8537 for (i = 0; base_types.iterate (i, &base_type); i++)
8538 {
8539 #if ENABLE_ASSERT_CHECKING
8540 gcc_assert (base_type->die_offset == 0
8541 && prev->die_sib == base_type
8542 && base_type->die_child == NULL
8543 && base_type->die_abbrev);
8544 prev = base_type;
8545 #endif
8546 base_type->die_offset = die_offset;
8547 die_offset += size_of_die (base_type);
8548 }
8549 }
8550
8551 /* Set the marks for a die and its children. We do this so
8552 that we know whether or not a reference needs to use FORM_ref_addr; only
8553 DIEs in the same CU will be marked. We used to clear out the offset
8554 and use that as the flag, but ran into ordering problems. */
8555
8556 static void
8557 mark_dies (dw_die_ref die)
8558 {
8559 dw_die_ref c;
8560
8561 gcc_assert (!die->die_mark);
8562
8563 die->die_mark = 1;
8564 FOR_EACH_CHILD (die, c, mark_dies (c));
8565 }
8566
8567 /* Clear the marks for a die and its children. */
8568
8569 static void
8570 unmark_dies (dw_die_ref die)
8571 {
8572 dw_die_ref c;
8573
8574 if (! use_debug_types)
8575 gcc_assert (die->die_mark);
8576
8577 die->die_mark = 0;
8578 FOR_EACH_CHILD (die, c, unmark_dies (c));
8579 }
8580
8581 /* Clear the marks for a die, its children and referred dies. */
8582
8583 static void
8584 unmark_all_dies (dw_die_ref die)
8585 {
8586 dw_die_ref c;
8587 dw_attr_node *a;
8588 unsigned ix;
8589
8590 if (!die->die_mark)
8591 return;
8592 die->die_mark = 0;
8593
8594 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
8595
8596 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8597 if (AT_class (a) == dw_val_class_die_ref)
8598 unmark_all_dies (AT_ref (a));
8599 }
8600
8601 /* Calculate if the entry should appear in the final output file. It may be
8602 from a pruned a type. */
8603
8604 static bool
8605 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
8606 {
8607 /* By limiting gnu pubnames to definitions only, gold can generate a
8608 gdb index without entries for declarations, which don't include
8609 enough information to be useful. */
8610 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
8611 return false;
8612
8613 if (table == pubname_table)
8614 {
8615 /* Enumerator names are part of the pubname table, but the
8616 parent DW_TAG_enumeration_type die may have been pruned.
8617 Don't output them if that is the case. */
8618 if (p->die->die_tag == DW_TAG_enumerator &&
8619 (p->die->die_parent == NULL
8620 || !p->die->die_parent->die_perennial_p))
8621 return false;
8622
8623 /* Everything else in the pubname table is included. */
8624 return true;
8625 }
8626
8627 /* The pubtypes table shouldn't include types that have been
8628 pruned. */
8629 return (p->die->die_offset != 0
8630 || !flag_eliminate_unused_debug_types);
8631 }
8632
8633 /* Return the size of the .debug_pubnames or .debug_pubtypes table
8634 generated for the compilation unit. */
8635
8636 static unsigned long
8637 size_of_pubnames (vec<pubname_entry, va_gc> *names)
8638 {
8639 unsigned long size;
8640 unsigned i;
8641 pubname_entry *p;
8642 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
8643
8644 size = DWARF_PUBNAMES_HEADER_SIZE;
8645 FOR_EACH_VEC_ELT (*names, i, p)
8646 if (include_pubname_in_output (names, p))
8647 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
8648
8649 size += DWARF_OFFSET_SIZE;
8650 return size;
8651 }
8652
8653 /* Return the size of the information in the .debug_aranges section. */
8654
8655 static unsigned long
8656 size_of_aranges (void)
8657 {
8658 unsigned long size;
8659
8660 size = DWARF_ARANGES_HEADER_SIZE;
8661
8662 /* Count the address/length pair for this compilation unit. */
8663 if (text_section_used)
8664 size += 2 * DWARF2_ADDR_SIZE;
8665 if (cold_text_section_used)
8666 size += 2 * DWARF2_ADDR_SIZE;
8667 if (have_multiple_function_sections)
8668 {
8669 unsigned fde_idx;
8670 dw_fde_ref fde;
8671
8672 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
8673 {
8674 if (DECL_IGNORED_P (fde->decl))
8675 continue;
8676 if (!fde->in_std_section)
8677 size += 2 * DWARF2_ADDR_SIZE;
8678 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
8679 size += 2 * DWARF2_ADDR_SIZE;
8680 }
8681 }
8682
8683 /* Count the two zero words used to terminated the address range table. */
8684 size += 2 * DWARF2_ADDR_SIZE;
8685 return size;
8686 }
8687 \f
8688 /* Select the encoding of an attribute value. */
8689
8690 static enum dwarf_form
8691 value_format (dw_attr_node *a)
8692 {
8693 switch (AT_class (a))
8694 {
8695 case dw_val_class_addr:
8696 /* Only very few attributes allow DW_FORM_addr. */
8697 switch (a->dw_attr)
8698 {
8699 case DW_AT_low_pc:
8700 case DW_AT_high_pc:
8701 case DW_AT_entry_pc:
8702 case DW_AT_trampoline:
8703 return (AT_index (a) == NOT_INDEXED
8704 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
8705 default:
8706 break;
8707 }
8708 switch (DWARF2_ADDR_SIZE)
8709 {
8710 case 1:
8711 return DW_FORM_data1;
8712 case 2:
8713 return DW_FORM_data2;
8714 case 4:
8715 return DW_FORM_data4;
8716 case 8:
8717 return DW_FORM_data8;
8718 default:
8719 gcc_unreachable ();
8720 }
8721 case dw_val_class_range_list:
8722 case dw_val_class_loc_list:
8723 if (dwarf_version >= 4)
8724 return DW_FORM_sec_offset;
8725 /* FALLTHRU */
8726 case dw_val_class_vms_delta:
8727 case dw_val_class_offset:
8728 switch (DWARF_OFFSET_SIZE)
8729 {
8730 case 4:
8731 return DW_FORM_data4;
8732 case 8:
8733 return DW_FORM_data8;
8734 default:
8735 gcc_unreachable ();
8736 }
8737 case dw_val_class_loc:
8738 if (dwarf_version >= 4)
8739 return DW_FORM_exprloc;
8740 switch (constant_size (size_of_locs (AT_loc (a))))
8741 {
8742 case 1:
8743 return DW_FORM_block1;
8744 case 2:
8745 return DW_FORM_block2;
8746 case 4:
8747 return DW_FORM_block4;
8748 default:
8749 gcc_unreachable ();
8750 }
8751 case dw_val_class_const:
8752 return DW_FORM_sdata;
8753 case dw_val_class_unsigned_const:
8754 switch (constant_size (AT_unsigned (a)))
8755 {
8756 case 1:
8757 return DW_FORM_data1;
8758 case 2:
8759 return DW_FORM_data2;
8760 case 4:
8761 /* In DWARF3 DW_AT_data_member_location with
8762 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
8763 constant, so we need to use DW_FORM_udata if we need
8764 a large constant. */
8765 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
8766 return DW_FORM_udata;
8767 return DW_FORM_data4;
8768 case 8:
8769 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
8770 return DW_FORM_udata;
8771 return DW_FORM_data8;
8772 default:
8773 gcc_unreachable ();
8774 }
8775 case dw_val_class_const_double:
8776 switch (HOST_BITS_PER_WIDE_INT)
8777 {
8778 case 8:
8779 return DW_FORM_data2;
8780 case 16:
8781 return DW_FORM_data4;
8782 case 32:
8783 return DW_FORM_data8;
8784 case 64:
8785 default:
8786 return DW_FORM_block1;
8787 }
8788 case dw_val_class_wide_int:
8789 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
8790 {
8791 case 8:
8792 return DW_FORM_data1;
8793 case 16:
8794 return DW_FORM_data2;
8795 case 32:
8796 return DW_FORM_data4;
8797 case 64:
8798 return DW_FORM_data8;
8799 default:
8800 return DW_FORM_block1;
8801 }
8802 case dw_val_class_vec:
8803 switch (constant_size (a->dw_attr_val.v.val_vec.length
8804 * a->dw_attr_val.v.val_vec.elt_size))
8805 {
8806 case 1:
8807 return DW_FORM_block1;
8808 case 2:
8809 return DW_FORM_block2;
8810 case 4:
8811 return DW_FORM_block4;
8812 default:
8813 gcc_unreachable ();
8814 }
8815 case dw_val_class_flag:
8816 if (dwarf_version >= 4)
8817 {
8818 /* Currently all add_AT_flag calls pass in 1 as last argument,
8819 so DW_FORM_flag_present can be used. If that ever changes,
8820 we'll need to use DW_FORM_flag and have some optimization
8821 in build_abbrev_table that will change those to
8822 DW_FORM_flag_present if it is set to 1 in all DIEs using
8823 the same abbrev entry. */
8824 gcc_assert (a->dw_attr_val.v.val_flag == 1);
8825 return DW_FORM_flag_present;
8826 }
8827 return DW_FORM_flag;
8828 case dw_val_class_die_ref:
8829 if (AT_ref_external (a))
8830 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
8831 else
8832 return DW_FORM_ref;
8833 case dw_val_class_fde_ref:
8834 return DW_FORM_data;
8835 case dw_val_class_lbl_id:
8836 return (AT_index (a) == NOT_INDEXED
8837 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
8838 case dw_val_class_lineptr:
8839 case dw_val_class_macptr:
8840 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
8841 case dw_val_class_str:
8842 return AT_string_form (a);
8843 case dw_val_class_file:
8844 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
8845 {
8846 case 1:
8847 return DW_FORM_data1;
8848 case 2:
8849 return DW_FORM_data2;
8850 case 4:
8851 return DW_FORM_data4;
8852 default:
8853 gcc_unreachable ();
8854 }
8855
8856 case dw_val_class_data8:
8857 return DW_FORM_data8;
8858
8859 case dw_val_class_high_pc:
8860 switch (DWARF2_ADDR_SIZE)
8861 {
8862 case 1:
8863 return DW_FORM_data1;
8864 case 2:
8865 return DW_FORM_data2;
8866 case 4:
8867 return DW_FORM_data4;
8868 case 8:
8869 return DW_FORM_data8;
8870 default:
8871 gcc_unreachable ();
8872 }
8873
8874 case dw_val_class_discr_value:
8875 return (a->dw_attr_val.v.val_discr_value.pos
8876 ? DW_FORM_udata
8877 : DW_FORM_sdata);
8878 case dw_val_class_discr_list:
8879 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
8880 {
8881 case 1:
8882 return DW_FORM_block1;
8883 case 2:
8884 return DW_FORM_block2;
8885 case 4:
8886 return DW_FORM_block4;
8887 default:
8888 gcc_unreachable ();
8889 }
8890
8891 default:
8892 gcc_unreachable ();
8893 }
8894 }
8895
8896 /* Output the encoding of an attribute value. */
8897
8898 static void
8899 output_value_format (dw_attr_node *a)
8900 {
8901 enum dwarf_form form = value_format (a);
8902
8903 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
8904 }
8905
8906 /* Given a die and id, produce the appropriate abbreviations. */
8907
8908 static void
8909 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
8910 {
8911 unsigned ix;
8912 dw_attr_node *a_attr;
8913
8914 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
8915 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
8916 dwarf_tag_name (abbrev->die_tag));
8917
8918 if (abbrev->die_child != NULL)
8919 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
8920 else
8921 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
8922
8923 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
8924 {
8925 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
8926 dwarf_attr_name (a_attr->dw_attr));
8927 output_value_format (a_attr);
8928 }
8929
8930 dw2_asm_output_data (1, 0, NULL);
8931 dw2_asm_output_data (1, 0, NULL);
8932 }
8933
8934
8935 /* Output the .debug_abbrev section which defines the DIE abbreviation
8936 table. */
8937
8938 static void
8939 output_abbrev_section (void)
8940 {
8941 unsigned long abbrev_id;
8942
8943 for (abbrev_id = 1; abbrev_id < abbrev_die_table_in_use; ++abbrev_id)
8944 output_die_abbrevs (abbrev_id, abbrev_die_table[abbrev_id]);
8945
8946 /* Terminate the table. */
8947 dw2_asm_output_data (1, 0, NULL);
8948 }
8949
8950 /* Output a symbol we can use to refer to this DIE from another CU. */
8951
8952 static inline void
8953 output_die_symbol (dw_die_ref die)
8954 {
8955 const char *sym = die->die_id.die_symbol;
8956
8957 gcc_assert (!die->comdat_type_p);
8958
8959 if (sym == 0)
8960 return;
8961
8962 if (strncmp (sym, DIE_LABEL_PREFIX, sizeof (DIE_LABEL_PREFIX) - 1) == 0)
8963 /* We make these global, not weak; if the target doesn't support
8964 .linkonce, it doesn't support combining the sections, so debugging
8965 will break. */
8966 targetm.asm_out.globalize_label (asm_out_file, sym);
8967
8968 ASM_OUTPUT_LABEL (asm_out_file, sym);
8969 }
8970
8971 /* Return a new location list, given the begin and end range, and the
8972 expression. */
8973
8974 static inline dw_loc_list_ref
8975 new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
8976 const char *section)
8977 {
8978 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
8979
8980 retlist->begin = begin;
8981 retlist->begin_entry = NULL;
8982 retlist->end = end;
8983 retlist->expr = expr;
8984 retlist->section = section;
8985
8986 return retlist;
8987 }
8988
8989 /* Generate a new internal symbol for this location list node, if it
8990 hasn't got one yet. */
8991
8992 static inline void
8993 gen_llsym (dw_loc_list_ref list)
8994 {
8995 gcc_assert (!list->ll_symbol);
8996 list->ll_symbol = gen_internal_sym ("LLST");
8997 }
8998
8999 /* Output the location list given to us. */
9000
9001 static void
9002 output_loc_list (dw_loc_list_ref list_head)
9003 {
9004 dw_loc_list_ref curr = list_head;
9005
9006 if (list_head->emitted)
9007 return;
9008 list_head->emitted = true;
9009
9010 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
9011
9012 /* Walk the location list, and output each range + expression. */
9013 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
9014 {
9015 unsigned long size;
9016 /* Don't output an entry that starts and ends at the same address. */
9017 if (strcmp (curr->begin, curr->end) == 0 && !curr->force)
9018 continue;
9019 size = size_of_locs (curr->expr);
9020 /* If the expression is too large, drop it on the floor. We could
9021 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9022 in the expression, but >= 64KB expressions for a single value
9023 in a single range are unlikely very useful. */
9024 if (size > 0xffff)
9025 continue;
9026 if (dwarf_split_debug_info)
9027 {
9028 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
9029 "Location list start/length entry (%s)",
9030 list_head->ll_symbol);
9031 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9032 "Location list range start index (%s)",
9033 curr->begin);
9034 /* The length field is 4 bytes. If we ever need to support
9035 an 8-byte length, we can add a new DW_LLE code or fall back
9036 to DW_LLE_GNU_start_end_entry. */
9037 dw2_asm_output_delta (4, curr->end, curr->begin,
9038 "Location list range length (%s)",
9039 list_head->ll_symbol);
9040 }
9041 else if (!have_multiple_function_sections)
9042 {
9043 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
9044 "Location list begin address (%s)",
9045 list_head->ll_symbol);
9046 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
9047 "Location list end address (%s)",
9048 list_head->ll_symbol);
9049 }
9050 else
9051 {
9052 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9053 "Location list begin address (%s)",
9054 list_head->ll_symbol);
9055 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9056 "Location list end address (%s)",
9057 list_head->ll_symbol);
9058 }
9059
9060 /* Output the block length for this list of location operations. */
9061 gcc_assert (size <= 0xffff);
9062 dw2_asm_output_data (2, size, "%s", "Location expression size");
9063
9064 output_loc_sequence (curr->expr, -1);
9065 }
9066
9067 if (dwarf_split_debug_info)
9068 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
9069 "Location list terminator (%s)",
9070 list_head->ll_symbol);
9071 else
9072 {
9073 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9074 "Location list terminator begin (%s)",
9075 list_head->ll_symbol);
9076 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9077 "Location list terminator end (%s)",
9078 list_head->ll_symbol);
9079 }
9080 }
9081
9082 /* Output a range_list offset into the debug_range section. Emit a
9083 relocated reference if val_entry is NULL, otherwise, emit an
9084 indirect reference. */
9085
9086 static void
9087 output_range_list_offset (dw_attr_node *a)
9088 {
9089 const char *name = dwarf_attr_name (a->dw_attr);
9090
9091 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
9092 {
9093 char *p = strchr (ranges_section_label, '\0');
9094 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX, a->dw_attr_val.v.val_offset);
9095 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
9096 debug_ranges_section, "%s", name);
9097 *p = '\0';
9098 }
9099 else
9100 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
9101 "%s (offset from %s)", name, ranges_section_label);
9102 }
9103
9104 /* Output the offset into the debug_loc section. */
9105
9106 static void
9107 output_loc_list_offset (dw_attr_node *a)
9108 {
9109 char *sym = AT_loc_list (a)->ll_symbol;
9110
9111 gcc_assert (sym);
9112 if (dwarf_split_debug_info)
9113 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
9114 "%s", dwarf_attr_name (a->dw_attr));
9115 else
9116 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
9117 "%s", dwarf_attr_name (a->dw_attr));
9118 }
9119
9120 /* Output an attribute's index or value appropriately. */
9121
9122 static void
9123 output_attr_index_or_value (dw_attr_node *a)
9124 {
9125 const char *name = dwarf_attr_name (a->dw_attr);
9126
9127 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9128 {
9129 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
9130 return;
9131 }
9132 switch (AT_class (a))
9133 {
9134 case dw_val_class_addr:
9135 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
9136 break;
9137 case dw_val_class_high_pc:
9138 case dw_val_class_lbl_id:
9139 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
9140 break;
9141 case dw_val_class_loc_list:
9142 output_loc_list_offset (a);
9143 break;
9144 default:
9145 gcc_unreachable ();
9146 }
9147 }
9148
9149 /* Output a type signature. */
9150
9151 static inline void
9152 output_signature (const char *sig, const char *name)
9153 {
9154 int i;
9155
9156 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9157 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
9158 }
9159
9160 /* Output a discriminant value. */
9161
9162 static inline void
9163 output_discr_value (dw_discr_value *discr_value, const char *name)
9164 {
9165 if (discr_value->pos)
9166 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
9167 else
9168 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
9169 }
9170
9171 /* Output the DIE and its attributes. Called recursively to generate
9172 the definitions of each child DIE. */
9173
9174 static void
9175 output_die (dw_die_ref die)
9176 {
9177 dw_attr_node *a;
9178 dw_die_ref c;
9179 unsigned long size;
9180 unsigned ix;
9181
9182 /* If someone in another CU might refer to us, set up a symbol for
9183 them to point to. */
9184 if (! die->comdat_type_p && die->die_id.die_symbol)
9185 output_die_symbol (die);
9186
9187 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
9188 (unsigned long)die->die_offset,
9189 dwarf_tag_name (die->die_tag));
9190
9191 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9192 {
9193 const char *name = dwarf_attr_name (a->dw_attr);
9194
9195 switch (AT_class (a))
9196 {
9197 case dw_val_class_addr:
9198 output_attr_index_or_value (a);
9199 break;
9200
9201 case dw_val_class_offset:
9202 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
9203 "%s", name);
9204 break;
9205
9206 case dw_val_class_range_list:
9207 output_range_list_offset (a);
9208 break;
9209
9210 case dw_val_class_loc:
9211 size = size_of_locs (AT_loc (a));
9212
9213 /* Output the block length for this list of location operations. */
9214 if (dwarf_version >= 4)
9215 dw2_asm_output_data_uleb128 (size, "%s", name);
9216 else
9217 dw2_asm_output_data (constant_size (size), size, "%s", name);
9218
9219 output_loc_sequence (AT_loc (a), -1);
9220 break;
9221
9222 case dw_val_class_const:
9223 /* ??? It would be slightly more efficient to use a scheme like is
9224 used for unsigned constants below, but gdb 4.x does not sign
9225 extend. Gdb 5.x does sign extend. */
9226 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
9227 break;
9228
9229 case dw_val_class_unsigned_const:
9230 {
9231 int csize = constant_size (AT_unsigned (a));
9232 if (dwarf_version == 3
9233 && a->dw_attr == DW_AT_data_member_location
9234 && csize >= 4)
9235 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
9236 else
9237 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
9238 }
9239 break;
9240
9241 case dw_val_class_const_double:
9242 {
9243 unsigned HOST_WIDE_INT first, second;
9244
9245 if (HOST_BITS_PER_WIDE_INT >= 64)
9246 dw2_asm_output_data (1,
9247 HOST_BITS_PER_DOUBLE_INT
9248 / HOST_BITS_PER_CHAR,
9249 NULL);
9250
9251 if (WORDS_BIG_ENDIAN)
9252 {
9253 first = a->dw_attr_val.v.val_double.high;
9254 second = a->dw_attr_val.v.val_double.low;
9255 }
9256 else
9257 {
9258 first = a->dw_attr_val.v.val_double.low;
9259 second = a->dw_attr_val.v.val_double.high;
9260 }
9261
9262 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
9263 first, "%s", name);
9264 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
9265 second, NULL);
9266 }
9267 break;
9268
9269 case dw_val_class_wide_int:
9270 {
9271 int i;
9272 int len = get_full_len (*a->dw_attr_val.v.val_wide);
9273 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
9274 if (len * HOST_BITS_PER_WIDE_INT > 64)
9275 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide) * l,
9276 NULL);
9277
9278 if (WORDS_BIG_ENDIAN)
9279 for (i = len - 1; i >= 0; --i)
9280 {
9281 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
9282 "%s", name);
9283 name = "";
9284 }
9285 else
9286 for (i = 0; i < len; ++i)
9287 {
9288 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
9289 "%s", name);
9290 name = "";
9291 }
9292 }
9293 break;
9294
9295 case dw_val_class_vec:
9296 {
9297 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
9298 unsigned int len = a->dw_attr_val.v.val_vec.length;
9299 unsigned int i;
9300 unsigned char *p;
9301
9302 dw2_asm_output_data (constant_size (len * elt_size),
9303 len * elt_size, "%s", name);
9304 if (elt_size > sizeof (HOST_WIDE_INT))
9305 {
9306 elt_size /= 2;
9307 len *= 2;
9308 }
9309 for (i = 0, p = a->dw_attr_val.v.val_vec.array;
9310 i < len;
9311 i++, p += elt_size)
9312 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
9313 "fp or vector constant word %u", i);
9314 break;
9315 }
9316
9317 case dw_val_class_flag:
9318 if (dwarf_version >= 4)
9319 {
9320 /* Currently all add_AT_flag calls pass in 1 as last argument,
9321 so DW_FORM_flag_present can be used. If that ever changes,
9322 we'll need to use DW_FORM_flag and have some optimization
9323 in build_abbrev_table that will change those to
9324 DW_FORM_flag_present if it is set to 1 in all DIEs using
9325 the same abbrev entry. */
9326 gcc_assert (AT_flag (a) == 1);
9327 if (flag_debug_asm)
9328 fprintf (asm_out_file, "\t\t\t%s %s\n",
9329 ASM_COMMENT_START, name);
9330 break;
9331 }
9332 dw2_asm_output_data (1, AT_flag (a), "%s", name);
9333 break;
9334
9335 case dw_val_class_loc_list:
9336 output_attr_index_or_value (a);
9337 break;
9338
9339 case dw_val_class_die_ref:
9340 if (AT_ref_external (a))
9341 {
9342 if (AT_ref (a)->comdat_type_p)
9343 {
9344 comdat_type_node *type_node =
9345 AT_ref (a)->die_id.die_type_node;
9346
9347 gcc_assert (type_node);
9348 output_signature (type_node->signature, name);
9349 }
9350 else
9351 {
9352 const char *sym = AT_ref (a)->die_id.die_symbol;
9353 int size;
9354
9355 gcc_assert (sym);
9356 /* In DWARF2, DW_FORM_ref_addr is sized by target address
9357 length, whereas in DWARF3 it's always sized as an
9358 offset. */
9359 if (dwarf_version == 2)
9360 size = DWARF2_ADDR_SIZE;
9361 else
9362 size = DWARF_OFFSET_SIZE;
9363 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
9364 name);
9365 }
9366 }
9367 else
9368 {
9369 gcc_assert (AT_ref (a)->die_offset);
9370 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
9371 "%s", name);
9372 }
9373 break;
9374
9375 case dw_val_class_fde_ref:
9376 {
9377 char l1[20];
9378
9379 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
9380 a->dw_attr_val.v.val_fde_index * 2);
9381 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
9382 "%s", name);
9383 }
9384 break;
9385
9386 case dw_val_class_vms_delta:
9387 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
9388 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
9389 AT_vms_delta2 (a), AT_vms_delta1 (a),
9390 "%s", name);
9391 #else
9392 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
9393 AT_vms_delta2 (a), AT_vms_delta1 (a),
9394 "%s", name);
9395 #endif
9396 break;
9397
9398 case dw_val_class_lbl_id:
9399 output_attr_index_or_value (a);
9400 break;
9401
9402 case dw_val_class_lineptr:
9403 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
9404 debug_line_section, "%s", name);
9405 break;
9406
9407 case dw_val_class_macptr:
9408 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
9409 debug_macinfo_section, "%s", name);
9410 break;
9411
9412 case dw_val_class_str:
9413 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
9414 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
9415 a->dw_attr_val.v.val_str->label,
9416 debug_str_section,
9417 "%s: \"%s\"", name, AT_string (a));
9418 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
9419 dw2_asm_output_data_uleb128 (AT_index (a),
9420 "%s: \"%s\"", name, AT_string (a));
9421 else
9422 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
9423 break;
9424
9425 case dw_val_class_file:
9426 {
9427 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
9428
9429 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
9430 a->dw_attr_val.v.val_file->filename);
9431 break;
9432 }
9433
9434 case dw_val_class_data8:
9435 {
9436 int i;
9437
9438 for (i = 0; i < 8; i++)
9439 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
9440 i == 0 ? "%s" : NULL, name);
9441 break;
9442 }
9443
9444 case dw_val_class_high_pc:
9445 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
9446 get_AT_low_pc (die), "DW_AT_high_pc");
9447 break;
9448
9449 case dw_val_class_discr_value:
9450 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
9451 break;
9452
9453 case dw_val_class_discr_list:
9454 {
9455 dw_discr_list_ref list = AT_discr_list (a);
9456 const int size = size_of_discr_list (list);
9457
9458 /* This is a block, so output its length first. */
9459 dw2_asm_output_data (constant_size (size), size,
9460 "%s: block size", name);
9461
9462 for (; list != NULL; list = list->dw_discr_next)
9463 {
9464 /* One byte for the discriminant value descriptor, and then as
9465 many LEB128 numbers as required. */
9466 if (list->dw_discr_range)
9467 dw2_asm_output_data (1, DW_DSC_range,
9468 "%s: DW_DSC_range", name);
9469 else
9470 dw2_asm_output_data (1, DW_DSC_label,
9471 "%s: DW_DSC_label", name);
9472
9473 output_discr_value (&list->dw_discr_lower_bound, name);
9474 if (list->dw_discr_range)
9475 output_discr_value (&list->dw_discr_upper_bound, name);
9476 }
9477 break;
9478 }
9479
9480 default:
9481 gcc_unreachable ();
9482 }
9483 }
9484
9485 FOR_EACH_CHILD (die, c, output_die (c));
9486
9487 /* Add null byte to terminate sibling list. */
9488 if (die->die_child != NULL)
9489 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
9490 (unsigned long) die->die_offset);
9491 }
9492
9493 /* Output the compilation unit that appears at the beginning of the
9494 .debug_info section, and precedes the DIE descriptions. */
9495
9496 static void
9497 output_compilation_unit_header (void)
9498 {
9499 /* We don't support actual DWARFv5 units yet, we just use some
9500 DWARFv5 draft DIE tags in DWARFv4 format. */
9501 int ver = dwarf_version < 5 ? dwarf_version : 4;
9502
9503 if (!XCOFF_DEBUGGING_INFO)
9504 {
9505 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
9506 dw2_asm_output_data (4, 0xffffffff,
9507 "Initial length escape value indicating 64-bit DWARF extension");
9508 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9509 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
9510 "Length of Compilation Unit Info");
9511 }
9512
9513 dw2_asm_output_data (2, ver, "DWARF version number");
9514 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
9515 debug_abbrev_section,
9516 "Offset Into Abbrev. Section");
9517 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
9518 }
9519
9520 /* Output the compilation unit DIE and its children. */
9521
9522 static void
9523 output_comp_unit (dw_die_ref die, int output_if_empty)
9524 {
9525 const char *secname, *oldsym;
9526 char *tmp;
9527
9528 /* Unless we are outputting main CU, we may throw away empty ones. */
9529 if (!output_if_empty && die->die_child == NULL)
9530 return;
9531
9532 /* Even if there are no children of this DIE, we must output the information
9533 about the compilation unit. Otherwise, on an empty translation unit, we
9534 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
9535 will then complain when examining the file. First mark all the DIEs in
9536 this CU so we know which get local refs. */
9537 mark_dies (die);
9538
9539 external_ref_hash_type *extern_map = optimize_external_refs (die);
9540
9541 build_abbrev_table (die, extern_map);
9542
9543 delete extern_map;
9544
9545 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
9546 next_die_offset = DWARF_COMPILE_UNIT_HEADER_SIZE;
9547 calc_die_sizes (die);
9548
9549 oldsym = die->die_id.die_symbol;
9550 if (oldsym)
9551 {
9552 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
9553
9554 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
9555 secname = tmp;
9556 die->die_id.die_symbol = NULL;
9557 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
9558 }
9559 else
9560 {
9561 switch_to_section (debug_info_section);
9562 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
9563 info_section_emitted = true;
9564 }
9565
9566 /* Output debugging information. */
9567 output_compilation_unit_header ();
9568 output_die (die);
9569
9570 /* Leave the marks on the main CU, so we can check them in
9571 output_pubnames. */
9572 if (oldsym)
9573 {
9574 unmark_dies (die);
9575 die->die_id.die_symbol = oldsym;
9576 }
9577 }
9578
9579 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
9580 and .debug_pubtypes. This is configured per-target, but can be
9581 overridden by the -gpubnames or -gno-pubnames options. */
9582
9583 static inline bool
9584 want_pubnames (void)
9585 {
9586 if (debug_info_level <= DINFO_LEVEL_TERSE)
9587 return false;
9588 if (debug_generate_pub_sections != -1)
9589 return debug_generate_pub_sections;
9590 return targetm.want_debug_pub_sections;
9591 }
9592
9593 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
9594
9595 static void
9596 add_AT_pubnames (dw_die_ref die)
9597 {
9598 if (want_pubnames ())
9599 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
9600 }
9601
9602 /* Add a string attribute value to a skeleton DIE. */
9603
9604 static inline void
9605 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
9606 const char *str)
9607 {
9608 dw_attr_node attr;
9609 struct indirect_string_node *node;
9610
9611 if (! skeleton_debug_str_hash)
9612 skeleton_debug_str_hash
9613 = hash_table<indirect_string_hasher>::create_ggc (10);
9614
9615 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
9616 find_string_form (node);
9617 if (node->form == DW_FORM_GNU_str_index)
9618 node->form = DW_FORM_strp;
9619
9620 attr.dw_attr = attr_kind;
9621 attr.dw_attr_val.val_class = dw_val_class_str;
9622 attr.dw_attr_val.val_entry = NULL;
9623 attr.dw_attr_val.v.val_str = node;
9624 add_dwarf_attr (die, &attr);
9625 }
9626
9627 /* Helper function to generate top-level dies for skeleton debug_info and
9628 debug_types. */
9629
9630 static void
9631 add_top_level_skeleton_die_attrs (dw_die_ref die)
9632 {
9633 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
9634 const char *comp_dir = comp_dir_string ();
9635
9636 add_skeleton_AT_string (die, DW_AT_GNU_dwo_name, dwo_file_name);
9637 if (comp_dir != NULL)
9638 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
9639 add_AT_pubnames (die);
9640 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
9641 }
9642
9643 /* Output skeleton debug sections that point to the dwo file. */
9644
9645 static void
9646 output_skeleton_debug_sections (dw_die_ref comp_unit)
9647 {
9648 /* We don't support actual DWARFv5 units yet, we just use some
9649 DWARFv5 draft DIE tags in DWARFv4 format. */
9650 int ver = dwarf_version < 5 ? dwarf_version : 4;
9651
9652 /* These attributes will be found in the full debug_info section. */
9653 remove_AT (comp_unit, DW_AT_producer);
9654 remove_AT (comp_unit, DW_AT_language);
9655
9656 switch_to_section (debug_skeleton_info_section);
9657 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
9658
9659 /* Produce the skeleton compilation-unit header. This one differs enough from
9660 a normal CU header that it's better not to call output_compilation_unit
9661 header. */
9662 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
9663 dw2_asm_output_data (4, 0xffffffff,
9664 "Initial length escape value indicating 64-bit DWARF extension");
9665
9666 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9667 DWARF_COMPILE_UNIT_HEADER_SIZE
9668 - DWARF_INITIAL_LENGTH_SIZE
9669 + size_of_die (comp_unit),
9670 "Length of Compilation Unit Info");
9671 dw2_asm_output_data (2, ver, "DWARF version number");
9672 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
9673 debug_abbrev_section,
9674 "Offset Into Abbrev. Section");
9675 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
9676
9677 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
9678 output_die (comp_unit);
9679
9680 /* Build the skeleton debug_abbrev section. */
9681 switch_to_section (debug_skeleton_abbrev_section);
9682 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
9683
9684 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
9685
9686 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
9687 }
9688
9689 /* Output a comdat type unit DIE and its children. */
9690
9691 static void
9692 output_comdat_type_unit (comdat_type_node *node)
9693 {
9694 const char *secname;
9695 char *tmp;
9696 int i;
9697 #if defined (OBJECT_FORMAT_ELF)
9698 tree comdat_key;
9699 #endif
9700
9701 /* First mark all the DIEs in this CU so we know which get local refs. */
9702 mark_dies (node->root_die);
9703
9704 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
9705
9706 build_abbrev_table (node->root_die, extern_map);
9707
9708 delete extern_map;
9709 extern_map = NULL;
9710
9711 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
9712 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
9713 calc_die_sizes (node->root_die);
9714
9715 #if defined (OBJECT_FORMAT_ELF)
9716 if (!dwarf_split_debug_info)
9717 secname = ".debug_types";
9718 else
9719 secname = ".debug_types.dwo";
9720
9721 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
9722 sprintf (tmp, "wt.");
9723 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9724 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
9725 comdat_key = get_identifier (tmp);
9726 targetm.asm_out.named_section (secname,
9727 SECTION_DEBUG | SECTION_LINKONCE,
9728 comdat_key);
9729 #else
9730 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
9731 sprintf (tmp, ".gnu.linkonce.wt.");
9732 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9733 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
9734 secname = tmp;
9735 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
9736 #endif
9737
9738 /* Output debugging information. */
9739 output_compilation_unit_header ();
9740 output_signature (node->signature, "Type Signature");
9741 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
9742 "Offset to Type DIE");
9743 output_die (node->root_die);
9744
9745 unmark_dies (node->root_die);
9746 }
9747
9748 /* Return the DWARF2/3 pubname associated with a decl. */
9749
9750 static const char *
9751 dwarf2_name (tree decl, int scope)
9752 {
9753 if (DECL_NAMELESS (decl))
9754 return NULL;
9755 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
9756 }
9757
9758 /* Add a new entry to .debug_pubnames if appropriate. */
9759
9760 static void
9761 add_pubname_string (const char *str, dw_die_ref die)
9762 {
9763 pubname_entry e;
9764
9765 e.die = die;
9766 e.name = xstrdup (str);
9767 vec_safe_push (pubname_table, e);
9768 }
9769
9770 static void
9771 add_pubname (tree decl, dw_die_ref die)
9772 {
9773 if (!want_pubnames ())
9774 return;
9775
9776 /* Don't add items to the table when we expect that the consumer will have
9777 just read the enclosing die. For example, if the consumer is looking at a
9778 class_member, it will either be inside the class already, or will have just
9779 looked up the class to find the member. Either way, searching the class is
9780 faster than searching the index. */
9781 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
9782 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
9783 {
9784 const char *name = dwarf2_name (decl, 1);
9785
9786 if (name)
9787 add_pubname_string (name, die);
9788 }
9789 }
9790
9791 /* Add an enumerator to the pubnames section. */
9792
9793 static void
9794 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
9795 {
9796 pubname_entry e;
9797
9798 gcc_assert (scope_name);
9799 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
9800 e.die = die;
9801 vec_safe_push (pubname_table, e);
9802 }
9803
9804 /* Add a new entry to .debug_pubtypes if appropriate. */
9805
9806 static void
9807 add_pubtype (tree decl, dw_die_ref die)
9808 {
9809 pubname_entry e;
9810
9811 if (!want_pubnames ())
9812 return;
9813
9814 if ((TREE_PUBLIC (decl)
9815 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
9816 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
9817 {
9818 tree scope = NULL;
9819 const char *scope_name = "";
9820 const char *sep = is_cxx () ? "::" : ".";
9821 const char *name;
9822
9823 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
9824 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
9825 {
9826 scope_name = lang_hooks.dwarf_name (scope, 1);
9827 if (scope_name != NULL && scope_name[0] != '\0')
9828 scope_name = concat (scope_name, sep, NULL);
9829 else
9830 scope_name = "";
9831 }
9832
9833 if (TYPE_P (decl))
9834 name = type_tag (decl);
9835 else
9836 name = lang_hooks.dwarf_name (decl, 1);
9837
9838 /* If we don't have a name for the type, there's no point in adding
9839 it to the table. */
9840 if (name != NULL && name[0] != '\0')
9841 {
9842 e.die = die;
9843 e.name = concat (scope_name, name, NULL);
9844 vec_safe_push (pubtype_table, e);
9845 }
9846
9847 /* Although it might be more consistent to add the pubinfo for the
9848 enumerators as their dies are created, they should only be added if the
9849 enum type meets the criteria above. So rather than re-check the parent
9850 enum type whenever an enumerator die is created, just output them all
9851 here. This isn't protected by the name conditional because anonymous
9852 enums don't have names. */
9853 if (die->die_tag == DW_TAG_enumeration_type)
9854 {
9855 dw_die_ref c;
9856
9857 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
9858 }
9859 }
9860 }
9861
9862 /* Output a single entry in the pubnames table. */
9863
9864 static void
9865 output_pubname (dw_offset die_offset, pubname_entry *entry)
9866 {
9867 dw_die_ref die = entry->die;
9868 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
9869
9870 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
9871
9872 if (debug_generate_pub_sections == 2)
9873 {
9874 /* This logic follows gdb's method for determining the value of the flag
9875 byte. */
9876 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
9877 switch (die->die_tag)
9878 {
9879 case DW_TAG_typedef:
9880 case DW_TAG_base_type:
9881 case DW_TAG_subrange_type:
9882 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
9883 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
9884 break;
9885 case DW_TAG_enumerator:
9886 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9887 GDB_INDEX_SYMBOL_KIND_VARIABLE);
9888 if (!is_cxx () && !is_java ())
9889 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
9890 break;
9891 case DW_TAG_subprogram:
9892 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9893 GDB_INDEX_SYMBOL_KIND_FUNCTION);
9894 if (!is_ada ())
9895 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
9896 break;
9897 case DW_TAG_constant:
9898 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9899 GDB_INDEX_SYMBOL_KIND_VARIABLE);
9900 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
9901 break;
9902 case DW_TAG_variable:
9903 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9904 GDB_INDEX_SYMBOL_KIND_VARIABLE);
9905 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
9906 break;
9907 case DW_TAG_namespace:
9908 case DW_TAG_imported_declaration:
9909 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
9910 break;
9911 case DW_TAG_class_type:
9912 case DW_TAG_interface_type:
9913 case DW_TAG_structure_type:
9914 case DW_TAG_union_type:
9915 case DW_TAG_enumeration_type:
9916 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
9917 if (!is_cxx () && !is_java ())
9918 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
9919 break;
9920 default:
9921 /* An unusual tag. Leave the flag-byte empty. */
9922 break;
9923 }
9924 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
9925 "GDB-index flags");
9926 }
9927
9928 dw2_asm_output_nstring (entry->name, -1, "external name");
9929 }
9930
9931
9932 /* Output the public names table used to speed up access to externally
9933 visible names; or the public types table used to find type definitions. */
9934
9935 static void
9936 output_pubnames (vec<pubname_entry, va_gc> *names)
9937 {
9938 unsigned i;
9939 unsigned long pubnames_length = size_of_pubnames (names);
9940 pubname_entry *pub;
9941
9942 if (!XCOFF_DEBUGGING_INFO)
9943 {
9944 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
9945 dw2_asm_output_data (4, 0xffffffff,
9946 "Initial length escape value indicating 64-bit DWARF extension");
9947 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
9948 "Pub Info Length");
9949 }
9950
9951 /* Version number for pubnames/pubtypes is independent of dwarf version. */
9952 dw2_asm_output_data (2, 2, "DWARF Version");
9953
9954 if (dwarf_split_debug_info)
9955 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
9956 debug_skeleton_info_section,
9957 "Offset of Compilation Unit Info");
9958 else
9959 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
9960 debug_info_section,
9961 "Offset of Compilation Unit Info");
9962 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
9963 "Compilation Unit Length");
9964
9965 FOR_EACH_VEC_ELT (*names, i, pub)
9966 {
9967 if (include_pubname_in_output (names, pub))
9968 {
9969 dw_offset die_offset = pub->die->die_offset;
9970
9971 /* We shouldn't see pubnames for DIEs outside of the main CU. */
9972 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
9973 gcc_assert (pub->die->die_mark);
9974
9975 /* If we're putting types in their own .debug_types sections,
9976 the .debug_pubtypes table will still point to the compile
9977 unit (not the type unit), so we want to use the offset of
9978 the skeleton DIE (if there is one). */
9979 if (pub->die->comdat_type_p && names == pubtype_table)
9980 {
9981 comdat_type_node *type_node = pub->die->die_id.die_type_node;
9982
9983 if (type_node != NULL)
9984 die_offset = (type_node->skeleton_die != NULL
9985 ? type_node->skeleton_die->die_offset
9986 : comp_unit_die ()->die_offset);
9987 }
9988
9989 output_pubname (die_offset, pub);
9990 }
9991 }
9992
9993 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
9994 }
9995
9996 /* Output public names and types tables if necessary. */
9997
9998 static void
9999 output_pubtables (void)
10000 {
10001 if (!want_pubnames () || !info_section_emitted)
10002 return;
10003
10004 switch_to_section (debug_pubnames_section);
10005 output_pubnames (pubname_table);
10006 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
10007 It shouldn't hurt to emit it always, since pure DWARF2 consumers
10008 simply won't look for the section. */
10009 switch_to_section (debug_pubtypes_section);
10010 output_pubnames (pubtype_table);
10011 }
10012
10013
10014 /* Output the information that goes into the .debug_aranges table.
10015 Namely, define the beginning and ending address range of the
10016 text section generated for this compilation unit. */
10017
10018 static void
10019 output_aranges (void)
10020 {
10021 unsigned i;
10022 unsigned long aranges_length = size_of_aranges ();
10023
10024 if (!XCOFF_DEBUGGING_INFO)
10025 {
10026 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10027 dw2_asm_output_data (4, 0xffffffff,
10028 "Initial length escape value indicating 64-bit DWARF extension");
10029 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
10030 "Length of Address Ranges Info");
10031 }
10032
10033 /* Version number for aranges is still 2, even up to DWARF5. */
10034 dw2_asm_output_data (2, 2, "DWARF Version");
10035 if (dwarf_split_debug_info)
10036 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10037 debug_skeleton_info_section,
10038 "Offset of Compilation Unit Info");
10039 else
10040 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10041 debug_info_section,
10042 "Offset of Compilation Unit Info");
10043 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
10044 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
10045
10046 /* We need to align to twice the pointer size here. */
10047 if (DWARF_ARANGES_PAD_SIZE)
10048 {
10049 /* Pad using a 2 byte words so that padding is correct for any
10050 pointer size. */
10051 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
10052 2 * DWARF2_ADDR_SIZE);
10053 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
10054 dw2_asm_output_data (2, 0, NULL);
10055 }
10056
10057 /* It is necessary not to output these entries if the sections were
10058 not used; if the sections were not used, the length will be 0 and
10059 the address may end up as 0 if the section is discarded by ld
10060 --gc-sections, leaving an invalid (0, 0) entry that can be
10061 confused with the terminator. */
10062 if (text_section_used)
10063 {
10064 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
10065 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
10066 text_section_label, "Length");
10067 }
10068 if (cold_text_section_used)
10069 {
10070 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
10071 "Address");
10072 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
10073 cold_text_section_label, "Length");
10074 }
10075
10076 if (have_multiple_function_sections)
10077 {
10078 unsigned fde_idx;
10079 dw_fde_ref fde;
10080
10081 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
10082 {
10083 if (DECL_IGNORED_P (fde->decl))
10084 continue;
10085 if (!fde->in_std_section)
10086 {
10087 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
10088 "Address");
10089 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
10090 fde->dw_fde_begin, "Length");
10091 }
10092 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
10093 {
10094 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
10095 "Address");
10096 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
10097 fde->dw_fde_second_begin, "Length");
10098 }
10099 }
10100 }
10101
10102 /* Output the terminator words. */
10103 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10104 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10105 }
10106
10107 /* Add a new entry to .debug_ranges. Return the offset at which it
10108 was placed. */
10109
10110 static unsigned int
10111 add_ranges_num (int num)
10112 {
10113 unsigned int in_use = ranges_table_in_use;
10114
10115 if (in_use == ranges_table_allocated)
10116 {
10117 ranges_table_allocated += RANGES_TABLE_INCREMENT;
10118 ranges_table = GGC_RESIZEVEC (dw_ranges, ranges_table,
10119 ranges_table_allocated);
10120 memset (ranges_table + ranges_table_in_use, 0,
10121 RANGES_TABLE_INCREMENT * sizeof (dw_ranges));
10122 }
10123
10124 ranges_table[in_use].num = num;
10125 ranges_table_in_use = in_use + 1;
10126
10127 return in_use * 2 * DWARF2_ADDR_SIZE;
10128 }
10129
10130 /* Add a new entry to .debug_ranges corresponding to a block, or a
10131 range terminator if BLOCK is NULL. */
10132
10133 static unsigned int
10134 add_ranges (const_tree block)
10135 {
10136 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0);
10137 }
10138
10139 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
10140 When using dwarf_split_debug_info, address attributes in dies destined
10141 for the final executable should be direct references--setting the
10142 parameter force_direct ensures this behavior. */
10143
10144 static void
10145 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
10146 bool *added, bool force_direct)
10147 {
10148 unsigned int in_use = ranges_by_label_in_use;
10149 unsigned int offset;
10150
10151 if (in_use == ranges_by_label_allocated)
10152 {
10153 ranges_by_label_allocated += RANGES_TABLE_INCREMENT;
10154 ranges_by_label = GGC_RESIZEVEC (dw_ranges_by_label, ranges_by_label,
10155 ranges_by_label_allocated);
10156 memset (ranges_by_label + ranges_by_label_in_use, 0,
10157 RANGES_TABLE_INCREMENT * sizeof (dw_ranges_by_label));
10158 }
10159
10160 ranges_by_label[in_use].begin = begin;
10161 ranges_by_label[in_use].end = end;
10162 ranges_by_label_in_use = in_use + 1;
10163
10164 offset = add_ranges_num (-(int)in_use - 1);
10165 if (!*added)
10166 {
10167 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
10168 *added = true;
10169 }
10170 }
10171
10172 static void
10173 output_ranges (void)
10174 {
10175 unsigned i;
10176 static const char *const start_fmt = "Offset %#x";
10177 const char *fmt = start_fmt;
10178
10179 for (i = 0; i < ranges_table_in_use; i++)
10180 {
10181 int block_num = ranges_table[i].num;
10182
10183 if (block_num > 0)
10184 {
10185 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
10186 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
10187
10188 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
10189 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
10190
10191 /* If all code is in the text section, then the compilation
10192 unit base address defaults to DW_AT_low_pc, which is the
10193 base of the text section. */
10194 if (!have_multiple_function_sections)
10195 {
10196 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
10197 text_section_label,
10198 fmt, i * 2 * DWARF2_ADDR_SIZE);
10199 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
10200 text_section_label, NULL);
10201 }
10202
10203 /* Otherwise, the compilation unit base address is zero,
10204 which allows us to use absolute addresses, and not worry
10205 about whether the target supports cross-section
10206 arithmetic. */
10207 else
10208 {
10209 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
10210 fmt, i * 2 * DWARF2_ADDR_SIZE);
10211 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
10212 }
10213
10214 fmt = NULL;
10215 }
10216
10217 /* Negative block_num stands for an index into ranges_by_label. */
10218 else if (block_num < 0)
10219 {
10220 int lab_idx = - block_num - 1;
10221
10222 if (!have_multiple_function_sections)
10223 {
10224 gcc_unreachable ();
10225 #if 0
10226 /* If we ever use add_ranges_by_labels () for a single
10227 function section, all we have to do is to take out
10228 the #if 0 above. */
10229 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
10230 ranges_by_label[lab_idx].begin,
10231 text_section_label,
10232 fmt, i * 2 * DWARF2_ADDR_SIZE);
10233 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
10234 ranges_by_label[lab_idx].end,
10235 text_section_label, NULL);
10236 #endif
10237 }
10238 else
10239 {
10240 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
10241 ranges_by_label[lab_idx].begin,
10242 fmt, i * 2 * DWARF2_ADDR_SIZE);
10243 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
10244 ranges_by_label[lab_idx].end,
10245 NULL);
10246 }
10247 }
10248 else
10249 {
10250 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10251 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10252 fmt = start_fmt;
10253 }
10254 }
10255 }
10256
10257 /* Data structure containing information about input files. */
10258 struct file_info
10259 {
10260 const char *path; /* Complete file name. */
10261 const char *fname; /* File name part. */
10262 int length; /* Length of entire string. */
10263 struct dwarf_file_data * file_idx; /* Index in input file table. */
10264 int dir_idx; /* Index in directory table. */
10265 };
10266
10267 /* Data structure containing information about directories with source
10268 files. */
10269 struct dir_info
10270 {
10271 const char *path; /* Path including directory name. */
10272 int length; /* Path length. */
10273 int prefix; /* Index of directory entry which is a prefix. */
10274 int count; /* Number of files in this directory. */
10275 int dir_idx; /* Index of directory used as base. */
10276 };
10277
10278 /* Callback function for file_info comparison. We sort by looking at
10279 the directories in the path. */
10280
10281 static int
10282 file_info_cmp (const void *p1, const void *p2)
10283 {
10284 const struct file_info *const s1 = (const struct file_info *) p1;
10285 const struct file_info *const s2 = (const struct file_info *) p2;
10286 const unsigned char *cp1;
10287 const unsigned char *cp2;
10288
10289 /* Take care of file names without directories. We need to make sure that
10290 we return consistent values to qsort since some will get confused if
10291 we return the same value when identical operands are passed in opposite
10292 orders. So if neither has a directory, return 0 and otherwise return
10293 1 or -1 depending on which one has the directory. */
10294 if ((s1->path == s1->fname || s2->path == s2->fname))
10295 return (s2->path == s2->fname) - (s1->path == s1->fname);
10296
10297 cp1 = (const unsigned char *) s1->path;
10298 cp2 = (const unsigned char *) s2->path;
10299
10300 while (1)
10301 {
10302 ++cp1;
10303 ++cp2;
10304 /* Reached the end of the first path? If so, handle like above. */
10305 if ((cp1 == (const unsigned char *) s1->fname)
10306 || (cp2 == (const unsigned char *) s2->fname))
10307 return ((cp2 == (const unsigned char *) s2->fname)
10308 - (cp1 == (const unsigned char *) s1->fname));
10309
10310 /* Character of current path component the same? */
10311 else if (*cp1 != *cp2)
10312 return *cp1 - *cp2;
10313 }
10314 }
10315
10316 struct file_name_acquire_data
10317 {
10318 struct file_info *files;
10319 int used_files;
10320 int max_files;
10321 };
10322
10323 /* Traversal function for the hash table. */
10324
10325 int
10326 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
10327 {
10328 struct dwarf_file_data *d = *slot;
10329 struct file_info *fi;
10330 const char *f;
10331
10332 gcc_assert (fnad->max_files >= d->emitted_number);
10333
10334 if (! d->emitted_number)
10335 return 1;
10336
10337 gcc_assert (fnad->max_files != fnad->used_files);
10338
10339 fi = fnad->files + fnad->used_files++;
10340
10341 /* Skip all leading "./". */
10342 f = d->filename;
10343 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
10344 f += 2;
10345
10346 /* Create a new array entry. */
10347 fi->path = f;
10348 fi->length = strlen (f);
10349 fi->file_idx = d;
10350
10351 /* Search for the file name part. */
10352 f = strrchr (f, DIR_SEPARATOR);
10353 #if defined (DIR_SEPARATOR_2)
10354 {
10355 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
10356
10357 if (g != NULL)
10358 {
10359 if (f == NULL || f < g)
10360 f = g;
10361 }
10362 }
10363 #endif
10364
10365 fi->fname = f == NULL ? fi->path : f + 1;
10366 return 1;
10367 }
10368
10369 /* Output the directory table and the file name table. We try to minimize
10370 the total amount of memory needed. A heuristic is used to avoid large
10371 slowdowns with many input files. */
10372
10373 static void
10374 output_file_names (void)
10375 {
10376 struct file_name_acquire_data fnad;
10377 int numfiles;
10378 struct file_info *files;
10379 struct dir_info *dirs;
10380 int *saved;
10381 int *savehere;
10382 int *backmap;
10383 int ndirs;
10384 int idx_offset;
10385 int i;
10386
10387 if (!last_emitted_file)
10388 {
10389 dw2_asm_output_data (1, 0, "End directory table");
10390 dw2_asm_output_data (1, 0, "End file name table");
10391 return;
10392 }
10393
10394 numfiles = last_emitted_file->emitted_number;
10395
10396 /* Allocate the various arrays we need. */
10397 files = XALLOCAVEC (struct file_info, numfiles);
10398 dirs = XALLOCAVEC (struct dir_info, numfiles);
10399
10400 fnad.files = files;
10401 fnad.used_files = 0;
10402 fnad.max_files = numfiles;
10403 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
10404 gcc_assert (fnad.used_files == fnad.max_files);
10405
10406 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
10407
10408 /* Find all the different directories used. */
10409 dirs[0].path = files[0].path;
10410 dirs[0].length = files[0].fname - files[0].path;
10411 dirs[0].prefix = -1;
10412 dirs[0].count = 1;
10413 dirs[0].dir_idx = 0;
10414 files[0].dir_idx = 0;
10415 ndirs = 1;
10416
10417 for (i = 1; i < numfiles; i++)
10418 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
10419 && memcmp (dirs[ndirs - 1].path, files[i].path,
10420 dirs[ndirs - 1].length) == 0)
10421 {
10422 /* Same directory as last entry. */
10423 files[i].dir_idx = ndirs - 1;
10424 ++dirs[ndirs - 1].count;
10425 }
10426 else
10427 {
10428 int j;
10429
10430 /* This is a new directory. */
10431 dirs[ndirs].path = files[i].path;
10432 dirs[ndirs].length = files[i].fname - files[i].path;
10433 dirs[ndirs].count = 1;
10434 dirs[ndirs].dir_idx = ndirs;
10435 files[i].dir_idx = ndirs;
10436
10437 /* Search for a prefix. */
10438 dirs[ndirs].prefix = -1;
10439 for (j = 0; j < ndirs; j++)
10440 if (dirs[j].length < dirs[ndirs].length
10441 && dirs[j].length > 1
10442 && (dirs[ndirs].prefix == -1
10443 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
10444 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
10445 dirs[ndirs].prefix = j;
10446
10447 ++ndirs;
10448 }
10449
10450 /* Now to the actual work. We have to find a subset of the directories which
10451 allow expressing the file name using references to the directory table
10452 with the least amount of characters. We do not do an exhaustive search
10453 where we would have to check out every combination of every single
10454 possible prefix. Instead we use a heuristic which provides nearly optimal
10455 results in most cases and never is much off. */
10456 saved = XALLOCAVEC (int, ndirs);
10457 savehere = XALLOCAVEC (int, ndirs);
10458
10459 memset (saved, '\0', ndirs * sizeof (saved[0]));
10460 for (i = 0; i < ndirs; i++)
10461 {
10462 int j;
10463 int total;
10464
10465 /* We can always save some space for the current directory. But this
10466 does not mean it will be enough to justify adding the directory. */
10467 savehere[i] = dirs[i].length;
10468 total = (savehere[i] - saved[i]) * dirs[i].count;
10469
10470 for (j = i + 1; j < ndirs; j++)
10471 {
10472 savehere[j] = 0;
10473 if (saved[j] < dirs[i].length)
10474 {
10475 /* Determine whether the dirs[i] path is a prefix of the
10476 dirs[j] path. */
10477 int k;
10478
10479 k = dirs[j].prefix;
10480 while (k != -1 && k != (int) i)
10481 k = dirs[k].prefix;
10482
10483 if (k == (int) i)
10484 {
10485 /* Yes it is. We can possibly save some memory by
10486 writing the filenames in dirs[j] relative to
10487 dirs[i]. */
10488 savehere[j] = dirs[i].length;
10489 total += (savehere[j] - saved[j]) * dirs[j].count;
10490 }
10491 }
10492 }
10493
10494 /* Check whether we can save enough to justify adding the dirs[i]
10495 directory. */
10496 if (total > dirs[i].length + 1)
10497 {
10498 /* It's worthwhile adding. */
10499 for (j = i; j < ndirs; j++)
10500 if (savehere[j] > 0)
10501 {
10502 /* Remember how much we saved for this directory so far. */
10503 saved[j] = savehere[j];
10504
10505 /* Remember the prefix directory. */
10506 dirs[j].dir_idx = i;
10507 }
10508 }
10509 }
10510
10511 /* Emit the directory name table. */
10512 idx_offset = dirs[0].length > 0 ? 1 : 0;
10513 for (i = 1 - idx_offset; i < ndirs; i++)
10514 dw2_asm_output_nstring (dirs[i].path,
10515 dirs[i].length
10516 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
10517 "Directory Entry: %#x", i + idx_offset);
10518
10519 dw2_asm_output_data (1, 0, "End directory table");
10520
10521 /* We have to emit them in the order of emitted_number since that's
10522 used in the debug info generation. To do this efficiently we
10523 generate a back-mapping of the indices first. */
10524 backmap = XALLOCAVEC (int, numfiles);
10525 for (i = 0; i < numfiles; i++)
10526 backmap[files[i].file_idx->emitted_number - 1] = i;
10527
10528 /* Now write all the file names. */
10529 for (i = 0; i < numfiles; i++)
10530 {
10531 int file_idx = backmap[i];
10532 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
10533
10534 #ifdef VMS_DEBUGGING_INFO
10535 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
10536
10537 /* Setting these fields can lead to debugger miscomparisons,
10538 but VMS Debug requires them to be set correctly. */
10539
10540 int ver;
10541 long long cdt;
10542 long siz;
10543 int maxfilelen = strlen (files[file_idx].path)
10544 + dirs[dir_idx].length
10545 + MAX_VMS_VERSION_LEN + 1;
10546 char *filebuf = XALLOCAVEC (char, maxfilelen);
10547
10548 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
10549 snprintf (filebuf, maxfilelen, "%s;%d",
10550 files[file_idx].path + dirs[dir_idx].length, ver);
10551
10552 dw2_asm_output_nstring
10553 (filebuf, -1, "File Entry: %#x", (unsigned) i + 1);
10554
10555 /* Include directory index. */
10556 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
10557
10558 /* Modification time. */
10559 dw2_asm_output_data_uleb128
10560 ((vms_file_stats_name (files[file_idx].path, &cdt, 0, 0, 0) == 0)
10561 ? cdt : 0,
10562 NULL);
10563
10564 /* File length in bytes. */
10565 dw2_asm_output_data_uleb128
10566 ((vms_file_stats_name (files[file_idx].path, 0, &siz, 0, 0) == 0)
10567 ? siz : 0,
10568 NULL);
10569 #else
10570 dw2_asm_output_nstring (files[file_idx].path + dirs[dir_idx].length, -1,
10571 "File Entry: %#x", (unsigned) i + 1);
10572
10573 /* Include directory index. */
10574 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
10575
10576 /* Modification time. */
10577 dw2_asm_output_data_uleb128 (0, NULL);
10578
10579 /* File length in bytes. */
10580 dw2_asm_output_data_uleb128 (0, NULL);
10581 #endif /* VMS_DEBUGGING_INFO */
10582 }
10583
10584 dw2_asm_output_data (1, 0, "End file name table");
10585 }
10586
10587
10588 /* Output one line number table into the .debug_line section. */
10589
10590 static void
10591 output_one_line_info_table (dw_line_info_table *table)
10592 {
10593 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
10594 unsigned int current_line = 1;
10595 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
10596 dw_line_info_entry *ent;
10597 size_t i;
10598
10599 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
10600 {
10601 switch (ent->opcode)
10602 {
10603 case LI_set_address:
10604 /* ??? Unfortunately, we have little choice here currently, and
10605 must always use the most general form. GCC does not know the
10606 address delta itself, so we can't use DW_LNS_advance_pc. Many
10607 ports do have length attributes which will give an upper bound
10608 on the address range. We could perhaps use length attributes
10609 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
10610 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
10611
10612 /* This can handle any delta. This takes
10613 4+DWARF2_ADDR_SIZE bytes. */
10614 dw2_asm_output_data (1, 0, "set address %s", line_label);
10615 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
10616 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
10617 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
10618 break;
10619
10620 case LI_set_line:
10621 if (ent->val == current_line)
10622 {
10623 /* We still need to start a new row, so output a copy insn. */
10624 dw2_asm_output_data (1, DW_LNS_copy,
10625 "copy line %u", current_line);
10626 }
10627 else
10628 {
10629 int line_offset = ent->val - current_line;
10630 int line_delta = line_offset - DWARF_LINE_BASE;
10631
10632 current_line = ent->val;
10633 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
10634 {
10635 /* This can handle deltas from -10 to 234, using the current
10636 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
10637 This takes 1 byte. */
10638 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
10639 "line %u", current_line);
10640 }
10641 else
10642 {
10643 /* This can handle any delta. This takes at least 4 bytes,
10644 depending on the value being encoded. */
10645 dw2_asm_output_data (1, DW_LNS_advance_line,
10646 "advance to line %u", current_line);
10647 dw2_asm_output_data_sleb128 (line_offset, NULL);
10648 dw2_asm_output_data (1, DW_LNS_copy, NULL);
10649 }
10650 }
10651 break;
10652
10653 case LI_set_file:
10654 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
10655 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
10656 break;
10657
10658 case LI_set_column:
10659 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
10660 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
10661 break;
10662
10663 case LI_negate_stmt:
10664 current_is_stmt = !current_is_stmt;
10665 dw2_asm_output_data (1, DW_LNS_negate_stmt,
10666 "is_stmt %d", current_is_stmt);
10667 break;
10668
10669 case LI_set_prologue_end:
10670 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
10671 "set prologue end");
10672 break;
10673
10674 case LI_set_epilogue_begin:
10675 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
10676 "set epilogue begin");
10677 break;
10678
10679 case LI_set_discriminator:
10680 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
10681 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
10682 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
10683 dw2_asm_output_data_uleb128 (ent->val, NULL);
10684 break;
10685 }
10686 }
10687
10688 /* Emit debug info for the address of the end of the table. */
10689 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
10690 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
10691 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
10692 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
10693
10694 dw2_asm_output_data (1, 0, "end sequence");
10695 dw2_asm_output_data_uleb128 (1, NULL);
10696 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
10697 }
10698
10699 /* Output the source line number correspondence information. This
10700 information goes into the .debug_line section. */
10701
10702 static void
10703 output_line_info (bool prologue_only)
10704 {
10705 char l1[20], l2[20], p1[20], p2[20];
10706 /* We don't support DWARFv5 line tables yet. */
10707 int ver = dwarf_version < 5 ? dwarf_version : 4;
10708 bool saw_one = false;
10709 int opc;
10710
10711 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, 0);
10712 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, 0);
10713 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, 0);
10714 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, 0);
10715
10716 if (!XCOFF_DEBUGGING_INFO)
10717 {
10718 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10719 dw2_asm_output_data (4, 0xffffffff,
10720 "Initial length escape value indicating 64-bit DWARF extension");
10721 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
10722 "Length of Source Line Info");
10723 }
10724
10725 ASM_OUTPUT_LABEL (asm_out_file, l1);
10726
10727 dw2_asm_output_data (2, ver, "DWARF Version");
10728 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
10729 ASM_OUTPUT_LABEL (asm_out_file, p1);
10730
10731 /* Define the architecture-dependent minimum instruction length (in bytes).
10732 In this implementation of DWARF, this field is used for information
10733 purposes only. Since GCC generates assembly language, we have no
10734 a priori knowledge of how many instruction bytes are generated for each
10735 source line, and therefore can use only the DW_LNE_set_address and
10736 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
10737 this as '1', which is "correct enough" for all architectures,
10738 and don't let the target override. */
10739 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
10740
10741 if (ver >= 4)
10742 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
10743 "Maximum Operations Per Instruction");
10744 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
10745 "Default is_stmt_start flag");
10746 dw2_asm_output_data (1, DWARF_LINE_BASE,
10747 "Line Base Value (Special Opcodes)");
10748 dw2_asm_output_data (1, DWARF_LINE_RANGE,
10749 "Line Range Value (Special Opcodes)");
10750 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
10751 "Special Opcode Base");
10752
10753 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
10754 {
10755 int n_op_args;
10756 switch (opc)
10757 {
10758 case DW_LNS_advance_pc:
10759 case DW_LNS_advance_line:
10760 case DW_LNS_set_file:
10761 case DW_LNS_set_column:
10762 case DW_LNS_fixed_advance_pc:
10763 case DW_LNS_set_isa:
10764 n_op_args = 1;
10765 break;
10766 default:
10767 n_op_args = 0;
10768 break;
10769 }
10770
10771 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
10772 opc, n_op_args);
10773 }
10774
10775 /* Write out the information about the files we use. */
10776 output_file_names ();
10777 ASM_OUTPUT_LABEL (asm_out_file, p2);
10778 if (prologue_only)
10779 {
10780 /* Output the marker for the end of the line number info. */
10781 ASM_OUTPUT_LABEL (asm_out_file, l2);
10782 return;
10783 }
10784
10785 if (separate_line_info)
10786 {
10787 dw_line_info_table *table;
10788 size_t i;
10789
10790 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
10791 if (table->in_use)
10792 {
10793 output_one_line_info_table (table);
10794 saw_one = true;
10795 }
10796 }
10797 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
10798 {
10799 output_one_line_info_table (cold_text_section_line_info);
10800 saw_one = true;
10801 }
10802
10803 /* ??? Some Darwin linkers crash on a .debug_line section with no
10804 sequences. Further, merely a DW_LNE_end_sequence entry is not
10805 sufficient -- the address column must also be initialized.
10806 Make sure to output at least one set_address/end_sequence pair,
10807 choosing .text since that section is always present. */
10808 if (text_section_line_info->in_use || !saw_one)
10809 output_one_line_info_table (text_section_line_info);
10810
10811 /* Output the marker for the end of the line number info. */
10812 ASM_OUTPUT_LABEL (asm_out_file, l2);
10813 }
10814 \f
10815 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
10816
10817 static inline bool
10818 need_endianity_attribute_p (bool reverse)
10819 {
10820 return reverse && (dwarf_version >= 3 || !dwarf_strict);
10821 }
10822
10823 /* Given a pointer to a tree node for some base type, return a pointer to
10824 a DIE that describes the given type. REVERSE is true if the type is
10825 to be interpreted in the reverse storage order wrt the target order.
10826
10827 This routine must only be called for GCC type nodes that correspond to
10828 Dwarf base (fundamental) types. */
10829
10830 static dw_die_ref
10831 base_type_die (tree type, bool reverse)
10832 {
10833 dw_die_ref base_type_result;
10834 enum dwarf_type encoding;
10835 bool fpt_used = false;
10836 struct fixed_point_type_info fpt_info;
10837 tree type_bias = NULL_TREE;
10838
10839 if (TREE_CODE (type) == ERROR_MARK || TREE_CODE (type) == VOID_TYPE)
10840 return 0;
10841
10842 /* If this is a subtype that should not be emitted as a subrange type,
10843 use the base type. See subrange_type_for_debug_p. */
10844 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
10845 type = TREE_TYPE (type);
10846
10847 switch (TREE_CODE (type))
10848 {
10849 case INTEGER_TYPE:
10850 if ((dwarf_version >= 4 || !dwarf_strict)
10851 && TYPE_NAME (type)
10852 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
10853 && DECL_IS_BUILTIN (TYPE_NAME (type))
10854 && DECL_NAME (TYPE_NAME (type)))
10855 {
10856 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
10857 if (strcmp (name, "char16_t") == 0
10858 || strcmp (name, "char32_t") == 0)
10859 {
10860 encoding = DW_ATE_UTF;
10861 break;
10862 }
10863 }
10864 if ((dwarf_version >= 3 || !dwarf_strict)
10865 && lang_hooks.types.get_fixed_point_type_info)
10866 {
10867 memset (&fpt_info, 0, sizeof (fpt_info));
10868 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
10869 {
10870 fpt_used = true;
10871 encoding = ((TYPE_UNSIGNED (type))
10872 ? DW_ATE_unsigned_fixed
10873 : DW_ATE_signed_fixed);
10874 break;
10875 }
10876 }
10877 if (TYPE_STRING_FLAG (type))
10878 {
10879 if (TYPE_UNSIGNED (type))
10880 encoding = DW_ATE_unsigned_char;
10881 else
10882 encoding = DW_ATE_signed_char;
10883 }
10884 else if (TYPE_UNSIGNED (type))
10885 encoding = DW_ATE_unsigned;
10886 else
10887 encoding = DW_ATE_signed;
10888
10889 if (!dwarf_strict
10890 && lang_hooks.types.get_type_bias)
10891 type_bias = lang_hooks.types.get_type_bias (type);
10892 break;
10893
10894 case REAL_TYPE:
10895 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
10896 {
10897 if (dwarf_version >= 3 || !dwarf_strict)
10898 encoding = DW_ATE_decimal_float;
10899 else
10900 encoding = DW_ATE_lo_user;
10901 }
10902 else
10903 encoding = DW_ATE_float;
10904 break;
10905
10906 case FIXED_POINT_TYPE:
10907 if (!(dwarf_version >= 3 || !dwarf_strict))
10908 encoding = DW_ATE_lo_user;
10909 else if (TYPE_UNSIGNED (type))
10910 encoding = DW_ATE_unsigned_fixed;
10911 else
10912 encoding = DW_ATE_signed_fixed;
10913 break;
10914
10915 /* Dwarf2 doesn't know anything about complex ints, so use
10916 a user defined type for it. */
10917 case COMPLEX_TYPE:
10918 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
10919 encoding = DW_ATE_complex_float;
10920 else
10921 encoding = DW_ATE_lo_user;
10922 break;
10923
10924 case BOOLEAN_TYPE:
10925 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
10926 encoding = DW_ATE_boolean;
10927 break;
10928
10929 default:
10930 /* No other TREE_CODEs are Dwarf fundamental types. */
10931 gcc_unreachable ();
10932 }
10933
10934 base_type_result = new_die (DW_TAG_base_type, comp_unit_die (), type);
10935
10936 add_AT_unsigned (base_type_result, DW_AT_byte_size,
10937 int_size_in_bytes (type));
10938 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
10939
10940 if (need_endianity_attribute_p (reverse))
10941 add_AT_unsigned (base_type_result, DW_AT_endianity,
10942 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
10943
10944 if (fpt_used)
10945 {
10946 switch (fpt_info.scale_factor_kind)
10947 {
10948 case fixed_point_scale_factor_binary:
10949 add_AT_int (base_type_result, DW_AT_binary_scale,
10950 fpt_info.scale_factor.binary);
10951 break;
10952
10953 case fixed_point_scale_factor_decimal:
10954 add_AT_int (base_type_result, DW_AT_decimal_scale,
10955 fpt_info.scale_factor.decimal);
10956 break;
10957
10958 case fixed_point_scale_factor_arbitrary:
10959 /* Arbitrary scale factors cannot be described in standard DWARF,
10960 yet. */
10961 if (!dwarf_strict)
10962 {
10963 /* Describe the scale factor as a rational constant. */
10964 const dw_die_ref scale_factor
10965 = new_die (DW_TAG_constant, comp_unit_die (), type);
10966
10967 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
10968 fpt_info.scale_factor.arbitrary.numerator);
10969 add_AT_int (scale_factor, DW_AT_GNU_denominator,
10970 fpt_info.scale_factor.arbitrary.denominator);
10971
10972 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
10973 }
10974 break;
10975
10976 default:
10977 gcc_unreachable ();
10978 }
10979 }
10980
10981 if (type_bias)
10982 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
10983 dw_scalar_form_constant
10984 | dw_scalar_form_exprloc
10985 | dw_scalar_form_reference,
10986 NULL);
10987
10988 add_pubtype (type, base_type_result);
10989
10990 return base_type_result;
10991 }
10992
10993 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
10994 named 'auto' in its type: return true for it, false otherwise. */
10995
10996 static inline bool
10997 is_cxx_auto (tree type)
10998 {
10999 if (is_cxx ())
11000 {
11001 tree name = TYPE_IDENTIFIER (type);
11002 if (name == get_identifier ("auto")
11003 || name == get_identifier ("decltype(auto)"))
11004 return true;
11005 }
11006 return false;
11007 }
11008
11009 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
11010 given input type is a Dwarf "fundamental" type. Otherwise return null. */
11011
11012 static inline int
11013 is_base_type (tree type)
11014 {
11015 switch (TREE_CODE (type))
11016 {
11017 case ERROR_MARK:
11018 case VOID_TYPE:
11019 case INTEGER_TYPE:
11020 case REAL_TYPE:
11021 case FIXED_POINT_TYPE:
11022 case COMPLEX_TYPE:
11023 case BOOLEAN_TYPE:
11024 case POINTER_BOUNDS_TYPE:
11025 return 1;
11026
11027 case ARRAY_TYPE:
11028 case RECORD_TYPE:
11029 case UNION_TYPE:
11030 case QUAL_UNION_TYPE:
11031 case ENUMERAL_TYPE:
11032 case FUNCTION_TYPE:
11033 case METHOD_TYPE:
11034 case POINTER_TYPE:
11035 case REFERENCE_TYPE:
11036 case NULLPTR_TYPE:
11037 case OFFSET_TYPE:
11038 case LANG_TYPE:
11039 case VECTOR_TYPE:
11040 return 0;
11041
11042 default:
11043 if (is_cxx_auto (type))
11044 return 0;
11045 gcc_unreachable ();
11046 }
11047
11048 return 0;
11049 }
11050
11051 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
11052 node, return the size in bits for the type if it is a constant, or else
11053 return the alignment for the type if the type's size is not constant, or
11054 else return BITS_PER_WORD if the type actually turns out to be an
11055 ERROR_MARK node. */
11056
11057 static inline unsigned HOST_WIDE_INT
11058 simple_type_size_in_bits (const_tree type)
11059 {
11060 if (TREE_CODE (type) == ERROR_MARK)
11061 return BITS_PER_WORD;
11062 else if (TYPE_SIZE (type) == NULL_TREE)
11063 return 0;
11064 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
11065 return tree_to_uhwi (TYPE_SIZE (type));
11066 else
11067 return TYPE_ALIGN (type);
11068 }
11069
11070 /* Similarly, but return an offset_int instead of UHWI. */
11071
11072 static inline offset_int
11073 offset_int_type_size_in_bits (const_tree type)
11074 {
11075 if (TREE_CODE (type) == ERROR_MARK)
11076 return BITS_PER_WORD;
11077 else if (TYPE_SIZE (type) == NULL_TREE)
11078 return 0;
11079 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
11080 return wi::to_offset (TYPE_SIZE (type));
11081 else
11082 return TYPE_ALIGN (type);
11083 }
11084
11085 /* Given a pointer to a tree node for a subrange type, return a pointer
11086 to a DIE that describes the given type. */
11087
11088 static dw_die_ref
11089 subrange_type_die (tree type, tree low, tree high, tree bias,
11090 dw_die_ref context_die)
11091 {
11092 dw_die_ref subrange_die;
11093 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
11094
11095 if (context_die == NULL)
11096 context_die = comp_unit_die ();
11097
11098 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
11099
11100 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
11101 {
11102 /* The size of the subrange type and its base type do not match,
11103 so we need to generate a size attribute for the subrange type. */
11104 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
11105 }
11106
11107 if (low)
11108 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
11109 if (high)
11110 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
11111 if (bias && !dwarf_strict)
11112 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
11113 dw_scalar_form_constant
11114 | dw_scalar_form_exprloc
11115 | dw_scalar_form_reference,
11116 NULL);
11117
11118 return subrange_die;
11119 }
11120
11121 /* Returns the (const and/or volatile) cv_qualifiers associated with
11122 the decl node. This will normally be augmented with the
11123 cv_qualifiers of the underlying type in add_type_attribute. */
11124
11125 static int
11126 decl_quals (const_tree decl)
11127 {
11128 return ((TREE_READONLY (decl)
11129 /* The C++ front-end correctly marks reference-typed
11130 variables as readonly, but from a language (and debug
11131 info) standpoint they are not const-qualified. */
11132 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
11133 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
11134 | (TREE_THIS_VOLATILE (decl)
11135 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
11136 }
11137
11138 /* Determine the TYPE whose qualifiers match the largest strict subset
11139 of the given TYPE_QUALS, and return its qualifiers. Ignore all
11140 qualifiers outside QUAL_MASK. */
11141
11142 static int
11143 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
11144 {
11145 tree t;
11146 int best_rank = 0, best_qual = 0, max_rank;
11147
11148 type_quals &= qual_mask;
11149 max_rank = popcount_hwi (type_quals) - 1;
11150
11151 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
11152 t = TYPE_NEXT_VARIANT (t))
11153 {
11154 int q = TYPE_QUALS (t) & qual_mask;
11155
11156 if ((q & type_quals) == q && q != type_quals
11157 && check_base_type (t, type))
11158 {
11159 int rank = popcount_hwi (q);
11160
11161 if (rank > best_rank)
11162 {
11163 best_rank = rank;
11164 best_qual = q;
11165 }
11166 }
11167 }
11168
11169 return best_qual;
11170 }
11171
11172 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
11173 static const dwarf_qual_info_t dwarf_qual_info[] =
11174 {
11175 { TYPE_QUAL_CONST, DW_TAG_const_type },
11176 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
11177 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
11178 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
11179 };
11180 static const unsigned int dwarf_qual_info_size
11181 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
11182
11183 /* If DIE is a qualified DIE of some base DIE with the same parent,
11184 return the base DIE, otherwise return NULL. Set MASK to the
11185 qualifiers added compared to the returned DIE. */
11186
11187 static dw_die_ref
11188 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
11189 {
11190 unsigned int i;
11191 for (i = 0; i < dwarf_qual_info_size; i++)
11192 if (die->die_tag == dwarf_qual_info[i].t)
11193 break;
11194 if (i == dwarf_qual_info_size)
11195 return NULL;
11196 if (vec_safe_length (die->die_attr) != 1)
11197 return NULL;
11198 dw_die_ref type = get_AT_ref (die, DW_AT_type);
11199 if (type == NULL || type->die_parent != die->die_parent)
11200 return NULL;
11201 *mask |= dwarf_qual_info[i].q;
11202 if (depth)
11203 {
11204 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
11205 if (ret)
11206 return ret;
11207 }
11208 return type;
11209 }
11210
11211 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
11212 entry that chains the modifiers specified by CV_QUALS in front of the
11213 given type. REVERSE is true if the type is to be interpreted in the
11214 reverse storage order wrt the target order. */
11215
11216 static dw_die_ref
11217 modified_type_die (tree type, int cv_quals, bool reverse,
11218 dw_die_ref context_die)
11219 {
11220 enum tree_code code = TREE_CODE (type);
11221 dw_die_ref mod_type_die;
11222 dw_die_ref sub_die = NULL;
11223 tree item_type = NULL;
11224 tree qualified_type;
11225 tree name, low, high;
11226 dw_die_ref mod_scope;
11227 /* Only these cv-qualifiers are currently handled. */
11228 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
11229 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC);
11230
11231 if (code == ERROR_MARK)
11232 return NULL;
11233
11234 if (lang_hooks.types.get_debug_type)
11235 {
11236 tree debug_type = lang_hooks.types.get_debug_type (type);
11237
11238 if (debug_type != NULL_TREE && debug_type != type)
11239 return modified_type_die (debug_type, cv_quals, reverse, context_die);
11240 }
11241
11242 cv_quals &= cv_qual_mask;
11243
11244 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
11245 tag modifier (and not an attribute) old consumers won't be able
11246 to handle it. */
11247 if (dwarf_version < 3)
11248 cv_quals &= ~TYPE_QUAL_RESTRICT;
11249
11250 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
11251 if (dwarf_version < 5)
11252 cv_quals &= ~TYPE_QUAL_ATOMIC;
11253
11254 /* See if we already have the appropriately qualified variant of
11255 this type. */
11256 qualified_type = get_qualified_type (type, cv_quals);
11257
11258 if (qualified_type == sizetype
11259 && TYPE_NAME (qualified_type)
11260 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
11261 {
11262 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
11263
11264 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
11265 && TYPE_PRECISION (t)
11266 == TYPE_PRECISION (qualified_type)
11267 && TYPE_UNSIGNED (t)
11268 == TYPE_UNSIGNED (qualified_type));
11269 qualified_type = t;
11270 }
11271
11272 /* If we do, then we can just use its DIE, if it exists. */
11273 if (qualified_type)
11274 {
11275 mod_type_die = lookup_type_die (qualified_type);
11276
11277 /* DW_AT_endianity doesn't come from a qualifier on the type. */
11278 if (mod_type_die
11279 && (!need_endianity_attribute_p (reverse)
11280 || !is_base_type (type)
11281 || get_AT_unsigned (mod_type_die, DW_AT_endianity)))
11282 return mod_type_die;
11283 }
11284
11285 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
11286
11287 /* Handle C typedef types. */
11288 if (name && TREE_CODE (name) == TYPE_DECL && DECL_ORIGINAL_TYPE (name)
11289 && !DECL_ARTIFICIAL (name))
11290 {
11291 tree dtype = TREE_TYPE (name);
11292
11293 if (qualified_type == dtype)
11294 {
11295 /* For a named type, use the typedef. */
11296 gen_type_die (qualified_type, context_die);
11297 return lookup_type_die (qualified_type);
11298 }
11299 else
11300 {
11301 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
11302 dquals &= cv_qual_mask;
11303 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
11304 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
11305 /* cv-unqualified version of named type. Just use
11306 the unnamed type to which it refers. */
11307 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
11308 reverse, context_die);
11309 /* Else cv-qualified version of named type; fall through. */
11310 }
11311 }
11312
11313 mod_scope = scope_die_for (type, context_die);
11314
11315 if (cv_quals)
11316 {
11317 int sub_quals = 0, first_quals = 0;
11318 unsigned i;
11319 dw_die_ref first = NULL, last = NULL;
11320
11321 /* Determine a lesser qualified type that most closely matches
11322 this one. Then generate DW_TAG_* entries for the remaining
11323 qualifiers. */
11324 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
11325 cv_qual_mask);
11326 if (sub_quals && use_debug_types)
11327 {
11328 bool needed = false;
11329 /* If emitting type units, make sure the order of qualifiers
11330 is canonical. Thus, start from unqualified type if
11331 an earlier qualifier is missing in sub_quals, but some later
11332 one is present there. */
11333 for (i = 0; i < dwarf_qual_info_size; i++)
11334 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
11335 needed = true;
11336 else if (needed && (dwarf_qual_info[i].q & cv_quals))
11337 {
11338 sub_quals = 0;
11339 break;
11340 }
11341 }
11342 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
11343 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
11344 {
11345 /* As not all intermediate qualified DIEs have corresponding
11346 tree types, ensure that qualified DIEs in the same scope
11347 as their DW_AT_type are emitted after their DW_AT_type,
11348 only with other qualified DIEs for the same type possibly
11349 in between them. Determine the range of such qualified
11350 DIEs now (first being the base type, last being corresponding
11351 last qualified DIE for it). */
11352 unsigned int count = 0;
11353 first = qualified_die_p (mod_type_die, &first_quals,
11354 dwarf_qual_info_size);
11355 if (first == NULL)
11356 first = mod_type_die;
11357 gcc_assert ((first_quals & ~sub_quals) == 0);
11358 for (count = 0, last = first;
11359 count < (1U << dwarf_qual_info_size);
11360 count++, last = last->die_sib)
11361 {
11362 int quals = 0;
11363 if (last == mod_scope->die_child)
11364 break;
11365 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
11366 != first)
11367 break;
11368 }
11369 }
11370
11371 for (i = 0; i < dwarf_qual_info_size; i++)
11372 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
11373 {
11374 dw_die_ref d;
11375 if (first && first != last)
11376 {
11377 for (d = first->die_sib; ; d = d->die_sib)
11378 {
11379 int quals = 0;
11380 qualified_die_p (d, &quals, dwarf_qual_info_size);
11381 if (quals == (first_quals | dwarf_qual_info[i].q))
11382 break;
11383 if (d == last)
11384 {
11385 d = NULL;
11386 break;
11387 }
11388 }
11389 if (d)
11390 {
11391 mod_type_die = d;
11392 continue;
11393 }
11394 }
11395 if (first)
11396 {
11397 d = ggc_cleared_alloc<die_node> ();
11398 d->die_tag = dwarf_qual_info[i].t;
11399 add_child_die_after (mod_scope, d, last);
11400 last = d;
11401 }
11402 else
11403 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
11404 if (mod_type_die)
11405 add_AT_die_ref (d, DW_AT_type, mod_type_die);
11406 mod_type_die = d;
11407 first_quals |= dwarf_qual_info[i].q;
11408 }
11409 }
11410 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
11411 {
11412 dwarf_tag tag = DW_TAG_pointer_type;
11413 if (code == REFERENCE_TYPE)
11414 {
11415 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
11416 tag = DW_TAG_rvalue_reference_type;
11417 else
11418 tag = DW_TAG_reference_type;
11419 }
11420 mod_type_die = new_die (tag, mod_scope, type);
11421
11422 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
11423 simple_type_size_in_bits (type) / BITS_PER_UNIT);
11424 item_type = TREE_TYPE (type);
11425
11426 addr_space_t as = TYPE_ADDR_SPACE (item_type);
11427 if (!ADDR_SPACE_GENERIC_P (as))
11428 {
11429 int action = targetm.addr_space.debug (as);
11430 if (action >= 0)
11431 {
11432 /* Positive values indicate an address_class. */
11433 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
11434 }
11435 else
11436 {
11437 /* Negative values indicate an (inverted) segment base reg. */
11438 dw_loc_descr_ref d
11439 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
11440 add_AT_loc (mod_type_die, DW_AT_segment, d);
11441 }
11442 }
11443 }
11444 else if (code == INTEGER_TYPE
11445 && TREE_TYPE (type) != NULL_TREE
11446 && subrange_type_for_debug_p (type, &low, &high))
11447 {
11448 tree bias = NULL_TREE;
11449 if (lang_hooks.types.get_type_bias)
11450 bias = lang_hooks.types.get_type_bias (type);
11451 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
11452 item_type = TREE_TYPE (type);
11453 }
11454 else if (is_base_type (type))
11455 mod_type_die = base_type_die (type, reverse);
11456 else
11457 {
11458 gen_type_die (type, context_die);
11459
11460 /* We have to get the type_main_variant here (and pass that to the
11461 `lookup_type_die' routine) because the ..._TYPE node we have
11462 might simply be a *copy* of some original type node (where the
11463 copy was created to help us keep track of typedef names) and
11464 that copy might have a different TYPE_UID from the original
11465 ..._TYPE node. */
11466 if (TREE_CODE (type) != VECTOR_TYPE)
11467 return lookup_type_die (type_main_variant (type));
11468 else
11469 /* Vectors have the debugging information in the type,
11470 not the main variant. */
11471 return lookup_type_die (type);
11472 }
11473
11474 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
11475 don't output a DW_TAG_typedef, since there isn't one in the
11476 user's program; just attach a DW_AT_name to the type.
11477 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
11478 if the base type already has the same name. */
11479 if (name
11480 && ((TREE_CODE (name) != TYPE_DECL
11481 && (qualified_type == TYPE_MAIN_VARIANT (type)
11482 || (cv_quals == TYPE_UNQUALIFIED)))
11483 || (TREE_CODE (name) == TYPE_DECL
11484 && TREE_TYPE (name) == qualified_type
11485 && DECL_NAME (name))))
11486 {
11487 if (TREE_CODE (name) == TYPE_DECL)
11488 /* Could just call add_name_and_src_coords_attributes here,
11489 but since this is a builtin type it doesn't have any
11490 useful source coordinates anyway. */
11491 name = DECL_NAME (name);
11492 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
11493 }
11494 /* This probably indicates a bug. */
11495 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
11496 {
11497 name = TYPE_IDENTIFIER (type);
11498 add_name_attribute (mod_type_die,
11499 name ? IDENTIFIER_POINTER (name) : "__unknown__");
11500 }
11501
11502 if (qualified_type)
11503 equate_type_number_to_die (qualified_type, mod_type_die);
11504
11505 if (item_type)
11506 /* We must do this after the equate_type_number_to_die call, in case
11507 this is a recursive type. This ensures that the modified_type_die
11508 recursion will terminate even if the type is recursive. Recursive
11509 types are possible in Ada. */
11510 sub_die = modified_type_die (item_type,
11511 TYPE_QUALS_NO_ADDR_SPACE (item_type),
11512 reverse,
11513 context_die);
11514
11515 if (sub_die != NULL)
11516 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
11517
11518 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
11519 if (TYPE_ARTIFICIAL (type))
11520 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
11521
11522 return mod_type_die;
11523 }
11524
11525 /* Generate DIEs for the generic parameters of T.
11526 T must be either a generic type or a generic function.
11527 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
11528
11529 static void
11530 gen_generic_params_dies (tree t)
11531 {
11532 tree parms, args;
11533 int parms_num, i;
11534 dw_die_ref die = NULL;
11535 int non_default;
11536
11537 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
11538 return;
11539
11540 if (TYPE_P (t))
11541 die = lookup_type_die (t);
11542 else if (DECL_P (t))
11543 die = lookup_decl_die (t);
11544
11545 gcc_assert (die);
11546
11547 parms = lang_hooks.get_innermost_generic_parms (t);
11548 if (!parms)
11549 /* T has no generic parameter. It means T is neither a generic type
11550 or function. End of story. */
11551 return;
11552
11553 parms_num = TREE_VEC_LENGTH (parms);
11554 args = lang_hooks.get_innermost_generic_args (t);
11555 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
11556 non_default = int_cst_value (TREE_CHAIN (args));
11557 else
11558 non_default = TREE_VEC_LENGTH (args);
11559 for (i = 0; i < parms_num; i++)
11560 {
11561 tree parm, arg, arg_pack_elems;
11562 dw_die_ref parm_die;
11563
11564 parm = TREE_VEC_ELT (parms, i);
11565 arg = TREE_VEC_ELT (args, i);
11566 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
11567 gcc_assert (parm && TREE_VALUE (parm) && arg);
11568
11569 if (parm && TREE_VALUE (parm) && arg)
11570 {
11571 /* If PARM represents a template parameter pack,
11572 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
11573 by DW_TAG_template_*_parameter DIEs for the argument
11574 pack elements of ARG. Note that ARG would then be
11575 an argument pack. */
11576 if (arg_pack_elems)
11577 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
11578 arg_pack_elems,
11579 die);
11580 else
11581 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
11582 true /* emit name */, die);
11583 if (i >= non_default)
11584 add_AT_flag (parm_die, DW_AT_default_value, 1);
11585 }
11586 }
11587 }
11588
11589 /* Create and return a DIE for PARM which should be
11590 the representation of a generic type parameter.
11591 For instance, in the C++ front end, PARM would be a template parameter.
11592 ARG is the argument to PARM.
11593 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
11594 name of the PARM.
11595 PARENT_DIE is the parent DIE which the new created DIE should be added to,
11596 as a child node. */
11597
11598 static dw_die_ref
11599 generic_parameter_die (tree parm, tree arg,
11600 bool emit_name_p,
11601 dw_die_ref parent_die)
11602 {
11603 dw_die_ref tmpl_die = NULL;
11604 const char *name = NULL;
11605
11606 if (!parm || !DECL_NAME (parm) || !arg)
11607 return NULL;
11608
11609 /* We support non-type generic parameters and arguments,
11610 type generic parameters and arguments, as well as
11611 generic generic parameters (a.k.a. template template parameters in C++)
11612 and arguments. */
11613 if (TREE_CODE (parm) == PARM_DECL)
11614 /* PARM is a nontype generic parameter */
11615 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
11616 else if (TREE_CODE (parm) == TYPE_DECL)
11617 /* PARM is a type generic parameter. */
11618 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
11619 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
11620 /* PARM is a generic generic parameter.
11621 Its DIE is a GNU extension. It shall have a
11622 DW_AT_name attribute to represent the name of the template template
11623 parameter, and a DW_AT_GNU_template_name attribute to represent the
11624 name of the template template argument. */
11625 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
11626 parent_die, parm);
11627 else
11628 gcc_unreachable ();
11629
11630 if (tmpl_die)
11631 {
11632 tree tmpl_type;
11633
11634 /* If PARM is a generic parameter pack, it means we are
11635 emitting debug info for a template argument pack element.
11636 In other terms, ARG is a template argument pack element.
11637 In that case, we don't emit any DW_AT_name attribute for
11638 the die. */
11639 if (emit_name_p)
11640 {
11641 name = IDENTIFIER_POINTER (DECL_NAME (parm));
11642 gcc_assert (name);
11643 add_AT_string (tmpl_die, DW_AT_name, name);
11644 }
11645
11646 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
11647 {
11648 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
11649 TMPL_DIE should have a child DW_AT_type attribute that is set
11650 to the type of the argument to PARM, which is ARG.
11651 If PARM is a type generic parameter, TMPL_DIE should have a
11652 child DW_AT_type that is set to ARG. */
11653 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
11654 add_type_attribute (tmpl_die, tmpl_type,
11655 (TREE_THIS_VOLATILE (tmpl_type)
11656 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
11657 false, parent_die);
11658 }
11659 else
11660 {
11661 /* So TMPL_DIE is a DIE representing a
11662 a generic generic template parameter, a.k.a template template
11663 parameter in C++ and arg is a template. */
11664
11665 /* The DW_AT_GNU_template_name attribute of the DIE must be set
11666 to the name of the argument. */
11667 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
11668 if (name)
11669 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
11670 }
11671
11672 if (TREE_CODE (parm) == PARM_DECL)
11673 /* So PARM is a non-type generic parameter.
11674 DWARF3 5.6.8 says we must set a DW_AT_const_value child
11675 attribute of TMPL_DIE which value represents the value
11676 of ARG.
11677 We must be careful here:
11678 The value of ARG might reference some function decls.
11679 We might currently be emitting debug info for a generic
11680 type and types are emitted before function decls, we don't
11681 know if the function decls referenced by ARG will actually be
11682 emitted after cgraph computations.
11683 So must defer the generation of the DW_AT_const_value to
11684 after cgraph is ready. */
11685 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
11686 }
11687
11688 return tmpl_die;
11689 }
11690
11691 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
11692 PARM_PACK must be a template parameter pack. The returned DIE
11693 will be child DIE of PARENT_DIE. */
11694
11695 static dw_die_ref
11696 template_parameter_pack_die (tree parm_pack,
11697 tree parm_pack_args,
11698 dw_die_ref parent_die)
11699 {
11700 dw_die_ref die;
11701 int j;
11702
11703 gcc_assert (parent_die && parm_pack);
11704
11705 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
11706 add_name_and_src_coords_attributes (die, parm_pack);
11707 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
11708 generic_parameter_die (parm_pack,
11709 TREE_VEC_ELT (parm_pack_args, j),
11710 false /* Don't emit DW_AT_name */,
11711 die);
11712 return die;
11713 }
11714
11715 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
11716 an enumerated type. */
11717
11718 static inline int
11719 type_is_enum (const_tree type)
11720 {
11721 return TREE_CODE (type) == ENUMERAL_TYPE;
11722 }
11723
11724 /* Return the DBX register number described by a given RTL node. */
11725
11726 static unsigned int
11727 dbx_reg_number (const_rtx rtl)
11728 {
11729 unsigned regno = REGNO (rtl);
11730
11731 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
11732
11733 #ifdef LEAF_REG_REMAP
11734 if (crtl->uses_only_leaf_regs)
11735 {
11736 int leaf_reg = LEAF_REG_REMAP (regno);
11737 if (leaf_reg != -1)
11738 regno = (unsigned) leaf_reg;
11739 }
11740 #endif
11741
11742 regno = DBX_REGISTER_NUMBER (regno);
11743 gcc_assert (regno != INVALID_REGNUM);
11744 return regno;
11745 }
11746
11747 /* Optionally add a DW_OP_piece term to a location description expression.
11748 DW_OP_piece is only added if the location description expression already
11749 doesn't end with DW_OP_piece. */
11750
11751 static void
11752 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
11753 {
11754 dw_loc_descr_ref loc;
11755
11756 if (*list_head != NULL)
11757 {
11758 /* Find the end of the chain. */
11759 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
11760 ;
11761
11762 if (loc->dw_loc_opc != DW_OP_piece)
11763 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
11764 }
11765 }
11766
11767 /* Return a location descriptor that designates a machine register or
11768 zero if there is none. */
11769
11770 static dw_loc_descr_ref
11771 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
11772 {
11773 rtx regs;
11774
11775 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
11776 return 0;
11777
11778 /* We only use "frame base" when we're sure we're talking about the
11779 post-prologue local stack frame. We do this by *not* running
11780 register elimination until this point, and recognizing the special
11781 argument pointer and soft frame pointer rtx's.
11782 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
11783 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
11784 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
11785 {
11786 dw_loc_descr_ref result = NULL;
11787
11788 if (dwarf_version >= 4 || !dwarf_strict)
11789 {
11790 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
11791 initialized);
11792 if (result)
11793 add_loc_descr (&result,
11794 new_loc_descr (DW_OP_stack_value, 0, 0));
11795 }
11796 return result;
11797 }
11798
11799 regs = targetm.dwarf_register_span (rtl);
11800
11801 if (REG_NREGS (rtl) > 1 || regs)
11802 return multiple_reg_loc_descriptor (rtl, regs, initialized);
11803 else
11804 {
11805 unsigned int dbx_regnum = dbx_reg_number (rtl);
11806 if (dbx_regnum == IGNORED_DWARF_REGNUM)
11807 return 0;
11808 return one_reg_loc_descriptor (dbx_regnum, initialized);
11809 }
11810 }
11811
11812 /* Return a location descriptor that designates a machine register for
11813 a given hard register number. */
11814
11815 static dw_loc_descr_ref
11816 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
11817 {
11818 dw_loc_descr_ref reg_loc_descr;
11819
11820 if (regno <= 31)
11821 reg_loc_descr
11822 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
11823 else
11824 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
11825
11826 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
11827 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
11828
11829 return reg_loc_descr;
11830 }
11831
11832 /* Given an RTL of a register, return a location descriptor that
11833 designates a value that spans more than one register. */
11834
11835 static dw_loc_descr_ref
11836 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
11837 enum var_init_status initialized)
11838 {
11839 int size, i;
11840 dw_loc_descr_ref loc_result = NULL;
11841
11842 /* Simple, contiguous registers. */
11843 if (regs == NULL_RTX)
11844 {
11845 unsigned reg = REGNO (rtl);
11846 int nregs;
11847
11848 #ifdef LEAF_REG_REMAP
11849 if (crtl->uses_only_leaf_regs)
11850 {
11851 int leaf_reg = LEAF_REG_REMAP (reg);
11852 if (leaf_reg != -1)
11853 reg = (unsigned) leaf_reg;
11854 }
11855 #endif
11856
11857 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
11858 nregs = REG_NREGS (rtl);
11859
11860 size = GET_MODE_SIZE (GET_MODE (rtl)) / nregs;
11861
11862 loc_result = NULL;
11863 while (nregs--)
11864 {
11865 dw_loc_descr_ref t;
11866
11867 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
11868 VAR_INIT_STATUS_INITIALIZED);
11869 add_loc_descr (&loc_result, t);
11870 add_loc_descr_op_piece (&loc_result, size);
11871 ++reg;
11872 }
11873 return loc_result;
11874 }
11875
11876 /* Now onto stupid register sets in non contiguous locations. */
11877
11878 gcc_assert (GET_CODE (regs) == PARALLEL);
11879
11880 size = GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0)));
11881 loc_result = NULL;
11882
11883 for (i = 0; i < XVECLEN (regs, 0); ++i)
11884 {
11885 dw_loc_descr_ref t;
11886
11887 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
11888 VAR_INIT_STATUS_INITIALIZED);
11889 add_loc_descr (&loc_result, t);
11890 add_loc_descr_op_piece (&loc_result, size);
11891 }
11892
11893 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
11894 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
11895 return loc_result;
11896 }
11897
11898 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
11899
11900 /* Return a location descriptor that designates a constant i,
11901 as a compound operation from constant (i >> shift), constant shift
11902 and DW_OP_shl. */
11903
11904 static dw_loc_descr_ref
11905 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
11906 {
11907 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
11908 add_loc_descr (&ret, int_loc_descriptor (shift));
11909 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
11910 return ret;
11911 }
11912
11913 /* Return a location descriptor that designates a constant. */
11914
11915 static dw_loc_descr_ref
11916 int_loc_descriptor (HOST_WIDE_INT i)
11917 {
11918 enum dwarf_location_atom op;
11919
11920 /* Pick the smallest representation of a constant, rather than just
11921 defaulting to the LEB encoding. */
11922 if (i >= 0)
11923 {
11924 int clz = clz_hwi (i);
11925 int ctz = ctz_hwi (i);
11926 if (i <= 31)
11927 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
11928 else if (i <= 0xff)
11929 op = DW_OP_const1u;
11930 else if (i <= 0xffff)
11931 op = DW_OP_const2u;
11932 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
11933 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
11934 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
11935 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
11936 while DW_OP_const4u is 5 bytes. */
11937 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
11938 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
11939 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
11940 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
11941 while DW_OP_const4u is 5 bytes. */
11942 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
11943 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
11944 op = DW_OP_const4u;
11945 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
11946 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
11947 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes,
11948 while DW_OP_constu of constant >= 0x100000000 takes at least
11949 6 bytes. */
11950 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
11951 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
11952 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
11953 >= HOST_BITS_PER_WIDE_INT)
11954 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
11955 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes,
11956 while DW_OP_constu takes in this case at least 6 bytes. */
11957 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
11958 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
11959 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
11960 && size_of_uleb128 (i) > 6)
11961 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
11962 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
11963 else
11964 op = DW_OP_constu;
11965 }
11966 else
11967 {
11968 if (i >= -0x80)
11969 op = DW_OP_const1s;
11970 else if (i >= -0x8000)
11971 op = DW_OP_const2s;
11972 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
11973 {
11974 if (size_of_int_loc_descriptor (i) < 5)
11975 {
11976 dw_loc_descr_ref ret = int_loc_descriptor (-i);
11977 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
11978 return ret;
11979 }
11980 op = DW_OP_const4s;
11981 }
11982 else
11983 {
11984 if (size_of_int_loc_descriptor (i)
11985 < (unsigned long) 1 + size_of_sleb128 (i))
11986 {
11987 dw_loc_descr_ref ret = int_loc_descriptor (-i);
11988 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
11989 return ret;
11990 }
11991 op = DW_OP_consts;
11992 }
11993 }
11994
11995 return new_loc_descr (op, i, 0);
11996 }
11997
11998 /* Likewise, for unsigned constants. */
11999
12000 static dw_loc_descr_ref
12001 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
12002 {
12003 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
12004 const unsigned HOST_WIDE_INT max_uint
12005 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
12006
12007 /* If possible, use the clever signed constants handling. */
12008 if (i <= max_int)
12009 return int_loc_descriptor ((HOST_WIDE_INT) i);
12010
12011 /* Here, we are left with positive numbers that cannot be represented as
12012 HOST_WIDE_INT, i.e.:
12013 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
12014
12015 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
12016 whereas may be better to output a negative integer: thanks to integer
12017 wrapping, we know that:
12018 x = x - 2 ** DWARF2_ADDR_SIZE
12019 = x - 2 * (max (HOST_WIDE_INT) + 1)
12020 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
12021 small negative integers. Let's try that in cases it will clearly improve
12022 the encoding: there is no gain turning DW_OP_const4u into
12023 DW_OP_const4s. */
12024 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
12025 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
12026 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
12027 {
12028 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
12029
12030 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
12031 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
12032 const HOST_WIDE_INT second_shift
12033 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
12034
12035 /* So we finally have:
12036 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
12037 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
12038 return int_loc_descriptor (second_shift);
12039 }
12040
12041 /* Last chance: fallback to a simple constant operation. */
12042 return new_loc_descr
12043 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
12044 ? DW_OP_const4u
12045 : DW_OP_const8u,
12046 i, 0);
12047 }
12048
12049 /* Generate and return a location description that computes the unsigned
12050 comparison of the two stack top entries (a OP b where b is the top-most
12051 entry and a is the second one). The KIND of comparison can be LT_EXPR,
12052 LE_EXPR, GT_EXPR or GE_EXPR. */
12053
12054 static dw_loc_descr_ref
12055 uint_comparison_loc_list (enum tree_code kind)
12056 {
12057 enum dwarf_location_atom op, flip_op;
12058 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
12059
12060 switch (kind)
12061 {
12062 case LT_EXPR:
12063 op = DW_OP_lt;
12064 break;
12065 case LE_EXPR:
12066 op = DW_OP_le;
12067 break;
12068 case GT_EXPR:
12069 op = DW_OP_gt;
12070 break;
12071 case GE_EXPR:
12072 op = DW_OP_ge;
12073 break;
12074 default:
12075 gcc_unreachable ();
12076 }
12077
12078 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
12079 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
12080
12081 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
12082 possible to perform unsigned comparisons: we just have to distinguish
12083 three cases:
12084
12085 1. when a and b have the same sign (as signed integers); then we should
12086 return: a OP(signed) b;
12087
12088 2. when a is a negative signed integer while b is a positive one, then a
12089 is a greater unsigned integer than b; likewise when a and b's roles
12090 are flipped.
12091
12092 So first, compare the sign of the two operands. */
12093 ret = new_loc_descr (DW_OP_over, 0, 0);
12094 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
12095 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
12096 /* If they have different signs (i.e. they have different sign bits), then
12097 the stack top value has now the sign bit set and thus it's smaller than
12098 zero. */
12099 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
12100 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
12101 add_loc_descr (&ret, bra_node);
12102
12103 /* We are in case 1. At this point, we know both operands have the same
12104 sign, to it's safe to use the built-in signed comparison. */
12105 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
12106 add_loc_descr (&ret, jmp_node);
12107
12108 /* We are in case 2. Here, we know both operands do not have the same sign,
12109 so we have to flip the signed comparison. */
12110 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
12111 tmp = new_loc_descr (flip_op, 0, 0);
12112 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
12113 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
12114 add_loc_descr (&ret, tmp);
12115
12116 /* This dummy operation is necessary to make the two branches join. */
12117 tmp = new_loc_descr (DW_OP_nop, 0, 0);
12118 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
12119 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
12120 add_loc_descr (&ret, tmp);
12121
12122 return ret;
12123 }
12124
12125 /* Likewise, but takes the location description lists (might be destructive on
12126 them). Return NULL if either is NULL or if concatenation fails. */
12127
12128 static dw_loc_list_ref
12129 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
12130 enum tree_code kind)
12131 {
12132 if (left == NULL || right == NULL)
12133 return NULL;
12134
12135 add_loc_list (&left, right);
12136 if (left == NULL)
12137 return NULL;
12138
12139 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
12140 return left;
12141 }
12142
12143 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
12144 without actually allocating it. */
12145
12146 static unsigned long
12147 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
12148 {
12149 return size_of_int_loc_descriptor (i >> shift)
12150 + size_of_int_loc_descriptor (shift)
12151 + 1;
12152 }
12153
12154 /* Return size_of_locs (int_loc_descriptor (i)) without
12155 actually allocating it. */
12156
12157 static unsigned long
12158 size_of_int_loc_descriptor (HOST_WIDE_INT i)
12159 {
12160 unsigned long s;
12161
12162 if (i >= 0)
12163 {
12164 int clz, ctz;
12165 if (i <= 31)
12166 return 1;
12167 else if (i <= 0xff)
12168 return 2;
12169 else if (i <= 0xffff)
12170 return 3;
12171 clz = clz_hwi (i);
12172 ctz = ctz_hwi (i);
12173 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
12174 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
12175 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12176 - clz - 5);
12177 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
12178 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
12179 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12180 - clz - 8);
12181 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
12182 return 5;
12183 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
12184 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
12185 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
12186 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12187 - clz - 8);
12188 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
12189 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
12190 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12191 - clz - 16);
12192 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
12193 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
12194 && s > 6)
12195 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12196 - clz - 32);
12197 else
12198 return 1 + s;
12199 }
12200 else
12201 {
12202 if (i >= -0x80)
12203 return 2;
12204 else if (i >= -0x8000)
12205 return 3;
12206 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
12207 {
12208 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
12209 {
12210 s = size_of_int_loc_descriptor (-i) + 1;
12211 if (s < 5)
12212 return s;
12213 }
12214 return 5;
12215 }
12216 else
12217 {
12218 unsigned long r = 1 + size_of_sleb128 (i);
12219 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
12220 {
12221 s = size_of_int_loc_descriptor (-i) + 1;
12222 if (s < r)
12223 return s;
12224 }
12225 return r;
12226 }
12227 }
12228 }
12229
12230 /* Return loc description representing "address" of integer value.
12231 This can appear only as toplevel expression. */
12232
12233 static dw_loc_descr_ref
12234 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
12235 {
12236 int litsize;
12237 dw_loc_descr_ref loc_result = NULL;
12238
12239 if (!(dwarf_version >= 4 || !dwarf_strict))
12240 return NULL;
12241
12242 litsize = size_of_int_loc_descriptor (i);
12243 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
12244 is more compact. For DW_OP_stack_value we need:
12245 litsize + 1 (DW_OP_stack_value)
12246 and for DW_OP_implicit_value:
12247 1 (DW_OP_implicit_value) + 1 (length) + size. */
12248 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
12249 {
12250 loc_result = int_loc_descriptor (i);
12251 add_loc_descr (&loc_result,
12252 new_loc_descr (DW_OP_stack_value, 0, 0));
12253 return loc_result;
12254 }
12255
12256 loc_result = new_loc_descr (DW_OP_implicit_value,
12257 size, 0);
12258 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
12259 loc_result->dw_loc_oprnd2.v.val_int = i;
12260 return loc_result;
12261 }
12262
12263 /* Return a location descriptor that designates a base+offset location. */
12264
12265 static dw_loc_descr_ref
12266 based_loc_descr (rtx reg, HOST_WIDE_INT offset,
12267 enum var_init_status initialized)
12268 {
12269 unsigned int regno;
12270 dw_loc_descr_ref result;
12271 dw_fde_ref fde = cfun->fde;
12272
12273 /* We only use "frame base" when we're sure we're talking about the
12274 post-prologue local stack frame. We do this by *not* running
12275 register elimination until this point, and recognizing the special
12276 argument pointer and soft frame pointer rtx's. */
12277 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
12278 {
12279 rtx elim = (ira_use_lra_p
12280 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
12281 : eliminate_regs (reg, VOIDmode, NULL_RTX));
12282
12283 if (elim != reg)
12284 {
12285 if (GET_CODE (elim) == PLUS)
12286 {
12287 offset += INTVAL (XEXP (elim, 1));
12288 elim = XEXP (elim, 0);
12289 }
12290 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
12291 && (elim == hard_frame_pointer_rtx
12292 || elim == stack_pointer_rtx))
12293 || elim == (frame_pointer_needed
12294 ? hard_frame_pointer_rtx
12295 : stack_pointer_rtx));
12296
12297 /* If drap register is used to align stack, use frame
12298 pointer + offset to access stack variables. If stack
12299 is aligned without drap, use stack pointer + offset to
12300 access stack variables. */
12301 if (crtl->stack_realign_tried
12302 && reg == frame_pointer_rtx)
12303 {
12304 int base_reg
12305 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
12306 ? HARD_FRAME_POINTER_REGNUM
12307 : REGNO (elim));
12308 return new_reg_loc_descr (base_reg, offset);
12309 }
12310
12311 gcc_assert (frame_pointer_fb_offset_valid);
12312 offset += frame_pointer_fb_offset;
12313 return new_loc_descr (DW_OP_fbreg, offset, 0);
12314 }
12315 }
12316
12317 regno = REGNO (reg);
12318 #ifdef LEAF_REG_REMAP
12319 if (crtl->uses_only_leaf_regs)
12320 {
12321 int leaf_reg = LEAF_REG_REMAP (regno);
12322 if (leaf_reg != -1)
12323 regno = (unsigned) leaf_reg;
12324 }
12325 #endif
12326 regno = DWARF_FRAME_REGNUM (regno);
12327
12328 if (!optimize && fde
12329 && (fde->drap_reg == regno || fde->vdrap_reg == regno))
12330 {
12331 /* Use cfa+offset to represent the location of arguments passed
12332 on the stack when drap is used to align stack.
12333 Only do this when not optimizing, for optimized code var-tracking
12334 is supposed to track where the arguments live and the register
12335 used as vdrap or drap in some spot might be used for something
12336 else in other part of the routine. */
12337 return new_loc_descr (DW_OP_fbreg, offset, 0);
12338 }
12339
12340 if (regno <= 31)
12341 result = new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + regno),
12342 offset, 0);
12343 else
12344 result = new_loc_descr (DW_OP_bregx, regno, offset);
12345
12346 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
12347 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
12348
12349 return result;
12350 }
12351
12352 /* Return true if this RTL expression describes a base+offset calculation. */
12353
12354 static inline int
12355 is_based_loc (const_rtx rtl)
12356 {
12357 return (GET_CODE (rtl) == PLUS
12358 && ((REG_P (XEXP (rtl, 0))
12359 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
12360 && CONST_INT_P (XEXP (rtl, 1)))));
12361 }
12362
12363 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
12364 failed. */
12365
12366 static dw_loc_descr_ref
12367 tls_mem_loc_descriptor (rtx mem)
12368 {
12369 tree base;
12370 dw_loc_descr_ref loc_result;
12371
12372 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
12373 return NULL;
12374
12375 base = get_base_address (MEM_EXPR (mem));
12376 if (base == NULL
12377 || TREE_CODE (base) != VAR_DECL
12378 || !DECL_THREAD_LOCAL_P (base))
12379 return NULL;
12380
12381 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
12382 if (loc_result == NULL)
12383 return NULL;
12384
12385 if (MEM_OFFSET (mem))
12386 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
12387
12388 return loc_result;
12389 }
12390
12391 /* Output debug info about reason why we failed to expand expression as dwarf
12392 expression. */
12393
12394 static void
12395 expansion_failed (tree expr, rtx rtl, char const *reason)
12396 {
12397 if (dump_file && (dump_flags & TDF_DETAILS))
12398 {
12399 fprintf (dump_file, "Failed to expand as dwarf: ");
12400 if (expr)
12401 print_generic_expr (dump_file, expr, dump_flags);
12402 if (rtl)
12403 {
12404 fprintf (dump_file, "\n");
12405 print_rtl (dump_file, rtl);
12406 }
12407 fprintf (dump_file, "\nReason: %s\n", reason);
12408 }
12409 }
12410
12411 /* Helper function for const_ok_for_output. */
12412
12413 static bool
12414 const_ok_for_output_1 (rtx rtl)
12415 {
12416 if (GET_CODE (rtl) == UNSPEC)
12417 {
12418 /* If delegitimize_address couldn't do anything with the UNSPEC, assume
12419 we can't express it in the debug info. */
12420 /* Don't complain about TLS UNSPECs, those are just too hard to
12421 delegitimize. Note this could be a non-decl SYMBOL_REF such as
12422 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
12423 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
12424 if (flag_checking
12425 && (XVECLEN (rtl, 0) == 0
12426 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
12427 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
12428 inform (current_function_decl
12429 ? DECL_SOURCE_LOCATION (current_function_decl)
12430 : UNKNOWN_LOCATION,
12431 #if NUM_UNSPEC_VALUES > 0
12432 "non-delegitimized UNSPEC %s (%d) found in variable location",
12433 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
12434 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
12435 XINT (rtl, 1));
12436 #else
12437 "non-delegitimized UNSPEC %d found in variable location",
12438 XINT (rtl, 1));
12439 #endif
12440 expansion_failed (NULL_TREE, rtl,
12441 "UNSPEC hasn't been delegitimized.\n");
12442 return false;
12443 }
12444
12445 if (targetm.const_not_ok_for_debug_p (rtl))
12446 {
12447 expansion_failed (NULL_TREE, rtl,
12448 "Expression rejected for debug by the backend.\n");
12449 return false;
12450 }
12451
12452 /* FIXME: Refer to PR60655. It is possible for simplification
12453 of rtl expressions in var tracking to produce such expressions.
12454 We should really identify / validate expressions
12455 enclosed in CONST that can be handled by assemblers on various
12456 targets and only handle legitimate cases here. */
12457 if (GET_CODE (rtl) != SYMBOL_REF)
12458 {
12459 if (GET_CODE (rtl) == NOT)
12460 return false;
12461 return true;
12462 }
12463
12464 if (CONSTANT_POOL_ADDRESS_P (rtl))
12465 {
12466 bool marked;
12467 get_pool_constant_mark (rtl, &marked);
12468 /* If all references to this pool constant were optimized away,
12469 it was not output and thus we can't represent it. */
12470 if (!marked)
12471 {
12472 expansion_failed (NULL_TREE, rtl,
12473 "Constant was removed from constant pool.\n");
12474 return false;
12475 }
12476 }
12477
12478 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
12479 return false;
12480
12481 /* Avoid references to external symbols in debug info, on several targets
12482 the linker might even refuse to link when linking a shared library,
12483 and in many other cases the relocations for .debug_info/.debug_loc are
12484 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
12485 to be defined within the same shared library or executable are fine. */
12486 if (SYMBOL_REF_EXTERNAL_P (rtl))
12487 {
12488 tree decl = SYMBOL_REF_DECL (rtl);
12489
12490 if (decl == NULL || !targetm.binds_local_p (decl))
12491 {
12492 expansion_failed (NULL_TREE, rtl,
12493 "Symbol not defined in current TU.\n");
12494 return false;
12495 }
12496 }
12497
12498 return true;
12499 }
12500
12501 /* Return true if constant RTL can be emitted in DW_OP_addr or
12502 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
12503 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
12504
12505 static bool
12506 const_ok_for_output (rtx rtl)
12507 {
12508 if (GET_CODE (rtl) == SYMBOL_REF)
12509 return const_ok_for_output_1 (rtl);
12510
12511 if (GET_CODE (rtl) == CONST)
12512 {
12513 subrtx_var_iterator::array_type array;
12514 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
12515 if (!const_ok_for_output_1 (*iter))
12516 return false;
12517 return true;
12518 }
12519
12520 return true;
12521 }
12522
12523 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
12524 if possible, NULL otherwise. */
12525
12526 static dw_die_ref
12527 base_type_for_mode (machine_mode mode, bool unsignedp)
12528 {
12529 dw_die_ref type_die;
12530 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
12531
12532 if (type == NULL)
12533 return NULL;
12534 switch (TREE_CODE (type))
12535 {
12536 case INTEGER_TYPE:
12537 case REAL_TYPE:
12538 break;
12539 default:
12540 return NULL;
12541 }
12542 type_die = lookup_type_die (type);
12543 if (!type_die)
12544 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
12545 comp_unit_die ());
12546 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
12547 return NULL;
12548 return type_die;
12549 }
12550
12551 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
12552 type matching MODE, or, if MODE is narrower than or as wide as
12553 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
12554 possible. */
12555
12556 static dw_loc_descr_ref
12557 convert_descriptor_to_mode (machine_mode mode, dw_loc_descr_ref op)
12558 {
12559 machine_mode outer_mode = mode;
12560 dw_die_ref type_die;
12561 dw_loc_descr_ref cvt;
12562
12563 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
12564 {
12565 add_loc_descr (&op, new_loc_descr (DW_OP_GNU_convert, 0, 0));
12566 return op;
12567 }
12568 type_die = base_type_for_mode (outer_mode, 1);
12569 if (type_die == NULL)
12570 return NULL;
12571 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12572 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12573 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12574 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12575 add_loc_descr (&op, cvt);
12576 return op;
12577 }
12578
12579 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
12580
12581 static dw_loc_descr_ref
12582 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
12583 dw_loc_descr_ref op1)
12584 {
12585 dw_loc_descr_ref ret = op0;
12586 add_loc_descr (&ret, op1);
12587 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
12588 if (STORE_FLAG_VALUE != 1)
12589 {
12590 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
12591 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
12592 }
12593 return ret;
12594 }
12595
12596 /* Return location descriptor for signed comparison OP RTL. */
12597
12598 static dw_loc_descr_ref
12599 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
12600 machine_mode mem_mode)
12601 {
12602 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
12603 dw_loc_descr_ref op0, op1;
12604 int shift;
12605
12606 if (op_mode == VOIDmode)
12607 op_mode = GET_MODE (XEXP (rtl, 1));
12608 if (op_mode == VOIDmode)
12609 return NULL;
12610
12611 if (dwarf_strict
12612 && (!SCALAR_INT_MODE_P (op_mode)
12613 || GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE))
12614 return NULL;
12615
12616 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
12617 VAR_INIT_STATUS_INITIALIZED);
12618 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
12619 VAR_INIT_STATUS_INITIALIZED);
12620
12621 if (op0 == NULL || op1 == NULL)
12622 return NULL;
12623
12624 if (!SCALAR_INT_MODE_P (op_mode)
12625 || GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
12626 return compare_loc_descriptor (op, op0, op1);
12627
12628 if (GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
12629 {
12630 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
12631 dw_loc_descr_ref cvt;
12632
12633 if (type_die == NULL)
12634 return NULL;
12635 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12636 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12637 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12638 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12639 add_loc_descr (&op0, cvt);
12640 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12641 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12642 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12643 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12644 add_loc_descr (&op1, cvt);
12645 return compare_loc_descriptor (op, op0, op1);
12646 }
12647
12648 shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
12649 /* For eq/ne, if the operands are known to be zero-extended,
12650 there is no need to do the fancy shifting up. */
12651 if (op == DW_OP_eq || op == DW_OP_ne)
12652 {
12653 dw_loc_descr_ref last0, last1;
12654 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
12655 ;
12656 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
12657 ;
12658 /* deref_size zero extends, and for constants we can check
12659 whether they are zero extended or not. */
12660 if (((last0->dw_loc_opc == DW_OP_deref_size
12661 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
12662 || (CONST_INT_P (XEXP (rtl, 0))
12663 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
12664 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
12665 && ((last1->dw_loc_opc == DW_OP_deref_size
12666 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
12667 || (CONST_INT_P (XEXP (rtl, 1))
12668 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
12669 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
12670 return compare_loc_descriptor (op, op0, op1);
12671
12672 /* EQ/NE comparison against constant in narrower type than
12673 DWARF2_ADDR_SIZE can be performed either as
12674 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
12675 DW_OP_{eq,ne}
12676 or
12677 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
12678 DW_OP_{eq,ne}. Pick whatever is shorter. */
12679 if (CONST_INT_P (XEXP (rtl, 1))
12680 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
12681 && (size_of_int_loc_descriptor (shift) + 1
12682 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift)
12683 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
12684 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
12685 & GET_MODE_MASK (op_mode))))
12686 {
12687 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
12688 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
12689 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
12690 & GET_MODE_MASK (op_mode));
12691 return compare_loc_descriptor (op, op0, op1);
12692 }
12693 }
12694 add_loc_descr (&op0, int_loc_descriptor (shift));
12695 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
12696 if (CONST_INT_P (XEXP (rtl, 1)))
12697 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift);
12698 else
12699 {
12700 add_loc_descr (&op1, int_loc_descriptor (shift));
12701 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
12702 }
12703 return compare_loc_descriptor (op, op0, op1);
12704 }
12705
12706 /* Return location descriptor for unsigned comparison OP RTL. */
12707
12708 static dw_loc_descr_ref
12709 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
12710 machine_mode mem_mode)
12711 {
12712 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
12713 dw_loc_descr_ref op0, op1;
12714
12715 if (op_mode == VOIDmode)
12716 op_mode = GET_MODE (XEXP (rtl, 1));
12717 if (op_mode == VOIDmode)
12718 return NULL;
12719 if (!SCALAR_INT_MODE_P (op_mode))
12720 return NULL;
12721
12722 if (dwarf_strict && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
12723 return NULL;
12724
12725 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
12726 VAR_INIT_STATUS_INITIALIZED);
12727 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
12728 VAR_INIT_STATUS_INITIALIZED);
12729
12730 if (op0 == NULL || op1 == NULL)
12731 return NULL;
12732
12733 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
12734 {
12735 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
12736 dw_loc_descr_ref last0, last1;
12737 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
12738 ;
12739 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
12740 ;
12741 if (CONST_INT_P (XEXP (rtl, 0)))
12742 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
12743 /* deref_size zero extends, so no need to mask it again. */
12744 else if (last0->dw_loc_opc != DW_OP_deref_size
12745 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
12746 {
12747 add_loc_descr (&op0, int_loc_descriptor (mask));
12748 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
12749 }
12750 if (CONST_INT_P (XEXP (rtl, 1)))
12751 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
12752 /* deref_size zero extends, so no need to mask it again. */
12753 else if (last1->dw_loc_opc != DW_OP_deref_size
12754 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
12755 {
12756 add_loc_descr (&op1, int_loc_descriptor (mask));
12757 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
12758 }
12759 }
12760 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
12761 {
12762 HOST_WIDE_INT bias = 1;
12763 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
12764 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
12765 if (CONST_INT_P (XEXP (rtl, 1)))
12766 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
12767 + INTVAL (XEXP (rtl, 1)));
12768 else
12769 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
12770 bias, 0));
12771 }
12772 return compare_loc_descriptor (op, op0, op1);
12773 }
12774
12775 /* Return location descriptor for {U,S}{MIN,MAX}. */
12776
12777 static dw_loc_descr_ref
12778 minmax_loc_descriptor (rtx rtl, machine_mode mode,
12779 machine_mode mem_mode)
12780 {
12781 enum dwarf_location_atom op;
12782 dw_loc_descr_ref op0, op1, ret;
12783 dw_loc_descr_ref bra_node, drop_node;
12784
12785 if (dwarf_strict
12786 && (!SCALAR_INT_MODE_P (mode)
12787 || GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE))
12788 return NULL;
12789
12790 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
12791 VAR_INIT_STATUS_INITIALIZED);
12792 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
12793 VAR_INIT_STATUS_INITIALIZED);
12794
12795 if (op0 == NULL || op1 == NULL)
12796 return NULL;
12797
12798 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
12799 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
12800 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
12801 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
12802 {
12803 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
12804 {
12805 HOST_WIDE_INT mask = GET_MODE_MASK (mode);
12806 add_loc_descr (&op0, int_loc_descriptor (mask));
12807 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
12808 add_loc_descr (&op1, int_loc_descriptor (mask));
12809 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
12810 }
12811 else if (GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE)
12812 {
12813 HOST_WIDE_INT bias = 1;
12814 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
12815 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
12816 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
12817 }
12818 }
12819 else if (!SCALAR_INT_MODE_P (mode)
12820 && GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
12821 {
12822 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode)) * BITS_PER_UNIT;
12823 add_loc_descr (&op0, int_loc_descriptor (shift));
12824 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
12825 add_loc_descr (&op1, int_loc_descriptor (shift));
12826 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
12827 }
12828 else if (SCALAR_INT_MODE_P (mode)
12829 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
12830 {
12831 dw_die_ref type_die = base_type_for_mode (mode, 0);
12832 dw_loc_descr_ref cvt;
12833 if (type_die == NULL)
12834 return NULL;
12835 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12836 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12837 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12838 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12839 add_loc_descr (&op0, cvt);
12840 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12841 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12842 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12843 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12844 add_loc_descr (&op1, cvt);
12845 }
12846
12847 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
12848 op = DW_OP_lt;
12849 else
12850 op = DW_OP_gt;
12851 ret = op0;
12852 add_loc_descr (&ret, op1);
12853 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
12854 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
12855 add_loc_descr (&ret, bra_node);
12856 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
12857 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
12858 add_loc_descr (&ret, drop_node);
12859 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
12860 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
12861 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
12862 && SCALAR_INT_MODE_P (mode)
12863 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
12864 ret = convert_descriptor_to_mode (mode, ret);
12865 return ret;
12866 }
12867
12868 /* Helper function for mem_loc_descriptor. Perform OP binary op,
12869 but after converting arguments to type_die, afterwards
12870 convert back to unsigned. */
12871
12872 static dw_loc_descr_ref
12873 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
12874 machine_mode mode, machine_mode mem_mode)
12875 {
12876 dw_loc_descr_ref cvt, op0, op1;
12877
12878 if (type_die == NULL)
12879 return NULL;
12880 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
12881 VAR_INIT_STATUS_INITIALIZED);
12882 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
12883 VAR_INIT_STATUS_INITIALIZED);
12884 if (op0 == NULL || op1 == NULL)
12885 return NULL;
12886 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12887 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12888 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12889 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12890 add_loc_descr (&op0, cvt);
12891 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12892 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12893 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12894 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12895 add_loc_descr (&op1, cvt);
12896 add_loc_descr (&op0, op1);
12897 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
12898 return convert_descriptor_to_mode (mode, op0);
12899 }
12900
12901 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
12902 const0 is DW_OP_lit0 or corresponding typed constant,
12903 const1 is DW_OP_lit1 or corresponding typed constant
12904 and constMSB is constant with just the MSB bit set
12905 for the mode):
12906 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
12907 L1: const0 DW_OP_swap
12908 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
12909 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
12910 L3: DW_OP_drop
12911 L4: DW_OP_nop
12912
12913 CTZ is similar:
12914 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
12915 L1: const0 DW_OP_swap
12916 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
12917 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
12918 L3: DW_OP_drop
12919 L4: DW_OP_nop
12920
12921 FFS is similar:
12922 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
12923 L1: const1 DW_OP_swap
12924 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
12925 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
12926 L3: DW_OP_drop
12927 L4: DW_OP_nop */
12928
12929 static dw_loc_descr_ref
12930 clz_loc_descriptor (rtx rtl, machine_mode mode,
12931 machine_mode mem_mode)
12932 {
12933 dw_loc_descr_ref op0, ret, tmp;
12934 HOST_WIDE_INT valv;
12935 dw_loc_descr_ref l1jump, l1label;
12936 dw_loc_descr_ref l2jump, l2label;
12937 dw_loc_descr_ref l3jump, l3label;
12938 dw_loc_descr_ref l4jump, l4label;
12939 rtx msb;
12940
12941 if (!SCALAR_INT_MODE_P (mode)
12942 || GET_MODE (XEXP (rtl, 0)) != mode)
12943 return NULL;
12944
12945 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
12946 VAR_INIT_STATUS_INITIALIZED);
12947 if (op0 == NULL)
12948 return NULL;
12949 ret = op0;
12950 if (GET_CODE (rtl) == CLZ)
12951 {
12952 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
12953 valv = GET_MODE_BITSIZE (mode);
12954 }
12955 else if (GET_CODE (rtl) == FFS)
12956 valv = 0;
12957 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
12958 valv = GET_MODE_BITSIZE (mode);
12959 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
12960 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
12961 add_loc_descr (&ret, l1jump);
12962 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
12963 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
12964 VAR_INIT_STATUS_INITIALIZED);
12965 if (tmp == NULL)
12966 return NULL;
12967 add_loc_descr (&ret, tmp);
12968 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
12969 add_loc_descr (&ret, l4jump);
12970 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
12971 ? const1_rtx : const0_rtx,
12972 mode, mem_mode,
12973 VAR_INIT_STATUS_INITIALIZED);
12974 if (l1label == NULL)
12975 return NULL;
12976 add_loc_descr (&ret, l1label);
12977 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
12978 l2label = new_loc_descr (DW_OP_dup, 0, 0);
12979 add_loc_descr (&ret, l2label);
12980 if (GET_CODE (rtl) != CLZ)
12981 msb = const1_rtx;
12982 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
12983 msb = GEN_INT (HOST_WIDE_INT_1U
12984 << (GET_MODE_BITSIZE (mode) - 1));
12985 else
12986 msb = immed_wide_int_const
12987 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
12988 GET_MODE_PRECISION (mode)), mode);
12989 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
12990 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
12991 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
12992 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
12993 else
12994 tmp = mem_loc_descriptor (msb, mode, mem_mode,
12995 VAR_INIT_STATUS_INITIALIZED);
12996 if (tmp == NULL)
12997 return NULL;
12998 add_loc_descr (&ret, tmp);
12999 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
13000 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
13001 add_loc_descr (&ret, l3jump);
13002 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
13003 VAR_INIT_STATUS_INITIALIZED);
13004 if (tmp == NULL)
13005 return NULL;
13006 add_loc_descr (&ret, tmp);
13007 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
13008 ? DW_OP_shl : DW_OP_shr, 0, 0));
13009 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13010 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
13011 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13012 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
13013 add_loc_descr (&ret, l2jump);
13014 l3label = new_loc_descr (DW_OP_drop, 0, 0);
13015 add_loc_descr (&ret, l3label);
13016 l4label = new_loc_descr (DW_OP_nop, 0, 0);
13017 add_loc_descr (&ret, l4label);
13018 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13019 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
13020 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13021 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
13022 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13023 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
13024 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13025 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
13026 return ret;
13027 }
13028
13029 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
13030 const1 is DW_OP_lit1 or corresponding typed constant):
13031 const0 DW_OP_swap
13032 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
13033 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
13034 L2: DW_OP_drop
13035
13036 PARITY is similar:
13037 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
13038 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
13039 L2: DW_OP_drop */
13040
13041 static dw_loc_descr_ref
13042 popcount_loc_descriptor (rtx rtl, machine_mode mode,
13043 machine_mode mem_mode)
13044 {
13045 dw_loc_descr_ref op0, ret, tmp;
13046 dw_loc_descr_ref l1jump, l1label;
13047 dw_loc_descr_ref l2jump, l2label;
13048
13049 if (!SCALAR_INT_MODE_P (mode)
13050 || GET_MODE (XEXP (rtl, 0)) != mode)
13051 return NULL;
13052
13053 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13054 VAR_INIT_STATUS_INITIALIZED);
13055 if (op0 == NULL)
13056 return NULL;
13057 ret = op0;
13058 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
13059 VAR_INIT_STATUS_INITIALIZED);
13060 if (tmp == NULL)
13061 return NULL;
13062 add_loc_descr (&ret, tmp);
13063 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13064 l1label = new_loc_descr (DW_OP_dup, 0, 0);
13065 add_loc_descr (&ret, l1label);
13066 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
13067 add_loc_descr (&ret, l2jump);
13068 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
13069 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
13070 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
13071 VAR_INIT_STATUS_INITIALIZED);
13072 if (tmp == NULL)
13073 return NULL;
13074 add_loc_descr (&ret, tmp);
13075 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
13076 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
13077 ? DW_OP_plus : DW_OP_xor, 0, 0));
13078 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13079 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
13080 VAR_INIT_STATUS_INITIALIZED);
13081 add_loc_descr (&ret, tmp);
13082 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13083 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
13084 add_loc_descr (&ret, l1jump);
13085 l2label = new_loc_descr (DW_OP_drop, 0, 0);
13086 add_loc_descr (&ret, l2label);
13087 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13088 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
13089 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13090 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
13091 return ret;
13092 }
13093
13094 /* BSWAP (constS is initial shift count, either 56 or 24):
13095 constS const0
13096 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
13097 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
13098 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
13099 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
13100 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
13101
13102 static dw_loc_descr_ref
13103 bswap_loc_descriptor (rtx rtl, machine_mode mode,
13104 machine_mode mem_mode)
13105 {
13106 dw_loc_descr_ref op0, ret, tmp;
13107 dw_loc_descr_ref l1jump, l1label;
13108 dw_loc_descr_ref l2jump, l2label;
13109
13110 if (!SCALAR_INT_MODE_P (mode)
13111 || BITS_PER_UNIT != 8
13112 || (GET_MODE_BITSIZE (mode) != 32
13113 && GET_MODE_BITSIZE (mode) != 64))
13114 return NULL;
13115
13116 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13117 VAR_INIT_STATUS_INITIALIZED);
13118 if (op0 == NULL)
13119 return NULL;
13120
13121 ret = op0;
13122 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
13123 mode, mem_mode,
13124 VAR_INIT_STATUS_INITIALIZED);
13125 if (tmp == NULL)
13126 return NULL;
13127 add_loc_descr (&ret, tmp);
13128 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
13129 VAR_INIT_STATUS_INITIALIZED);
13130 if (tmp == NULL)
13131 return NULL;
13132 add_loc_descr (&ret, tmp);
13133 l1label = new_loc_descr (DW_OP_pick, 2, 0);
13134 add_loc_descr (&ret, l1label);
13135 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
13136 mode, mem_mode,
13137 VAR_INIT_STATUS_INITIALIZED);
13138 add_loc_descr (&ret, tmp);
13139 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
13140 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
13141 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13142 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
13143 VAR_INIT_STATUS_INITIALIZED);
13144 if (tmp == NULL)
13145 return NULL;
13146 add_loc_descr (&ret, tmp);
13147 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
13148 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
13149 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13150 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
13151 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13152 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
13153 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
13154 VAR_INIT_STATUS_INITIALIZED);
13155 add_loc_descr (&ret, tmp);
13156 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
13157 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
13158 add_loc_descr (&ret, l2jump);
13159 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
13160 VAR_INIT_STATUS_INITIALIZED);
13161 add_loc_descr (&ret, tmp);
13162 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
13163 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13164 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
13165 add_loc_descr (&ret, l1jump);
13166 l2label = new_loc_descr (DW_OP_drop, 0, 0);
13167 add_loc_descr (&ret, l2label);
13168 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13169 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
13170 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13171 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
13172 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13173 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
13174 return ret;
13175 }
13176
13177 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
13178 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
13179 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
13180 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
13181
13182 ROTATERT is similar:
13183 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
13184 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
13185 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
13186
13187 static dw_loc_descr_ref
13188 rotate_loc_descriptor (rtx rtl, machine_mode mode,
13189 machine_mode mem_mode)
13190 {
13191 rtx rtlop1 = XEXP (rtl, 1);
13192 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
13193 int i;
13194
13195 if (!SCALAR_INT_MODE_P (mode))
13196 return NULL;
13197
13198 if (GET_MODE (rtlop1) != VOIDmode
13199 && GET_MODE_BITSIZE (GET_MODE (rtlop1)) < GET_MODE_BITSIZE (mode))
13200 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
13201 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13202 VAR_INIT_STATUS_INITIALIZED);
13203 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
13204 VAR_INIT_STATUS_INITIALIZED);
13205 if (op0 == NULL || op1 == NULL)
13206 return NULL;
13207 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
13208 for (i = 0; i < 2; i++)
13209 {
13210 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
13211 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
13212 mode, mem_mode,
13213 VAR_INIT_STATUS_INITIALIZED);
13214 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
13215 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
13216 ? DW_OP_const4u
13217 : HOST_BITS_PER_WIDE_INT == 64
13218 ? DW_OP_const8u : DW_OP_constu,
13219 GET_MODE_MASK (mode), 0);
13220 else
13221 mask[i] = NULL;
13222 if (mask[i] == NULL)
13223 return NULL;
13224 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
13225 }
13226 ret = op0;
13227 add_loc_descr (&ret, op1);
13228 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13229 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13230 if (GET_CODE (rtl) == ROTATERT)
13231 {
13232 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13233 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
13234 GET_MODE_BITSIZE (mode), 0));
13235 }
13236 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13237 if (mask[0] != NULL)
13238 add_loc_descr (&ret, mask[0]);
13239 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
13240 if (mask[1] != NULL)
13241 {
13242 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13243 add_loc_descr (&ret, mask[1]);
13244 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13245 }
13246 if (GET_CODE (rtl) == ROTATE)
13247 {
13248 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13249 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
13250 GET_MODE_BITSIZE (mode), 0));
13251 }
13252 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13253 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
13254 return ret;
13255 }
13256
13257 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
13258 for DEBUG_PARAMETER_REF RTL. */
13259
13260 static dw_loc_descr_ref
13261 parameter_ref_descriptor (rtx rtl)
13262 {
13263 dw_loc_descr_ref ret;
13264 dw_die_ref ref;
13265
13266 if (dwarf_strict)
13267 return NULL;
13268 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
13269 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
13270 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
13271 if (ref)
13272 {
13273 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13274 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
13275 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
13276 }
13277 else
13278 {
13279 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
13280 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
13281 }
13282 return ret;
13283 }
13284
13285 /* The following routine converts the RTL for a variable or parameter
13286 (resident in memory) into an equivalent Dwarf representation of a
13287 mechanism for getting the address of that same variable onto the top of a
13288 hypothetical "address evaluation" stack.
13289
13290 When creating memory location descriptors, we are effectively transforming
13291 the RTL for a memory-resident object into its Dwarf postfix expression
13292 equivalent. This routine recursively descends an RTL tree, turning
13293 it into Dwarf postfix code as it goes.
13294
13295 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
13296
13297 MEM_MODE is the mode of the memory reference, needed to handle some
13298 autoincrement addressing modes.
13299
13300 Return 0 if we can't represent the location. */
13301
13302 dw_loc_descr_ref
13303 mem_loc_descriptor (rtx rtl, machine_mode mode,
13304 machine_mode mem_mode,
13305 enum var_init_status initialized)
13306 {
13307 dw_loc_descr_ref mem_loc_result = NULL;
13308 enum dwarf_location_atom op;
13309 dw_loc_descr_ref op0, op1;
13310 rtx inner = NULL_RTX;
13311
13312 if (mode == VOIDmode)
13313 mode = GET_MODE (rtl);
13314
13315 /* Note that for a dynamically sized array, the location we will generate a
13316 description of here will be the lowest numbered location which is
13317 actually within the array. That's *not* necessarily the same as the
13318 zeroth element of the array. */
13319
13320 rtl = targetm.delegitimize_address (rtl);
13321
13322 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
13323 return NULL;
13324
13325 switch (GET_CODE (rtl))
13326 {
13327 case POST_INC:
13328 case POST_DEC:
13329 case POST_MODIFY:
13330 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
13331
13332 case SUBREG:
13333 /* The case of a subreg may arise when we have a local (register)
13334 variable or a formal (register) parameter which doesn't quite fill
13335 up an entire register. For now, just assume that it is
13336 legitimate to make the Dwarf info refer to the whole register which
13337 contains the given subreg. */
13338 if (!subreg_lowpart_p (rtl))
13339 break;
13340 inner = SUBREG_REG (rtl);
13341 case TRUNCATE:
13342 if (inner == NULL_RTX)
13343 inner = XEXP (rtl, 0);
13344 if (SCALAR_INT_MODE_P (mode)
13345 && SCALAR_INT_MODE_P (GET_MODE (inner))
13346 && (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13347 #ifdef POINTERS_EXTEND_UNSIGNED
13348 || (mode == Pmode && mem_mode != VOIDmode)
13349 #endif
13350 )
13351 && GET_MODE_SIZE (GET_MODE (inner)) <= DWARF2_ADDR_SIZE)
13352 {
13353 mem_loc_result = mem_loc_descriptor (inner,
13354 GET_MODE (inner),
13355 mem_mode, initialized);
13356 break;
13357 }
13358 if (dwarf_strict)
13359 break;
13360 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (inner)))
13361 break;
13362 if (GET_MODE_SIZE (mode) != GET_MODE_SIZE (GET_MODE (inner))
13363 && (!SCALAR_INT_MODE_P (mode)
13364 || !SCALAR_INT_MODE_P (GET_MODE (inner))))
13365 break;
13366 else
13367 {
13368 dw_die_ref type_die;
13369 dw_loc_descr_ref cvt;
13370
13371 mem_loc_result = mem_loc_descriptor (inner,
13372 GET_MODE (inner),
13373 mem_mode, initialized);
13374 if (mem_loc_result == NULL)
13375 break;
13376 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
13377 if (type_die == NULL)
13378 {
13379 mem_loc_result = NULL;
13380 break;
13381 }
13382 if (GET_MODE_SIZE (mode)
13383 != GET_MODE_SIZE (GET_MODE (inner)))
13384 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13385 else
13386 cvt = new_loc_descr (DW_OP_GNU_reinterpret, 0, 0);
13387 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13388 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13389 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13390 add_loc_descr (&mem_loc_result, cvt);
13391 if (SCALAR_INT_MODE_P (mode)
13392 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13393 {
13394 /* Convert it to untyped afterwards. */
13395 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13396 add_loc_descr (&mem_loc_result, cvt);
13397 }
13398 }
13399 break;
13400
13401 case REG:
13402 if (! SCALAR_INT_MODE_P (mode)
13403 || (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
13404 && rtl != arg_pointer_rtx
13405 && rtl != frame_pointer_rtx
13406 #ifdef POINTERS_EXTEND_UNSIGNED
13407 && (mode != Pmode || mem_mode == VOIDmode)
13408 #endif
13409 ))
13410 {
13411 dw_die_ref type_die;
13412 unsigned int dbx_regnum;
13413
13414 if (dwarf_strict)
13415 break;
13416 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
13417 break;
13418 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
13419 if (type_die == NULL)
13420 break;
13421
13422 dbx_regnum = dbx_reg_number (rtl);
13423 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13424 break;
13425 mem_loc_result = new_loc_descr (DW_OP_GNU_regval_type,
13426 dbx_regnum, 0);
13427 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
13428 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
13429 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
13430 break;
13431 }
13432 /* Whenever a register number forms a part of the description of the
13433 method for calculating the (dynamic) address of a memory resident
13434 object, DWARF rules require the register number be referred to as
13435 a "base register". This distinction is not based in any way upon
13436 what category of register the hardware believes the given register
13437 belongs to. This is strictly DWARF terminology we're dealing with
13438 here. Note that in cases where the location of a memory-resident
13439 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
13440 OP_CONST (0)) the actual DWARF location descriptor that we generate
13441 may just be OP_BASEREG (basereg). This may look deceptively like
13442 the object in question was allocated to a register (rather than in
13443 memory) so DWARF consumers need to be aware of the subtle
13444 distinction between OP_REG and OP_BASEREG. */
13445 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
13446 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
13447 else if (stack_realign_drap
13448 && crtl->drap_reg
13449 && crtl->args.internal_arg_pointer == rtl
13450 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
13451 {
13452 /* If RTL is internal_arg_pointer, which has been optimized
13453 out, use DRAP instead. */
13454 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
13455 VAR_INIT_STATUS_INITIALIZED);
13456 }
13457 break;
13458
13459 case SIGN_EXTEND:
13460 case ZERO_EXTEND:
13461 if (!SCALAR_INT_MODE_P (mode))
13462 break;
13463 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
13464 mem_mode, VAR_INIT_STATUS_INITIALIZED);
13465 if (op0 == 0)
13466 break;
13467 else if (GET_CODE (rtl) == ZERO_EXTEND
13468 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13469 && GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
13470 < HOST_BITS_PER_WIDE_INT
13471 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
13472 to expand zero extend as two shifts instead of
13473 masking. */
13474 && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= 4)
13475 {
13476 machine_mode imode = GET_MODE (XEXP (rtl, 0));
13477 mem_loc_result = op0;
13478 add_loc_descr (&mem_loc_result,
13479 int_loc_descriptor (GET_MODE_MASK (imode)));
13480 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
13481 }
13482 else if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13483 {
13484 int shift = DWARF2_ADDR_SIZE
13485 - GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)));
13486 shift *= BITS_PER_UNIT;
13487 if (GET_CODE (rtl) == SIGN_EXTEND)
13488 op = DW_OP_shra;
13489 else
13490 op = DW_OP_shr;
13491 mem_loc_result = op0;
13492 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
13493 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
13494 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
13495 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13496 }
13497 else if (!dwarf_strict)
13498 {
13499 dw_die_ref type_die1, type_die2;
13500 dw_loc_descr_ref cvt;
13501
13502 type_die1 = base_type_for_mode (GET_MODE (XEXP (rtl, 0)),
13503 GET_CODE (rtl) == ZERO_EXTEND);
13504 if (type_die1 == NULL)
13505 break;
13506 type_die2 = base_type_for_mode (mode, 1);
13507 if (type_die2 == NULL)
13508 break;
13509 mem_loc_result = op0;
13510 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13511 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13512 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
13513 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13514 add_loc_descr (&mem_loc_result, cvt);
13515 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13516 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13517 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
13518 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13519 add_loc_descr (&mem_loc_result, cvt);
13520 }
13521 break;
13522
13523 case MEM:
13524 {
13525 rtx new_rtl = avoid_constant_pool_reference (rtl);
13526 if (new_rtl != rtl)
13527 {
13528 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
13529 initialized);
13530 if (mem_loc_result != NULL)
13531 return mem_loc_result;
13532 }
13533 }
13534 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
13535 get_address_mode (rtl), mode,
13536 VAR_INIT_STATUS_INITIALIZED);
13537 if (mem_loc_result == NULL)
13538 mem_loc_result = tls_mem_loc_descriptor (rtl);
13539 if (mem_loc_result != NULL)
13540 {
13541 if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
13542 || !SCALAR_INT_MODE_P(mode))
13543 {
13544 dw_die_ref type_die;
13545 dw_loc_descr_ref deref;
13546
13547 if (dwarf_strict)
13548 return NULL;
13549 type_die
13550 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
13551 if (type_die == NULL)
13552 return NULL;
13553 deref = new_loc_descr (DW_OP_GNU_deref_type,
13554 GET_MODE_SIZE (mode), 0);
13555 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
13556 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
13557 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
13558 add_loc_descr (&mem_loc_result, deref);
13559 }
13560 else if (GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE)
13561 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
13562 else
13563 add_loc_descr (&mem_loc_result,
13564 new_loc_descr (DW_OP_deref_size,
13565 GET_MODE_SIZE (mode), 0));
13566 }
13567 break;
13568
13569 case LO_SUM:
13570 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
13571
13572 case LABEL_REF:
13573 /* Some ports can transform a symbol ref into a label ref, because
13574 the symbol ref is too far away and has to be dumped into a constant
13575 pool. */
13576 case CONST:
13577 case SYMBOL_REF:
13578 if (!SCALAR_INT_MODE_P (mode)
13579 || (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
13580 #ifdef POINTERS_EXTEND_UNSIGNED
13581 && (mode != Pmode || mem_mode == VOIDmode)
13582 #endif
13583 ))
13584 break;
13585 if (GET_CODE (rtl) == SYMBOL_REF
13586 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
13587 {
13588 dw_loc_descr_ref temp;
13589
13590 /* If this is not defined, we have no way to emit the data. */
13591 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
13592 break;
13593
13594 temp = new_addr_loc_descr (rtl, dtprel_true);
13595
13596 mem_loc_result = new_loc_descr (DW_OP_GNU_push_tls_address, 0, 0);
13597 add_loc_descr (&mem_loc_result, temp);
13598
13599 break;
13600 }
13601
13602 if (!const_ok_for_output (rtl))
13603 {
13604 if (GET_CODE (rtl) == CONST)
13605 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13606 initialized);
13607 break;
13608 }
13609
13610 symref:
13611 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
13612 vec_safe_push (used_rtx_array, rtl);
13613 break;
13614
13615 case CONCAT:
13616 case CONCATN:
13617 case VAR_LOCATION:
13618 case DEBUG_IMPLICIT_PTR:
13619 expansion_failed (NULL_TREE, rtl,
13620 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
13621 return 0;
13622
13623 case ENTRY_VALUE:
13624 if (dwarf_strict)
13625 return NULL;
13626 if (REG_P (ENTRY_VALUE_EXP (rtl)))
13627 {
13628 if (!SCALAR_INT_MODE_P (mode)
13629 || GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
13630 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
13631 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
13632 else
13633 {
13634 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
13635 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13636 return NULL;
13637 op0 = one_reg_loc_descriptor (dbx_regnum,
13638 VAR_INIT_STATUS_INITIALIZED);
13639 }
13640 }
13641 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
13642 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
13643 {
13644 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
13645 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
13646 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
13647 return NULL;
13648 }
13649 else
13650 gcc_unreachable ();
13651 if (op0 == NULL)
13652 return NULL;
13653 mem_loc_result = new_loc_descr (DW_OP_GNU_entry_value, 0, 0);
13654 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
13655 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
13656 break;
13657
13658 case DEBUG_PARAMETER_REF:
13659 mem_loc_result = parameter_ref_descriptor (rtl);
13660 break;
13661
13662 case PRE_MODIFY:
13663 /* Extract the PLUS expression nested inside and fall into
13664 PLUS code below. */
13665 rtl = XEXP (rtl, 1);
13666 goto plus;
13667
13668 case PRE_INC:
13669 case PRE_DEC:
13670 /* Turn these into a PLUS expression and fall into the PLUS code
13671 below. */
13672 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
13673 gen_int_mode (GET_CODE (rtl) == PRE_INC
13674 ? GET_MODE_UNIT_SIZE (mem_mode)
13675 : -GET_MODE_UNIT_SIZE (mem_mode),
13676 mode));
13677
13678 /* ... fall through ... */
13679
13680 case PLUS:
13681 plus:
13682 if (is_based_loc (rtl)
13683 && (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13684 || XEXP (rtl, 0) == arg_pointer_rtx
13685 || XEXP (rtl, 0) == frame_pointer_rtx)
13686 && SCALAR_INT_MODE_P (mode))
13687 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
13688 INTVAL (XEXP (rtl, 1)),
13689 VAR_INIT_STATUS_INITIALIZED);
13690 else
13691 {
13692 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13693 VAR_INIT_STATUS_INITIALIZED);
13694 if (mem_loc_result == 0)
13695 break;
13696
13697 if (CONST_INT_P (XEXP (rtl, 1))
13698 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13699 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
13700 else
13701 {
13702 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
13703 VAR_INIT_STATUS_INITIALIZED);
13704 if (op1 == 0)
13705 return NULL;
13706 add_loc_descr (&mem_loc_result, op1);
13707 add_loc_descr (&mem_loc_result,
13708 new_loc_descr (DW_OP_plus, 0, 0));
13709 }
13710 }
13711 break;
13712
13713 /* If a pseudo-reg is optimized away, it is possible for it to
13714 be replaced with a MEM containing a multiply or shift. */
13715 case MINUS:
13716 op = DW_OP_minus;
13717 goto do_binop;
13718
13719 case MULT:
13720 op = DW_OP_mul;
13721 goto do_binop;
13722
13723 case DIV:
13724 if (!dwarf_strict
13725 && SCALAR_INT_MODE_P (mode)
13726 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
13727 {
13728 mem_loc_result = typed_binop (DW_OP_div, rtl,
13729 base_type_for_mode (mode, 0),
13730 mode, mem_mode);
13731 break;
13732 }
13733 op = DW_OP_div;
13734 goto do_binop;
13735
13736 case UMOD:
13737 op = DW_OP_mod;
13738 goto do_binop;
13739
13740 case ASHIFT:
13741 op = DW_OP_shl;
13742 goto do_shift;
13743
13744 case ASHIFTRT:
13745 op = DW_OP_shra;
13746 goto do_shift;
13747
13748 case LSHIFTRT:
13749 op = DW_OP_shr;
13750 goto do_shift;
13751
13752 do_shift:
13753 if (!SCALAR_INT_MODE_P (mode))
13754 break;
13755 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13756 VAR_INIT_STATUS_INITIALIZED);
13757 {
13758 rtx rtlop1 = XEXP (rtl, 1);
13759 if (GET_MODE (rtlop1) != VOIDmode
13760 && GET_MODE_BITSIZE (GET_MODE (rtlop1))
13761 < GET_MODE_BITSIZE (mode))
13762 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
13763 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
13764 VAR_INIT_STATUS_INITIALIZED);
13765 }
13766
13767 if (op0 == 0 || op1 == 0)
13768 break;
13769
13770 mem_loc_result = op0;
13771 add_loc_descr (&mem_loc_result, op1);
13772 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13773 break;
13774
13775 case AND:
13776 op = DW_OP_and;
13777 goto do_binop;
13778
13779 case IOR:
13780 op = DW_OP_or;
13781 goto do_binop;
13782
13783 case XOR:
13784 op = DW_OP_xor;
13785 goto do_binop;
13786
13787 do_binop:
13788 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13789 VAR_INIT_STATUS_INITIALIZED);
13790 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
13791 VAR_INIT_STATUS_INITIALIZED);
13792
13793 if (op0 == 0 || op1 == 0)
13794 break;
13795
13796 mem_loc_result = op0;
13797 add_loc_descr (&mem_loc_result, op1);
13798 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13799 break;
13800
13801 case MOD:
13802 if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE && !dwarf_strict)
13803 {
13804 mem_loc_result = typed_binop (DW_OP_mod, rtl,
13805 base_type_for_mode (mode, 0),
13806 mode, mem_mode);
13807 break;
13808 }
13809
13810 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13811 VAR_INIT_STATUS_INITIALIZED);
13812 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
13813 VAR_INIT_STATUS_INITIALIZED);
13814
13815 if (op0 == 0 || op1 == 0)
13816 break;
13817
13818 mem_loc_result = op0;
13819 add_loc_descr (&mem_loc_result, op1);
13820 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
13821 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
13822 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
13823 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
13824 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
13825 break;
13826
13827 case UDIV:
13828 if (!dwarf_strict && SCALAR_INT_MODE_P (mode))
13829 {
13830 if (GET_MODE_CLASS (mode) > DWARF2_ADDR_SIZE)
13831 {
13832 op = DW_OP_div;
13833 goto do_binop;
13834 }
13835 mem_loc_result = typed_binop (DW_OP_div, rtl,
13836 base_type_for_mode (mode, 1),
13837 mode, mem_mode);
13838 }
13839 break;
13840
13841 case NOT:
13842 op = DW_OP_not;
13843 goto do_unop;
13844
13845 case ABS:
13846 op = DW_OP_abs;
13847 goto do_unop;
13848
13849 case NEG:
13850 op = DW_OP_neg;
13851 goto do_unop;
13852
13853 do_unop:
13854 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13855 VAR_INIT_STATUS_INITIALIZED);
13856
13857 if (op0 == 0)
13858 break;
13859
13860 mem_loc_result = op0;
13861 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13862 break;
13863
13864 case CONST_INT:
13865 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13866 #ifdef POINTERS_EXTEND_UNSIGNED
13867 || (mode == Pmode
13868 && mem_mode != VOIDmode
13869 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
13870 #endif
13871 )
13872 {
13873 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
13874 break;
13875 }
13876 if (!dwarf_strict
13877 && (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT
13878 || GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT))
13879 {
13880 dw_die_ref type_die = base_type_for_mode (mode, 1);
13881 machine_mode amode;
13882 if (type_die == NULL)
13883 return NULL;
13884 amode = mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT,
13885 MODE_INT, 0);
13886 if (INTVAL (rtl) >= 0
13887 && amode != BLKmode
13888 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
13889 /* const DW_OP_GNU_convert <XXX> vs.
13890 DW_OP_GNU_const_type <XXX, 1, const>. */
13891 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
13892 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (mode))
13893 {
13894 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
13895 op0 = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13896 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13897 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13898 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
13899 add_loc_descr (&mem_loc_result, op0);
13900 return mem_loc_result;
13901 }
13902 mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0,
13903 INTVAL (rtl));
13904 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13905 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13906 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
13907 if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
13908 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
13909 else
13910 {
13911 mem_loc_result->dw_loc_oprnd2.val_class
13912 = dw_val_class_const_double;
13913 mem_loc_result->dw_loc_oprnd2.v.val_double
13914 = double_int::from_shwi (INTVAL (rtl));
13915 }
13916 }
13917 break;
13918
13919 case CONST_DOUBLE:
13920 if (!dwarf_strict)
13921 {
13922 dw_die_ref type_die;
13923
13924 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
13925 CONST_DOUBLE rtx could represent either a large integer
13926 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
13927 the value is always a floating point constant.
13928
13929 When it is an integer, a CONST_DOUBLE is used whenever
13930 the constant requires 2 HWIs to be adequately represented.
13931 We output CONST_DOUBLEs as blocks. */
13932 if (mode == VOIDmode
13933 || (GET_MODE (rtl) == VOIDmode
13934 && GET_MODE_BITSIZE (mode) != HOST_BITS_PER_DOUBLE_INT))
13935 break;
13936 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
13937 if (type_die == NULL)
13938 return NULL;
13939 mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0, 0);
13940 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13941 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13942 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
13943 #if TARGET_SUPPORTS_WIDE_INT == 0
13944 if (!SCALAR_FLOAT_MODE_P (mode))
13945 {
13946 mem_loc_result->dw_loc_oprnd2.val_class
13947 = dw_val_class_const_double;
13948 mem_loc_result->dw_loc_oprnd2.v.val_double
13949 = rtx_to_double_int (rtl);
13950 }
13951 else
13952 #endif
13953 {
13954 unsigned int length = GET_MODE_SIZE (mode);
13955 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
13956
13957 insert_float (rtl, array);
13958 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
13959 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
13960 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
13961 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
13962 }
13963 }
13964 break;
13965
13966 case CONST_WIDE_INT:
13967 if (!dwarf_strict)
13968 {
13969 dw_die_ref type_die;
13970
13971 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
13972 if (type_die == NULL)
13973 return NULL;
13974 mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0, 0);
13975 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13976 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13977 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
13978 mem_loc_result->dw_loc_oprnd2.val_class
13979 = dw_val_class_wide_int;
13980 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
13981 *mem_loc_result->dw_loc_oprnd2.v.val_wide = std::make_pair (rtl, mode);
13982 }
13983 break;
13984
13985 case EQ:
13986 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
13987 break;
13988
13989 case GE:
13990 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
13991 break;
13992
13993 case GT:
13994 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
13995 break;
13996
13997 case LE:
13998 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
13999 break;
14000
14001 case LT:
14002 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
14003 break;
14004
14005 case NE:
14006 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
14007 break;
14008
14009 case GEU:
14010 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
14011 break;
14012
14013 case GTU:
14014 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
14015 break;
14016
14017 case LEU:
14018 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
14019 break;
14020
14021 case LTU:
14022 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
14023 break;
14024
14025 case UMIN:
14026 case UMAX:
14027 if (!SCALAR_INT_MODE_P (mode))
14028 break;
14029 /* FALLTHRU */
14030 case SMIN:
14031 case SMAX:
14032 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
14033 break;
14034
14035 case ZERO_EXTRACT:
14036 case SIGN_EXTRACT:
14037 if (CONST_INT_P (XEXP (rtl, 1))
14038 && CONST_INT_P (XEXP (rtl, 2))
14039 && ((unsigned) INTVAL (XEXP (rtl, 1))
14040 + (unsigned) INTVAL (XEXP (rtl, 2))
14041 <= GET_MODE_BITSIZE (mode))
14042 && SCALAR_INT_MODE_P (mode)
14043 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
14044 && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= DWARF2_ADDR_SIZE)
14045 {
14046 int shift, size;
14047 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
14048 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14049 if (op0 == 0)
14050 break;
14051 if (GET_CODE (rtl) == SIGN_EXTRACT)
14052 op = DW_OP_shra;
14053 else
14054 op = DW_OP_shr;
14055 mem_loc_result = op0;
14056 size = INTVAL (XEXP (rtl, 1));
14057 shift = INTVAL (XEXP (rtl, 2));
14058 if (BITS_BIG_ENDIAN)
14059 shift = GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
14060 - shift - size;
14061 if (shift + size != (int) DWARF2_ADDR_SIZE)
14062 {
14063 add_loc_descr (&mem_loc_result,
14064 int_loc_descriptor (DWARF2_ADDR_SIZE
14065 - shift - size));
14066 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
14067 }
14068 if (size != (int) DWARF2_ADDR_SIZE)
14069 {
14070 add_loc_descr (&mem_loc_result,
14071 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
14072 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14073 }
14074 }
14075 break;
14076
14077 case IF_THEN_ELSE:
14078 {
14079 dw_loc_descr_ref op2, bra_node, drop_node;
14080 op0 = mem_loc_descriptor (XEXP (rtl, 0),
14081 GET_MODE (XEXP (rtl, 0)) == VOIDmode
14082 ? word_mode : GET_MODE (XEXP (rtl, 0)),
14083 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14084 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14085 VAR_INIT_STATUS_INITIALIZED);
14086 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
14087 VAR_INIT_STATUS_INITIALIZED);
14088 if (op0 == NULL || op1 == NULL || op2 == NULL)
14089 break;
14090
14091 mem_loc_result = op1;
14092 add_loc_descr (&mem_loc_result, op2);
14093 add_loc_descr (&mem_loc_result, op0);
14094 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14095 add_loc_descr (&mem_loc_result, bra_node);
14096 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
14097 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14098 add_loc_descr (&mem_loc_result, drop_node);
14099 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14100 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14101 }
14102 break;
14103
14104 case FLOAT_EXTEND:
14105 case FLOAT_TRUNCATE:
14106 case FLOAT:
14107 case UNSIGNED_FLOAT:
14108 case FIX:
14109 case UNSIGNED_FIX:
14110 if (!dwarf_strict)
14111 {
14112 dw_die_ref type_die;
14113 dw_loc_descr_ref cvt;
14114
14115 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
14116 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14117 if (op0 == NULL)
14118 break;
14119 if (SCALAR_INT_MODE_P (GET_MODE (XEXP (rtl, 0)))
14120 && (GET_CODE (rtl) == FLOAT
14121 || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)))
14122 <= DWARF2_ADDR_SIZE))
14123 {
14124 type_die = base_type_for_mode (GET_MODE (XEXP (rtl, 0)),
14125 GET_CODE (rtl) == UNSIGNED_FLOAT);
14126 if (type_die == NULL)
14127 break;
14128 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
14129 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14130 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14131 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14132 add_loc_descr (&op0, cvt);
14133 }
14134 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
14135 if (type_die == NULL)
14136 break;
14137 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
14138 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14139 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14140 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14141 add_loc_descr (&op0, cvt);
14142 if (SCALAR_INT_MODE_P (mode)
14143 && (GET_CODE (rtl) == FIX
14144 || GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE))
14145 {
14146 op0 = convert_descriptor_to_mode (mode, op0);
14147 if (op0 == NULL)
14148 break;
14149 }
14150 mem_loc_result = op0;
14151 }
14152 break;
14153
14154 case CLZ:
14155 case CTZ:
14156 case FFS:
14157 mem_loc_result = clz_loc_descriptor (rtl, mode, mem_mode);
14158 break;
14159
14160 case POPCOUNT:
14161 case PARITY:
14162 mem_loc_result = popcount_loc_descriptor (rtl, mode, mem_mode);
14163 break;
14164
14165 case BSWAP:
14166 mem_loc_result = bswap_loc_descriptor (rtl, mode, mem_mode);
14167 break;
14168
14169 case ROTATE:
14170 case ROTATERT:
14171 mem_loc_result = rotate_loc_descriptor (rtl, mode, mem_mode);
14172 break;
14173
14174 case COMPARE:
14175 /* In theory, we could implement the above. */
14176 /* DWARF cannot represent the unsigned compare operations
14177 natively. */
14178 case SS_MULT:
14179 case US_MULT:
14180 case SS_DIV:
14181 case US_DIV:
14182 case SS_PLUS:
14183 case US_PLUS:
14184 case SS_MINUS:
14185 case US_MINUS:
14186 case SS_NEG:
14187 case US_NEG:
14188 case SS_ABS:
14189 case SS_ASHIFT:
14190 case US_ASHIFT:
14191 case SS_TRUNCATE:
14192 case US_TRUNCATE:
14193 case UNORDERED:
14194 case ORDERED:
14195 case UNEQ:
14196 case UNGE:
14197 case UNGT:
14198 case UNLE:
14199 case UNLT:
14200 case LTGT:
14201 case FRACT_CONVERT:
14202 case UNSIGNED_FRACT_CONVERT:
14203 case SAT_FRACT:
14204 case UNSIGNED_SAT_FRACT:
14205 case SQRT:
14206 case ASM_OPERANDS:
14207 case VEC_MERGE:
14208 case VEC_SELECT:
14209 case VEC_CONCAT:
14210 case VEC_DUPLICATE:
14211 case UNSPEC:
14212 case HIGH:
14213 case FMA:
14214 case STRICT_LOW_PART:
14215 case CONST_VECTOR:
14216 case CONST_FIXED:
14217 case CLRSB:
14218 case CLOBBER:
14219 /* If delegitimize_address couldn't do anything with the UNSPEC, we
14220 can't express it in the debug info. This can happen e.g. with some
14221 TLS UNSPECs. */
14222 break;
14223
14224 case CONST_STRING:
14225 resolve_one_addr (&rtl);
14226 goto symref;
14227
14228 default:
14229 if (flag_checking)
14230 {
14231 print_rtl (stderr, rtl);
14232 gcc_unreachable ();
14233 }
14234 break;
14235 }
14236
14237 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
14238 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14239
14240 return mem_loc_result;
14241 }
14242
14243 /* Return a descriptor that describes the concatenation of two locations.
14244 This is typically a complex variable. */
14245
14246 static dw_loc_descr_ref
14247 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
14248 {
14249 dw_loc_descr_ref cc_loc_result = NULL;
14250 dw_loc_descr_ref x0_ref
14251 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14252 dw_loc_descr_ref x1_ref
14253 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14254
14255 if (x0_ref == 0 || x1_ref == 0)
14256 return 0;
14257
14258 cc_loc_result = x0_ref;
14259 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x0)));
14260
14261 add_loc_descr (&cc_loc_result, x1_ref);
14262 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x1)));
14263
14264 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14265 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14266
14267 return cc_loc_result;
14268 }
14269
14270 /* Return a descriptor that describes the concatenation of N
14271 locations. */
14272
14273 static dw_loc_descr_ref
14274 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
14275 {
14276 unsigned int i;
14277 dw_loc_descr_ref cc_loc_result = NULL;
14278 unsigned int n = XVECLEN (concatn, 0);
14279
14280 for (i = 0; i < n; ++i)
14281 {
14282 dw_loc_descr_ref ref;
14283 rtx x = XVECEXP (concatn, 0, i);
14284
14285 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14286 if (ref == NULL)
14287 return NULL;
14288
14289 add_loc_descr (&cc_loc_result, ref);
14290 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x)));
14291 }
14292
14293 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
14294 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14295
14296 return cc_loc_result;
14297 }
14298
14299 /* Helper function for loc_descriptor. Return DW_OP_GNU_implicit_pointer
14300 for DEBUG_IMPLICIT_PTR RTL. */
14301
14302 static dw_loc_descr_ref
14303 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
14304 {
14305 dw_loc_descr_ref ret;
14306 dw_die_ref ref;
14307
14308 if (dwarf_strict)
14309 return NULL;
14310 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
14311 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
14312 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
14313 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
14314 ret = new_loc_descr (DW_OP_GNU_implicit_pointer, 0, offset);
14315 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
14316 if (ref)
14317 {
14318 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14319 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
14320 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
14321 }
14322 else
14323 {
14324 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
14325 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
14326 }
14327 return ret;
14328 }
14329
14330 /* Output a proper Dwarf location descriptor for a variable or parameter
14331 which is either allocated in a register or in a memory location. For a
14332 register, we just generate an OP_REG and the register number. For a
14333 memory location we provide a Dwarf postfix expression describing how to
14334 generate the (dynamic) address of the object onto the address stack.
14335
14336 MODE is mode of the decl if this loc_descriptor is going to be used in
14337 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
14338 allowed, VOIDmode otherwise.
14339
14340 If we don't know how to describe it, return 0. */
14341
14342 static dw_loc_descr_ref
14343 loc_descriptor (rtx rtl, machine_mode mode,
14344 enum var_init_status initialized)
14345 {
14346 dw_loc_descr_ref loc_result = NULL;
14347
14348 switch (GET_CODE (rtl))
14349 {
14350 case SUBREG:
14351 /* The case of a subreg may arise when we have a local (register)
14352 variable or a formal (register) parameter which doesn't quite fill
14353 up an entire register. For now, just assume that it is
14354 legitimate to make the Dwarf info refer to the whole register which
14355 contains the given subreg. */
14356 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
14357 loc_result = loc_descriptor (SUBREG_REG (rtl),
14358 GET_MODE (SUBREG_REG (rtl)), initialized);
14359 else
14360 goto do_default;
14361 break;
14362
14363 case REG:
14364 loc_result = reg_loc_descriptor (rtl, initialized);
14365 break;
14366
14367 case MEM:
14368 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
14369 GET_MODE (rtl), initialized);
14370 if (loc_result == NULL)
14371 loc_result = tls_mem_loc_descriptor (rtl);
14372 if (loc_result == NULL)
14373 {
14374 rtx new_rtl = avoid_constant_pool_reference (rtl);
14375 if (new_rtl != rtl)
14376 loc_result = loc_descriptor (new_rtl, mode, initialized);
14377 }
14378 break;
14379
14380 case CONCAT:
14381 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
14382 initialized);
14383 break;
14384
14385 case CONCATN:
14386 loc_result = concatn_loc_descriptor (rtl, initialized);
14387 break;
14388
14389 case VAR_LOCATION:
14390 /* Single part. */
14391 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
14392 {
14393 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
14394 if (GET_CODE (loc) == EXPR_LIST)
14395 loc = XEXP (loc, 0);
14396 loc_result = loc_descriptor (loc, mode, initialized);
14397 break;
14398 }
14399
14400 rtl = XEXP (rtl, 1);
14401 /* FALLTHRU */
14402
14403 case PARALLEL:
14404 {
14405 rtvec par_elems = XVEC (rtl, 0);
14406 int num_elem = GET_NUM_ELEM (par_elems);
14407 machine_mode mode;
14408 int i;
14409
14410 /* Create the first one, so we have something to add to. */
14411 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
14412 VOIDmode, initialized);
14413 if (loc_result == NULL)
14414 return NULL;
14415 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
14416 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
14417 for (i = 1; i < num_elem; i++)
14418 {
14419 dw_loc_descr_ref temp;
14420
14421 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
14422 VOIDmode, initialized);
14423 if (temp == NULL)
14424 return NULL;
14425 add_loc_descr (&loc_result, temp);
14426 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
14427 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
14428 }
14429 }
14430 break;
14431
14432 case CONST_INT:
14433 if (mode != VOIDmode && mode != BLKmode)
14434 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (mode),
14435 INTVAL (rtl));
14436 break;
14437
14438 case CONST_DOUBLE:
14439 if (mode == VOIDmode)
14440 mode = GET_MODE (rtl);
14441
14442 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
14443 {
14444 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
14445
14446 /* Note that a CONST_DOUBLE rtx could represent either an integer
14447 or a floating-point constant. A CONST_DOUBLE is used whenever
14448 the constant requires more than one word in order to be
14449 adequately represented. We output CONST_DOUBLEs as blocks. */
14450 loc_result = new_loc_descr (DW_OP_implicit_value,
14451 GET_MODE_SIZE (mode), 0);
14452 #if TARGET_SUPPORTS_WIDE_INT == 0
14453 if (!SCALAR_FLOAT_MODE_P (mode))
14454 {
14455 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
14456 loc_result->dw_loc_oprnd2.v.val_double
14457 = rtx_to_double_int (rtl);
14458 }
14459 else
14460 #endif
14461 {
14462 unsigned int length = GET_MODE_SIZE (mode);
14463 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
14464
14465 insert_float (rtl, array);
14466 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
14467 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
14468 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
14469 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
14470 }
14471 }
14472 break;
14473
14474 case CONST_WIDE_INT:
14475 if (mode == VOIDmode)
14476 mode = GET_MODE (rtl);
14477
14478 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
14479 {
14480 loc_result = new_loc_descr (DW_OP_implicit_value,
14481 GET_MODE_SIZE (mode), 0);
14482 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
14483 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
14484 *loc_result->dw_loc_oprnd2.v.val_wide = std::make_pair (rtl, mode);
14485 }
14486 break;
14487
14488 case CONST_VECTOR:
14489 if (mode == VOIDmode)
14490 mode = GET_MODE (rtl);
14491
14492 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
14493 {
14494 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
14495 unsigned int length = CONST_VECTOR_NUNITS (rtl);
14496 unsigned char *array
14497 = ggc_vec_alloc<unsigned char> (length * elt_size);
14498 unsigned int i;
14499 unsigned char *p;
14500 machine_mode imode = GET_MODE_INNER (mode);
14501
14502 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
14503 switch (GET_MODE_CLASS (mode))
14504 {
14505 case MODE_VECTOR_INT:
14506 for (i = 0, p = array; i < length; i++, p += elt_size)
14507 {
14508 rtx elt = CONST_VECTOR_ELT (rtl, i);
14509 insert_wide_int (std::make_pair (elt, imode), p, elt_size);
14510 }
14511 break;
14512
14513 case MODE_VECTOR_FLOAT:
14514 for (i = 0, p = array; i < length; i++, p += elt_size)
14515 {
14516 rtx elt = CONST_VECTOR_ELT (rtl, i);
14517 insert_float (elt, p);
14518 }
14519 break;
14520
14521 default:
14522 gcc_unreachable ();
14523 }
14524
14525 loc_result = new_loc_descr (DW_OP_implicit_value,
14526 length * elt_size, 0);
14527 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
14528 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
14529 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
14530 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
14531 }
14532 break;
14533
14534 case CONST:
14535 if (mode == VOIDmode
14536 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
14537 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
14538 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
14539 {
14540 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
14541 break;
14542 }
14543 /* FALLTHROUGH */
14544 case SYMBOL_REF:
14545 if (!const_ok_for_output (rtl))
14546 break;
14547 case LABEL_REF:
14548 if (mode != VOIDmode && GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE
14549 && (dwarf_version >= 4 || !dwarf_strict))
14550 {
14551 loc_result = new_addr_loc_descr (rtl, dtprel_false);
14552 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
14553 vec_safe_push (used_rtx_array, rtl);
14554 }
14555 break;
14556
14557 case DEBUG_IMPLICIT_PTR:
14558 loc_result = implicit_ptr_descriptor (rtl, 0);
14559 break;
14560
14561 case PLUS:
14562 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
14563 && CONST_INT_P (XEXP (rtl, 1)))
14564 {
14565 loc_result
14566 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
14567 break;
14568 }
14569 /* FALLTHRU */
14570 do_default:
14571 default:
14572 if ((SCALAR_INT_MODE_P (mode)
14573 && GET_MODE (rtl) == mode
14574 && GET_MODE_SIZE (GET_MODE (rtl)) <= DWARF2_ADDR_SIZE
14575 && dwarf_version >= 4)
14576 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
14577 {
14578 /* Value expression. */
14579 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
14580 if (loc_result)
14581 add_loc_descr (&loc_result,
14582 new_loc_descr (DW_OP_stack_value, 0, 0));
14583 }
14584 break;
14585 }
14586
14587 return loc_result;
14588 }
14589
14590 /* We need to figure out what section we should use as the base for the
14591 address ranges where a given location is valid.
14592 1. If this particular DECL has a section associated with it, use that.
14593 2. If this function has a section associated with it, use that.
14594 3. Otherwise, use the text section.
14595 XXX: If you split a variable across multiple sections, we won't notice. */
14596
14597 static const char *
14598 secname_for_decl (const_tree decl)
14599 {
14600 const char *secname;
14601
14602 if (VAR_OR_FUNCTION_DECL_P (decl)
14603 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
14604 && DECL_SECTION_NAME (decl))
14605 secname = DECL_SECTION_NAME (decl);
14606 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
14607 secname = DECL_SECTION_NAME (current_function_decl);
14608 else if (cfun && in_cold_section_p)
14609 secname = crtl->subsections.cold_section_label;
14610 else
14611 secname = text_section_label;
14612
14613 return secname;
14614 }
14615
14616 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
14617
14618 static bool
14619 decl_by_reference_p (tree decl)
14620 {
14621 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
14622 || TREE_CODE (decl) == VAR_DECL)
14623 && DECL_BY_REFERENCE (decl));
14624 }
14625
14626 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
14627 for VARLOC. */
14628
14629 static dw_loc_descr_ref
14630 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
14631 enum var_init_status initialized)
14632 {
14633 int have_address = 0;
14634 dw_loc_descr_ref descr;
14635 machine_mode mode;
14636
14637 if (want_address != 2)
14638 {
14639 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
14640 /* Single part. */
14641 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
14642 {
14643 varloc = PAT_VAR_LOCATION_LOC (varloc);
14644 if (GET_CODE (varloc) == EXPR_LIST)
14645 varloc = XEXP (varloc, 0);
14646 mode = GET_MODE (varloc);
14647 if (MEM_P (varloc))
14648 {
14649 rtx addr = XEXP (varloc, 0);
14650 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
14651 mode, initialized);
14652 if (descr)
14653 have_address = 1;
14654 else
14655 {
14656 rtx x = avoid_constant_pool_reference (varloc);
14657 if (x != varloc)
14658 descr = mem_loc_descriptor (x, mode, VOIDmode,
14659 initialized);
14660 }
14661 }
14662 else
14663 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
14664 }
14665 else
14666 return 0;
14667 }
14668 else
14669 {
14670 if (GET_CODE (varloc) == VAR_LOCATION)
14671 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
14672 else
14673 mode = DECL_MODE (loc);
14674 descr = loc_descriptor (varloc, mode, initialized);
14675 have_address = 1;
14676 }
14677
14678 if (!descr)
14679 return 0;
14680
14681 if (want_address == 2 && !have_address
14682 && (dwarf_version >= 4 || !dwarf_strict))
14683 {
14684 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
14685 {
14686 expansion_failed (loc, NULL_RTX,
14687 "DWARF address size mismatch");
14688 return 0;
14689 }
14690 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
14691 have_address = 1;
14692 }
14693 /* Show if we can't fill the request for an address. */
14694 if (want_address && !have_address)
14695 {
14696 expansion_failed (loc, NULL_RTX,
14697 "Want address and only have value");
14698 return 0;
14699 }
14700
14701 /* If we've got an address and don't want one, dereference. */
14702 if (!want_address && have_address)
14703 {
14704 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
14705 enum dwarf_location_atom op;
14706
14707 if (size > DWARF2_ADDR_SIZE || size == -1)
14708 {
14709 expansion_failed (loc, NULL_RTX,
14710 "DWARF address size mismatch");
14711 return 0;
14712 }
14713 else if (size == DWARF2_ADDR_SIZE)
14714 op = DW_OP_deref;
14715 else
14716 op = DW_OP_deref_size;
14717
14718 add_loc_descr (&descr, new_loc_descr (op, size, 0));
14719 }
14720
14721 return descr;
14722 }
14723
14724 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
14725 if it is not possible. */
14726
14727 static dw_loc_descr_ref
14728 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
14729 {
14730 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
14731 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
14732 else if (dwarf_version >= 3 || !dwarf_strict)
14733 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
14734 else
14735 return NULL;
14736 }
14737
14738 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
14739 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
14740
14741 static dw_loc_descr_ref
14742 dw_sra_loc_expr (tree decl, rtx loc)
14743 {
14744 rtx p;
14745 unsigned HOST_WIDE_INT padsize = 0;
14746 dw_loc_descr_ref descr, *descr_tail;
14747 unsigned HOST_WIDE_INT decl_size;
14748 rtx varloc;
14749 enum var_init_status initialized;
14750
14751 if (DECL_SIZE (decl) == NULL
14752 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
14753 return NULL;
14754
14755 decl_size = tree_to_uhwi (DECL_SIZE (decl));
14756 descr = NULL;
14757 descr_tail = &descr;
14758
14759 for (p = loc; p; p = XEXP (p, 1))
14760 {
14761 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
14762 rtx loc_note = *decl_piece_varloc_ptr (p);
14763 dw_loc_descr_ref cur_descr;
14764 dw_loc_descr_ref *tail, last = NULL;
14765 unsigned HOST_WIDE_INT opsize = 0;
14766
14767 if (loc_note == NULL_RTX
14768 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
14769 {
14770 padsize += bitsize;
14771 continue;
14772 }
14773 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
14774 varloc = NOTE_VAR_LOCATION (loc_note);
14775 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
14776 if (cur_descr == NULL)
14777 {
14778 padsize += bitsize;
14779 continue;
14780 }
14781
14782 /* Check that cur_descr either doesn't use
14783 DW_OP_*piece operations, or their sum is equal
14784 to bitsize. Otherwise we can't embed it. */
14785 for (tail = &cur_descr; *tail != NULL;
14786 tail = &(*tail)->dw_loc_next)
14787 if ((*tail)->dw_loc_opc == DW_OP_piece)
14788 {
14789 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
14790 * BITS_PER_UNIT;
14791 last = *tail;
14792 }
14793 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
14794 {
14795 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
14796 last = *tail;
14797 }
14798
14799 if (last != NULL && opsize != bitsize)
14800 {
14801 padsize += bitsize;
14802 /* Discard the current piece of the descriptor and release any
14803 addr_table entries it uses. */
14804 remove_loc_list_addr_table_entries (cur_descr);
14805 continue;
14806 }
14807
14808 /* If there is a hole, add DW_OP_*piece after empty DWARF
14809 expression, which means that those bits are optimized out. */
14810 if (padsize)
14811 {
14812 if (padsize > decl_size)
14813 {
14814 remove_loc_list_addr_table_entries (cur_descr);
14815 goto discard_descr;
14816 }
14817 decl_size -= padsize;
14818 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
14819 if (*descr_tail == NULL)
14820 {
14821 remove_loc_list_addr_table_entries (cur_descr);
14822 goto discard_descr;
14823 }
14824 descr_tail = &(*descr_tail)->dw_loc_next;
14825 padsize = 0;
14826 }
14827 *descr_tail = cur_descr;
14828 descr_tail = tail;
14829 if (bitsize > decl_size)
14830 goto discard_descr;
14831 decl_size -= bitsize;
14832 if (last == NULL)
14833 {
14834 HOST_WIDE_INT offset = 0;
14835 if (GET_CODE (varloc) == VAR_LOCATION
14836 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
14837 {
14838 varloc = PAT_VAR_LOCATION_LOC (varloc);
14839 if (GET_CODE (varloc) == EXPR_LIST)
14840 varloc = XEXP (varloc, 0);
14841 }
14842 do
14843 {
14844 if (GET_CODE (varloc) == CONST
14845 || GET_CODE (varloc) == SIGN_EXTEND
14846 || GET_CODE (varloc) == ZERO_EXTEND)
14847 varloc = XEXP (varloc, 0);
14848 else if (GET_CODE (varloc) == SUBREG)
14849 varloc = SUBREG_REG (varloc);
14850 else
14851 break;
14852 }
14853 while (1);
14854 /* DW_OP_bit_size offset should be zero for register
14855 or implicit location descriptions and empty location
14856 descriptions, but for memory addresses needs big endian
14857 adjustment. */
14858 if (MEM_P (varloc))
14859 {
14860 unsigned HOST_WIDE_INT memsize
14861 = MEM_SIZE (varloc) * BITS_PER_UNIT;
14862 if (memsize != bitsize)
14863 {
14864 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
14865 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
14866 goto discard_descr;
14867 if (memsize < bitsize)
14868 goto discard_descr;
14869 if (BITS_BIG_ENDIAN)
14870 offset = memsize - bitsize;
14871 }
14872 }
14873
14874 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
14875 if (*descr_tail == NULL)
14876 goto discard_descr;
14877 descr_tail = &(*descr_tail)->dw_loc_next;
14878 }
14879 }
14880
14881 /* If there were any non-empty expressions, add padding till the end of
14882 the decl. */
14883 if (descr != NULL && decl_size != 0)
14884 {
14885 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
14886 if (*descr_tail == NULL)
14887 goto discard_descr;
14888 }
14889 return descr;
14890
14891 discard_descr:
14892 /* Discard the descriptor and release any addr_table entries it uses. */
14893 remove_loc_list_addr_table_entries (descr);
14894 return NULL;
14895 }
14896
14897 /* Return the dwarf representation of the location list LOC_LIST of
14898 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
14899 function. */
14900
14901 static dw_loc_list_ref
14902 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
14903 {
14904 const char *endname, *secname;
14905 rtx varloc;
14906 enum var_init_status initialized;
14907 struct var_loc_node *node;
14908 dw_loc_descr_ref descr;
14909 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
14910 dw_loc_list_ref list = NULL;
14911 dw_loc_list_ref *listp = &list;
14912
14913 /* Now that we know what section we are using for a base,
14914 actually construct the list of locations.
14915 The first location information is what is passed to the
14916 function that creates the location list, and the remaining
14917 locations just get added on to that list.
14918 Note that we only know the start address for a location
14919 (IE location changes), so to build the range, we use
14920 the range [current location start, next location start].
14921 This means we have to special case the last node, and generate
14922 a range of [last location start, end of function label]. */
14923
14924 secname = secname_for_decl (decl);
14925
14926 for (node = loc_list->first; node; node = node->next)
14927 if (GET_CODE (node->loc) == EXPR_LIST
14928 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
14929 {
14930 if (GET_CODE (node->loc) == EXPR_LIST)
14931 {
14932 /* This requires DW_OP_{,bit_}piece, which is not usable
14933 inside DWARF expressions. */
14934 if (want_address != 2)
14935 continue;
14936 descr = dw_sra_loc_expr (decl, node->loc);
14937 if (descr == NULL)
14938 continue;
14939 }
14940 else
14941 {
14942 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
14943 varloc = NOTE_VAR_LOCATION (node->loc);
14944 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
14945 }
14946 if (descr)
14947 {
14948 bool range_across_switch = false;
14949 /* If section switch happens in between node->label
14950 and node->next->label (or end of function) and
14951 we can't emit it as a single entry list,
14952 emit two ranges, first one ending at the end
14953 of first partition and second one starting at the
14954 beginning of second partition. */
14955 if (node == loc_list->last_before_switch
14956 && (node != loc_list->first || loc_list->first->next)
14957 && current_function_decl)
14958 {
14959 endname = cfun->fde->dw_fde_end;
14960 range_across_switch = true;
14961 }
14962 /* The variable has a location between NODE->LABEL and
14963 NODE->NEXT->LABEL. */
14964 else if (node->next)
14965 endname = node->next->label;
14966 /* If the variable has a location at the last label
14967 it keeps its location until the end of function. */
14968 else if (!current_function_decl)
14969 endname = text_end_label;
14970 else
14971 {
14972 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
14973 current_function_funcdef_no);
14974 endname = ggc_strdup (label_id);
14975 }
14976
14977 *listp = new_loc_list (descr, node->label, endname, secname);
14978 if (TREE_CODE (decl) == PARM_DECL
14979 && node == loc_list->first
14980 && NOTE_P (node->loc)
14981 && strcmp (node->label, endname) == 0)
14982 (*listp)->force = true;
14983 listp = &(*listp)->dw_loc_next;
14984
14985 if (range_across_switch)
14986 {
14987 if (GET_CODE (node->loc) == EXPR_LIST)
14988 descr = dw_sra_loc_expr (decl, node->loc);
14989 else
14990 {
14991 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
14992 varloc = NOTE_VAR_LOCATION (node->loc);
14993 descr = dw_loc_list_1 (decl, varloc, want_address,
14994 initialized);
14995 }
14996 gcc_assert (descr);
14997 /* The variable has a location between NODE->LABEL and
14998 NODE->NEXT->LABEL. */
14999 if (node->next)
15000 endname = node->next->label;
15001 else
15002 endname = cfun->fde->dw_fde_second_end;
15003 *listp = new_loc_list (descr,
15004 cfun->fde->dw_fde_second_begin,
15005 endname, secname);
15006 listp = &(*listp)->dw_loc_next;
15007 }
15008 }
15009 }
15010
15011 /* Try to avoid the overhead of a location list emitting a location
15012 expression instead, but only if we didn't have more than one
15013 location entry in the first place. If some entries were not
15014 representable, we don't want to pretend a single entry that was
15015 applies to the entire scope in which the variable is
15016 available. */
15017 if (list && loc_list->first->next)
15018 gen_llsym (list);
15019
15020 return list;
15021 }
15022
15023 /* Return if the loc_list has only single element and thus can be represented
15024 as location description. */
15025
15026 static bool
15027 single_element_loc_list_p (dw_loc_list_ref list)
15028 {
15029 gcc_assert (!list->dw_loc_next || list->ll_symbol);
15030 return !list->ll_symbol;
15031 }
15032
15033 /* To each location in list LIST add loc descr REF. */
15034
15035 static void
15036 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
15037 {
15038 dw_loc_descr_ref copy;
15039 add_loc_descr (&list->expr, ref);
15040 list = list->dw_loc_next;
15041 while (list)
15042 {
15043 copy = ggc_alloc<dw_loc_descr_node> ();
15044 memcpy (copy, ref, sizeof (dw_loc_descr_node));
15045 add_loc_descr (&list->expr, copy);
15046 while (copy->dw_loc_next)
15047 {
15048 dw_loc_descr_ref new_copy = ggc_alloc<dw_loc_descr_node> ();
15049 memcpy (new_copy, copy->dw_loc_next, sizeof (dw_loc_descr_node));
15050 copy->dw_loc_next = new_copy;
15051 copy = new_copy;
15052 }
15053 list = list->dw_loc_next;
15054 }
15055 }
15056
15057 /* Given two lists RET and LIST
15058 produce location list that is result of adding expression in LIST
15059 to expression in RET on each position in program.
15060 Might be destructive on both RET and LIST.
15061
15062 TODO: We handle only simple cases of RET or LIST having at most one
15063 element. General case would inolve sorting the lists in program order
15064 and merging them that will need some additional work.
15065 Adding that will improve quality of debug info especially for SRA-ed
15066 structures. */
15067
15068 static void
15069 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
15070 {
15071 if (!list)
15072 return;
15073 if (!*ret)
15074 {
15075 *ret = list;
15076 return;
15077 }
15078 if (!list->dw_loc_next)
15079 {
15080 add_loc_descr_to_each (*ret, list->expr);
15081 return;
15082 }
15083 if (!(*ret)->dw_loc_next)
15084 {
15085 add_loc_descr_to_each (list, (*ret)->expr);
15086 *ret = list;
15087 return;
15088 }
15089 expansion_failed (NULL_TREE, NULL_RTX,
15090 "Don't know how to merge two non-trivial"
15091 " location lists.\n");
15092 *ret = NULL;
15093 return;
15094 }
15095
15096 /* LOC is constant expression. Try a luck, look it up in constant
15097 pool and return its loc_descr of its address. */
15098
15099 static dw_loc_descr_ref
15100 cst_pool_loc_descr (tree loc)
15101 {
15102 /* Get an RTL for this, if something has been emitted. */
15103 rtx rtl = lookup_constant_def (loc);
15104
15105 if (!rtl || !MEM_P (rtl))
15106 {
15107 gcc_assert (!rtl);
15108 return 0;
15109 }
15110 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
15111
15112 /* TODO: We might get more coverage if we was actually delaying expansion
15113 of all expressions till end of compilation when constant pools are fully
15114 populated. */
15115 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
15116 {
15117 expansion_failed (loc, NULL_RTX,
15118 "CST value in contant pool but not marked.");
15119 return 0;
15120 }
15121 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
15122 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
15123 }
15124
15125 /* Return dw_loc_list representing address of addr_expr LOC
15126 by looking for inner INDIRECT_REF expression and turning
15127 it into simple arithmetics.
15128
15129 See loc_list_from_tree for the meaning of CONTEXT. */
15130
15131 static dw_loc_list_ref
15132 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
15133 const loc_descr_context *context)
15134 {
15135 tree obj, offset;
15136 HOST_WIDE_INT bitsize, bitpos, bytepos;
15137 machine_mode mode;
15138 int unsignedp, reversep, volatilep = 0;
15139 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
15140
15141 obj = get_inner_reference (TREE_OPERAND (loc, 0),
15142 &bitsize, &bitpos, &offset, &mode,
15143 &unsignedp, &reversep, &volatilep);
15144 STRIP_NOPS (obj);
15145 if (bitpos % BITS_PER_UNIT)
15146 {
15147 expansion_failed (loc, NULL_RTX, "bitfield access");
15148 return 0;
15149 }
15150 if (!INDIRECT_REF_P (obj))
15151 {
15152 expansion_failed (obj,
15153 NULL_RTX, "no indirect ref in inner refrence");
15154 return 0;
15155 }
15156 if (!offset && !bitpos)
15157 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
15158 context);
15159 else if (toplev
15160 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
15161 && (dwarf_version >= 4 || !dwarf_strict))
15162 {
15163 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
15164 if (!list_ret)
15165 return 0;
15166 if (offset)
15167 {
15168 /* Variable offset. */
15169 list_ret1 = loc_list_from_tree (offset, 0, context);
15170 if (list_ret1 == 0)
15171 return 0;
15172 add_loc_list (&list_ret, list_ret1);
15173 if (!list_ret)
15174 return 0;
15175 add_loc_descr_to_each (list_ret,
15176 new_loc_descr (DW_OP_plus, 0, 0));
15177 }
15178 bytepos = bitpos / BITS_PER_UNIT;
15179 if (bytepos > 0)
15180 add_loc_descr_to_each (list_ret,
15181 new_loc_descr (DW_OP_plus_uconst,
15182 bytepos, 0));
15183 else if (bytepos < 0)
15184 loc_list_plus_const (list_ret, bytepos);
15185 add_loc_descr_to_each (list_ret,
15186 new_loc_descr (DW_OP_stack_value, 0, 0));
15187 }
15188 return list_ret;
15189 }
15190
15191 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
15192 all operations from LOC are nops, move to the last one. Insert in NOPS all
15193 operations that are skipped. */
15194
15195 static void
15196 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
15197 hash_set<dw_loc_descr_ref> &nops)
15198 {
15199 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
15200 {
15201 nops.add (loc);
15202 loc = loc->dw_loc_next;
15203 }
15204 }
15205
15206 /* Helper for loc_descr_without_nops: free the location description operation
15207 P. */
15208
15209 bool
15210 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
15211 {
15212 ggc_free (loc);
15213 return true;
15214 }
15215
15216 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
15217 finishes LOC. */
15218
15219 static void
15220 loc_descr_without_nops (dw_loc_descr_ref &loc)
15221 {
15222 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
15223 return;
15224
15225 /* Set of all DW_OP_nop operations we remove. */
15226 hash_set<dw_loc_descr_ref> nops;
15227
15228 /* First, strip all prefix NOP operations in order to keep the head of the
15229 operations list. */
15230 loc_descr_to_next_no_nop (loc, nops);
15231
15232 for (dw_loc_descr_ref cur = loc; cur != NULL;)
15233 {
15234 /* For control flow operations: strip "prefix" nops in destination
15235 labels. */
15236 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
15237 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
15238 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
15239 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
15240
15241 /* Do the same for the operations that follow, then move to the next
15242 iteration. */
15243 if (cur->dw_loc_next != NULL)
15244 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
15245 cur = cur->dw_loc_next;
15246 }
15247
15248 nops.traverse<void *, free_loc_descr> (NULL);
15249 }
15250
15251
15252 struct dwarf_procedure_info;
15253
15254 /* Helper structure for location descriptions generation. */
15255 struct loc_descr_context
15256 {
15257 /* The type that is implicitly referenced by DW_OP_push_object_address, or
15258 NULL_TREE if DW_OP_push_object_address in invalid for this location
15259 description. This is used when processing PLACEHOLDER_EXPR nodes. */
15260 tree context_type;
15261 /* The ..._DECL node that should be translated as a
15262 DW_OP_push_object_address operation. */
15263 tree base_decl;
15264 /* Information about the DWARF procedure we are currently generating. NULL if
15265 we are not generating a DWARF procedure. */
15266 struct dwarf_procedure_info *dpi;
15267 };
15268
15269 /* DWARF procedures generation
15270
15271 DWARF expressions (aka. location descriptions) are used to encode variable
15272 things such as sizes or offsets. Such computations can have redundant parts
15273 that can be factorized in order to reduce the size of the output debug
15274 information. This is the whole point of DWARF procedures.
15275
15276 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
15277 already factorized into functions ("size functions") in order to handle very
15278 big and complex types. Such functions are quite simple: they have integral
15279 arguments, they return an integral result and their body contains only a
15280 return statement with arithmetic expressions. This is the only kind of
15281 function we are interested in translating into DWARF procedures, here.
15282
15283 DWARF expressions and DWARF procedure are executed using a stack, so we have
15284 to define some calling convention for them to interact. Let's say that:
15285
15286 - Before calling a DWARF procedure, DWARF expressions must push on the stack
15287 all arguments in reverse order (right-to-left) so that when the DWARF
15288 procedure execution starts, the first argument is the top of the stack.
15289
15290 - Then, when returning, the DWARF procedure must have consumed all arguments
15291 on the stack, must have pushed the result and touched nothing else.
15292
15293 - Each integral argument and the result are integral types can be hold in a
15294 single stack slot.
15295
15296 - We call "frame offset" the number of stack slots that are "under DWARF
15297 procedure control": it includes the arguments slots, the temporaries and
15298 the result slot. Thus, it is equal to the number of arguments when the
15299 procedure execution starts and must be equal to one (the result) when it
15300 returns. */
15301
15302 /* Helper structure used when generating operations for a DWARF procedure. */
15303 struct dwarf_procedure_info
15304 {
15305 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
15306 currently translated. */
15307 tree fndecl;
15308 /* The number of arguments FNDECL takes. */
15309 unsigned args_count;
15310 };
15311
15312 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
15313 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
15314 equate it to this DIE. */
15315
15316 static dw_die_ref
15317 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
15318 dw_die_ref parent_die)
15319 {
15320 const bool dwarf_proc_supported = dwarf_version >= 4;
15321 dw_die_ref dwarf_proc_die;
15322
15323 if ((dwarf_version < 3 && dwarf_strict)
15324 || location == NULL)
15325 return NULL;
15326
15327 dwarf_proc_die = new_die (dwarf_proc_supported
15328 ? DW_TAG_dwarf_procedure
15329 : DW_TAG_variable,
15330 parent_die,
15331 fndecl);
15332 if (fndecl)
15333 equate_decl_number_to_die (fndecl, dwarf_proc_die);
15334 if (!dwarf_proc_supported)
15335 add_AT_flag (dwarf_proc_die, DW_AT_artificial, 1);
15336 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
15337 return dwarf_proc_die;
15338 }
15339
15340 /* Return whether TYPE is a supported type as a DWARF procedure argument
15341 type or return type (we handle only scalar types and pointer types that
15342 aren't wider than the DWARF expression evaluation stack. */
15343
15344 static bool
15345 is_handled_procedure_type (tree type)
15346 {
15347 return ((INTEGRAL_TYPE_P (type)
15348 || TREE_CODE (type) == OFFSET_TYPE
15349 || TREE_CODE (type) == POINTER_TYPE)
15350 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
15351 }
15352
15353 /* Helper for resolve_args_picking: do the same but stop when coming across
15354 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
15355 offset *before* evaluating the corresponding operation. */
15356
15357 static bool
15358 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
15359 struct dwarf_procedure_info *dpi,
15360 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
15361 {
15362 /* The "frame_offset" identifier is already used to name a macro... */
15363 unsigned frame_offset_ = initial_frame_offset;
15364 dw_loc_descr_ref l;
15365
15366 for (l = loc; l != NULL;)
15367 {
15368 bool existed;
15369 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
15370
15371 /* If we already met this node, there is nothing to compute anymore. */
15372 if (existed)
15373 {
15374 /* Make sure that the stack size is consistent wherever the execution
15375 flow comes from. */
15376 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
15377 break;
15378 }
15379 l_frame_offset = frame_offset_;
15380
15381 /* If needed, relocate the picking offset with respect to the frame
15382 offset. */
15383 if (l->dw_loc_opc == DW_OP_pick && l->frame_offset_rel)
15384 {
15385 /* frame_offset_ is the size of the current stack frame, including
15386 incoming arguments. Besides, the arguments are pushed
15387 right-to-left. Thus, in order to access the Nth argument from
15388 this operation node, the picking has to skip temporaries *plus*
15389 one stack slot per argument (0 for the first one, 1 for the second
15390 one, etc.).
15391
15392 The targetted argument number (N) is already set as the operand,
15393 and the number of temporaries can be computed with:
15394 frame_offsets_ - dpi->args_count */
15395 l->dw_loc_oprnd1.v.val_unsigned += frame_offset_ - dpi->args_count;
15396
15397 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
15398 if (l->dw_loc_oprnd1.v.val_unsigned > 255)
15399 return false;
15400 }
15401
15402 /* Update frame_offset according to the effect the current operation has
15403 on the stack. */
15404 switch (l->dw_loc_opc)
15405 {
15406 case DW_OP_deref:
15407 case DW_OP_swap:
15408 case DW_OP_rot:
15409 case DW_OP_abs:
15410 case DW_OP_neg:
15411 case DW_OP_not:
15412 case DW_OP_plus_uconst:
15413 case DW_OP_skip:
15414 case DW_OP_reg0:
15415 case DW_OP_reg1:
15416 case DW_OP_reg2:
15417 case DW_OP_reg3:
15418 case DW_OP_reg4:
15419 case DW_OP_reg5:
15420 case DW_OP_reg6:
15421 case DW_OP_reg7:
15422 case DW_OP_reg8:
15423 case DW_OP_reg9:
15424 case DW_OP_reg10:
15425 case DW_OP_reg11:
15426 case DW_OP_reg12:
15427 case DW_OP_reg13:
15428 case DW_OP_reg14:
15429 case DW_OP_reg15:
15430 case DW_OP_reg16:
15431 case DW_OP_reg17:
15432 case DW_OP_reg18:
15433 case DW_OP_reg19:
15434 case DW_OP_reg20:
15435 case DW_OP_reg21:
15436 case DW_OP_reg22:
15437 case DW_OP_reg23:
15438 case DW_OP_reg24:
15439 case DW_OP_reg25:
15440 case DW_OP_reg26:
15441 case DW_OP_reg27:
15442 case DW_OP_reg28:
15443 case DW_OP_reg29:
15444 case DW_OP_reg30:
15445 case DW_OP_reg31:
15446 case DW_OP_bregx:
15447 case DW_OP_piece:
15448 case DW_OP_deref_size:
15449 case DW_OP_nop:
15450 case DW_OP_form_tls_address:
15451 case DW_OP_bit_piece:
15452 case DW_OP_implicit_value:
15453 case DW_OP_stack_value:
15454 break;
15455
15456 case DW_OP_addr:
15457 case DW_OP_const1u:
15458 case DW_OP_const1s:
15459 case DW_OP_const2u:
15460 case DW_OP_const2s:
15461 case DW_OP_const4u:
15462 case DW_OP_const4s:
15463 case DW_OP_const8u:
15464 case DW_OP_const8s:
15465 case DW_OP_constu:
15466 case DW_OP_consts:
15467 case DW_OP_dup:
15468 case DW_OP_over:
15469 case DW_OP_pick:
15470 case DW_OP_lit0:
15471 case DW_OP_lit1:
15472 case DW_OP_lit2:
15473 case DW_OP_lit3:
15474 case DW_OP_lit4:
15475 case DW_OP_lit5:
15476 case DW_OP_lit6:
15477 case DW_OP_lit7:
15478 case DW_OP_lit8:
15479 case DW_OP_lit9:
15480 case DW_OP_lit10:
15481 case DW_OP_lit11:
15482 case DW_OP_lit12:
15483 case DW_OP_lit13:
15484 case DW_OP_lit14:
15485 case DW_OP_lit15:
15486 case DW_OP_lit16:
15487 case DW_OP_lit17:
15488 case DW_OP_lit18:
15489 case DW_OP_lit19:
15490 case DW_OP_lit20:
15491 case DW_OP_lit21:
15492 case DW_OP_lit22:
15493 case DW_OP_lit23:
15494 case DW_OP_lit24:
15495 case DW_OP_lit25:
15496 case DW_OP_lit26:
15497 case DW_OP_lit27:
15498 case DW_OP_lit28:
15499 case DW_OP_lit29:
15500 case DW_OP_lit30:
15501 case DW_OP_lit31:
15502 case DW_OP_breg0:
15503 case DW_OP_breg1:
15504 case DW_OP_breg2:
15505 case DW_OP_breg3:
15506 case DW_OP_breg4:
15507 case DW_OP_breg5:
15508 case DW_OP_breg6:
15509 case DW_OP_breg7:
15510 case DW_OP_breg8:
15511 case DW_OP_breg9:
15512 case DW_OP_breg10:
15513 case DW_OP_breg11:
15514 case DW_OP_breg12:
15515 case DW_OP_breg13:
15516 case DW_OP_breg14:
15517 case DW_OP_breg15:
15518 case DW_OP_breg16:
15519 case DW_OP_breg17:
15520 case DW_OP_breg18:
15521 case DW_OP_breg19:
15522 case DW_OP_breg20:
15523 case DW_OP_breg21:
15524 case DW_OP_breg22:
15525 case DW_OP_breg23:
15526 case DW_OP_breg24:
15527 case DW_OP_breg25:
15528 case DW_OP_breg26:
15529 case DW_OP_breg27:
15530 case DW_OP_breg28:
15531 case DW_OP_breg29:
15532 case DW_OP_breg30:
15533 case DW_OP_breg31:
15534 case DW_OP_fbreg:
15535 case DW_OP_push_object_address:
15536 case DW_OP_call_frame_cfa:
15537 ++frame_offset_;
15538 break;
15539
15540 case DW_OP_drop:
15541 case DW_OP_xderef:
15542 case DW_OP_and:
15543 case DW_OP_div:
15544 case DW_OP_minus:
15545 case DW_OP_mod:
15546 case DW_OP_mul:
15547 case DW_OP_or:
15548 case DW_OP_plus:
15549 case DW_OP_shl:
15550 case DW_OP_shr:
15551 case DW_OP_shra:
15552 case DW_OP_xor:
15553 case DW_OP_bra:
15554 case DW_OP_eq:
15555 case DW_OP_ge:
15556 case DW_OP_gt:
15557 case DW_OP_le:
15558 case DW_OP_lt:
15559 case DW_OP_ne:
15560 case DW_OP_regx:
15561 case DW_OP_xderef_size:
15562 --frame_offset_;
15563 break;
15564
15565 case DW_OP_call2:
15566 case DW_OP_call4:
15567 case DW_OP_call_ref:
15568 {
15569 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
15570 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
15571
15572 if (stack_usage == NULL)
15573 return false;
15574 frame_offset_ += *stack_usage;
15575 break;
15576 }
15577
15578 case DW_OP_GNU_push_tls_address:
15579 case DW_OP_GNU_uninit:
15580 case DW_OP_GNU_encoded_addr:
15581 case DW_OP_GNU_implicit_pointer:
15582 case DW_OP_GNU_entry_value:
15583 case DW_OP_GNU_const_type:
15584 case DW_OP_GNU_regval_type:
15585 case DW_OP_GNU_deref_type:
15586 case DW_OP_GNU_convert:
15587 case DW_OP_GNU_reinterpret:
15588 case DW_OP_GNU_parameter_ref:
15589 /* loc_list_from_tree will probably not output these operations for
15590 size functions, so assume they will not appear here. */
15591 /* Fall through... */
15592
15593 default:
15594 gcc_unreachable ();
15595 }
15596
15597 /* Now, follow the control flow (except subroutine calls). */
15598 switch (l->dw_loc_opc)
15599 {
15600 case DW_OP_bra:
15601 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
15602 frame_offsets))
15603 return false;
15604 /* Fall through... */
15605
15606 case DW_OP_skip:
15607 l = l->dw_loc_oprnd1.v.val_loc;
15608 break;
15609
15610 case DW_OP_stack_value:
15611 return true;
15612
15613 default:
15614 l = l->dw_loc_next;
15615 break;
15616 }
15617 }
15618
15619 return true;
15620 }
15621
15622 /* Make a DFS over operations reachable through LOC (i.e. follow branch
15623 operations) in order to resolve the operand of DW_OP_pick operations that
15624 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
15625 offset *before* LOC is executed. Return if all relocations were
15626 successful. */
15627
15628 static bool
15629 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
15630 struct dwarf_procedure_info *dpi)
15631 {
15632 /* Associate to all visited operations the frame offset *before* evaluating
15633 this operation. */
15634 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
15635
15636 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
15637 frame_offsets);
15638 }
15639
15640 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
15641 Return NULL if it is not possible. */
15642
15643 static dw_die_ref
15644 function_to_dwarf_procedure (tree fndecl)
15645 {
15646 struct loc_descr_context ctx;
15647 struct dwarf_procedure_info dpi;
15648 dw_die_ref dwarf_proc_die;
15649 tree tree_body = DECL_SAVED_TREE (fndecl);
15650 dw_loc_descr_ref loc_body, epilogue;
15651
15652 tree cursor;
15653 unsigned i;
15654
15655 /* Do not generate multiple DWARF procedures for the same function
15656 declaration. */
15657 dwarf_proc_die = lookup_decl_die (fndecl);
15658 if (dwarf_proc_die != NULL)
15659 return dwarf_proc_die;
15660
15661 /* DWARF procedures are available starting with the DWARFv3 standard, but
15662 it's the DWARFv4 standard that introduces the DW_TAG_dwarf_procedure
15663 DIE. */
15664 if (dwarf_version < 3 && dwarf_strict)
15665 return NULL;
15666
15667 /* We handle only functions for which we still have a body, that return a
15668 supported type and that takes arguments with supported types. Note that
15669 there is no point translating functions that return nothing. */
15670 if (tree_body == NULL_TREE
15671 || DECL_RESULT (fndecl) == NULL_TREE
15672 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
15673 return NULL;
15674
15675 for (cursor = DECL_ARGUMENTS (fndecl);
15676 cursor != NULL_TREE;
15677 cursor = TREE_CHAIN (cursor))
15678 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
15679 return NULL;
15680
15681 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
15682 if (TREE_CODE (tree_body) != RETURN_EXPR)
15683 return NULL;
15684 tree_body = TREE_OPERAND (tree_body, 0);
15685 if (TREE_CODE (tree_body) != MODIFY_EXPR
15686 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
15687 return NULL;
15688 tree_body = TREE_OPERAND (tree_body, 1);
15689
15690 /* Try to translate the body expression itself. Note that this will probably
15691 cause an infinite recursion if its call graph has a cycle. This is very
15692 unlikely for size functions, however, so don't bother with such things at
15693 the moment. */
15694 ctx.context_type = NULL_TREE;
15695 ctx.base_decl = NULL_TREE;
15696 ctx.dpi = &dpi;
15697 dpi.fndecl = fndecl;
15698 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
15699 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
15700 if (!loc_body)
15701 return NULL;
15702
15703 /* After evaluating all operands in "loc_body", we should still have on the
15704 stack all arguments plus the desired function result (top of the stack).
15705 Generate code in order to keep only the result in our stack frame. */
15706 epilogue = NULL;
15707 for (i = 0; i < dpi.args_count; ++i)
15708 {
15709 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
15710 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
15711 op_couple->dw_loc_next->dw_loc_next = epilogue;
15712 epilogue = op_couple;
15713 }
15714 add_loc_descr (&loc_body, epilogue);
15715 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
15716 return NULL;
15717
15718 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
15719 because they are considered useful. Now there is an epilogue, they are
15720 not anymore, so give it another try. */
15721 loc_descr_without_nops (loc_body);
15722
15723 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
15724 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
15725 though, given that size functions do not come from source, so they should
15726 not have a dedicated DW_TAG_subprogram DIE. */
15727 dwarf_proc_die
15728 = new_dwarf_proc_die (loc_body, fndecl,
15729 get_context_die (DECL_CONTEXT (fndecl)));
15730
15731 /* The called DWARF procedure consumes one stack slot per argument and
15732 returns one stack slot. */
15733 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
15734
15735 return dwarf_proc_die;
15736 }
15737
15738
15739 /* Generate Dwarf location list representing LOC.
15740 If WANT_ADDRESS is false, expression computing LOC will be computed
15741 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
15742 if WANT_ADDRESS is 2, expression computing address useable in location
15743 will be returned (i.e. DW_OP_reg can be used
15744 to refer to register values).
15745
15746 CONTEXT provides information to customize the location descriptions
15747 generation. Its context_type field specifies what type is implicitly
15748 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
15749 will not be generated.
15750
15751 Its DPI field determines whether we are generating a DWARF expression for a
15752 DWARF procedure, so PARM_DECL references are processed specifically.
15753
15754 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
15755 and dpi fields were null. */
15756
15757 static dw_loc_list_ref
15758 loc_list_from_tree_1 (tree loc, int want_address,
15759 const struct loc_descr_context *context)
15760 {
15761 dw_loc_descr_ref ret = NULL, ret1 = NULL;
15762 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
15763 int have_address = 0;
15764 enum dwarf_location_atom op;
15765
15766 /* ??? Most of the time we do not take proper care for sign/zero
15767 extending the values properly. Hopefully this won't be a real
15768 problem... */
15769
15770 if (context != NULL
15771 && context->base_decl == loc
15772 && want_address == 0)
15773 {
15774 if (dwarf_version >= 3 || !dwarf_strict)
15775 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
15776 NULL, NULL, NULL);
15777 else
15778 return NULL;
15779 }
15780
15781 switch (TREE_CODE (loc))
15782 {
15783 case ERROR_MARK:
15784 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
15785 return 0;
15786
15787 case PLACEHOLDER_EXPR:
15788 /* This case involves extracting fields from an object to determine the
15789 position of other fields. It is supposed to appear only as the first
15790 operand of COMPONENT_REF nodes and to reference precisely the type
15791 that the context allows. */
15792 if (context != NULL
15793 && TREE_TYPE (loc) == context->context_type
15794 && want_address >= 1)
15795 {
15796 if (dwarf_version >= 3 || !dwarf_strict)
15797 {
15798 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
15799 have_address = 1;
15800 break;
15801 }
15802 else
15803 return NULL;
15804 }
15805 else
15806 expansion_failed (loc, NULL_RTX,
15807 "PLACEHOLDER_EXPR for an unexpected type");
15808 break;
15809
15810 case CALL_EXPR:
15811 {
15812 const int nargs = call_expr_nargs (loc);
15813 tree callee = get_callee_fndecl (loc);
15814 int i;
15815 dw_die_ref dwarf_proc;
15816
15817 if (callee == NULL_TREE)
15818 goto call_expansion_failed;
15819
15820 /* We handle only functions that return an integer. */
15821 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
15822 goto call_expansion_failed;
15823
15824 dwarf_proc = function_to_dwarf_procedure (callee);
15825 if (dwarf_proc == NULL)
15826 goto call_expansion_failed;
15827
15828 /* Evaluate arguments right-to-left so that the first argument will
15829 be the top-most one on the stack. */
15830 for (i = nargs - 1; i >= 0; --i)
15831 {
15832 dw_loc_descr_ref loc_descr
15833 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
15834 context);
15835
15836 if (loc_descr == NULL)
15837 goto call_expansion_failed;
15838
15839 add_loc_descr (&ret, loc_descr);
15840 }
15841
15842 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
15843 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15844 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
15845 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
15846 add_loc_descr (&ret, ret1);
15847 break;
15848
15849 call_expansion_failed:
15850 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
15851 /* There are no opcodes for these operations. */
15852 return 0;
15853 }
15854
15855 case PREINCREMENT_EXPR:
15856 case PREDECREMENT_EXPR:
15857 case POSTINCREMENT_EXPR:
15858 case POSTDECREMENT_EXPR:
15859 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
15860 /* There are no opcodes for these operations. */
15861 return 0;
15862
15863 case ADDR_EXPR:
15864 /* If we already want an address, see if there is INDIRECT_REF inside
15865 e.g. for &this->field. */
15866 if (want_address)
15867 {
15868 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
15869 (loc, want_address == 2, context);
15870 if (list_ret)
15871 have_address = 1;
15872 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
15873 && (ret = cst_pool_loc_descr (loc)))
15874 have_address = 1;
15875 }
15876 /* Otherwise, process the argument and look for the address. */
15877 if (!list_ret && !ret)
15878 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
15879 else
15880 {
15881 if (want_address)
15882 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
15883 return NULL;
15884 }
15885 break;
15886
15887 case VAR_DECL:
15888 if (DECL_THREAD_LOCAL_P (loc))
15889 {
15890 rtx rtl;
15891 enum dwarf_location_atom tls_op;
15892 enum dtprel_bool dtprel = dtprel_false;
15893
15894 if (targetm.have_tls)
15895 {
15896 /* If this is not defined, we have no way to emit the
15897 data. */
15898 if (!targetm.asm_out.output_dwarf_dtprel)
15899 return 0;
15900
15901 /* The way DW_OP_GNU_push_tls_address is specified, we
15902 can only look up addresses of objects in the current
15903 module. We used DW_OP_addr as first op, but that's
15904 wrong, because DW_OP_addr is relocated by the debug
15905 info consumer, while DW_OP_GNU_push_tls_address
15906 operand shouldn't be. */
15907 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
15908 return 0;
15909 dtprel = dtprel_true;
15910 tls_op = DW_OP_GNU_push_tls_address;
15911 }
15912 else
15913 {
15914 if (!targetm.emutls.debug_form_tls_address
15915 || !(dwarf_version >= 3 || !dwarf_strict))
15916 return 0;
15917 /* We stuffed the control variable into the DECL_VALUE_EXPR
15918 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
15919 no longer appear in gimple code. We used the control
15920 variable in specific so that we could pick it up here. */
15921 loc = DECL_VALUE_EXPR (loc);
15922 tls_op = DW_OP_form_tls_address;
15923 }
15924
15925 rtl = rtl_for_decl_location (loc);
15926 if (rtl == NULL_RTX)
15927 return 0;
15928
15929 if (!MEM_P (rtl))
15930 return 0;
15931 rtl = XEXP (rtl, 0);
15932 if (! CONSTANT_P (rtl))
15933 return 0;
15934
15935 ret = new_addr_loc_descr (rtl, dtprel);
15936 ret1 = new_loc_descr (tls_op, 0, 0);
15937 add_loc_descr (&ret, ret1);
15938
15939 have_address = 1;
15940 break;
15941 }
15942 /* FALLTHRU */
15943
15944 case PARM_DECL:
15945 if (context != NULL && context->dpi != NULL
15946 && DECL_CONTEXT (loc) == context->dpi->fndecl)
15947 {
15948 /* We are generating code for a DWARF procedure and we want to access
15949 one of its arguments: find the appropriate argument offset and let
15950 the resolve_args_picking pass compute the offset that complies
15951 with the stack frame size. */
15952 unsigned i = 0;
15953 tree cursor;
15954
15955 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
15956 cursor != NULL_TREE && cursor != loc;
15957 cursor = TREE_CHAIN (cursor), ++i)
15958 ;
15959 /* If we are translating a DWARF procedure, all referenced parameters
15960 must belong to the current function. */
15961 gcc_assert (cursor != NULL_TREE);
15962
15963 ret = new_loc_descr (DW_OP_pick, i, 0);
15964 ret->frame_offset_rel = 1;
15965 break;
15966 }
15967 /* FALLTHRU */
15968
15969 case RESULT_DECL:
15970 if (DECL_HAS_VALUE_EXPR_P (loc))
15971 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
15972 want_address, context);
15973 /* FALLTHRU */
15974
15975 case FUNCTION_DECL:
15976 {
15977 rtx rtl;
15978 var_loc_list *loc_list = lookup_decl_loc (loc);
15979
15980 if (loc_list && loc_list->first)
15981 {
15982 list_ret = dw_loc_list (loc_list, loc, want_address);
15983 have_address = want_address != 0;
15984 break;
15985 }
15986 rtl = rtl_for_decl_location (loc);
15987 if (rtl == NULL_RTX)
15988 {
15989 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
15990 return 0;
15991 }
15992 else if (CONST_INT_P (rtl))
15993 {
15994 HOST_WIDE_INT val = INTVAL (rtl);
15995 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
15996 val &= GET_MODE_MASK (DECL_MODE (loc));
15997 ret = int_loc_descriptor (val);
15998 }
15999 else if (GET_CODE (rtl) == CONST_STRING)
16000 {
16001 expansion_failed (loc, NULL_RTX, "CONST_STRING");
16002 return 0;
16003 }
16004 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
16005 ret = new_addr_loc_descr (rtl, dtprel_false);
16006 else
16007 {
16008 machine_mode mode, mem_mode;
16009
16010 /* Certain constructs can only be represented at top-level. */
16011 if (want_address == 2)
16012 {
16013 ret = loc_descriptor (rtl, VOIDmode,
16014 VAR_INIT_STATUS_INITIALIZED);
16015 have_address = 1;
16016 }
16017 else
16018 {
16019 mode = GET_MODE (rtl);
16020 mem_mode = VOIDmode;
16021 if (MEM_P (rtl))
16022 {
16023 mem_mode = mode;
16024 mode = get_address_mode (rtl);
16025 rtl = XEXP (rtl, 0);
16026 have_address = 1;
16027 }
16028 ret = mem_loc_descriptor (rtl, mode, mem_mode,
16029 VAR_INIT_STATUS_INITIALIZED);
16030 }
16031 if (!ret)
16032 expansion_failed (loc, rtl,
16033 "failed to produce loc descriptor for rtl");
16034 }
16035 }
16036 break;
16037
16038 case MEM_REF:
16039 if (!integer_zerop (TREE_OPERAND (loc, 1)))
16040 {
16041 have_address = 1;
16042 goto do_plus;
16043 }
16044 /* Fallthru. */
16045 case INDIRECT_REF:
16046 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16047 have_address = 1;
16048 break;
16049
16050 case TARGET_MEM_REF:
16051 case SSA_NAME:
16052 case DEBUG_EXPR_DECL:
16053 return NULL;
16054
16055 case COMPOUND_EXPR:
16056 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
16057 context);
16058
16059 CASE_CONVERT:
16060 case VIEW_CONVERT_EXPR:
16061 case SAVE_EXPR:
16062 case MODIFY_EXPR:
16063 case NON_LVALUE_EXPR:
16064 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
16065 context);
16066
16067 case COMPONENT_REF:
16068 case BIT_FIELD_REF:
16069 case ARRAY_REF:
16070 case ARRAY_RANGE_REF:
16071 case REALPART_EXPR:
16072 case IMAGPART_EXPR:
16073 {
16074 tree obj, offset;
16075 HOST_WIDE_INT bitsize, bitpos, bytepos;
16076 machine_mode mode;
16077 int unsignedp, reversep, volatilep = 0;
16078
16079 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
16080 &unsignedp, &reversep, &volatilep);
16081
16082 gcc_assert (obj != loc);
16083
16084 list_ret = loc_list_from_tree_1 (obj,
16085 want_address == 2
16086 && !bitpos && !offset ? 2 : 1,
16087 context);
16088 /* TODO: We can extract value of the small expression via shifting even
16089 for nonzero bitpos. */
16090 if (list_ret == 0)
16091 return 0;
16092 if (bitpos % BITS_PER_UNIT != 0 || bitsize % BITS_PER_UNIT != 0)
16093 {
16094 expansion_failed (loc, NULL_RTX,
16095 "bitfield access");
16096 return 0;
16097 }
16098
16099 if (offset != NULL_TREE)
16100 {
16101 /* Variable offset. */
16102 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
16103 if (list_ret1 == 0)
16104 return 0;
16105 add_loc_list (&list_ret, list_ret1);
16106 if (!list_ret)
16107 return 0;
16108 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
16109 }
16110
16111 bytepos = bitpos / BITS_PER_UNIT;
16112 if (bytepos > 0)
16113 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
16114 else if (bytepos < 0)
16115 loc_list_plus_const (list_ret, bytepos);
16116
16117 have_address = 1;
16118 break;
16119 }
16120
16121 case INTEGER_CST:
16122 if ((want_address || !tree_fits_shwi_p (loc))
16123 && (ret = cst_pool_loc_descr (loc)))
16124 have_address = 1;
16125 else if (want_address == 2
16126 && tree_fits_shwi_p (loc)
16127 && (ret = address_of_int_loc_descriptor
16128 (int_size_in_bytes (TREE_TYPE (loc)),
16129 tree_to_shwi (loc))))
16130 have_address = 1;
16131 else if (tree_fits_shwi_p (loc))
16132 ret = int_loc_descriptor (tree_to_shwi (loc));
16133 else if (tree_fits_uhwi_p (loc))
16134 ret = uint_loc_descriptor (tree_to_uhwi (loc));
16135 else
16136 {
16137 expansion_failed (loc, NULL_RTX,
16138 "Integer operand is not host integer");
16139 return 0;
16140 }
16141 break;
16142
16143 case CONSTRUCTOR:
16144 case REAL_CST:
16145 case STRING_CST:
16146 case COMPLEX_CST:
16147 if ((ret = cst_pool_loc_descr (loc)))
16148 have_address = 1;
16149 else if (TREE_CODE (loc) == CONSTRUCTOR)
16150 {
16151 tree type = TREE_TYPE (loc);
16152 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
16153 unsigned HOST_WIDE_INT offset = 0;
16154 unsigned HOST_WIDE_INT cnt;
16155 constructor_elt *ce;
16156
16157 if (TREE_CODE (type) == RECORD_TYPE)
16158 {
16159 /* This is very limited, but it's enough to output
16160 pointers to member functions, as long as the
16161 referenced function is defined in the current
16162 translation unit. */
16163 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
16164 {
16165 tree val = ce->value;
16166
16167 tree field = ce->index;
16168
16169 if (val)
16170 STRIP_NOPS (val);
16171
16172 if (!field || DECL_BIT_FIELD (field))
16173 {
16174 expansion_failed (loc, NULL_RTX,
16175 "bitfield in record type constructor");
16176 size = offset = (unsigned HOST_WIDE_INT)-1;
16177 ret = NULL;
16178 break;
16179 }
16180
16181 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
16182 unsigned HOST_WIDE_INT pos = int_byte_position (field);
16183 gcc_assert (pos + fieldsize <= size);
16184 if (pos < offset)
16185 {
16186 expansion_failed (loc, NULL_RTX,
16187 "out-of-order fields in record constructor");
16188 size = offset = (unsigned HOST_WIDE_INT)-1;
16189 ret = NULL;
16190 break;
16191 }
16192 if (pos > offset)
16193 {
16194 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
16195 add_loc_descr (&ret, ret1);
16196 offset = pos;
16197 }
16198 if (val && fieldsize != 0)
16199 {
16200 ret1 = loc_descriptor_from_tree (val, want_address, context);
16201 if (!ret1)
16202 {
16203 expansion_failed (loc, NULL_RTX,
16204 "unsupported expression in field");
16205 size = offset = (unsigned HOST_WIDE_INT)-1;
16206 ret = NULL;
16207 break;
16208 }
16209 add_loc_descr (&ret, ret1);
16210 }
16211 if (fieldsize)
16212 {
16213 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
16214 add_loc_descr (&ret, ret1);
16215 offset = pos + fieldsize;
16216 }
16217 }
16218
16219 if (offset != size)
16220 {
16221 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
16222 add_loc_descr (&ret, ret1);
16223 offset = size;
16224 }
16225
16226 have_address = !!want_address;
16227 }
16228 else
16229 expansion_failed (loc, NULL_RTX,
16230 "constructor of non-record type");
16231 }
16232 else
16233 /* We can construct small constants here using int_loc_descriptor. */
16234 expansion_failed (loc, NULL_RTX,
16235 "constructor or constant not in constant pool");
16236 break;
16237
16238 case TRUTH_AND_EXPR:
16239 case TRUTH_ANDIF_EXPR:
16240 case BIT_AND_EXPR:
16241 op = DW_OP_and;
16242 goto do_binop;
16243
16244 case TRUTH_XOR_EXPR:
16245 case BIT_XOR_EXPR:
16246 op = DW_OP_xor;
16247 goto do_binop;
16248
16249 case TRUTH_OR_EXPR:
16250 case TRUTH_ORIF_EXPR:
16251 case BIT_IOR_EXPR:
16252 op = DW_OP_or;
16253 goto do_binop;
16254
16255 case FLOOR_DIV_EXPR:
16256 case CEIL_DIV_EXPR:
16257 case ROUND_DIV_EXPR:
16258 case TRUNC_DIV_EXPR:
16259 case EXACT_DIV_EXPR:
16260 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
16261 return 0;
16262 op = DW_OP_div;
16263 goto do_binop;
16264
16265 case MINUS_EXPR:
16266 op = DW_OP_minus;
16267 goto do_binop;
16268
16269 case FLOOR_MOD_EXPR:
16270 case CEIL_MOD_EXPR:
16271 case ROUND_MOD_EXPR:
16272 case TRUNC_MOD_EXPR:
16273 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
16274 {
16275 op = DW_OP_mod;
16276 goto do_binop;
16277 }
16278 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16279 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
16280 if (list_ret == 0 || list_ret1 == 0)
16281 return 0;
16282
16283 add_loc_list (&list_ret, list_ret1);
16284 if (list_ret == 0)
16285 return 0;
16286 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
16287 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
16288 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
16289 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
16290 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
16291 break;
16292
16293 case MULT_EXPR:
16294 op = DW_OP_mul;
16295 goto do_binop;
16296
16297 case LSHIFT_EXPR:
16298 op = DW_OP_shl;
16299 goto do_binop;
16300
16301 case RSHIFT_EXPR:
16302 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
16303 goto do_binop;
16304
16305 case POINTER_PLUS_EXPR:
16306 case PLUS_EXPR:
16307 do_plus:
16308 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
16309 {
16310 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
16311 smarter to encode their opposite. The DW_OP_plus_uconst operation
16312 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
16313 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
16314 bytes, Y being the size of the operation that pushes the opposite
16315 of the addend. So let's choose the smallest representation. */
16316 const tree tree_addend = TREE_OPERAND (loc, 1);
16317 offset_int wi_addend;
16318 HOST_WIDE_INT shwi_addend;
16319 dw_loc_descr_ref loc_naddend;
16320
16321 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16322 if (list_ret == 0)
16323 return 0;
16324
16325 /* Try to get the literal to push. It is the opposite of the addend,
16326 so as we rely on wrapping during DWARF evaluation, first decode
16327 the literal as a "DWARF-sized" signed number. */
16328 wi_addend = wi::to_offset (tree_addend);
16329 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
16330 shwi_addend = wi_addend.to_shwi ();
16331 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
16332 ? int_loc_descriptor (-shwi_addend)
16333 : NULL;
16334
16335 if (loc_naddend != NULL
16336 && ((unsigned) size_of_uleb128 (shwi_addend)
16337 > size_of_loc_descr (loc_naddend)))
16338 {
16339 add_loc_descr_to_each (list_ret, loc_naddend);
16340 add_loc_descr_to_each (list_ret,
16341 new_loc_descr (DW_OP_minus, 0, 0));
16342 }
16343 else
16344 {
16345 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
16346 {
16347 loc_naddend = loc_cur;
16348 loc_cur = loc_cur->dw_loc_next;
16349 ggc_free (loc_naddend);
16350 }
16351 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
16352 }
16353 break;
16354 }
16355
16356 op = DW_OP_plus;
16357 goto do_binop;
16358
16359 case LE_EXPR:
16360 op = DW_OP_le;
16361 goto do_comp_binop;
16362
16363 case GE_EXPR:
16364 op = DW_OP_ge;
16365 goto do_comp_binop;
16366
16367 case LT_EXPR:
16368 op = DW_OP_lt;
16369 goto do_comp_binop;
16370
16371 case GT_EXPR:
16372 op = DW_OP_gt;
16373 goto do_comp_binop;
16374
16375 do_comp_binop:
16376 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
16377 {
16378 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
16379 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
16380 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
16381 TREE_CODE (loc));
16382 break;
16383 }
16384 else
16385 goto do_binop;
16386
16387 case EQ_EXPR:
16388 op = DW_OP_eq;
16389 goto do_binop;
16390
16391 case NE_EXPR:
16392 op = DW_OP_ne;
16393 goto do_binop;
16394
16395 do_binop:
16396 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16397 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
16398 if (list_ret == 0 || list_ret1 == 0)
16399 return 0;
16400
16401 add_loc_list (&list_ret, list_ret1);
16402 if (list_ret == 0)
16403 return 0;
16404 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
16405 break;
16406
16407 case TRUTH_NOT_EXPR:
16408 case BIT_NOT_EXPR:
16409 op = DW_OP_not;
16410 goto do_unop;
16411
16412 case ABS_EXPR:
16413 op = DW_OP_abs;
16414 goto do_unop;
16415
16416 case NEGATE_EXPR:
16417 op = DW_OP_neg;
16418 goto do_unop;
16419
16420 do_unop:
16421 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16422 if (list_ret == 0)
16423 return 0;
16424
16425 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
16426 break;
16427
16428 case MIN_EXPR:
16429 case MAX_EXPR:
16430 {
16431 const enum tree_code code =
16432 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
16433
16434 loc = build3 (COND_EXPR, TREE_TYPE (loc),
16435 build2 (code, integer_type_node,
16436 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
16437 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
16438 }
16439
16440 /* ... fall through ... */
16441
16442 case COND_EXPR:
16443 {
16444 dw_loc_descr_ref lhs
16445 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
16446 dw_loc_list_ref rhs
16447 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
16448 dw_loc_descr_ref bra_node, jump_node, tmp;
16449
16450 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16451 if (list_ret == 0 || lhs == 0 || rhs == 0)
16452 return 0;
16453
16454 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16455 add_loc_descr_to_each (list_ret, bra_node);
16456
16457 add_loc_list (&list_ret, rhs);
16458 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
16459 add_loc_descr_to_each (list_ret, jump_node);
16460
16461 add_loc_descr_to_each (list_ret, lhs);
16462 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16463 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
16464
16465 /* ??? Need a node to point the skip at. Use a nop. */
16466 tmp = new_loc_descr (DW_OP_nop, 0, 0);
16467 add_loc_descr_to_each (list_ret, tmp);
16468 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16469 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
16470 }
16471 break;
16472
16473 case FIX_TRUNC_EXPR:
16474 return 0;
16475
16476 default:
16477 /* Leave front-end specific codes as simply unknown. This comes
16478 up, for instance, with the C STMT_EXPR. */
16479 if ((unsigned int) TREE_CODE (loc)
16480 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
16481 {
16482 expansion_failed (loc, NULL_RTX,
16483 "language specific tree node");
16484 return 0;
16485 }
16486
16487 /* Otherwise this is a generic code; we should just lists all of
16488 these explicitly. We forgot one. */
16489 if (flag_checking)
16490 gcc_unreachable ();
16491
16492 /* In a release build, we want to degrade gracefully: better to
16493 generate incomplete debugging information than to crash. */
16494 return NULL;
16495 }
16496
16497 if (!ret && !list_ret)
16498 return 0;
16499
16500 if (want_address == 2 && !have_address
16501 && (dwarf_version >= 4 || !dwarf_strict))
16502 {
16503 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16504 {
16505 expansion_failed (loc, NULL_RTX,
16506 "DWARF address size mismatch");
16507 return 0;
16508 }
16509 if (ret)
16510 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
16511 else
16512 add_loc_descr_to_each (list_ret,
16513 new_loc_descr (DW_OP_stack_value, 0, 0));
16514 have_address = 1;
16515 }
16516 /* Show if we can't fill the request for an address. */
16517 if (want_address && !have_address)
16518 {
16519 expansion_failed (loc, NULL_RTX,
16520 "Want address and only have value");
16521 return 0;
16522 }
16523
16524 gcc_assert (!ret || !list_ret);
16525
16526 /* If we've got an address and don't want one, dereference. */
16527 if (!want_address && have_address)
16528 {
16529 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16530
16531 if (size > DWARF2_ADDR_SIZE || size == -1)
16532 {
16533 expansion_failed (loc, NULL_RTX,
16534 "DWARF address size mismatch");
16535 return 0;
16536 }
16537 else if (size == DWARF2_ADDR_SIZE)
16538 op = DW_OP_deref;
16539 else
16540 op = DW_OP_deref_size;
16541
16542 if (ret)
16543 add_loc_descr (&ret, new_loc_descr (op, size, 0));
16544 else
16545 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
16546 }
16547 if (ret)
16548 list_ret = new_loc_list (ret, NULL, NULL, NULL);
16549
16550 return list_ret;
16551 }
16552
16553 /* Likewise, but strip useless DW_OP_nop operations in the resulting
16554 expressions. */
16555
16556 static dw_loc_list_ref
16557 loc_list_from_tree (tree loc, int want_address,
16558 const struct loc_descr_context *context)
16559 {
16560 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
16561
16562 for (dw_loc_list_ref loc_cur = result;
16563 loc_cur != NULL; loc_cur =
16564 loc_cur->dw_loc_next)
16565 loc_descr_without_nops (loc_cur->expr);
16566 return result;
16567 }
16568
16569 /* Same as above but return only single location expression. */
16570 static dw_loc_descr_ref
16571 loc_descriptor_from_tree (tree loc, int want_address,
16572 const struct loc_descr_context *context)
16573 {
16574 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
16575 if (!ret)
16576 return NULL;
16577 if (ret->dw_loc_next)
16578 {
16579 expansion_failed (loc, NULL_RTX,
16580 "Location list where only loc descriptor needed");
16581 return NULL;
16582 }
16583 return ret->expr;
16584 }
16585
16586 /* Given a value, round it up to the lowest multiple of `boundary'
16587 which is not less than the value itself. */
16588
16589 static inline HOST_WIDE_INT
16590 ceiling (HOST_WIDE_INT value, unsigned int boundary)
16591 {
16592 return (((value + boundary - 1) / boundary) * boundary);
16593 }
16594
16595 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
16596 pointer to the declared type for the relevant field variable, or return
16597 `integer_type_node' if the given node turns out to be an
16598 ERROR_MARK node. */
16599
16600 static inline tree
16601 field_type (const_tree decl)
16602 {
16603 tree type;
16604
16605 if (TREE_CODE (decl) == ERROR_MARK)
16606 return integer_type_node;
16607
16608 type = DECL_BIT_FIELD_TYPE (decl);
16609 if (type == NULL_TREE)
16610 type = TREE_TYPE (decl);
16611
16612 return type;
16613 }
16614
16615 /* Given a pointer to a tree node, return the alignment in bits for
16616 it, or else return BITS_PER_WORD if the node actually turns out to
16617 be an ERROR_MARK node. */
16618
16619 static inline unsigned
16620 simple_type_align_in_bits (const_tree type)
16621 {
16622 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
16623 }
16624
16625 static inline unsigned
16626 simple_decl_align_in_bits (const_tree decl)
16627 {
16628 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
16629 }
16630
16631 /* Return the result of rounding T up to ALIGN. */
16632
16633 static inline offset_int
16634 round_up_to_align (const offset_int &t, unsigned int align)
16635 {
16636 return wi::udiv_trunc (t + align - 1, align) * align;
16637 }
16638
16639 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
16640 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
16641 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
16642 if we fail to return the size in one of these two forms. */
16643
16644 static dw_loc_descr_ref
16645 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
16646 {
16647 tree tree_size;
16648 struct loc_descr_context ctx;
16649
16650 /* Return a constant integer in priority, if possible. */
16651 *cst_size = int_size_in_bytes (type);
16652 if (*cst_size != -1)
16653 return NULL;
16654
16655 ctx.context_type = const_cast<tree> (type);
16656 ctx.base_decl = NULL_TREE;
16657 ctx.dpi = NULL;
16658
16659 type = TYPE_MAIN_VARIANT (type);
16660 tree_size = TYPE_SIZE_UNIT (type);
16661 return ((tree_size != NULL_TREE)
16662 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
16663 : NULL);
16664 }
16665
16666 /* Helper structure for RECORD_TYPE processing. */
16667 struct vlr_context
16668 {
16669 /* Root RECORD_TYPE. It is needed to generate data member location
16670 descriptions in variable-length records (VLR), but also to cope with
16671 variants, which are composed of nested structures multiplexed with
16672 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
16673 function processing a FIELD_DECL, it is required to be non null. */
16674 tree struct_type;
16675 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
16676 QUAL_UNION_TYPE), this holds an expression that computes the offset for
16677 this variant part as part of the root record (in storage units). For
16678 regular records, it must be NULL_TREE. */
16679 tree variant_part_offset;
16680 };
16681
16682 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
16683 addressed byte of the "containing object" for the given FIELD_DECL. If
16684 possible, return a native constant through CST_OFFSET (in which case NULL is
16685 returned); otherwise return a DWARF expression that computes the offset.
16686
16687 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
16688 that offset is, either because the argument turns out to be a pointer to an
16689 ERROR_MARK node, or because the offset expression is too complex for us.
16690
16691 CTX is required: see the comment for VLR_CONTEXT. */
16692
16693 static dw_loc_descr_ref
16694 field_byte_offset (const_tree decl, struct vlr_context *ctx,
16695 HOST_WIDE_INT *cst_offset)
16696 {
16697 offset_int object_offset_in_bits;
16698 offset_int object_offset_in_bytes;
16699 offset_int bitpos_int;
16700 bool is_byte_offset_cst, is_bit_offset_cst;
16701 tree tree_result;
16702 dw_loc_list_ref loc_result;
16703
16704 *cst_offset = 0;
16705
16706 if (TREE_CODE (decl) == ERROR_MARK)
16707 return NULL;
16708 else
16709 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
16710
16711 is_bit_offset_cst = TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST;
16712 is_byte_offset_cst = TREE_CODE (DECL_FIELD_OFFSET (decl)) != INTEGER_CST;
16713
16714 /* We cannot handle variable bit offsets at the moment, so abort if it's the
16715 case. */
16716 if (is_bit_offset_cst)
16717 return NULL;
16718
16719 #ifdef PCC_BITFIELD_TYPE_MATTERS
16720 /* We used to handle only constant offsets in all cases. Now, we handle
16721 properly dynamic byte offsets only when PCC bitfield type doesn't
16722 matter. */
16723 if (PCC_BITFIELD_TYPE_MATTERS && is_byte_offset_cst && is_bit_offset_cst)
16724 {
16725 tree type;
16726 tree field_size_tree;
16727 offset_int deepest_bitpos;
16728 offset_int field_size_in_bits;
16729 unsigned int type_align_in_bits;
16730 unsigned int decl_align_in_bits;
16731 offset_int type_size_in_bits;
16732
16733 bitpos_int = wi::to_offset (bit_position (decl));
16734 type = field_type (decl);
16735 type_size_in_bits = offset_int_type_size_in_bits (type);
16736 type_align_in_bits = simple_type_align_in_bits (type);
16737
16738 field_size_tree = DECL_SIZE (decl);
16739
16740 /* The size could be unspecified if there was an error, or for
16741 a flexible array member. */
16742 if (!field_size_tree)
16743 field_size_tree = bitsize_zero_node;
16744
16745 /* If the size of the field is not constant, use the type size. */
16746 if (TREE_CODE (field_size_tree) == INTEGER_CST)
16747 field_size_in_bits = wi::to_offset (field_size_tree);
16748 else
16749 field_size_in_bits = type_size_in_bits;
16750
16751 decl_align_in_bits = simple_decl_align_in_bits (decl);
16752
16753 /* The GCC front-end doesn't make any attempt to keep track of the
16754 starting bit offset (relative to the start of the containing
16755 structure type) of the hypothetical "containing object" for a
16756 bit-field. Thus, when computing the byte offset value for the
16757 start of the "containing object" of a bit-field, we must deduce
16758 this information on our own. This can be rather tricky to do in
16759 some cases. For example, handling the following structure type
16760 definition when compiling for an i386/i486 target (which only
16761 aligns long long's to 32-bit boundaries) can be very tricky:
16762
16763 struct S { int field1; long long field2:31; };
16764
16765 Fortunately, there is a simple rule-of-thumb which can be used
16766 in such cases. When compiling for an i386/i486, GCC will
16767 allocate 8 bytes for the structure shown above. It decides to
16768 do this based upon one simple rule for bit-field allocation.
16769 GCC allocates each "containing object" for each bit-field at
16770 the first (i.e. lowest addressed) legitimate alignment boundary
16771 (based upon the required minimum alignment for the declared
16772 type of the field) which it can possibly use, subject to the
16773 condition that there is still enough available space remaining
16774 in the containing object (when allocated at the selected point)
16775 to fully accommodate all of the bits of the bit-field itself.
16776
16777 This simple rule makes it obvious why GCC allocates 8 bytes for
16778 each object of the structure type shown above. When looking
16779 for a place to allocate the "containing object" for `field2',
16780 the compiler simply tries to allocate a 64-bit "containing
16781 object" at each successive 32-bit boundary (starting at zero)
16782 until it finds a place to allocate that 64- bit field such that
16783 at least 31 contiguous (and previously unallocated) bits remain
16784 within that selected 64 bit field. (As it turns out, for the
16785 example above, the compiler finds it is OK to allocate the
16786 "containing object" 64-bit field at bit-offset zero within the
16787 structure type.)
16788
16789 Here we attempt to work backwards from the limited set of facts
16790 we're given, and we try to deduce from those facts, where GCC
16791 must have believed that the containing object started (within
16792 the structure type). The value we deduce is then used (by the
16793 callers of this routine) to generate DW_AT_location and
16794 DW_AT_bit_offset attributes for fields (both bit-fields and, in
16795 the case of DW_AT_location, regular fields as well). */
16796
16797 /* Figure out the bit-distance from the start of the structure to
16798 the "deepest" bit of the bit-field. */
16799 deepest_bitpos = bitpos_int + field_size_in_bits;
16800
16801 /* This is the tricky part. Use some fancy footwork to deduce
16802 where the lowest addressed bit of the containing object must
16803 be. */
16804 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
16805
16806 /* Round up to type_align by default. This works best for
16807 bitfields. */
16808 object_offset_in_bits
16809 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
16810
16811 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
16812 {
16813 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
16814
16815 /* Round up to decl_align instead. */
16816 object_offset_in_bits
16817 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
16818 }
16819 }
16820 #endif /* PCC_BITFIELD_TYPE_MATTERS */
16821
16822 tree_result = byte_position (decl);
16823 if (ctx->variant_part_offset != NULL_TREE)
16824 tree_result = fold (build2 (PLUS_EXPR, TREE_TYPE (tree_result),
16825 ctx->variant_part_offset, tree_result));
16826
16827 /* If the byte offset is a constant, it's simplier to handle a native
16828 constant rather than a DWARF expression. */
16829 if (TREE_CODE (tree_result) == INTEGER_CST)
16830 {
16831 *cst_offset = wi::to_offset (tree_result).to_shwi ();
16832 return NULL;
16833 }
16834 struct loc_descr_context loc_ctx = {
16835 ctx->struct_type, /* context_type */
16836 NULL_TREE, /* base_decl */
16837 NULL /* dpi */
16838 };
16839 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
16840
16841 /* We want a DWARF expression: abort if we only have a location list with
16842 multiple elements. */
16843 if (!loc_result || !single_element_loc_list_p (loc_result))
16844 return NULL;
16845 else
16846 return loc_result->expr;
16847 }
16848 \f
16849 /* The following routines define various Dwarf attributes and any data
16850 associated with them. */
16851
16852 /* Add a location description attribute value to a DIE.
16853
16854 This emits location attributes suitable for whole variables and
16855 whole parameters. Note that the location attributes for struct fields are
16856 generated by the routine `data_member_location_attribute' below. */
16857
16858 static inline void
16859 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
16860 dw_loc_list_ref descr)
16861 {
16862 if (descr == 0)
16863 return;
16864 if (single_element_loc_list_p (descr))
16865 add_AT_loc (die, attr_kind, descr->expr);
16866 else
16867 add_AT_loc_list (die, attr_kind, descr);
16868 }
16869
16870 /* Add DW_AT_accessibility attribute to DIE if needed. */
16871
16872 static void
16873 add_accessibility_attribute (dw_die_ref die, tree decl)
16874 {
16875 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
16876 children, otherwise the default is DW_ACCESS_public. In DWARF2
16877 the default has always been DW_ACCESS_public. */
16878 if (TREE_PROTECTED (decl))
16879 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
16880 else if (TREE_PRIVATE (decl))
16881 {
16882 if (dwarf_version == 2
16883 || die->die_parent == NULL
16884 || die->die_parent->die_tag != DW_TAG_class_type)
16885 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
16886 }
16887 else if (dwarf_version > 2
16888 && die->die_parent
16889 && die->die_parent->die_tag == DW_TAG_class_type)
16890 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
16891 }
16892
16893 /* Attach the specialized form of location attribute used for data members of
16894 struct and union types. In the special case of a FIELD_DECL node which
16895 represents a bit-field, the "offset" part of this special location
16896 descriptor must indicate the distance in bytes from the lowest-addressed
16897 byte of the containing struct or union type to the lowest-addressed byte of
16898 the "containing object" for the bit-field. (See the `field_byte_offset'
16899 function above).
16900
16901 For any given bit-field, the "containing object" is a hypothetical object
16902 (of some integral or enum type) within which the given bit-field lives. The
16903 type of this hypothetical "containing object" is always the same as the
16904 declared type of the individual bit-field itself (for GCC anyway... the
16905 DWARF spec doesn't actually mandate this). Note that it is the size (in
16906 bytes) of the hypothetical "containing object" which will be given in the
16907 DW_AT_byte_size attribute for this bit-field. (See the
16908 `byte_size_attribute' function below.) It is also used when calculating the
16909 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
16910 function below.)
16911
16912 CTX is required: see the comment for VLR_CONTEXT. */
16913
16914 static void
16915 add_data_member_location_attribute (dw_die_ref die,
16916 tree decl,
16917 struct vlr_context *ctx)
16918 {
16919 HOST_WIDE_INT offset;
16920 dw_loc_descr_ref loc_descr = 0;
16921
16922 if (TREE_CODE (decl) == TREE_BINFO)
16923 {
16924 /* We're working on the TAG_inheritance for a base class. */
16925 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
16926 {
16927 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
16928 aren't at a fixed offset from all (sub)objects of the same
16929 type. We need to extract the appropriate offset from our
16930 vtable. The following dwarf expression means
16931
16932 BaseAddr = ObAddr + *((*ObAddr) - Offset)
16933
16934 This is specific to the V3 ABI, of course. */
16935
16936 dw_loc_descr_ref tmp;
16937
16938 /* Make a copy of the object address. */
16939 tmp = new_loc_descr (DW_OP_dup, 0, 0);
16940 add_loc_descr (&loc_descr, tmp);
16941
16942 /* Extract the vtable address. */
16943 tmp = new_loc_descr (DW_OP_deref, 0, 0);
16944 add_loc_descr (&loc_descr, tmp);
16945
16946 /* Calculate the address of the offset. */
16947 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
16948 gcc_assert (offset < 0);
16949
16950 tmp = int_loc_descriptor (-offset);
16951 add_loc_descr (&loc_descr, tmp);
16952 tmp = new_loc_descr (DW_OP_minus, 0, 0);
16953 add_loc_descr (&loc_descr, tmp);
16954
16955 /* Extract the offset. */
16956 tmp = new_loc_descr (DW_OP_deref, 0, 0);
16957 add_loc_descr (&loc_descr, tmp);
16958
16959 /* Add it to the object address. */
16960 tmp = new_loc_descr (DW_OP_plus, 0, 0);
16961 add_loc_descr (&loc_descr, tmp);
16962 }
16963 else
16964 offset = tree_to_shwi (BINFO_OFFSET (decl));
16965 }
16966 else
16967 {
16968 loc_descr = field_byte_offset (decl, ctx, &offset);
16969
16970 /* If loc_descr is available then we know the field offset is dynamic.
16971 However, GDB does not handle dynamic field offsets very well at the
16972 moment. */
16973 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
16974 {
16975 loc_descr = NULL;
16976 offset = 0;
16977 }
16978
16979 /* Data member location evalutation starts with the base address on the
16980 stack. Compute the field offset and add it to this base address. */
16981 else if (loc_descr != NULL)
16982 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
16983 }
16984
16985 if (! loc_descr)
16986 {
16987 if (dwarf_version > 2)
16988 {
16989 /* Don't need to output a location expression, just the constant. */
16990 if (offset < 0)
16991 add_AT_int (die, DW_AT_data_member_location, offset);
16992 else
16993 add_AT_unsigned (die, DW_AT_data_member_location, offset);
16994 return;
16995 }
16996 else
16997 {
16998 enum dwarf_location_atom op;
16999
17000 /* The DWARF2 standard says that we should assume that the structure
17001 address is already on the stack, so we can specify a structure
17002 field address by using DW_OP_plus_uconst. */
17003 op = DW_OP_plus_uconst;
17004 loc_descr = new_loc_descr (op, offset, 0);
17005 }
17006 }
17007
17008 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
17009 }
17010
17011 /* Writes integer values to dw_vec_const array. */
17012
17013 static void
17014 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
17015 {
17016 while (size != 0)
17017 {
17018 *dest++ = val & 0xff;
17019 val >>= 8;
17020 --size;
17021 }
17022 }
17023
17024 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
17025
17026 static HOST_WIDE_INT
17027 extract_int (const unsigned char *src, unsigned int size)
17028 {
17029 HOST_WIDE_INT val = 0;
17030
17031 src += size;
17032 while (size != 0)
17033 {
17034 val <<= 8;
17035 val |= *--src & 0xff;
17036 --size;
17037 }
17038 return val;
17039 }
17040
17041 /* Writes wide_int values to dw_vec_const array. */
17042
17043 static void
17044 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
17045 {
17046 int i;
17047
17048 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
17049 {
17050 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
17051 return;
17052 }
17053
17054 /* We'd have to extend this code to support odd sizes. */
17055 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
17056
17057 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
17058
17059 if (WORDS_BIG_ENDIAN)
17060 for (i = n - 1; i >= 0; i--)
17061 {
17062 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
17063 dest += sizeof (HOST_WIDE_INT);
17064 }
17065 else
17066 for (i = 0; i < n; i++)
17067 {
17068 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
17069 dest += sizeof (HOST_WIDE_INT);
17070 }
17071 }
17072
17073 /* Writes floating point values to dw_vec_const array. */
17074
17075 static void
17076 insert_float (const_rtx rtl, unsigned char *array)
17077 {
17078 long val[4];
17079 int i;
17080
17081 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), GET_MODE (rtl));
17082
17083 /* real_to_target puts 32-bit pieces in each long. Pack them. */
17084 for (i = 0; i < GET_MODE_SIZE (GET_MODE (rtl)) / 4; i++)
17085 {
17086 insert_int (val[i], 4, array);
17087 array += 4;
17088 }
17089 }
17090
17091 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
17092 does not have a "location" either in memory or in a register. These
17093 things can arise in GNU C when a constant is passed as an actual parameter
17094 to an inlined function. They can also arise in C++ where declared
17095 constants do not necessarily get memory "homes". */
17096
17097 static bool
17098 add_const_value_attribute (dw_die_ref die, rtx rtl)
17099 {
17100 switch (GET_CODE (rtl))
17101 {
17102 case CONST_INT:
17103 {
17104 HOST_WIDE_INT val = INTVAL (rtl);
17105
17106 if (val < 0)
17107 add_AT_int (die, DW_AT_const_value, val);
17108 else
17109 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
17110 }
17111 return true;
17112
17113 case CONST_WIDE_INT:
17114 {
17115 wide_int w1 = std::make_pair (rtl, MAX_MODE_INT);
17116 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
17117 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
17118 wide_int w = wi::zext (w1, prec);
17119 add_AT_wide (die, DW_AT_const_value, w);
17120 }
17121 return true;
17122
17123 case CONST_DOUBLE:
17124 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
17125 floating-point constant. A CONST_DOUBLE is used whenever the
17126 constant requires more than one word in order to be adequately
17127 represented. */
17128 {
17129 machine_mode mode = GET_MODE (rtl);
17130
17131 if (TARGET_SUPPORTS_WIDE_INT == 0 && !SCALAR_FLOAT_MODE_P (mode))
17132 add_AT_double (die, DW_AT_const_value,
17133 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
17134 else
17135 {
17136 unsigned int length = GET_MODE_SIZE (mode);
17137 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
17138
17139 insert_float (rtl, array);
17140 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
17141 }
17142 }
17143 return true;
17144
17145 case CONST_VECTOR:
17146 {
17147 machine_mode mode = GET_MODE (rtl);
17148 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
17149 unsigned int length = CONST_VECTOR_NUNITS (rtl);
17150 unsigned char *array
17151 = ggc_vec_alloc<unsigned char> (length * elt_size);
17152 unsigned int i;
17153 unsigned char *p;
17154 machine_mode imode = GET_MODE_INNER (mode);
17155
17156 switch (GET_MODE_CLASS (mode))
17157 {
17158 case MODE_VECTOR_INT:
17159 for (i = 0, p = array; i < length; i++, p += elt_size)
17160 {
17161 rtx elt = CONST_VECTOR_ELT (rtl, i);
17162 insert_wide_int (std::make_pair (elt, imode), p, elt_size);
17163 }
17164 break;
17165
17166 case MODE_VECTOR_FLOAT:
17167 for (i = 0, p = array; i < length; i++, p += elt_size)
17168 {
17169 rtx elt = CONST_VECTOR_ELT (rtl, i);
17170 insert_float (elt, p);
17171 }
17172 break;
17173
17174 default:
17175 gcc_unreachable ();
17176 }
17177
17178 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
17179 }
17180 return true;
17181
17182 case CONST_STRING:
17183 if (dwarf_version >= 4 || !dwarf_strict)
17184 {
17185 dw_loc_descr_ref loc_result;
17186 resolve_one_addr (&rtl);
17187 rtl_addr:
17188 loc_result = new_addr_loc_descr (rtl, dtprel_false);
17189 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
17190 add_AT_loc (die, DW_AT_location, loc_result);
17191 vec_safe_push (used_rtx_array, rtl);
17192 return true;
17193 }
17194 return false;
17195
17196 case CONST:
17197 if (CONSTANT_P (XEXP (rtl, 0)))
17198 return add_const_value_attribute (die, XEXP (rtl, 0));
17199 /* FALLTHROUGH */
17200 case SYMBOL_REF:
17201 if (!const_ok_for_output (rtl))
17202 return false;
17203 case LABEL_REF:
17204 if (dwarf_version >= 4 || !dwarf_strict)
17205 goto rtl_addr;
17206 return false;
17207
17208 case PLUS:
17209 /* In cases where an inlined instance of an inline function is passed
17210 the address of an `auto' variable (which is local to the caller) we
17211 can get a situation where the DECL_RTL of the artificial local
17212 variable (for the inlining) which acts as a stand-in for the
17213 corresponding formal parameter (of the inline function) will look
17214 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
17215 exactly a compile-time constant expression, but it isn't the address
17216 of the (artificial) local variable either. Rather, it represents the
17217 *value* which the artificial local variable always has during its
17218 lifetime. We currently have no way to represent such quasi-constant
17219 values in Dwarf, so for now we just punt and generate nothing. */
17220 return false;
17221
17222 case HIGH:
17223 case CONST_FIXED:
17224 return false;
17225
17226 case MEM:
17227 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
17228 && MEM_READONLY_P (rtl)
17229 && GET_MODE (rtl) == BLKmode)
17230 {
17231 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
17232 return true;
17233 }
17234 return false;
17235
17236 default:
17237 /* No other kinds of rtx should be possible here. */
17238 gcc_unreachable ();
17239 }
17240 return false;
17241 }
17242
17243 /* Determine whether the evaluation of EXPR references any variables
17244 or functions which aren't otherwise used (and therefore may not be
17245 output). */
17246 static tree
17247 reference_to_unused (tree * tp, int * walk_subtrees,
17248 void * data ATTRIBUTE_UNUSED)
17249 {
17250 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
17251 *walk_subtrees = 0;
17252
17253 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
17254 && ! TREE_ASM_WRITTEN (*tp))
17255 return *tp;
17256 /* ??? The C++ FE emits debug information for using decls, so
17257 putting gcc_unreachable here falls over. See PR31899. For now
17258 be conservative. */
17259 else if (!symtab->global_info_ready
17260 && (TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == FUNCTION_DECL))
17261 return *tp;
17262 else if (TREE_CODE (*tp) == VAR_DECL)
17263 {
17264 varpool_node *node = varpool_node::get (*tp);
17265 if (!node || !node->definition)
17266 return *tp;
17267 }
17268 else if (TREE_CODE (*tp) == FUNCTION_DECL
17269 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
17270 {
17271 /* The call graph machinery must have finished analyzing,
17272 optimizing and gimplifying the CU by now.
17273 So if *TP has no call graph node associated
17274 to it, it means *TP will not be emitted. */
17275 if (!cgraph_node::get (*tp))
17276 return *tp;
17277 }
17278 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
17279 return *tp;
17280
17281 return NULL_TREE;
17282 }
17283
17284 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
17285 for use in a later add_const_value_attribute call. */
17286
17287 static rtx
17288 rtl_for_decl_init (tree init, tree type)
17289 {
17290 rtx rtl = NULL_RTX;
17291
17292 STRIP_NOPS (init);
17293
17294 /* If a variable is initialized with a string constant without embedded
17295 zeros, build CONST_STRING. */
17296 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
17297 {
17298 tree enttype = TREE_TYPE (type);
17299 tree domain = TYPE_DOMAIN (type);
17300 machine_mode mode = TYPE_MODE (enttype);
17301
17302 if (GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_SIZE (mode) == 1
17303 && domain
17304 && integer_zerop (TYPE_MIN_VALUE (domain))
17305 && compare_tree_int (TYPE_MAX_VALUE (domain),
17306 TREE_STRING_LENGTH (init) - 1) == 0
17307 && ((size_t) TREE_STRING_LENGTH (init)
17308 == strlen (TREE_STRING_POINTER (init)) + 1))
17309 {
17310 rtl = gen_rtx_CONST_STRING (VOIDmode,
17311 ggc_strdup (TREE_STRING_POINTER (init)));
17312 rtl = gen_rtx_MEM (BLKmode, rtl);
17313 MEM_READONLY_P (rtl) = 1;
17314 }
17315 }
17316 /* Other aggregates, and complex values, could be represented using
17317 CONCAT: FIXME! */
17318 else if (AGGREGATE_TYPE_P (type)
17319 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
17320 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
17321 || TREE_CODE (type) == COMPLEX_TYPE)
17322 ;
17323 /* Vectors only work if their mode is supported by the target.
17324 FIXME: generic vectors ought to work too. */
17325 else if (TREE_CODE (type) == VECTOR_TYPE
17326 && !VECTOR_MODE_P (TYPE_MODE (type)))
17327 ;
17328 /* If the initializer is something that we know will expand into an
17329 immediate RTL constant, expand it now. We must be careful not to
17330 reference variables which won't be output. */
17331 else if (initializer_constant_valid_p (init, type)
17332 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
17333 {
17334 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
17335 possible. */
17336 if (TREE_CODE (type) == VECTOR_TYPE)
17337 switch (TREE_CODE (init))
17338 {
17339 case VECTOR_CST:
17340 break;
17341 case CONSTRUCTOR:
17342 if (TREE_CONSTANT (init))
17343 {
17344 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
17345 bool constant_p = true;
17346 tree value;
17347 unsigned HOST_WIDE_INT ix;
17348
17349 /* Even when ctor is constant, it might contain non-*_CST
17350 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
17351 belong into VECTOR_CST nodes. */
17352 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
17353 if (!CONSTANT_CLASS_P (value))
17354 {
17355 constant_p = false;
17356 break;
17357 }
17358
17359 if (constant_p)
17360 {
17361 init = build_vector_from_ctor (type, elts);
17362 break;
17363 }
17364 }
17365 /* FALLTHRU */
17366
17367 default:
17368 return NULL;
17369 }
17370
17371 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
17372
17373 /* If expand_expr returns a MEM, it wasn't immediate. */
17374 gcc_assert (!rtl || !MEM_P (rtl));
17375 }
17376
17377 return rtl;
17378 }
17379
17380 /* Generate RTL for the variable DECL to represent its location. */
17381
17382 static rtx
17383 rtl_for_decl_location (tree decl)
17384 {
17385 rtx rtl;
17386
17387 /* Here we have to decide where we are going to say the parameter "lives"
17388 (as far as the debugger is concerned). We only have a couple of
17389 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
17390
17391 DECL_RTL normally indicates where the parameter lives during most of the
17392 activation of the function. If optimization is enabled however, this
17393 could be either NULL or else a pseudo-reg. Both of those cases indicate
17394 that the parameter doesn't really live anywhere (as far as the code
17395 generation parts of GCC are concerned) during most of the function's
17396 activation. That will happen (for example) if the parameter is never
17397 referenced within the function.
17398
17399 We could just generate a location descriptor here for all non-NULL
17400 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
17401 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
17402 where DECL_RTL is NULL or is a pseudo-reg.
17403
17404 Note however that we can only get away with using DECL_INCOMING_RTL as
17405 a backup substitute for DECL_RTL in certain limited cases. In cases
17406 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
17407 we can be sure that the parameter was passed using the same type as it is
17408 declared to have within the function, and that its DECL_INCOMING_RTL
17409 points us to a place where a value of that type is passed.
17410
17411 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
17412 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
17413 because in these cases DECL_INCOMING_RTL points us to a value of some
17414 type which is *different* from the type of the parameter itself. Thus,
17415 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
17416 such cases, the debugger would end up (for example) trying to fetch a
17417 `float' from a place which actually contains the first part of a
17418 `double'. That would lead to really incorrect and confusing
17419 output at debug-time.
17420
17421 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
17422 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
17423 are a couple of exceptions however. On little-endian machines we can
17424 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
17425 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
17426 an integral type that is smaller than TREE_TYPE (decl). These cases arise
17427 when (on a little-endian machine) a non-prototyped function has a
17428 parameter declared to be of type `short' or `char'. In such cases,
17429 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
17430 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
17431 passed `int' value. If the debugger then uses that address to fetch
17432 a `short' or a `char' (on a little-endian machine) the result will be
17433 the correct data, so we allow for such exceptional cases below.
17434
17435 Note that our goal here is to describe the place where the given formal
17436 parameter lives during most of the function's activation (i.e. between the
17437 end of the prologue and the start of the epilogue). We'll do that as best
17438 as we can. Note however that if the given formal parameter is modified
17439 sometime during the execution of the function, then a stack backtrace (at
17440 debug-time) will show the function as having been called with the *new*
17441 value rather than the value which was originally passed in. This happens
17442 rarely enough that it is not a major problem, but it *is* a problem, and
17443 I'd like to fix it.
17444
17445 A future version of dwarf2out.c may generate two additional attributes for
17446 any given DW_TAG_formal_parameter DIE which will describe the "passed
17447 type" and the "passed location" for the given formal parameter in addition
17448 to the attributes we now generate to indicate the "declared type" and the
17449 "active location" for each parameter. This additional set of attributes
17450 could be used by debuggers for stack backtraces. Separately, note that
17451 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
17452 This happens (for example) for inlined-instances of inline function formal
17453 parameters which are never referenced. This really shouldn't be
17454 happening. All PARM_DECL nodes should get valid non-NULL
17455 DECL_INCOMING_RTL values. FIXME. */
17456
17457 /* Use DECL_RTL as the "location" unless we find something better. */
17458 rtl = DECL_RTL_IF_SET (decl);
17459
17460 /* When generating abstract instances, ignore everything except
17461 constants, symbols living in memory, and symbols living in
17462 fixed registers. */
17463 if (! reload_completed)
17464 {
17465 if (rtl
17466 && (CONSTANT_P (rtl)
17467 || (MEM_P (rtl)
17468 && CONSTANT_P (XEXP (rtl, 0)))
17469 || (REG_P (rtl)
17470 && TREE_CODE (decl) == VAR_DECL
17471 && TREE_STATIC (decl))))
17472 {
17473 rtl = targetm.delegitimize_address (rtl);
17474 return rtl;
17475 }
17476 rtl = NULL_RTX;
17477 }
17478 else if (TREE_CODE (decl) == PARM_DECL)
17479 {
17480 if (rtl == NULL_RTX
17481 || is_pseudo_reg (rtl)
17482 || (MEM_P (rtl)
17483 && is_pseudo_reg (XEXP (rtl, 0))
17484 && DECL_INCOMING_RTL (decl)
17485 && MEM_P (DECL_INCOMING_RTL (decl))
17486 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
17487 {
17488 tree declared_type = TREE_TYPE (decl);
17489 tree passed_type = DECL_ARG_TYPE (decl);
17490 machine_mode dmode = TYPE_MODE (declared_type);
17491 machine_mode pmode = TYPE_MODE (passed_type);
17492
17493 /* This decl represents a formal parameter which was optimized out.
17494 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
17495 all cases where (rtl == NULL_RTX) just below. */
17496 if (dmode == pmode)
17497 rtl = DECL_INCOMING_RTL (decl);
17498 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
17499 && SCALAR_INT_MODE_P (dmode)
17500 && GET_MODE_SIZE (dmode) <= GET_MODE_SIZE (pmode)
17501 && DECL_INCOMING_RTL (decl))
17502 {
17503 rtx inc = DECL_INCOMING_RTL (decl);
17504 if (REG_P (inc))
17505 rtl = inc;
17506 else if (MEM_P (inc))
17507 {
17508 if (BYTES_BIG_ENDIAN)
17509 rtl = adjust_address_nv (inc, dmode,
17510 GET_MODE_SIZE (pmode)
17511 - GET_MODE_SIZE (dmode));
17512 else
17513 rtl = inc;
17514 }
17515 }
17516 }
17517
17518 /* If the parm was passed in registers, but lives on the stack, then
17519 make a big endian correction if the mode of the type of the
17520 parameter is not the same as the mode of the rtl. */
17521 /* ??? This is the same series of checks that are made in dbxout.c before
17522 we reach the big endian correction code there. It isn't clear if all
17523 of these checks are necessary here, but keeping them all is the safe
17524 thing to do. */
17525 else if (MEM_P (rtl)
17526 && XEXP (rtl, 0) != const0_rtx
17527 && ! CONSTANT_P (XEXP (rtl, 0))
17528 /* Not passed in memory. */
17529 && !MEM_P (DECL_INCOMING_RTL (decl))
17530 /* Not passed by invisible reference. */
17531 && (!REG_P (XEXP (rtl, 0))
17532 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
17533 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
17534 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
17535 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
17536 #endif
17537 )
17538 /* Big endian correction check. */
17539 && BYTES_BIG_ENDIAN
17540 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
17541 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
17542 < UNITS_PER_WORD))
17543 {
17544 machine_mode addr_mode = get_address_mode (rtl);
17545 int offset = (UNITS_PER_WORD
17546 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
17547
17548 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
17549 plus_constant (addr_mode, XEXP (rtl, 0), offset));
17550 }
17551 }
17552 else if (TREE_CODE (decl) == VAR_DECL
17553 && rtl
17554 && MEM_P (rtl)
17555 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl))
17556 && BYTES_BIG_ENDIAN)
17557 {
17558 machine_mode addr_mode = get_address_mode (rtl);
17559 int rsize = GET_MODE_SIZE (GET_MODE (rtl));
17560 int dsize = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)));
17561
17562 /* If a variable is declared "register" yet is smaller than
17563 a register, then if we store the variable to memory, it
17564 looks like we're storing a register-sized value, when in
17565 fact we are not. We need to adjust the offset of the
17566 storage location to reflect the actual value's bytes,
17567 else gdb will not be able to display it. */
17568 if (rsize > dsize)
17569 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
17570 plus_constant (addr_mode, XEXP (rtl, 0),
17571 rsize - dsize));
17572 }
17573
17574 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
17575 and will have been substituted directly into all expressions that use it.
17576 C does not have such a concept, but C++ and other languages do. */
17577 if (!rtl && TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
17578 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
17579
17580 if (rtl)
17581 rtl = targetm.delegitimize_address (rtl);
17582
17583 /* If we don't look past the constant pool, we risk emitting a
17584 reference to a constant pool entry that isn't referenced from
17585 code, and thus is not emitted. */
17586 if (rtl)
17587 rtl = avoid_constant_pool_reference (rtl);
17588
17589 /* Try harder to get a rtl. If this symbol ends up not being emitted
17590 in the current CU, resolve_addr will remove the expression referencing
17591 it. */
17592 if (rtl == NULL_RTX
17593 && TREE_CODE (decl) == VAR_DECL
17594 && !DECL_EXTERNAL (decl)
17595 && TREE_STATIC (decl)
17596 && DECL_NAME (decl)
17597 && !DECL_HARD_REGISTER (decl)
17598 && DECL_MODE (decl) != VOIDmode)
17599 {
17600 rtl = make_decl_rtl_for_debug (decl);
17601 if (!MEM_P (rtl)
17602 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
17603 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
17604 rtl = NULL_RTX;
17605 }
17606
17607 return rtl;
17608 }
17609
17610 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
17611 returned. If so, the decl for the COMMON block is returned, and the
17612 value is the offset into the common block for the symbol. */
17613
17614 static tree
17615 fortran_common (tree decl, HOST_WIDE_INT *value)
17616 {
17617 tree val_expr, cvar;
17618 machine_mode mode;
17619 HOST_WIDE_INT bitsize, bitpos;
17620 tree offset;
17621 int unsignedp, reversep, volatilep = 0;
17622
17623 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
17624 it does not have a value (the offset into the common area), or if it
17625 is thread local (as opposed to global) then it isn't common, and shouldn't
17626 be handled as such. */
17627 if (TREE_CODE (decl) != VAR_DECL
17628 || !TREE_STATIC (decl)
17629 || !DECL_HAS_VALUE_EXPR_P (decl)
17630 || !is_fortran ())
17631 return NULL_TREE;
17632
17633 val_expr = DECL_VALUE_EXPR (decl);
17634 if (TREE_CODE (val_expr) != COMPONENT_REF)
17635 return NULL_TREE;
17636
17637 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
17638 &unsignedp, &reversep, &volatilep);
17639
17640 if (cvar == NULL_TREE
17641 || TREE_CODE (cvar) != VAR_DECL
17642 || DECL_ARTIFICIAL (cvar)
17643 || !TREE_PUBLIC (cvar))
17644 return NULL_TREE;
17645
17646 *value = 0;
17647 if (offset != NULL)
17648 {
17649 if (!tree_fits_shwi_p (offset))
17650 return NULL_TREE;
17651 *value = tree_to_shwi (offset);
17652 }
17653 if (bitpos != 0)
17654 *value += bitpos / BITS_PER_UNIT;
17655
17656 return cvar;
17657 }
17658
17659 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
17660 data attribute for a variable or a parameter. We generate the
17661 DW_AT_const_value attribute only in those cases where the given variable
17662 or parameter does not have a true "location" either in memory or in a
17663 register. This can happen (for example) when a constant is passed as an
17664 actual argument in a call to an inline function. (It's possible that
17665 these things can crop up in other ways also.) Note that one type of
17666 constant value which can be passed into an inlined function is a constant
17667 pointer. This can happen for example if an actual argument in an inlined
17668 function call evaluates to a compile-time constant address.
17669
17670 CACHE_P is true if it is worth caching the location list for DECL,
17671 so that future calls can reuse it rather than regenerate it from scratch.
17672 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
17673 since we will need to refer to them each time the function is inlined. */
17674
17675 static bool
17676 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
17677 {
17678 rtx rtl;
17679 dw_loc_list_ref list;
17680 var_loc_list *loc_list;
17681 cached_dw_loc_list *cache;
17682
17683 if (early_dwarf)
17684 return false;
17685
17686 if (TREE_CODE (decl) == ERROR_MARK)
17687 return false;
17688
17689 if (get_AT (die, DW_AT_location)
17690 || get_AT (die, DW_AT_const_value))
17691 return true;
17692
17693 gcc_assert (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL
17694 || TREE_CODE (decl) == RESULT_DECL);
17695
17696 /* Try to get some constant RTL for this decl, and use that as the value of
17697 the location. */
17698
17699 rtl = rtl_for_decl_location (decl);
17700 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
17701 && add_const_value_attribute (die, rtl))
17702 return true;
17703
17704 /* See if we have single element location list that is equivalent to
17705 a constant value. That way we are better to use add_const_value_attribute
17706 rather than expanding constant value equivalent. */
17707 loc_list = lookup_decl_loc (decl);
17708 if (loc_list
17709 && loc_list->first
17710 && loc_list->first->next == NULL
17711 && NOTE_P (loc_list->first->loc)
17712 && NOTE_VAR_LOCATION (loc_list->first->loc)
17713 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
17714 {
17715 struct var_loc_node *node;
17716
17717 node = loc_list->first;
17718 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
17719 if (GET_CODE (rtl) == EXPR_LIST)
17720 rtl = XEXP (rtl, 0);
17721 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
17722 && add_const_value_attribute (die, rtl))
17723 return true;
17724 }
17725 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
17726 list several times. See if we've already cached the contents. */
17727 list = NULL;
17728 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
17729 cache_p = false;
17730 if (cache_p)
17731 {
17732 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
17733 if (cache)
17734 list = cache->loc_list;
17735 }
17736 if (list == NULL)
17737 {
17738 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
17739 NULL);
17740 /* It is usually worth caching this result if the decl is from
17741 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
17742 if (cache_p && list && list->dw_loc_next)
17743 {
17744 cached_dw_loc_list **slot
17745 = cached_dw_loc_list_table->find_slot_with_hash (decl,
17746 DECL_UID (decl),
17747 INSERT);
17748 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
17749 cache->decl_id = DECL_UID (decl);
17750 cache->loc_list = list;
17751 *slot = cache;
17752 }
17753 }
17754 if (list)
17755 {
17756 add_AT_location_description (die, DW_AT_location, list);
17757 return true;
17758 }
17759 /* None of that worked, so it must not really have a location;
17760 try adding a constant value attribute from the DECL_INITIAL. */
17761 return tree_add_const_value_attribute_for_decl (die, decl);
17762 }
17763
17764 /* Helper function for tree_add_const_value_attribute. Natively encode
17765 initializer INIT into an array. Return true if successful. */
17766
17767 static bool
17768 native_encode_initializer (tree init, unsigned char *array, int size)
17769 {
17770 tree type;
17771
17772 if (init == NULL_TREE)
17773 return false;
17774
17775 STRIP_NOPS (init);
17776 switch (TREE_CODE (init))
17777 {
17778 case STRING_CST:
17779 type = TREE_TYPE (init);
17780 if (TREE_CODE (type) == ARRAY_TYPE)
17781 {
17782 tree enttype = TREE_TYPE (type);
17783 machine_mode mode = TYPE_MODE (enttype);
17784
17785 if (GET_MODE_CLASS (mode) != MODE_INT || GET_MODE_SIZE (mode) != 1)
17786 return false;
17787 if (int_size_in_bytes (type) != size)
17788 return false;
17789 if (size > TREE_STRING_LENGTH (init))
17790 {
17791 memcpy (array, TREE_STRING_POINTER (init),
17792 TREE_STRING_LENGTH (init));
17793 memset (array + TREE_STRING_LENGTH (init),
17794 '\0', size - TREE_STRING_LENGTH (init));
17795 }
17796 else
17797 memcpy (array, TREE_STRING_POINTER (init), size);
17798 return true;
17799 }
17800 return false;
17801 case CONSTRUCTOR:
17802 type = TREE_TYPE (init);
17803 if (int_size_in_bytes (type) != size)
17804 return false;
17805 if (TREE_CODE (type) == ARRAY_TYPE)
17806 {
17807 HOST_WIDE_INT min_index;
17808 unsigned HOST_WIDE_INT cnt;
17809 int curpos = 0, fieldsize;
17810 constructor_elt *ce;
17811
17812 if (TYPE_DOMAIN (type) == NULL_TREE
17813 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
17814 return false;
17815
17816 fieldsize = int_size_in_bytes (TREE_TYPE (type));
17817 if (fieldsize <= 0)
17818 return false;
17819
17820 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
17821 memset (array, '\0', size);
17822 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
17823 {
17824 tree val = ce->value;
17825 tree index = ce->index;
17826 int pos = curpos;
17827 if (index && TREE_CODE (index) == RANGE_EXPR)
17828 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
17829 * fieldsize;
17830 else if (index)
17831 pos = (tree_to_shwi (index) - min_index) * fieldsize;
17832
17833 if (val)
17834 {
17835 STRIP_NOPS (val);
17836 if (!native_encode_initializer (val, array + pos, fieldsize))
17837 return false;
17838 }
17839 curpos = pos + fieldsize;
17840 if (index && TREE_CODE (index) == RANGE_EXPR)
17841 {
17842 int count = tree_to_shwi (TREE_OPERAND (index, 1))
17843 - tree_to_shwi (TREE_OPERAND (index, 0));
17844 while (count-- > 0)
17845 {
17846 if (val)
17847 memcpy (array + curpos, array + pos, fieldsize);
17848 curpos += fieldsize;
17849 }
17850 }
17851 gcc_assert (curpos <= size);
17852 }
17853 return true;
17854 }
17855 else if (TREE_CODE (type) == RECORD_TYPE
17856 || TREE_CODE (type) == UNION_TYPE)
17857 {
17858 tree field = NULL_TREE;
17859 unsigned HOST_WIDE_INT cnt;
17860 constructor_elt *ce;
17861
17862 if (int_size_in_bytes (type) != size)
17863 return false;
17864
17865 if (TREE_CODE (type) == RECORD_TYPE)
17866 field = TYPE_FIELDS (type);
17867
17868 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
17869 {
17870 tree val = ce->value;
17871 int pos, fieldsize;
17872
17873 if (ce->index != 0)
17874 field = ce->index;
17875
17876 if (val)
17877 STRIP_NOPS (val);
17878
17879 if (field == NULL_TREE || DECL_BIT_FIELD (field))
17880 return false;
17881
17882 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
17883 && TYPE_DOMAIN (TREE_TYPE (field))
17884 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
17885 return false;
17886 else if (DECL_SIZE_UNIT (field) == NULL_TREE
17887 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
17888 return false;
17889 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
17890 pos = int_byte_position (field);
17891 gcc_assert (pos + fieldsize <= size);
17892 if (val && fieldsize != 0
17893 && !native_encode_initializer (val, array + pos, fieldsize))
17894 return false;
17895 }
17896 return true;
17897 }
17898 return false;
17899 case VIEW_CONVERT_EXPR:
17900 case NON_LVALUE_EXPR:
17901 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
17902 default:
17903 return native_encode_expr (init, array, size) == size;
17904 }
17905 }
17906
17907 /* Attach a DW_AT_const_value attribute to DIE. The value of the
17908 attribute is the const value T. */
17909
17910 static bool
17911 tree_add_const_value_attribute (dw_die_ref die, tree t)
17912 {
17913 tree init;
17914 tree type = TREE_TYPE (t);
17915 rtx rtl;
17916
17917 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
17918 return false;
17919
17920 init = t;
17921 gcc_assert (!DECL_P (init));
17922
17923 rtl = rtl_for_decl_init (init, type);
17924 if (rtl)
17925 return add_const_value_attribute (die, rtl);
17926 /* If the host and target are sane, try harder. */
17927 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
17928 && initializer_constant_valid_p (init, type))
17929 {
17930 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
17931 if (size > 0 && (int) size == size)
17932 {
17933 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
17934
17935 if (native_encode_initializer (init, array, size))
17936 {
17937 add_AT_vec (die, DW_AT_const_value, size, 1, array);
17938 return true;
17939 }
17940 ggc_free (array);
17941 }
17942 }
17943 return false;
17944 }
17945
17946 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
17947 attribute is the const value of T, where T is an integral constant
17948 variable with static storage duration
17949 (so it can't be a PARM_DECL or a RESULT_DECL). */
17950
17951 static bool
17952 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
17953 {
17954
17955 if (!decl
17956 || (TREE_CODE (decl) != VAR_DECL
17957 && TREE_CODE (decl) != CONST_DECL)
17958 || (TREE_CODE (decl) == VAR_DECL
17959 && !TREE_STATIC (decl)))
17960 return false;
17961
17962 if (TREE_READONLY (decl)
17963 && ! TREE_THIS_VOLATILE (decl)
17964 && DECL_INITIAL (decl))
17965 /* OK */;
17966 else
17967 return false;
17968
17969 /* Don't add DW_AT_const_value if abstract origin already has one. */
17970 if (get_AT (var_die, DW_AT_const_value))
17971 return false;
17972
17973 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
17974 }
17975
17976 /* Convert the CFI instructions for the current function into a
17977 location list. This is used for DW_AT_frame_base when we targeting
17978 a dwarf2 consumer that does not support the dwarf3
17979 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
17980 expressions. */
17981
17982 static dw_loc_list_ref
17983 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
17984 {
17985 int ix;
17986 dw_fde_ref fde;
17987 dw_loc_list_ref list, *list_tail;
17988 dw_cfi_ref cfi;
17989 dw_cfa_location last_cfa, next_cfa;
17990 const char *start_label, *last_label, *section;
17991 dw_cfa_location remember;
17992
17993 fde = cfun->fde;
17994 gcc_assert (fde != NULL);
17995
17996 section = secname_for_decl (current_function_decl);
17997 list_tail = &list;
17998 list = NULL;
17999
18000 memset (&next_cfa, 0, sizeof (next_cfa));
18001 next_cfa.reg = INVALID_REGNUM;
18002 remember = next_cfa;
18003
18004 start_label = fde->dw_fde_begin;
18005
18006 /* ??? Bald assumption that the CIE opcode list does not contain
18007 advance opcodes. */
18008 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
18009 lookup_cfa_1 (cfi, &next_cfa, &remember);
18010
18011 last_cfa = next_cfa;
18012 last_label = start_label;
18013
18014 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
18015 {
18016 /* If the first partition contained no CFI adjustments, the
18017 CIE opcodes apply to the whole first partition. */
18018 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18019 fde->dw_fde_begin, fde->dw_fde_end, section);
18020 list_tail =&(*list_tail)->dw_loc_next;
18021 start_label = last_label = fde->dw_fde_second_begin;
18022 }
18023
18024 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
18025 {
18026 switch (cfi->dw_cfi_opc)
18027 {
18028 case DW_CFA_set_loc:
18029 case DW_CFA_advance_loc1:
18030 case DW_CFA_advance_loc2:
18031 case DW_CFA_advance_loc4:
18032 if (!cfa_equal_p (&last_cfa, &next_cfa))
18033 {
18034 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18035 start_label, last_label, section);
18036
18037 list_tail = &(*list_tail)->dw_loc_next;
18038 last_cfa = next_cfa;
18039 start_label = last_label;
18040 }
18041 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
18042 break;
18043
18044 case DW_CFA_advance_loc:
18045 /* The encoding is complex enough that we should never emit this. */
18046 gcc_unreachable ();
18047
18048 default:
18049 lookup_cfa_1 (cfi, &next_cfa, &remember);
18050 break;
18051 }
18052 if (ix + 1 == fde->dw_fde_switch_cfi_index)
18053 {
18054 if (!cfa_equal_p (&last_cfa, &next_cfa))
18055 {
18056 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18057 start_label, last_label, section);
18058
18059 list_tail = &(*list_tail)->dw_loc_next;
18060 last_cfa = next_cfa;
18061 start_label = last_label;
18062 }
18063 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18064 start_label, fde->dw_fde_end, section);
18065 list_tail = &(*list_tail)->dw_loc_next;
18066 start_label = last_label = fde->dw_fde_second_begin;
18067 }
18068 }
18069
18070 if (!cfa_equal_p (&last_cfa, &next_cfa))
18071 {
18072 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
18073 start_label, last_label, section);
18074 list_tail = &(*list_tail)->dw_loc_next;
18075 start_label = last_label;
18076 }
18077
18078 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
18079 start_label,
18080 fde->dw_fde_second_begin
18081 ? fde->dw_fde_second_end : fde->dw_fde_end,
18082 section);
18083
18084 if (list && list->dw_loc_next)
18085 gen_llsym (list);
18086
18087 return list;
18088 }
18089
18090 /* Compute a displacement from the "steady-state frame pointer" to the
18091 frame base (often the same as the CFA), and store it in
18092 frame_pointer_fb_offset. OFFSET is added to the displacement
18093 before the latter is negated. */
18094
18095 static void
18096 compute_frame_pointer_to_fb_displacement (HOST_WIDE_INT offset)
18097 {
18098 rtx reg, elim;
18099
18100 #ifdef FRAME_POINTER_CFA_OFFSET
18101 reg = frame_pointer_rtx;
18102 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
18103 #else
18104 reg = arg_pointer_rtx;
18105 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
18106 #endif
18107
18108 elim = (ira_use_lra_p
18109 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
18110 : eliminate_regs (reg, VOIDmode, NULL_RTX));
18111 if (GET_CODE (elim) == PLUS)
18112 {
18113 offset += INTVAL (XEXP (elim, 1));
18114 elim = XEXP (elim, 0);
18115 }
18116
18117 frame_pointer_fb_offset = -offset;
18118
18119 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
18120 in which to eliminate. This is because it's stack pointer isn't
18121 directly accessible as a register within the ISA. To work around
18122 this, assume that while we cannot provide a proper value for
18123 frame_pointer_fb_offset, we won't need one either. */
18124 frame_pointer_fb_offset_valid
18125 = ((SUPPORTS_STACK_ALIGNMENT
18126 && (elim == hard_frame_pointer_rtx
18127 || elim == stack_pointer_rtx))
18128 || elim == (frame_pointer_needed
18129 ? hard_frame_pointer_rtx
18130 : stack_pointer_rtx));
18131 }
18132
18133 /* Generate a DW_AT_name attribute given some string value to be included as
18134 the value of the attribute. */
18135
18136 static void
18137 add_name_attribute (dw_die_ref die, const char *name_string)
18138 {
18139 if (name_string != NULL && *name_string != 0)
18140 {
18141 if (demangle_name_func)
18142 name_string = (*demangle_name_func) (name_string);
18143
18144 add_AT_string (die, DW_AT_name, name_string);
18145 }
18146 }
18147
18148 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
18149 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
18150 of TYPE accordingly.
18151
18152 ??? This is a temporary measure until after we're able to generate
18153 regular DWARF for the complex Ada type system. */
18154
18155 static void
18156 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
18157 dw_die_ref context_die)
18158 {
18159 tree dtype;
18160 dw_die_ref dtype_die;
18161
18162 if (!lang_hooks.types.descriptive_type)
18163 return;
18164
18165 dtype = lang_hooks.types.descriptive_type (type);
18166 if (!dtype)
18167 return;
18168
18169 dtype_die = lookup_type_die (dtype);
18170 if (!dtype_die)
18171 {
18172 gen_type_die (dtype, context_die);
18173 dtype_die = lookup_type_die (dtype);
18174 gcc_assert (dtype_die);
18175 }
18176
18177 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
18178 }
18179
18180 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
18181
18182 static const char *
18183 comp_dir_string (void)
18184 {
18185 const char *wd;
18186 char *wd1;
18187 static const char *cached_wd = NULL;
18188
18189 if (cached_wd != NULL)
18190 return cached_wd;
18191
18192 wd = get_src_pwd ();
18193 if (wd == NULL)
18194 return NULL;
18195
18196 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
18197 {
18198 int wdlen;
18199
18200 wdlen = strlen (wd);
18201 wd1 = ggc_vec_alloc<char> (wdlen + 2);
18202 strcpy (wd1, wd);
18203 wd1 [wdlen] = DIR_SEPARATOR;
18204 wd1 [wdlen + 1] = 0;
18205 wd = wd1;
18206 }
18207
18208 cached_wd = remap_debug_filename (wd);
18209 return cached_wd;
18210 }
18211
18212 /* Generate a DW_AT_comp_dir attribute for DIE. */
18213
18214 static void
18215 add_comp_dir_attribute (dw_die_ref die)
18216 {
18217 const char * wd = comp_dir_string ();
18218 if (wd != NULL)
18219 add_AT_string (die, DW_AT_comp_dir, wd);
18220 }
18221
18222 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
18223 pointer computation, ...), output a representation for that bound according
18224 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
18225 loc_list_from_tree for the meaning of CONTEXT. */
18226
18227 static void
18228 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
18229 int forms, const struct loc_descr_context *context)
18230 {
18231 dw_die_ref context_die, decl_die;
18232 dw_loc_list_ref list;
18233
18234 bool strip_conversions = true;
18235
18236 while (strip_conversions)
18237 switch (TREE_CODE (value))
18238 {
18239 case ERROR_MARK:
18240 case SAVE_EXPR:
18241 return;
18242
18243 CASE_CONVERT:
18244 case VIEW_CONVERT_EXPR:
18245 value = TREE_OPERAND (value, 0);
18246 break;
18247
18248 default:
18249 strip_conversions = false;
18250 break;
18251 }
18252
18253 /* If possible and permitted, output the attribute as a constant. */
18254 if ((forms & dw_scalar_form_constant) != 0
18255 && TREE_CODE (value) == INTEGER_CST)
18256 {
18257 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
18258
18259 /* If HOST_WIDE_INT is big enough then represent the bound as
18260 a constant value. We need to choose a form based on
18261 whether the type is signed or unsigned. We cannot just
18262 call add_AT_unsigned if the value itself is positive
18263 (add_AT_unsigned might add the unsigned value encoded as
18264 DW_FORM_data[1248]). Some DWARF consumers will lookup the
18265 bounds type and then sign extend any unsigned values found
18266 for signed types. This is needed only for
18267 DW_AT_{lower,upper}_bound, since for most other attributes,
18268 consumers will treat DW_FORM_data[1248] as unsigned values,
18269 regardless of the underlying type. */
18270 if (prec <= HOST_BITS_PER_WIDE_INT
18271 || tree_fits_uhwi_p (value))
18272 {
18273 if (TYPE_UNSIGNED (TREE_TYPE (value)))
18274 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
18275 else
18276 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
18277 }
18278 else
18279 /* Otherwise represent the bound as an unsigned value with
18280 the precision of its type. The precision and signedness
18281 of the type will be necessary to re-interpret it
18282 unambiguously. */
18283 add_AT_wide (die, attr, value);
18284 return;
18285 }
18286
18287 /* Otherwise, if it's possible and permitted too, output a reference to
18288 another DIE. */
18289 if ((forms & dw_scalar_form_reference) != 0)
18290 {
18291 tree decl = NULL_TREE;
18292
18293 /* Some type attributes reference an outer type. For instance, the upper
18294 bound of an array may reference an embedding record (this happens in
18295 Ada). */
18296 if (TREE_CODE (value) == COMPONENT_REF
18297 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
18298 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
18299 decl = TREE_OPERAND (value, 1);
18300
18301 else if (TREE_CODE (value) == VAR_DECL
18302 || TREE_CODE (value) == PARM_DECL
18303 || TREE_CODE (value) == RESULT_DECL)
18304 decl = value;
18305
18306 if (decl != NULL_TREE)
18307 {
18308 dw_die_ref decl_die = lookup_decl_die (decl);
18309
18310 /* ??? Can this happen, or should the variable have been bound
18311 first? Probably it can, since I imagine that we try to create
18312 the types of parameters in the order in which they exist in
18313 the list, and won't have created a forward reference to a
18314 later parameter. */
18315 if (decl_die != NULL)
18316 {
18317 add_AT_die_ref (die, attr, decl_die);
18318 return;
18319 }
18320 }
18321 }
18322
18323 /* Last chance: try to create a stack operation procedure to evaluate the
18324 value. Do nothing if even that is not possible or permitted. */
18325 if ((forms & dw_scalar_form_exprloc) == 0)
18326 return;
18327
18328 list = loc_list_from_tree (value, 2, context);
18329 if (list == NULL || single_element_loc_list_p (list))
18330 {
18331 /* If this attribute is not a reference nor constant, it is
18332 a DWARF expression rather than location description. For that
18333 loc_list_from_tree (value, 0, &context) is needed. */
18334 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
18335 if (list2 && single_element_loc_list_p (list2))
18336 {
18337 add_AT_loc (die, attr, list2->expr);
18338 return;
18339 }
18340 }
18341
18342 /* If that failed to give a single element location list, fall back to
18343 outputting this as a reference... still if permitted. */
18344 if (list == NULL || (forms & dw_scalar_form_reference) == 0)
18345 return;
18346
18347 if (current_function_decl == 0)
18348 context_die = comp_unit_die ();
18349 else
18350 context_die = lookup_decl_die (current_function_decl);
18351
18352 decl_die = new_die (DW_TAG_variable, context_die, value);
18353 add_AT_flag (decl_die, DW_AT_artificial, 1);
18354 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
18355 context_die);
18356 add_AT_location_description (decl_die, DW_AT_location, list);
18357 add_AT_die_ref (die, attr, decl_die);
18358 }
18359
18360 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
18361 default. */
18362
18363 static int
18364 lower_bound_default (void)
18365 {
18366 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
18367 {
18368 case DW_LANG_C:
18369 case DW_LANG_C89:
18370 case DW_LANG_C99:
18371 case DW_LANG_C11:
18372 case DW_LANG_C_plus_plus:
18373 case DW_LANG_C_plus_plus_11:
18374 case DW_LANG_C_plus_plus_14:
18375 case DW_LANG_ObjC:
18376 case DW_LANG_ObjC_plus_plus:
18377 case DW_LANG_Java:
18378 return 0;
18379 case DW_LANG_Fortran77:
18380 case DW_LANG_Fortran90:
18381 case DW_LANG_Fortran95:
18382 case DW_LANG_Fortran03:
18383 case DW_LANG_Fortran08:
18384 return 1;
18385 case DW_LANG_UPC:
18386 case DW_LANG_D:
18387 case DW_LANG_Python:
18388 return dwarf_version >= 4 ? 0 : -1;
18389 case DW_LANG_Ada95:
18390 case DW_LANG_Ada83:
18391 case DW_LANG_Cobol74:
18392 case DW_LANG_Cobol85:
18393 case DW_LANG_Pascal83:
18394 case DW_LANG_Modula2:
18395 case DW_LANG_PLI:
18396 return dwarf_version >= 4 ? 1 : -1;
18397 default:
18398 return -1;
18399 }
18400 }
18401
18402 /* Given a tree node describing an array bound (either lower or upper) output
18403 a representation for that bound. */
18404
18405 static void
18406 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
18407 tree bound, const struct loc_descr_context *context)
18408 {
18409 int dflt;
18410
18411 while (1)
18412 switch (TREE_CODE (bound))
18413 {
18414 /* Strip all conversions. */
18415 CASE_CONVERT:
18416 case VIEW_CONVERT_EXPR:
18417 bound = TREE_OPERAND (bound, 0);
18418 break;
18419
18420 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
18421 are even omitted when they are the default. */
18422 case INTEGER_CST:
18423 /* If the value for this bound is the default one, we can even omit the
18424 attribute. */
18425 if (bound_attr == DW_AT_lower_bound
18426 && tree_fits_shwi_p (bound)
18427 && (dflt = lower_bound_default ()) != -1
18428 && tree_to_shwi (bound) == dflt)
18429 return;
18430
18431 /* FALLTHRU */
18432
18433 default:
18434 /* Because of the complex interaction there can be with other GNAT
18435 encodings, GDB isn't ready yet to handle proper DWARF description
18436 for self-referencial subrange bounds: let GNAT encodings do the
18437 magic in such a case. */
18438 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
18439 && contains_placeholder_p (bound))
18440 return;
18441
18442 add_scalar_info (subrange_die, bound_attr, bound,
18443 dw_scalar_form_constant
18444 | dw_scalar_form_exprloc
18445 | dw_scalar_form_reference,
18446 context);
18447 return;
18448 }
18449 }
18450
18451 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
18452 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
18453 Note that the block of subscript information for an array type also
18454 includes information about the element type of the given array type.
18455
18456 This function reuses previously set type and bound information if
18457 available. */
18458
18459 static void
18460 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
18461 {
18462 unsigned dimension_number;
18463 tree lower, upper;
18464 dw_die_ref child = type_die->die_child;
18465
18466 for (dimension_number = 0;
18467 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
18468 type = TREE_TYPE (type), dimension_number++)
18469 {
18470 tree domain = TYPE_DOMAIN (type);
18471
18472 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
18473 break;
18474
18475 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
18476 and (in GNU C only) variable bounds. Handle all three forms
18477 here. */
18478
18479 /* Find and reuse a previously generated DW_TAG_subrange_type if
18480 available.
18481
18482 For multi-dimensional arrays, as we iterate through the
18483 various dimensions in the enclosing for loop above, we also
18484 iterate through the DIE children and pick at each
18485 DW_TAG_subrange_type previously generated (if available).
18486 Each child DW_TAG_subrange_type DIE describes the range of
18487 the current dimension. At this point we should have as many
18488 DW_TAG_subrange_type's as we have dimensions in the
18489 array. */
18490 dw_die_ref subrange_die = NULL;
18491 if (child)
18492 while (1)
18493 {
18494 child = child->die_sib;
18495 if (child->die_tag == DW_TAG_subrange_type)
18496 subrange_die = child;
18497 if (child == type_die->die_child)
18498 {
18499 /* If we wrapped around, stop looking next time. */
18500 child = NULL;
18501 break;
18502 }
18503 if (child->die_tag == DW_TAG_subrange_type)
18504 break;
18505 }
18506 if (!subrange_die)
18507 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
18508
18509 if (domain)
18510 {
18511 /* We have an array type with specified bounds. */
18512 lower = TYPE_MIN_VALUE (domain);
18513 upper = TYPE_MAX_VALUE (domain);
18514
18515 /* Define the index type. */
18516 if (TREE_TYPE (domain)
18517 && !get_AT (subrange_die, DW_AT_type))
18518 {
18519 /* ??? This is probably an Ada unnamed subrange type. Ignore the
18520 TREE_TYPE field. We can't emit debug info for this
18521 because it is an unnamed integral type. */
18522 if (TREE_CODE (domain) == INTEGER_TYPE
18523 && TYPE_NAME (domain) == NULL_TREE
18524 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
18525 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
18526 ;
18527 else
18528 add_type_attribute (subrange_die, TREE_TYPE (domain),
18529 TYPE_UNQUALIFIED, false, type_die);
18530 }
18531
18532 /* ??? If upper is NULL, the array has unspecified length,
18533 but it does have a lower bound. This happens with Fortran
18534 dimension arr(N:*)
18535 Since the debugger is definitely going to need to know N
18536 to produce useful results, go ahead and output the lower
18537 bound solo, and hope the debugger can cope. */
18538
18539 if (!get_AT (subrange_die, DW_AT_lower_bound))
18540 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
18541 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
18542 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
18543 }
18544
18545 /* Otherwise we have an array type with an unspecified length. The
18546 DWARF-2 spec does not say how to handle this; let's just leave out the
18547 bounds. */
18548 }
18549 }
18550
18551 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
18552
18553 static void
18554 add_byte_size_attribute (dw_die_ref die, tree tree_node)
18555 {
18556 dw_die_ref decl_die;
18557 HOST_WIDE_INT size;
18558 dw_loc_descr_ref size_expr = NULL;
18559
18560 switch (TREE_CODE (tree_node))
18561 {
18562 case ERROR_MARK:
18563 size = 0;
18564 break;
18565 case ENUMERAL_TYPE:
18566 case RECORD_TYPE:
18567 case UNION_TYPE:
18568 case QUAL_UNION_TYPE:
18569 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
18570 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
18571 {
18572 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
18573 return;
18574 }
18575 size_expr = type_byte_size (tree_node, &size);
18576 break;
18577 case FIELD_DECL:
18578 /* For a data member of a struct or union, the DW_AT_byte_size is
18579 generally given as the number of bytes normally allocated for an
18580 object of the *declared* type of the member itself. This is true
18581 even for bit-fields. */
18582 size = int_size_in_bytes (field_type (tree_node));
18583 break;
18584 default:
18585 gcc_unreachable ();
18586 }
18587
18588 /* Support for dynamically-sized objects was introduced by DWARFv3.
18589 At the moment, GDB does not handle variable byte sizes very well,
18590 though. */
18591 if ((dwarf_version >= 3 || !dwarf_strict)
18592 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
18593 && size_expr != NULL)
18594 add_AT_loc (die, DW_AT_byte_size, size_expr);
18595
18596 /* Note that `size' might be -1 when we get to this point. If it is, that
18597 indicates that the byte size of the entity in question is variable and
18598 that we could not generate a DWARF expression that computes it. */
18599 if (size >= 0)
18600 add_AT_unsigned (die, DW_AT_byte_size, size);
18601 }
18602
18603 /* For a FIELD_DECL node which represents a bit-field, output an attribute
18604 which specifies the distance in bits from the highest order bit of the
18605 "containing object" for the bit-field to the highest order bit of the
18606 bit-field itself.
18607
18608 For any given bit-field, the "containing object" is a hypothetical object
18609 (of some integral or enum type) within which the given bit-field lives. The
18610 type of this hypothetical "containing object" is always the same as the
18611 declared type of the individual bit-field itself. The determination of the
18612 exact location of the "containing object" for a bit-field is rather
18613 complicated. It's handled by the `field_byte_offset' function (above).
18614
18615 CTX is required: see the comment for VLR_CONTEXT.
18616
18617 Note that it is the size (in bytes) of the hypothetical "containing object"
18618 which will be given in the DW_AT_byte_size attribute for this bit-field.
18619 (See `byte_size_attribute' above). */
18620
18621 static inline void
18622 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
18623 {
18624 HOST_WIDE_INT object_offset_in_bytes;
18625 tree original_type = DECL_BIT_FIELD_TYPE (decl);
18626 HOST_WIDE_INT bitpos_int;
18627 HOST_WIDE_INT highest_order_object_bit_offset;
18628 HOST_WIDE_INT highest_order_field_bit_offset;
18629 HOST_WIDE_INT bit_offset;
18630
18631 field_byte_offset (decl, ctx, &object_offset_in_bytes);
18632
18633 /* Must be a field and a bit field. */
18634 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
18635
18636 /* We can't yet handle bit-fields whose offsets are variable, so if we
18637 encounter such things, just return without generating any attribute
18638 whatsoever. Likewise for variable or too large size. */
18639 if (! tree_fits_shwi_p (bit_position (decl))
18640 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
18641 return;
18642
18643 bitpos_int = int_bit_position (decl);
18644
18645 /* Note that the bit offset is always the distance (in bits) from the
18646 highest-order bit of the "containing object" to the highest-order bit of
18647 the bit-field itself. Since the "high-order end" of any object or field
18648 is different on big-endian and little-endian machines, the computation
18649 below must take account of these differences. */
18650 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
18651 highest_order_field_bit_offset = bitpos_int;
18652
18653 if (! BYTES_BIG_ENDIAN)
18654 {
18655 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
18656 highest_order_object_bit_offset +=
18657 simple_type_size_in_bits (original_type);
18658 }
18659
18660 bit_offset
18661 = (! BYTES_BIG_ENDIAN
18662 ? highest_order_object_bit_offset - highest_order_field_bit_offset
18663 : highest_order_field_bit_offset - highest_order_object_bit_offset);
18664
18665 if (bit_offset < 0)
18666 add_AT_int (die, DW_AT_bit_offset, bit_offset);
18667 else
18668 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
18669 }
18670
18671 /* For a FIELD_DECL node which represents a bit field, output an attribute
18672 which specifies the length in bits of the given field. */
18673
18674 static inline void
18675 add_bit_size_attribute (dw_die_ref die, tree decl)
18676 {
18677 /* Must be a field and a bit field. */
18678 gcc_assert (TREE_CODE (decl) == FIELD_DECL
18679 && DECL_BIT_FIELD_TYPE (decl));
18680
18681 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
18682 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
18683 }
18684
18685 /* If the compiled language is ANSI C, then add a 'prototyped'
18686 attribute, if arg types are given for the parameters of a function. */
18687
18688 static inline void
18689 add_prototyped_attribute (dw_die_ref die, tree func_type)
18690 {
18691 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
18692 {
18693 case DW_LANG_C:
18694 case DW_LANG_C89:
18695 case DW_LANG_C99:
18696 case DW_LANG_C11:
18697 case DW_LANG_ObjC:
18698 if (prototype_p (func_type))
18699 add_AT_flag (die, DW_AT_prototyped, 1);
18700 break;
18701 default:
18702 break;
18703 }
18704 }
18705
18706 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
18707 by looking in the type declaration, the object declaration equate table or
18708 the block mapping. */
18709
18710 static inline dw_die_ref
18711 add_abstract_origin_attribute (dw_die_ref die, tree origin)
18712 {
18713 dw_die_ref origin_die = NULL;
18714
18715 if (TREE_CODE (origin) != FUNCTION_DECL
18716 && TREE_CODE (origin) != BLOCK)
18717 {
18718 /* We may have gotten separated from the block for the inlined
18719 function, if we're in an exception handler or some such; make
18720 sure that the abstract function has been written out.
18721
18722 Doing this for nested functions is wrong, however; functions are
18723 distinct units, and our context might not even be inline. */
18724 tree fn = origin;
18725
18726 if (TYPE_P (fn))
18727 fn = TYPE_STUB_DECL (fn);
18728
18729 fn = decl_function_context (fn);
18730 if (fn)
18731 dwarf2out_abstract_function (fn);
18732 }
18733
18734 if (DECL_P (origin))
18735 origin_die = lookup_decl_die (origin);
18736 else if (TYPE_P (origin))
18737 origin_die = lookup_type_die (origin);
18738 else if (TREE_CODE (origin) == BLOCK)
18739 origin_die = BLOCK_DIE (origin);
18740
18741 /* XXX: Functions that are never lowered don't always have correct block
18742 trees (in the case of java, they simply have no block tree, in some other
18743 languages). For these functions, there is nothing we can really do to
18744 output correct debug info for inlined functions in all cases. Rather
18745 than die, we'll just produce deficient debug info now, in that we will
18746 have variables without a proper abstract origin. In the future, when all
18747 functions are lowered, we should re-add a gcc_assert (origin_die)
18748 here. */
18749
18750 if (origin_die)
18751 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
18752 return origin_die;
18753 }
18754
18755 /* We do not currently support the pure_virtual attribute. */
18756
18757 static inline void
18758 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
18759 {
18760 if (DECL_VINDEX (func_decl))
18761 {
18762 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
18763
18764 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
18765 add_AT_loc (die, DW_AT_vtable_elem_location,
18766 new_loc_descr (DW_OP_constu,
18767 tree_to_shwi (DECL_VINDEX (func_decl)),
18768 0));
18769
18770 /* GNU extension: Record what type this method came from originally. */
18771 if (debug_info_level > DINFO_LEVEL_TERSE
18772 && DECL_CONTEXT (func_decl))
18773 add_AT_die_ref (die, DW_AT_containing_type,
18774 lookup_type_die (DECL_CONTEXT (func_decl)));
18775 }
18776 }
18777 \f
18778 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
18779 given decl. This used to be a vendor extension until after DWARF 4
18780 standardized it. */
18781
18782 static void
18783 add_linkage_attr (dw_die_ref die, tree decl)
18784 {
18785 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18786
18787 /* Mimic what assemble_name_raw does with a leading '*'. */
18788 if (name[0] == '*')
18789 name = &name[1];
18790
18791 if (dwarf_version >= 4)
18792 add_AT_string (die, DW_AT_linkage_name, name);
18793 else
18794 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
18795 }
18796
18797 /* Add source coordinate attributes for the given decl. */
18798
18799 static void
18800 add_src_coords_attributes (dw_die_ref die, tree decl)
18801 {
18802 expanded_location s;
18803
18804 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
18805 return;
18806 s = expand_location (DECL_SOURCE_LOCATION (decl));
18807 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
18808 add_AT_unsigned (die, DW_AT_decl_line, s.line);
18809 }
18810
18811 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
18812
18813 static void
18814 add_linkage_name_raw (dw_die_ref die, tree decl)
18815 {
18816 /* Defer until we have an assembler name set. */
18817 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
18818 {
18819 limbo_die_node *asm_name;
18820
18821 asm_name = ggc_cleared_alloc<limbo_die_node> ();
18822 asm_name->die = die;
18823 asm_name->created_for = decl;
18824 asm_name->next = deferred_asm_name;
18825 deferred_asm_name = asm_name;
18826 }
18827 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
18828 add_linkage_attr (die, decl);
18829 }
18830
18831 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
18832
18833 static void
18834 add_linkage_name (dw_die_ref die, tree decl)
18835 {
18836 if (debug_info_level > DINFO_LEVEL_NONE
18837 && (TREE_CODE (decl) == FUNCTION_DECL || TREE_CODE (decl) == VAR_DECL)
18838 && TREE_PUBLIC (decl)
18839 && !(TREE_CODE (decl) == VAR_DECL && DECL_REGISTER (decl))
18840 && die->die_tag != DW_TAG_member)
18841 add_linkage_name_raw (die, decl);
18842 }
18843
18844 /* Add a DW_AT_name attribute and source coordinate attribute for the
18845 given decl, but only if it actually has a name. */
18846
18847 static void
18848 add_name_and_src_coords_attributes (dw_die_ref die, tree decl)
18849 {
18850 tree decl_name;
18851
18852 decl_name = DECL_NAME (decl);
18853 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
18854 {
18855 const char *name = dwarf2_name (decl, 0);
18856 if (name)
18857 add_name_attribute (die, name);
18858 if (! DECL_ARTIFICIAL (decl))
18859 add_src_coords_attributes (die, decl);
18860
18861 add_linkage_name (die, decl);
18862 }
18863
18864 #ifdef VMS_DEBUGGING_INFO
18865 /* Get the function's name, as described by its RTL. This may be different
18866 from the DECL_NAME name used in the source file. */
18867 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
18868 {
18869 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
18870 XEXP (DECL_RTL (decl), 0), false);
18871 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
18872 }
18873 #endif /* VMS_DEBUGGING_INFO */
18874 }
18875
18876 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
18877
18878 static void
18879 add_discr_value (dw_die_ref die, dw_discr_value *value)
18880 {
18881 dw_attr_node attr;
18882
18883 attr.dw_attr = DW_AT_discr_value;
18884 attr.dw_attr_val.val_class = dw_val_class_discr_value;
18885 attr.dw_attr_val.val_entry = NULL;
18886 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
18887 if (value->pos)
18888 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
18889 else
18890 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
18891 add_dwarf_attr (die, &attr);
18892 }
18893
18894 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
18895
18896 static void
18897 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
18898 {
18899 dw_attr_node attr;
18900
18901 attr.dw_attr = DW_AT_discr_list;
18902 attr.dw_attr_val.val_class = dw_val_class_discr_list;
18903 attr.dw_attr_val.val_entry = NULL;
18904 attr.dw_attr_val.v.val_discr_list = discr_list;
18905 add_dwarf_attr (die, &attr);
18906 }
18907
18908 static inline dw_discr_list_ref
18909 AT_discr_list (dw_attr_node *attr)
18910 {
18911 return attr->dw_attr_val.v.val_discr_list;
18912 }
18913
18914 #ifdef VMS_DEBUGGING_INFO
18915 /* Output the debug main pointer die for VMS */
18916
18917 void
18918 dwarf2out_vms_debug_main_pointer (void)
18919 {
18920 char label[MAX_ARTIFICIAL_LABEL_BYTES];
18921 dw_die_ref die;
18922
18923 /* Allocate the VMS debug main subprogram die. */
18924 die = ggc_cleared_alloc<die_node> ();
18925 die->die_tag = DW_TAG_subprogram;
18926 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
18927 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
18928 current_function_funcdef_no);
18929 add_AT_lbl_id (die, DW_AT_entry_pc, label);
18930
18931 /* Make it the first child of comp_unit_die (). */
18932 die->die_parent = comp_unit_die ();
18933 if (comp_unit_die ()->die_child)
18934 {
18935 die->die_sib = comp_unit_die ()->die_child->die_sib;
18936 comp_unit_die ()->die_child->die_sib = die;
18937 }
18938 else
18939 {
18940 die->die_sib = die;
18941 comp_unit_die ()->die_child = die;
18942 }
18943 }
18944 #endif /* VMS_DEBUGGING_INFO */
18945
18946 /* Push a new declaration scope. */
18947
18948 static void
18949 push_decl_scope (tree scope)
18950 {
18951 vec_safe_push (decl_scope_table, scope);
18952 }
18953
18954 /* Pop a declaration scope. */
18955
18956 static inline void
18957 pop_decl_scope (void)
18958 {
18959 decl_scope_table->pop ();
18960 }
18961
18962 /* walk_tree helper function for uses_local_type, below. */
18963
18964 static tree
18965 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
18966 {
18967 if (!TYPE_P (*tp))
18968 *walk_subtrees = 0;
18969 else
18970 {
18971 tree name = TYPE_NAME (*tp);
18972 if (name && DECL_P (name) && decl_function_context (name))
18973 return *tp;
18974 }
18975 return NULL_TREE;
18976 }
18977
18978 /* If TYPE involves a function-local type (including a local typedef to a
18979 non-local type), returns that type; otherwise returns NULL_TREE. */
18980
18981 static tree
18982 uses_local_type (tree type)
18983 {
18984 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
18985 return used;
18986 }
18987
18988 /* Return the DIE for the scope that immediately contains this type.
18989 Non-named types that do not involve a function-local type get global
18990 scope. Named types nested in namespaces or other types get their
18991 containing scope. All other types (i.e. function-local named types) get
18992 the current active scope. */
18993
18994 static dw_die_ref
18995 scope_die_for (tree t, dw_die_ref context_die)
18996 {
18997 dw_die_ref scope_die = NULL;
18998 tree containing_scope;
18999
19000 /* Non-types always go in the current scope. */
19001 gcc_assert (TYPE_P (t));
19002
19003 /* Use the scope of the typedef, rather than the scope of the type
19004 it refers to. */
19005 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
19006 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
19007 else
19008 containing_scope = TYPE_CONTEXT (t);
19009
19010 /* Use the containing namespace if there is one. */
19011 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
19012 {
19013 if (context_die == lookup_decl_die (containing_scope))
19014 /* OK */;
19015 else if (debug_info_level > DINFO_LEVEL_TERSE)
19016 context_die = get_context_die (containing_scope);
19017 else
19018 containing_scope = NULL_TREE;
19019 }
19020
19021 /* Ignore function type "scopes" from the C frontend. They mean that
19022 a tagged type is local to a parmlist of a function declarator, but
19023 that isn't useful to DWARF. */
19024 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
19025 containing_scope = NULL_TREE;
19026
19027 if (SCOPE_FILE_SCOPE_P (containing_scope))
19028 {
19029 /* If T uses a local type keep it local as well, to avoid references
19030 to function-local DIEs from outside the function. */
19031 if (current_function_decl && uses_local_type (t))
19032 scope_die = context_die;
19033 else
19034 scope_die = comp_unit_die ();
19035 }
19036 else if (TYPE_P (containing_scope))
19037 {
19038 /* For types, we can just look up the appropriate DIE. */
19039 if (debug_info_level > DINFO_LEVEL_TERSE)
19040 scope_die = get_context_die (containing_scope);
19041 else
19042 {
19043 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
19044 if (scope_die == NULL)
19045 scope_die = comp_unit_die ();
19046 }
19047 }
19048 else
19049 scope_die = context_die;
19050
19051 return scope_die;
19052 }
19053
19054 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
19055
19056 static inline int
19057 local_scope_p (dw_die_ref context_die)
19058 {
19059 for (; context_die; context_die = context_die->die_parent)
19060 if (context_die->die_tag == DW_TAG_inlined_subroutine
19061 || context_die->die_tag == DW_TAG_subprogram)
19062 return 1;
19063
19064 return 0;
19065 }
19066
19067 /* Returns nonzero if CONTEXT_DIE is a class. */
19068
19069 static inline int
19070 class_scope_p (dw_die_ref context_die)
19071 {
19072 return (context_die
19073 && (context_die->die_tag == DW_TAG_structure_type
19074 || context_die->die_tag == DW_TAG_class_type
19075 || context_die->die_tag == DW_TAG_interface_type
19076 || context_die->die_tag == DW_TAG_union_type));
19077 }
19078
19079 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
19080 whether or not to treat a DIE in this context as a declaration. */
19081
19082 static inline int
19083 class_or_namespace_scope_p (dw_die_ref context_die)
19084 {
19085 return (class_scope_p (context_die)
19086 || (context_die && context_die->die_tag == DW_TAG_namespace));
19087 }
19088
19089 /* Many forms of DIEs require a "type description" attribute. This
19090 routine locates the proper "type descriptor" die for the type given
19091 by 'type' plus any additional qualifiers given by 'cv_quals', and
19092 adds a DW_AT_type attribute below the given die. */
19093
19094 static void
19095 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
19096 bool reverse, dw_die_ref context_die)
19097 {
19098 enum tree_code code = TREE_CODE (type);
19099 dw_die_ref type_die = NULL;
19100
19101 /* ??? If this type is an unnamed subrange type of an integral, floating-point
19102 or fixed-point type, use the inner type. This is because we have no
19103 support for unnamed types in base_type_die. This can happen if this is
19104 an Ada subrange type. Correct solution is emit a subrange type die. */
19105 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
19106 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
19107 type = TREE_TYPE (type), code = TREE_CODE (type);
19108
19109 if (code == ERROR_MARK
19110 /* Handle a special case. For functions whose return type is void, we
19111 generate *no* type attribute. (Note that no object may have type
19112 `void', so this only applies to function return types). */
19113 || code == VOID_TYPE)
19114 return;
19115
19116 type_die = modified_type_die (type,
19117 cv_quals | TYPE_QUALS_NO_ADDR_SPACE (type),
19118 reverse,
19119 context_die);
19120
19121 if (type_die != NULL)
19122 add_AT_die_ref (object_die, DW_AT_type, type_die);
19123 }
19124
19125 /* Given an object die, add the calling convention attribute for the
19126 function call type. */
19127 static void
19128 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
19129 {
19130 enum dwarf_calling_convention value = DW_CC_normal;
19131
19132 value = ((enum dwarf_calling_convention)
19133 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
19134
19135 if (is_fortran ()
19136 && !strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "MAIN__"))
19137 {
19138 /* DWARF 2 doesn't provide a way to identify a program's source-level
19139 entry point. DW_AT_calling_convention attributes are only meant
19140 to describe functions' calling conventions. However, lacking a
19141 better way to signal the Fortran main program, we used this for
19142 a long time, following existing custom. Now, DWARF 4 has
19143 DW_AT_main_subprogram, which we add below, but some tools still
19144 rely on the old way, which we thus keep. */
19145 value = DW_CC_program;
19146
19147 if (dwarf_version >= 4 || !dwarf_strict)
19148 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
19149 }
19150
19151 /* Only add the attribute if the backend requests it, and
19152 is not DW_CC_normal. */
19153 if (value && (value != DW_CC_normal))
19154 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
19155 }
19156
19157 /* Given a tree pointer to a struct, class, union, or enum type node, return
19158 a pointer to the (string) tag name for the given type, or zero if the type
19159 was declared without a tag. */
19160
19161 static const char *
19162 type_tag (const_tree type)
19163 {
19164 const char *name = 0;
19165
19166 if (TYPE_NAME (type) != 0)
19167 {
19168 tree t = 0;
19169
19170 /* Find the IDENTIFIER_NODE for the type name. */
19171 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
19172 && !TYPE_NAMELESS (type))
19173 t = TYPE_NAME (type);
19174
19175 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
19176 a TYPE_DECL node, regardless of whether or not a `typedef' was
19177 involved. */
19178 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
19179 && ! DECL_IGNORED_P (TYPE_NAME (type)))
19180 {
19181 /* We want to be extra verbose. Don't call dwarf_name if
19182 DECL_NAME isn't set. The default hook for decl_printable_name
19183 doesn't like that, and in this context it's correct to return
19184 0, instead of "<anonymous>" or the like. */
19185 if (DECL_NAME (TYPE_NAME (type))
19186 && !DECL_NAMELESS (TYPE_NAME (type)))
19187 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
19188 }
19189
19190 /* Now get the name as a string, or invent one. */
19191 if (!name && t != 0)
19192 name = IDENTIFIER_POINTER (t);
19193 }
19194
19195 return (name == 0 || *name == '\0') ? 0 : name;
19196 }
19197
19198 /* Return the type associated with a data member, make a special check
19199 for bit field types. */
19200
19201 static inline tree
19202 member_declared_type (const_tree member)
19203 {
19204 return (DECL_BIT_FIELD_TYPE (member)
19205 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
19206 }
19207
19208 /* Get the decl's label, as described by its RTL. This may be different
19209 from the DECL_NAME name used in the source file. */
19210
19211 #if 0
19212 static const char *
19213 decl_start_label (tree decl)
19214 {
19215 rtx x;
19216 const char *fnname;
19217
19218 x = DECL_RTL (decl);
19219 gcc_assert (MEM_P (x));
19220
19221 x = XEXP (x, 0);
19222 gcc_assert (GET_CODE (x) == SYMBOL_REF);
19223
19224 fnname = XSTR (x, 0);
19225 return fnname;
19226 }
19227 #endif
19228 \f
19229 /* For variable-length arrays that have been previously generated, but
19230 may be incomplete due to missing subscript info, fill the subscript
19231 info. Return TRUE if this is one of those cases. */
19232 static bool
19233 fill_variable_array_bounds (tree type)
19234 {
19235 if (TREE_ASM_WRITTEN (type)
19236 && TREE_CODE (type) == ARRAY_TYPE
19237 && variably_modified_type_p (type, NULL))
19238 {
19239 dw_die_ref array_die = lookup_type_die (type);
19240 if (!array_die)
19241 return false;
19242 add_subscript_info (array_die, type, !is_ada ());
19243 return true;
19244 }
19245 return false;
19246 }
19247
19248 /* These routines generate the internal representation of the DIE's for
19249 the compilation unit. Debugging information is collected by walking
19250 the declaration trees passed in from dwarf2out_decl(). */
19251
19252 static void
19253 gen_array_type_die (tree type, dw_die_ref context_die)
19254 {
19255 dw_die_ref array_die;
19256
19257 /* GNU compilers represent multidimensional array types as sequences of one
19258 dimensional array types whose element types are themselves array types.
19259 We sometimes squish that down to a single array_type DIE with multiple
19260 subscripts in the Dwarf debugging info. The draft Dwarf specification
19261 say that we are allowed to do this kind of compression in C, because
19262 there is no difference between an array of arrays and a multidimensional
19263 array. We don't do this for Ada to remain as close as possible to the
19264 actual representation, which is especially important against the language
19265 flexibilty wrt arrays of variable size. */
19266
19267 bool collapse_nested_arrays = !is_ada ();
19268
19269 if (fill_variable_array_bounds (type))
19270 return;
19271
19272 dw_die_ref scope_die = scope_die_for (type, context_die);
19273 tree element_type;
19274
19275 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
19276 DW_TAG_string_type doesn't have DW_AT_type attribute). */
19277 if (TYPE_STRING_FLAG (type)
19278 && TREE_CODE (type) == ARRAY_TYPE
19279 && is_fortran ()
19280 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
19281 {
19282 HOST_WIDE_INT size;
19283
19284 array_die = new_die (DW_TAG_string_type, scope_die, type);
19285 add_name_attribute (array_die, type_tag (type));
19286 equate_type_number_to_die (type, array_die);
19287 size = int_size_in_bytes (type);
19288 if (size >= 0)
19289 add_AT_unsigned (array_die, DW_AT_byte_size, size);
19290 else if (TYPE_DOMAIN (type) != NULL_TREE
19291 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE
19292 && DECL_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
19293 {
19294 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
19295 dw_loc_list_ref loc = loc_list_from_tree (szdecl, 2, NULL);
19296
19297 size = int_size_in_bytes (TREE_TYPE (szdecl));
19298 if (loc && size > 0)
19299 {
19300 add_AT_location_description (array_die, DW_AT_string_length, loc);
19301 if (size != DWARF2_ADDR_SIZE)
19302 add_AT_unsigned (array_die, DW_AT_byte_size, size);
19303 }
19304 }
19305 return;
19306 }
19307
19308 array_die = new_die (DW_TAG_array_type, scope_die, type);
19309 add_name_attribute (array_die, type_tag (type));
19310 equate_type_number_to_die (type, array_die);
19311
19312 if (TREE_CODE (type) == VECTOR_TYPE)
19313 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
19314
19315 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
19316 if (is_fortran ()
19317 && TREE_CODE (type) == ARRAY_TYPE
19318 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
19319 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
19320 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
19321
19322 #if 0
19323 /* We default the array ordering. SDB will probably do
19324 the right things even if DW_AT_ordering is not present. It's not even
19325 an issue until we start to get into multidimensional arrays anyway. If
19326 SDB is ever caught doing the Wrong Thing for multi-dimensional arrays,
19327 then we'll have to put the DW_AT_ordering attribute back in. (But if
19328 and when we find out that we need to put these in, we will only do so
19329 for multidimensional arrays. */
19330 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
19331 #endif
19332
19333 if (TREE_CODE (type) == VECTOR_TYPE)
19334 {
19335 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
19336 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
19337 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
19338 add_bound_info (subrange_die, DW_AT_upper_bound,
19339 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
19340 }
19341 else
19342 add_subscript_info (array_die, type, collapse_nested_arrays);
19343
19344 /* Add representation of the type of the elements of this array type and
19345 emit the corresponding DIE if we haven't done it already. */
19346 element_type = TREE_TYPE (type);
19347 if (collapse_nested_arrays)
19348 while (TREE_CODE (element_type) == ARRAY_TYPE)
19349 {
19350 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
19351 break;
19352 element_type = TREE_TYPE (element_type);
19353 }
19354
19355 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
19356 TREE_CODE (type) == ARRAY_TYPE
19357 && TYPE_REVERSE_STORAGE_ORDER (type),
19358 context_die);
19359
19360 add_gnat_descriptive_type_attribute (array_die, type, context_die);
19361 if (TYPE_ARTIFICIAL (type))
19362 add_AT_flag (array_die, DW_AT_artificial, 1);
19363
19364 if (get_AT (array_die, DW_AT_name))
19365 add_pubtype (type, array_die);
19366 }
19367
19368 /* This routine generates DIE for array with hidden descriptor, details
19369 are filled into *info by a langhook. */
19370
19371 static void
19372 gen_descr_array_type_die (tree type, struct array_descr_info *info,
19373 dw_die_ref context_die)
19374 {
19375 const dw_die_ref scope_die = scope_die_for (type, context_die);
19376 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
19377 const struct loc_descr_context context = { type, info->base_decl, NULL };
19378 int dim;
19379
19380 add_name_attribute (array_die, type_tag (type));
19381 equate_type_number_to_die (type, array_die);
19382
19383 if (info->ndimensions > 1)
19384 switch (info->ordering)
19385 {
19386 case array_descr_ordering_row_major:
19387 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
19388 break;
19389 case array_descr_ordering_column_major:
19390 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
19391 break;
19392 default:
19393 break;
19394 }
19395
19396 if (dwarf_version >= 3 || !dwarf_strict)
19397 {
19398 if (info->data_location)
19399 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
19400 dw_scalar_form_exprloc, &context);
19401 if (info->associated)
19402 add_scalar_info (array_die, DW_AT_associated, info->associated,
19403 dw_scalar_form_constant
19404 | dw_scalar_form_exprloc
19405 | dw_scalar_form_reference, &context);
19406 if (info->allocated)
19407 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
19408 dw_scalar_form_constant
19409 | dw_scalar_form_exprloc
19410 | dw_scalar_form_reference, &context);
19411 if (info->stride)
19412 {
19413 const enum dwarf_attribute attr
19414 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
19415 const int forms
19416 = (info->stride_in_bits)
19417 ? dw_scalar_form_constant
19418 : (dw_scalar_form_constant
19419 | dw_scalar_form_exprloc
19420 | dw_scalar_form_reference);
19421
19422 add_scalar_info (array_die, attr, info->stride, forms, &context);
19423 }
19424 }
19425
19426 add_gnat_descriptive_type_attribute (array_die, type, context_die);
19427
19428 for (dim = 0; dim < info->ndimensions; dim++)
19429 {
19430 dw_die_ref subrange_die
19431 = new_die (DW_TAG_subrange_type, array_die, NULL);
19432
19433 if (info->dimen[dim].bounds_type)
19434 add_type_attribute (subrange_die,
19435 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
19436 false, context_die);
19437 if (info->dimen[dim].lower_bound)
19438 add_bound_info (subrange_die, DW_AT_lower_bound,
19439 info->dimen[dim].lower_bound, &context);
19440 if (info->dimen[dim].upper_bound)
19441 add_bound_info (subrange_die, DW_AT_upper_bound,
19442 info->dimen[dim].upper_bound, &context);
19443 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
19444 add_scalar_info (subrange_die, DW_AT_byte_stride,
19445 info->dimen[dim].stride,
19446 dw_scalar_form_constant
19447 | dw_scalar_form_exprloc
19448 | dw_scalar_form_reference,
19449 &context);
19450 }
19451
19452 gen_type_die (info->element_type, context_die);
19453 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
19454 TREE_CODE (type) == ARRAY_TYPE
19455 && TYPE_REVERSE_STORAGE_ORDER (type),
19456 context_die);
19457
19458 if (get_AT (array_die, DW_AT_name))
19459 add_pubtype (type, array_die);
19460 }
19461
19462 #if 0
19463 static void
19464 gen_entry_point_die (tree decl, dw_die_ref context_die)
19465 {
19466 tree origin = decl_ultimate_origin (decl);
19467 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
19468
19469 if (origin != NULL)
19470 add_abstract_origin_attribute (decl_die, origin);
19471 else
19472 {
19473 add_name_and_src_coords_attributes (decl_die, decl);
19474 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
19475 TYPE_UNQUALIFIED, false, context_die);
19476 }
19477
19478 if (DECL_ABSTRACT_P (decl))
19479 equate_decl_number_to_die (decl, decl_die);
19480 else
19481 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
19482 }
19483 #endif
19484
19485 /* Walk through the list of incomplete types again, trying once more to
19486 emit full debugging info for them. */
19487
19488 static void
19489 retry_incomplete_types (void)
19490 {
19491 set_early_dwarf s;
19492 int i;
19493
19494 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
19495 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
19496 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
19497 vec_safe_truncate (incomplete_types, 0);
19498 }
19499
19500 /* Determine what tag to use for a record type. */
19501
19502 static enum dwarf_tag
19503 record_type_tag (tree type)
19504 {
19505 if (! lang_hooks.types.classify_record)
19506 return DW_TAG_structure_type;
19507
19508 switch (lang_hooks.types.classify_record (type))
19509 {
19510 case RECORD_IS_STRUCT:
19511 return DW_TAG_structure_type;
19512
19513 case RECORD_IS_CLASS:
19514 return DW_TAG_class_type;
19515
19516 case RECORD_IS_INTERFACE:
19517 if (dwarf_version >= 3 || !dwarf_strict)
19518 return DW_TAG_interface_type;
19519 return DW_TAG_structure_type;
19520
19521 default:
19522 gcc_unreachable ();
19523 }
19524 }
19525
19526 /* Generate a DIE to represent an enumeration type. Note that these DIEs
19527 include all of the information about the enumeration values also. Each
19528 enumerated type name/value is listed as a child of the enumerated type
19529 DIE. */
19530
19531 static dw_die_ref
19532 gen_enumeration_type_die (tree type, dw_die_ref context_die)
19533 {
19534 dw_die_ref type_die = lookup_type_die (type);
19535
19536 if (type_die == NULL)
19537 {
19538 type_die = new_die (DW_TAG_enumeration_type,
19539 scope_die_for (type, context_die), type);
19540 equate_type_number_to_die (type, type_die);
19541 add_name_attribute (type_die, type_tag (type));
19542 if (dwarf_version >= 4 || !dwarf_strict)
19543 {
19544 if (ENUM_IS_SCOPED (type))
19545 add_AT_flag (type_die, DW_AT_enum_class, 1);
19546 if (ENUM_IS_OPAQUE (type))
19547 add_AT_flag (type_die, DW_AT_declaration, 1);
19548 }
19549 }
19550 else if (! TYPE_SIZE (type))
19551 return type_die;
19552 else
19553 remove_AT (type_die, DW_AT_declaration);
19554
19555 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
19556 given enum type is incomplete, do not generate the DW_AT_byte_size
19557 attribute or the DW_AT_element_list attribute. */
19558 if (TYPE_SIZE (type))
19559 {
19560 tree link;
19561
19562 TREE_ASM_WRITTEN (type) = 1;
19563 add_byte_size_attribute (type_die, type);
19564 if (dwarf_version >= 3 || !dwarf_strict)
19565 {
19566 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
19567 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
19568 context_die);
19569 }
19570 if (TYPE_STUB_DECL (type) != NULL_TREE)
19571 {
19572 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
19573 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
19574 }
19575
19576 /* If the first reference to this type was as the return type of an
19577 inline function, then it may not have a parent. Fix this now. */
19578 if (type_die->die_parent == NULL)
19579 add_child_die (scope_die_for (type, context_die), type_die);
19580
19581 for (link = TYPE_VALUES (type);
19582 link != NULL; link = TREE_CHAIN (link))
19583 {
19584 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
19585 tree value = TREE_VALUE (link);
19586
19587 add_name_attribute (enum_die,
19588 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
19589
19590 if (TREE_CODE (value) == CONST_DECL)
19591 value = DECL_INITIAL (value);
19592
19593 if (simple_type_size_in_bits (TREE_TYPE (value))
19594 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
19595 {
19596 /* For constant forms created by add_AT_unsigned DWARF
19597 consumers (GDB, elfutils, etc.) always zero extend
19598 the value. Only when the actual value is negative
19599 do we need to use add_AT_int to generate a constant
19600 form that can represent negative values. */
19601 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
19602 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
19603 add_AT_unsigned (enum_die, DW_AT_const_value,
19604 (unsigned HOST_WIDE_INT) val);
19605 else
19606 add_AT_int (enum_die, DW_AT_const_value, val);
19607 }
19608 else
19609 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
19610 that here. TODO: This should be re-worked to use correct
19611 signed/unsigned double tags for all cases. */
19612 add_AT_wide (enum_die, DW_AT_const_value, value);
19613 }
19614
19615 add_gnat_descriptive_type_attribute (type_die, type, context_die);
19616 if (TYPE_ARTIFICIAL (type))
19617 add_AT_flag (type_die, DW_AT_artificial, 1);
19618 }
19619 else
19620 add_AT_flag (type_die, DW_AT_declaration, 1);
19621
19622 add_pubtype (type, type_die);
19623
19624 return type_die;
19625 }
19626
19627 /* Generate a DIE to represent either a real live formal parameter decl or to
19628 represent just the type of some formal parameter position in some function
19629 type.
19630
19631 Note that this routine is a bit unusual because its argument may be a
19632 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
19633 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
19634 node. If it's the former then this function is being called to output a
19635 DIE to represent a formal parameter object (or some inlining thereof). If
19636 it's the latter, then this function is only being called to output a
19637 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
19638 argument type of some subprogram type.
19639 If EMIT_NAME_P is true, name and source coordinate attributes
19640 are emitted. */
19641
19642 static dw_die_ref
19643 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
19644 dw_die_ref context_die)
19645 {
19646 tree node_or_origin = node ? node : origin;
19647 tree ultimate_origin;
19648 dw_die_ref parm_die = NULL;
19649
19650 if (TREE_CODE_CLASS (TREE_CODE (node_or_origin)) == tcc_declaration)
19651 {
19652 parm_die = lookup_decl_die (node);
19653
19654 /* If the contexts differ, we may not be talking about the same
19655 thing. */
19656 if (parm_die && parm_die->die_parent != context_die)
19657 {
19658 if (!DECL_ABSTRACT_P (node))
19659 {
19660 /* This can happen when creating an inlined instance, in
19661 which case we need to create a new DIE that will get
19662 annotated with DW_AT_abstract_origin. */
19663 parm_die = NULL;
19664 }
19665 else
19666 {
19667 /* FIXME: Reuse DIE even with a differing context.
19668
19669 This can happen when calling
19670 dwarf2out_abstract_function to build debug info for
19671 the abstract instance of a function for which we have
19672 already generated a DIE in
19673 dwarf2out_early_global_decl.
19674
19675 Once we remove dwarf2out_abstract_function, we should
19676 have a call to gcc_unreachable here. */
19677 }
19678 }
19679
19680 if (parm_die && parm_die->die_parent == NULL)
19681 {
19682 /* Check that parm_die already has the right attributes that
19683 we would have added below. If any attributes are
19684 missing, fall through to add them. */
19685 if (! DECL_ABSTRACT_P (node_or_origin)
19686 && !get_AT (parm_die, DW_AT_location)
19687 && !get_AT (parm_die, DW_AT_const_value))
19688 /* We are missing location info, and are about to add it. */
19689 ;
19690 else
19691 {
19692 add_child_die (context_die, parm_die);
19693 return parm_die;
19694 }
19695 }
19696 }
19697
19698 /* If we have a previously generated DIE, use it, unless this is an
19699 concrete instance (origin != NULL), in which case we need a new
19700 DIE with a corresponding DW_AT_abstract_origin. */
19701 bool reusing_die;
19702 if (parm_die && origin == NULL)
19703 reusing_die = true;
19704 else
19705 {
19706 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
19707 reusing_die = false;
19708 }
19709
19710 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
19711 {
19712 case tcc_declaration:
19713 ultimate_origin = decl_ultimate_origin (node_or_origin);
19714 if (node || ultimate_origin)
19715 origin = ultimate_origin;
19716
19717 if (reusing_die)
19718 goto add_location;
19719
19720 if (origin != NULL)
19721 add_abstract_origin_attribute (parm_die, origin);
19722 else if (emit_name_p)
19723 add_name_and_src_coords_attributes (parm_die, node);
19724 if (origin == NULL
19725 || (! DECL_ABSTRACT_P (node_or_origin)
19726 && variably_modified_type_p (TREE_TYPE (node_or_origin),
19727 decl_function_context
19728 (node_or_origin))))
19729 {
19730 tree type = TREE_TYPE (node_or_origin);
19731 if (decl_by_reference_p (node_or_origin))
19732 add_type_attribute (parm_die, TREE_TYPE (type),
19733 TYPE_UNQUALIFIED,
19734 false, context_die);
19735 else
19736 add_type_attribute (parm_die, type,
19737 decl_quals (node_or_origin),
19738 false, context_die);
19739 }
19740 if (origin == NULL && DECL_ARTIFICIAL (node))
19741 add_AT_flag (parm_die, DW_AT_artificial, 1);
19742 add_location:
19743 if (node && node != origin)
19744 equate_decl_number_to_die (node, parm_die);
19745 if (! DECL_ABSTRACT_P (node_or_origin))
19746 add_location_or_const_value_attribute (parm_die, node_or_origin,
19747 node == NULL);
19748
19749 break;
19750
19751 case tcc_type:
19752 /* We were called with some kind of a ..._TYPE node. */
19753 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
19754 context_die);
19755 break;
19756
19757 default:
19758 gcc_unreachable ();
19759 }
19760
19761 return parm_die;
19762 }
19763
19764 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
19765 children DW_TAG_formal_parameter DIEs representing the arguments of the
19766 parameter pack.
19767
19768 PARM_PACK must be a function parameter pack.
19769 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
19770 must point to the subsequent arguments of the function PACK_ARG belongs to.
19771 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
19772 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
19773 following the last one for which a DIE was generated. */
19774
19775 static dw_die_ref
19776 gen_formal_parameter_pack_die (tree parm_pack,
19777 tree pack_arg,
19778 dw_die_ref subr_die,
19779 tree *next_arg)
19780 {
19781 tree arg;
19782 dw_die_ref parm_pack_die;
19783
19784 gcc_assert (parm_pack
19785 && lang_hooks.function_parameter_pack_p (parm_pack)
19786 && subr_die);
19787
19788 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
19789 add_src_coords_attributes (parm_pack_die, parm_pack);
19790
19791 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
19792 {
19793 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
19794 parm_pack))
19795 break;
19796 gen_formal_parameter_die (arg, NULL,
19797 false /* Don't emit name attribute. */,
19798 parm_pack_die);
19799 }
19800 if (next_arg)
19801 *next_arg = arg;
19802 return parm_pack_die;
19803 }
19804
19805 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
19806 at the end of an (ANSI prototyped) formal parameters list. */
19807
19808 static void
19809 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
19810 {
19811 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
19812 }
19813
19814 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
19815 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
19816 parameters as specified in some function type specification (except for
19817 those which appear as part of a function *definition*). */
19818
19819 static void
19820 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
19821 {
19822 tree link;
19823 tree formal_type = NULL;
19824 tree first_parm_type;
19825 tree arg;
19826
19827 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
19828 {
19829 arg = DECL_ARGUMENTS (function_or_method_type);
19830 function_or_method_type = TREE_TYPE (function_or_method_type);
19831 }
19832 else
19833 arg = NULL_TREE;
19834
19835 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
19836
19837 /* Make our first pass over the list of formal parameter types and output a
19838 DW_TAG_formal_parameter DIE for each one. */
19839 for (link = first_parm_type; link; )
19840 {
19841 dw_die_ref parm_die;
19842
19843 formal_type = TREE_VALUE (link);
19844 if (formal_type == void_type_node)
19845 break;
19846
19847 /* Output a (nameless) DIE to represent the formal parameter itself. */
19848 if (!POINTER_BOUNDS_TYPE_P (formal_type))
19849 {
19850 parm_die = gen_formal_parameter_die (formal_type, NULL,
19851 true /* Emit name attribute. */,
19852 context_die);
19853 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
19854 && link == first_parm_type)
19855 {
19856 add_AT_flag (parm_die, DW_AT_artificial, 1);
19857 if (dwarf_version >= 3 || !dwarf_strict)
19858 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
19859 }
19860 else if (arg && DECL_ARTIFICIAL (arg))
19861 add_AT_flag (parm_die, DW_AT_artificial, 1);
19862 }
19863
19864 link = TREE_CHAIN (link);
19865 if (arg)
19866 arg = DECL_CHAIN (arg);
19867 }
19868
19869 /* If this function type has an ellipsis, add a
19870 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
19871 if (formal_type != void_type_node)
19872 gen_unspecified_parameters_die (function_or_method_type, context_die);
19873
19874 /* Make our second (and final) pass over the list of formal parameter types
19875 and output DIEs to represent those types (as necessary). */
19876 for (link = TYPE_ARG_TYPES (function_or_method_type);
19877 link && TREE_VALUE (link);
19878 link = TREE_CHAIN (link))
19879 gen_type_die (TREE_VALUE (link), context_die);
19880 }
19881
19882 /* We want to generate the DIE for TYPE so that we can generate the
19883 die for MEMBER, which has been defined; we will need to refer back
19884 to the member declaration nested within TYPE. If we're trying to
19885 generate minimal debug info for TYPE, processing TYPE won't do the
19886 trick; we need to attach the member declaration by hand. */
19887
19888 static void
19889 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
19890 {
19891 gen_type_die (type, context_die);
19892
19893 /* If we're trying to avoid duplicate debug info, we may not have
19894 emitted the member decl for this function. Emit it now. */
19895 if (TYPE_STUB_DECL (type)
19896 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
19897 && ! lookup_decl_die (member))
19898 {
19899 dw_die_ref type_die;
19900 gcc_assert (!decl_ultimate_origin (member));
19901
19902 push_decl_scope (type);
19903 type_die = lookup_type_die_strip_naming_typedef (type);
19904 if (TREE_CODE (member) == FUNCTION_DECL)
19905 gen_subprogram_die (member, type_die);
19906 else if (TREE_CODE (member) == FIELD_DECL)
19907 {
19908 /* Ignore the nameless fields that are used to skip bits but handle
19909 C++ anonymous unions and structs. */
19910 if (DECL_NAME (member) != NULL_TREE
19911 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
19912 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
19913 {
19914 struct vlr_context vlr_ctx = {
19915 DECL_CONTEXT (member), /* struct_type */
19916 NULL_TREE /* variant_part_offset */
19917 };
19918 gen_type_die (member_declared_type (member), type_die);
19919 gen_field_die (member, &vlr_ctx, type_die);
19920 }
19921 }
19922 else
19923 gen_variable_die (member, NULL_TREE, type_die);
19924
19925 pop_decl_scope ();
19926 }
19927 }
19928 \f
19929 /* Forward declare these functions, because they are mutually recursive
19930 with their set_block_* pairing functions. */
19931 static void set_decl_origin_self (tree);
19932 static void set_decl_abstract_flags (tree, vec<tree> &);
19933
19934 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
19935 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
19936 that it points to the node itself, thus indicating that the node is its
19937 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
19938 the given node is NULL, recursively descend the decl/block tree which
19939 it is the root of, and for each other ..._DECL or BLOCK node contained
19940 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
19941 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
19942 values to point to themselves. */
19943
19944 static void
19945 set_block_origin_self (tree stmt)
19946 {
19947 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
19948 {
19949 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
19950
19951 {
19952 tree local_decl;
19953
19954 for (local_decl = BLOCK_VARS (stmt);
19955 local_decl != NULL_TREE;
19956 local_decl = DECL_CHAIN (local_decl))
19957 /* Do not recurse on nested functions since the inlining status
19958 of parent and child can be different as per the DWARF spec. */
19959 if (TREE_CODE (local_decl) != FUNCTION_DECL
19960 && !DECL_EXTERNAL (local_decl))
19961 set_decl_origin_self (local_decl);
19962 }
19963
19964 {
19965 tree subblock;
19966
19967 for (subblock = BLOCK_SUBBLOCKS (stmt);
19968 subblock != NULL_TREE;
19969 subblock = BLOCK_CHAIN (subblock))
19970 set_block_origin_self (subblock); /* Recurse. */
19971 }
19972 }
19973 }
19974
19975 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
19976 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
19977 node to so that it points to the node itself, thus indicating that the
19978 node represents its own (abstract) origin. Additionally, if the
19979 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
19980 the decl/block tree of which the given node is the root of, and for
19981 each other ..._DECL or BLOCK node contained therein whose
19982 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
19983 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
19984 point to themselves. */
19985
19986 static void
19987 set_decl_origin_self (tree decl)
19988 {
19989 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
19990 {
19991 DECL_ABSTRACT_ORIGIN (decl) = decl;
19992 if (TREE_CODE (decl) == FUNCTION_DECL)
19993 {
19994 tree arg;
19995
19996 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
19997 DECL_ABSTRACT_ORIGIN (arg) = arg;
19998 if (DECL_INITIAL (decl) != NULL_TREE
19999 && DECL_INITIAL (decl) != error_mark_node)
20000 set_block_origin_self (DECL_INITIAL (decl));
20001 }
20002 }
20003 }
20004 \f
20005 /* Given a pointer to some BLOCK node, set the BLOCK_ABSTRACT flag to 1
20006 and if it wasn't 1 before, push it to abstract_vec vector.
20007 For all local decls and all local sub-blocks (recursively) do it
20008 too. */
20009
20010 static void
20011 set_block_abstract_flags (tree stmt, vec<tree> &abstract_vec)
20012 {
20013 tree local_decl;
20014 tree subblock;
20015 unsigned int i;
20016
20017 if (!BLOCK_ABSTRACT (stmt))
20018 {
20019 abstract_vec.safe_push (stmt);
20020 BLOCK_ABSTRACT (stmt) = 1;
20021 }
20022
20023 for (local_decl = BLOCK_VARS (stmt);
20024 local_decl != NULL_TREE;
20025 local_decl = DECL_CHAIN (local_decl))
20026 if (! DECL_EXTERNAL (local_decl))
20027 set_decl_abstract_flags (local_decl, abstract_vec);
20028
20029 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
20030 {
20031 local_decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
20032 if ((TREE_CODE (local_decl) == VAR_DECL && !TREE_STATIC (local_decl))
20033 || TREE_CODE (local_decl) == PARM_DECL)
20034 set_decl_abstract_flags (local_decl, abstract_vec);
20035 }
20036
20037 for (subblock = BLOCK_SUBBLOCKS (stmt);
20038 subblock != NULL_TREE;
20039 subblock = BLOCK_CHAIN (subblock))
20040 set_block_abstract_flags (subblock, abstract_vec);
20041 }
20042
20043 /* Given a pointer to some ..._DECL node, set DECL_ABSTRACT_P flag on it
20044 to 1 and if it wasn't 1 before, push to abstract_vec vector.
20045 In the case where the decl is a FUNCTION_DECL also set the abstract
20046 flags for all of the parameters, local vars, local
20047 blocks and sub-blocks (recursively). */
20048
20049 static void
20050 set_decl_abstract_flags (tree decl, vec<tree> &abstract_vec)
20051 {
20052 if (!DECL_ABSTRACT_P (decl))
20053 {
20054 abstract_vec.safe_push (decl);
20055 DECL_ABSTRACT_P (decl) = 1;
20056 }
20057
20058 if (TREE_CODE (decl) == FUNCTION_DECL)
20059 {
20060 tree arg;
20061
20062 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
20063 if (!DECL_ABSTRACT_P (arg))
20064 {
20065 abstract_vec.safe_push (arg);
20066 DECL_ABSTRACT_P (arg) = 1;
20067 }
20068 if (DECL_INITIAL (decl) != NULL_TREE
20069 && DECL_INITIAL (decl) != error_mark_node)
20070 set_block_abstract_flags (DECL_INITIAL (decl), abstract_vec);
20071 }
20072 }
20073
20074 /* Generate the DWARF2 info for the "abstract" instance of a function which we
20075 may later generate inlined and/or out-of-line instances of.
20076
20077 FIXME: In the early-dwarf world, this function, and most of the
20078 DECL_ABSTRACT code should be obsoleted. The early DIE _is_
20079 the abstract instance. All we would need to do is annotate
20080 the early DIE with the appropriate DW_AT_inline in late
20081 dwarf (perhaps in gen_inlined_subroutine_die).
20082
20083 However, we can't do this yet, because LTO streaming of DIEs
20084 has not been implemented yet. */
20085
20086 static void
20087 dwarf2out_abstract_function (tree decl)
20088 {
20089 dw_die_ref old_die;
20090 tree save_fn;
20091 tree context;
20092 hash_table<decl_loc_hasher> *old_decl_loc_table;
20093 hash_table<dw_loc_list_hasher> *old_cached_dw_loc_list_table;
20094 int old_call_site_count, old_tail_call_site_count;
20095 struct call_arg_loc_node *old_call_arg_locations;
20096
20097 /* Make sure we have the actual abstract inline, not a clone. */
20098 decl = DECL_ORIGIN (decl);
20099
20100 old_die = lookup_decl_die (decl);
20101 if (old_die && get_AT (old_die, DW_AT_inline))
20102 /* We've already generated the abstract instance. */
20103 return;
20104
20105 /* We can be called while recursively when seeing block defining inlined subroutine
20106 DIE. Be sure to not clobber the outer location table nor use it or we would
20107 get locations in abstract instantces. */
20108 old_decl_loc_table = decl_loc_table;
20109 decl_loc_table = NULL;
20110 old_cached_dw_loc_list_table = cached_dw_loc_list_table;
20111 cached_dw_loc_list_table = NULL;
20112 old_call_arg_locations = call_arg_locations;
20113 call_arg_locations = NULL;
20114 old_call_site_count = call_site_count;
20115 call_site_count = -1;
20116 old_tail_call_site_count = tail_call_site_count;
20117 tail_call_site_count = -1;
20118
20119 /* Be sure we've emitted the in-class declaration DIE (if any) first, so
20120 we don't get confused by DECL_ABSTRACT_P. */
20121 if (debug_info_level > DINFO_LEVEL_TERSE)
20122 {
20123 context = decl_class_context (decl);
20124 if (context)
20125 gen_type_die_for_member
20126 (context, decl, decl_function_context (decl) ? NULL : comp_unit_die ());
20127 }
20128
20129 /* Pretend we've just finished compiling this function. */
20130 save_fn = current_function_decl;
20131 current_function_decl = decl;
20132
20133 auto_vec<tree, 64> abstract_vec;
20134 set_decl_abstract_flags (decl, abstract_vec);
20135 dwarf2out_decl (decl);
20136 unsigned int i;
20137 tree t;
20138 FOR_EACH_VEC_ELT (abstract_vec, i, t)
20139 if (TREE_CODE (t) == BLOCK)
20140 BLOCK_ABSTRACT (t) = 0;
20141 else
20142 DECL_ABSTRACT_P (t) = 0;
20143
20144 current_function_decl = save_fn;
20145 decl_loc_table = old_decl_loc_table;
20146 cached_dw_loc_list_table = old_cached_dw_loc_list_table;
20147 call_arg_locations = old_call_arg_locations;
20148 call_site_count = old_call_site_count;
20149 tail_call_site_count = old_tail_call_site_count;
20150 }
20151
20152 /* Helper function of premark_used_types() which gets called through
20153 htab_traverse.
20154
20155 Marks the DIE of a given type in *SLOT as perennial, so it never gets
20156 marked as unused by prune_unused_types. */
20157
20158 bool
20159 premark_used_types_helper (tree const &type, void *)
20160 {
20161 dw_die_ref die;
20162
20163 die = lookup_type_die (type);
20164 if (die != NULL)
20165 die->die_perennial_p = 1;
20166 return true;
20167 }
20168
20169 /* Helper function of premark_types_used_by_global_vars which gets called
20170 through htab_traverse.
20171
20172 Marks the DIE of a given type in *SLOT as perennial, so it never gets
20173 marked as unused by prune_unused_types. The DIE of the type is marked
20174 only if the global variable using the type will actually be emitted. */
20175
20176 int
20177 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
20178 void *)
20179 {
20180 struct types_used_by_vars_entry *entry;
20181 dw_die_ref die;
20182
20183 entry = (struct types_used_by_vars_entry *) *slot;
20184 gcc_assert (entry->type != NULL
20185 && entry->var_decl != NULL);
20186 die = lookup_type_die (entry->type);
20187 if (die)
20188 {
20189 /* Ask cgraph if the global variable really is to be emitted.
20190 If yes, then we'll keep the DIE of ENTRY->TYPE. */
20191 varpool_node *node = varpool_node::get (entry->var_decl);
20192 if (node && node->definition)
20193 {
20194 die->die_perennial_p = 1;
20195 /* Keep the parent DIEs as well. */
20196 while ((die = die->die_parent) && die->die_perennial_p == 0)
20197 die->die_perennial_p = 1;
20198 }
20199 }
20200 return 1;
20201 }
20202
20203 /* Mark all members of used_types_hash as perennial. */
20204
20205 static void
20206 premark_used_types (struct function *fun)
20207 {
20208 if (fun && fun->used_types_hash)
20209 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
20210 }
20211
20212 /* Mark all members of types_used_by_vars_entry as perennial. */
20213
20214 static void
20215 premark_types_used_by_global_vars (void)
20216 {
20217 if (types_used_by_vars_hash)
20218 types_used_by_vars_hash
20219 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
20220 }
20221
20222 /* Generate a DW_TAG_GNU_call_site DIE in function DECL under SUBR_DIE
20223 for CA_LOC call arg loc node. */
20224
20225 static dw_die_ref
20226 gen_call_site_die (tree decl, dw_die_ref subr_die,
20227 struct call_arg_loc_node *ca_loc)
20228 {
20229 dw_die_ref stmt_die = NULL, die;
20230 tree block = ca_loc->block;
20231
20232 while (block
20233 && block != DECL_INITIAL (decl)
20234 && TREE_CODE (block) == BLOCK)
20235 {
20236 stmt_die = BLOCK_DIE (block);
20237 if (stmt_die)
20238 break;
20239 block = BLOCK_SUPERCONTEXT (block);
20240 }
20241 if (stmt_die == NULL)
20242 stmt_die = subr_die;
20243 die = new_die (DW_TAG_GNU_call_site, stmt_die, NULL_TREE);
20244 add_AT_lbl_id (die, DW_AT_low_pc, ca_loc->label);
20245 if (ca_loc->tail_call_p)
20246 add_AT_flag (die, DW_AT_GNU_tail_call, 1);
20247 if (ca_loc->symbol_ref)
20248 {
20249 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
20250 if (tdie)
20251 add_AT_die_ref (die, DW_AT_abstract_origin, tdie);
20252 else
20253 add_AT_addr (die, DW_AT_abstract_origin, ca_loc->symbol_ref, false);
20254 }
20255 return die;
20256 }
20257
20258 /* Generate a DIE to represent a declared function (either file-scope or
20259 block-local). */
20260
20261 static void
20262 gen_subprogram_die (tree decl, dw_die_ref context_die)
20263 {
20264 tree origin = decl_ultimate_origin (decl);
20265 dw_die_ref subr_die;
20266 dw_die_ref old_die = lookup_decl_die (decl);
20267
20268 /* This function gets called multiple times for different stages of
20269 the debug process. For example, for func() in this code:
20270
20271 namespace S
20272 {
20273 void func() { ... }
20274 }
20275
20276 ...we get called 4 times. Twice in early debug and twice in
20277 late debug:
20278
20279 Early debug
20280 -----------
20281
20282 1. Once while generating func() within the namespace. This is
20283 the declaration. The declaration bit below is set, as the
20284 context is the namespace.
20285
20286 A new DIE will be generated with DW_AT_declaration set.
20287
20288 2. Once for func() itself. This is the specification. The
20289 declaration bit below is clear as the context is the CU.
20290
20291 We will use the cached DIE from (1) to create a new DIE with
20292 DW_AT_specification pointing to the declaration in (1).
20293
20294 Late debug via rest_of_handle_final()
20295 -------------------------------------
20296
20297 3. Once generating func() within the namespace. This is also the
20298 declaration, as in (1), but this time we will early exit below
20299 as we have a cached DIE and a declaration needs no additional
20300 annotations (no locations), as the source declaration line
20301 info is enough.
20302
20303 4. Once for func() itself. As in (2), this is the specification,
20304 but this time we will re-use the cached DIE, and just annotate
20305 it with the location information that should now be available.
20306
20307 For something without namespaces, but with abstract instances, we
20308 are also called a multiple times:
20309
20310 class Base
20311 {
20312 public:
20313 Base (); // constructor declaration (1)
20314 };
20315
20316 Base::Base () { } // constructor specification (2)
20317
20318 Early debug
20319 -----------
20320
20321 1. Once for the Base() constructor by virtue of it being a
20322 member of the Base class. This is done via
20323 rest_of_type_compilation.
20324
20325 This is a declaration, so a new DIE will be created with
20326 DW_AT_declaration.
20327
20328 2. Once for the Base() constructor definition, but this time
20329 while generating the abstract instance of the base
20330 constructor (__base_ctor) which is being generated via early
20331 debug of reachable functions.
20332
20333 Even though we have a cached version of the declaration (1),
20334 we will create a DW_AT_specification of the declaration DIE
20335 in (1).
20336
20337 3. Once for the __base_ctor itself, but this time, we generate
20338 an DW_AT_abstract_origin version of the DW_AT_specification in
20339 (2).
20340
20341 Late debug via rest_of_handle_final
20342 -----------------------------------
20343
20344 4. One final time for the __base_ctor (which will have a cached
20345 DIE with DW_AT_abstract_origin created in (3). This time,
20346 we will just annotate the location information now
20347 available.
20348 */
20349 int declaration = (current_function_decl != decl
20350 || class_or_namespace_scope_p (context_die));
20351
20352 premark_used_types (DECL_STRUCT_FUNCTION (decl));
20353
20354 /* Now that the C++ front end lazily declares artificial member fns, we
20355 might need to retrofit the declaration into its class. */
20356 if (!declaration && !origin && !old_die
20357 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
20358 && !class_or_namespace_scope_p (context_die)
20359 && debug_info_level > DINFO_LEVEL_TERSE)
20360 old_die = force_decl_die (decl);
20361
20362 /* An inlined instance, tag a new DIE with DW_AT_abstract_origin. */
20363 if (origin != NULL)
20364 {
20365 gcc_assert (!declaration || local_scope_p (context_die));
20366
20367 /* Fixup die_parent for the abstract instance of a nested
20368 inline function. */
20369 if (old_die && old_die->die_parent == NULL)
20370 add_child_die (context_die, old_die);
20371
20372 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
20373 {
20374 /* If we have a DW_AT_abstract_origin we have a working
20375 cached version. */
20376 subr_die = old_die;
20377 }
20378 else
20379 {
20380 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
20381 add_abstract_origin_attribute (subr_die, origin);
20382 /* This is where the actual code for a cloned function is.
20383 Let's emit linkage name attribute for it. This helps
20384 debuggers to e.g, set breakpoints into
20385 constructors/destructors when the user asks "break
20386 K::K". */
20387 add_linkage_name (subr_die, decl);
20388 }
20389 }
20390 /* A cached copy, possibly from early dwarf generation. Reuse as
20391 much as possible. */
20392 else if (old_die)
20393 {
20394 /* A declaration that has been previously dumped needs no
20395 additional information. */
20396 if (declaration)
20397 return;
20398
20399 if (!get_AT_flag (old_die, DW_AT_declaration)
20400 /* We can have a normal definition following an inline one in the
20401 case of redefinition of GNU C extern inlines.
20402 It seems reasonable to use AT_specification in this case. */
20403 && !get_AT (old_die, DW_AT_inline))
20404 {
20405 /* Detect and ignore this case, where we are trying to output
20406 something we have already output. */
20407 if (get_AT (old_die, DW_AT_low_pc)
20408 || get_AT (old_die, DW_AT_ranges))
20409 return;
20410
20411 /* If we have no location information, this must be a
20412 partially generated DIE from early dwarf generation.
20413 Fall through and generate it. */
20414 }
20415
20416 /* If the definition comes from the same place as the declaration,
20417 maybe use the old DIE. We always want the DIE for this function
20418 that has the *_pc attributes to be under comp_unit_die so the
20419 debugger can find it. We also need to do this for abstract
20420 instances of inlines, since the spec requires the out-of-line copy
20421 to have the same parent. For local class methods, this doesn't
20422 apply; we just use the old DIE. */
20423 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
20424 struct dwarf_file_data * file_index = lookup_filename (s.file);
20425 if ((is_cu_die (old_die->die_parent)
20426 /* This condition fixes the inconsistency/ICE with the
20427 following Fortran test (or some derivative thereof) while
20428 building libgfortran:
20429
20430 module some_m
20431 contains
20432 logical function funky (FLAG)
20433 funky = .true.
20434 end function
20435 end module
20436 */
20437 || (old_die->die_parent
20438 && old_die->die_parent->die_tag == DW_TAG_module)
20439 || context_die == NULL)
20440 && (DECL_ARTIFICIAL (decl)
20441 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
20442 && (get_AT_unsigned (old_die, DW_AT_decl_line)
20443 == (unsigned) s.line))))
20444 {
20445 subr_die = old_die;
20446
20447 /* Clear out the declaration attribute, but leave the
20448 parameters so they can be augmented with location
20449 information later. Unless this was a declaration, in
20450 which case, wipe out the nameless parameters and recreate
20451 them further down. */
20452 if (remove_AT (subr_die, DW_AT_declaration))
20453 {
20454
20455 remove_AT (subr_die, DW_AT_object_pointer);
20456 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
20457 }
20458 }
20459 /* Make a specification pointing to the previously built
20460 declaration. */
20461 else
20462 {
20463 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
20464 add_AT_specification (subr_die, old_die);
20465 add_pubname (decl, subr_die);
20466 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
20467 add_AT_file (subr_die, DW_AT_decl_file, file_index);
20468 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
20469 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
20470
20471 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
20472 emit the real type on the definition die. */
20473 if (is_cxx() && debug_info_level > DINFO_LEVEL_TERSE)
20474 {
20475 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
20476 if (die == auto_die || die == decltype_auto_die)
20477 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
20478 TYPE_UNQUALIFIED, false, context_die);
20479 }
20480 }
20481 }
20482 /* Create a fresh DIE for anything else. */
20483 else
20484 {
20485 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
20486
20487 if (TREE_PUBLIC (decl))
20488 add_AT_flag (subr_die, DW_AT_external, 1);
20489
20490 add_name_and_src_coords_attributes (subr_die, decl);
20491 add_pubname (decl, subr_die);
20492 if (debug_info_level > DINFO_LEVEL_TERSE)
20493 {
20494 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
20495 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
20496 TYPE_UNQUALIFIED, false, context_die);
20497 }
20498
20499 add_pure_or_virtual_attribute (subr_die, decl);
20500 if (DECL_ARTIFICIAL (decl))
20501 add_AT_flag (subr_die, DW_AT_artificial, 1);
20502
20503 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
20504 add_AT_flag (subr_die, DW_AT_noreturn, 1);
20505
20506 add_accessibility_attribute (subr_die, decl);
20507 }
20508
20509 /* Unless we have an existing non-declaration DIE, equate the new
20510 DIE. */
20511 if (!old_die || is_declaration_die (old_die))
20512 equate_decl_number_to_die (decl, subr_die);
20513
20514 if (declaration)
20515 {
20516 if (!old_die || !get_AT (old_die, DW_AT_inline))
20517 {
20518 add_AT_flag (subr_die, DW_AT_declaration, 1);
20519
20520 /* If this is an explicit function declaration then generate
20521 a DW_AT_explicit attribute. */
20522 if (lang_hooks.decls.function_decl_explicit_p (decl)
20523 && (dwarf_version >= 3 || !dwarf_strict))
20524 add_AT_flag (subr_die, DW_AT_explicit, 1);
20525
20526 /* If this is a C++11 deleted special function member then generate
20527 a DW_AT_GNU_deleted attribute. */
20528 if (lang_hooks.decls.function_decl_deleted_p (decl)
20529 && (! dwarf_strict))
20530 add_AT_flag (subr_die, DW_AT_GNU_deleted, 1);
20531 }
20532 }
20533 /* Tag abstract instances with DW_AT_inline. */
20534 else if (DECL_ABSTRACT_P (decl))
20535 {
20536 if (DECL_DECLARED_INLINE_P (decl))
20537 {
20538 if (cgraph_function_possibly_inlined_p (decl))
20539 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_inlined);
20540 else
20541 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_not_inlined);
20542 }
20543 else
20544 {
20545 if (cgraph_function_possibly_inlined_p (decl))
20546 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_inlined);
20547 else
20548 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_not_inlined);
20549 }
20550
20551 if (DECL_DECLARED_INLINE_P (decl)
20552 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
20553 add_AT_flag (subr_die, DW_AT_artificial, 1);
20554 }
20555 /* For non DECL_EXTERNALs, if range information is available, fill
20556 the DIE with it. */
20557 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
20558 {
20559 HOST_WIDE_INT cfa_fb_offset;
20560
20561 struct function *fun = DECL_STRUCT_FUNCTION (decl);
20562
20563 if (!flag_reorder_blocks_and_partition)
20564 {
20565 dw_fde_ref fde = fun->fde;
20566 if (fde->dw_fde_begin)
20567 {
20568 /* We have already generated the labels. */
20569 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
20570 fde->dw_fde_end, false);
20571 }
20572 else
20573 {
20574 /* Create start/end labels and add the range. */
20575 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
20576 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
20577 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
20578 current_function_funcdef_no);
20579 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
20580 current_function_funcdef_no);
20581 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
20582 false);
20583 }
20584
20585 #if VMS_DEBUGGING_INFO
20586 /* HP OpenVMS Industry Standard 64: DWARF Extensions
20587 Section 2.3 Prologue and Epilogue Attributes:
20588 When a breakpoint is set on entry to a function, it is generally
20589 desirable for execution to be suspended, not on the very first
20590 instruction of the function, but rather at a point after the
20591 function's frame has been set up, after any language defined local
20592 declaration processing has been completed, and before execution of
20593 the first statement of the function begins. Debuggers generally
20594 cannot properly determine where this point is. Similarly for a
20595 breakpoint set on exit from a function. The prologue and epilogue
20596 attributes allow a compiler to communicate the location(s) to use. */
20597
20598 {
20599 if (fde->dw_fde_vms_end_prologue)
20600 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
20601 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
20602
20603 if (fde->dw_fde_vms_begin_epilogue)
20604 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
20605 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
20606 }
20607 #endif
20608
20609 }
20610 else
20611 {
20612 /* Generate pubnames entries for the split function code ranges. */
20613 dw_fde_ref fde = fun->fde;
20614
20615 if (fde->dw_fde_second_begin)
20616 {
20617 if (dwarf_version >= 3 || !dwarf_strict)
20618 {
20619 /* We should use ranges for non-contiguous code section
20620 addresses. Use the actual code range for the initial
20621 section, since the HOT/COLD labels might precede an
20622 alignment offset. */
20623 bool range_list_added = false;
20624 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
20625 fde->dw_fde_end, &range_list_added,
20626 false);
20627 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
20628 fde->dw_fde_second_end,
20629 &range_list_added, false);
20630 if (range_list_added)
20631 add_ranges (NULL);
20632 }
20633 else
20634 {
20635 /* There is no real support in DW2 for this .. so we make
20636 a work-around. First, emit the pub name for the segment
20637 containing the function label. Then make and emit a
20638 simplified subprogram DIE for the second segment with the
20639 name pre-fixed by __hot/cold_sect_of_. We use the same
20640 linkage name for the second die so that gdb will find both
20641 sections when given "b foo". */
20642 const char *name = NULL;
20643 tree decl_name = DECL_NAME (decl);
20644 dw_die_ref seg_die;
20645
20646 /* Do the 'primary' section. */
20647 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
20648 fde->dw_fde_end, false);
20649
20650 /* Build a minimal DIE for the secondary section. */
20651 seg_die = new_die (DW_TAG_subprogram,
20652 subr_die->die_parent, decl);
20653
20654 if (TREE_PUBLIC (decl))
20655 add_AT_flag (seg_die, DW_AT_external, 1);
20656
20657 if (decl_name != NULL
20658 && IDENTIFIER_POINTER (decl_name) != NULL)
20659 {
20660 name = dwarf2_name (decl, 1);
20661 if (! DECL_ARTIFICIAL (decl))
20662 add_src_coords_attributes (seg_die, decl);
20663
20664 add_linkage_name (seg_die, decl);
20665 }
20666 gcc_assert (name != NULL);
20667 add_pure_or_virtual_attribute (seg_die, decl);
20668 if (DECL_ARTIFICIAL (decl))
20669 add_AT_flag (seg_die, DW_AT_artificial, 1);
20670
20671 name = concat ("__second_sect_of_", name, NULL);
20672 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
20673 fde->dw_fde_second_end, false);
20674 add_name_attribute (seg_die, name);
20675 if (want_pubnames ())
20676 add_pubname_string (name, seg_die);
20677 }
20678 }
20679 else
20680 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
20681 false);
20682 }
20683
20684 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
20685
20686 /* We define the "frame base" as the function's CFA. This is more
20687 convenient for several reasons: (1) It's stable across the prologue
20688 and epilogue, which makes it better than just a frame pointer,
20689 (2) With dwarf3, there exists a one-byte encoding that allows us
20690 to reference the .debug_frame data by proxy, but failing that,
20691 (3) We can at least reuse the code inspection and interpretation
20692 code that determines the CFA position at various points in the
20693 function. */
20694 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
20695 {
20696 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
20697 add_AT_loc (subr_die, DW_AT_frame_base, op);
20698 }
20699 else
20700 {
20701 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
20702 if (list->dw_loc_next)
20703 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
20704 else
20705 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
20706 }
20707
20708 /* Compute a displacement from the "steady-state frame pointer" to
20709 the CFA. The former is what all stack slots and argument slots
20710 will reference in the rtl; the latter is what we've told the
20711 debugger about. We'll need to adjust all frame_base references
20712 by this displacement. */
20713 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
20714
20715 if (fun->static_chain_decl)
20716 {
20717 /* DWARF requires here a location expression that computes the
20718 address of the enclosing subprogram's frame base. The machinery
20719 in tree-nested.c is supposed to store this specific address in the
20720 last field of the FRAME record. */
20721 const tree frame_type
20722 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
20723 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
20724
20725 tree fb_expr
20726 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
20727 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
20728 fb_expr, fb_decl, NULL_TREE);
20729
20730 add_AT_location_description (subr_die, DW_AT_static_link,
20731 loc_list_from_tree (fb_expr, 0, NULL));
20732 }
20733 }
20734
20735 /* Generate child dies for template paramaters. */
20736 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
20737 gen_generic_params_dies (decl);
20738
20739 /* Now output descriptions of the arguments for this function. This gets
20740 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
20741 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
20742 `...' at the end of the formal parameter list. In order to find out if
20743 there was a trailing ellipsis or not, we must instead look at the type
20744 associated with the FUNCTION_DECL. This will be a node of type
20745 FUNCTION_TYPE. If the chain of type nodes hanging off of this
20746 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
20747 an ellipsis at the end. */
20748
20749 /* In the case where we are describing a mere function declaration, all we
20750 need to do here (and all we *can* do here) is to describe the *types* of
20751 its formal parameters. */
20752 if (debug_info_level <= DINFO_LEVEL_TERSE)
20753 ;
20754 else if (declaration)
20755 gen_formal_types_die (decl, subr_die);
20756 else
20757 {
20758 /* Generate DIEs to represent all known formal parameters. */
20759 tree parm = DECL_ARGUMENTS (decl);
20760 tree generic_decl = early_dwarf
20761 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
20762 tree generic_decl_parm = generic_decl
20763 ? DECL_ARGUMENTS (generic_decl)
20764 : NULL;
20765
20766 /* Now we want to walk the list of parameters of the function and
20767 emit their relevant DIEs.
20768
20769 We consider the case of DECL being an instance of a generic function
20770 as well as it being a normal function.
20771
20772 If DECL is an instance of a generic function we walk the
20773 parameters of the generic function declaration _and_ the parameters of
20774 DECL itself. This is useful because we want to emit specific DIEs for
20775 function parameter packs and those are declared as part of the
20776 generic function declaration. In that particular case,
20777 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
20778 That DIE has children DIEs representing the set of arguments
20779 of the pack. Note that the set of pack arguments can be empty.
20780 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
20781 children DIE.
20782
20783 Otherwise, we just consider the parameters of DECL. */
20784 while (generic_decl_parm || parm)
20785 {
20786 if (generic_decl_parm
20787 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
20788 gen_formal_parameter_pack_die (generic_decl_parm,
20789 parm, subr_die,
20790 &parm);
20791 else if (parm && !POINTER_BOUNDS_P (parm))
20792 {
20793 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
20794
20795 if (parm == DECL_ARGUMENTS (decl)
20796 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
20797 && parm_die
20798 && (dwarf_version >= 3 || !dwarf_strict))
20799 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
20800
20801 parm = DECL_CHAIN (parm);
20802 }
20803 else if (parm)
20804 parm = DECL_CHAIN (parm);
20805
20806 if (generic_decl_parm)
20807 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
20808 }
20809
20810 /* Decide whether we need an unspecified_parameters DIE at the end.
20811 There are 2 more cases to do this for: 1) the ansi ... declaration -
20812 this is detectable when the end of the arg list is not a
20813 void_type_node 2) an unprototyped function declaration (not a
20814 definition). This just means that we have no info about the
20815 parameters at all. */
20816 if (early_dwarf)
20817 {
20818 if (prototype_p (TREE_TYPE (decl)))
20819 {
20820 /* This is the prototyped case, check for.... */
20821 if (stdarg_p (TREE_TYPE (decl)))
20822 gen_unspecified_parameters_die (decl, subr_die);
20823 }
20824 else if (DECL_INITIAL (decl) == NULL_TREE)
20825 gen_unspecified_parameters_die (decl, subr_die);
20826 }
20827 }
20828
20829 if (subr_die != old_die)
20830 /* Add the calling convention attribute if requested. */
20831 add_calling_convention_attribute (subr_die, decl);
20832
20833 /* Output Dwarf info for all of the stuff within the body of the function
20834 (if it has one - it may be just a declaration).
20835
20836 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
20837 a function. This BLOCK actually represents the outermost binding contour
20838 for the function, i.e. the contour in which the function's formal
20839 parameters and labels get declared. Curiously, it appears that the front
20840 end doesn't actually put the PARM_DECL nodes for the current function onto
20841 the BLOCK_VARS list for this outer scope, but are strung off of the
20842 DECL_ARGUMENTS list for the function instead.
20843
20844 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
20845 the LABEL_DECL nodes for the function however, and we output DWARF info
20846 for those in decls_for_scope. Just within the `outer_scope' there will be
20847 a BLOCK node representing the function's outermost pair of curly braces,
20848 and any blocks used for the base and member initializers of a C++
20849 constructor function. */
20850 tree outer_scope = DECL_INITIAL (decl);
20851 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
20852 {
20853 int call_site_note_count = 0;
20854 int tail_call_site_note_count = 0;
20855
20856 /* Emit a DW_TAG_variable DIE for a named return value. */
20857 if (DECL_NAME (DECL_RESULT (decl)))
20858 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
20859
20860 /* The first time through decls_for_scope we will generate the
20861 DIEs for the locals. The second time, we fill in the
20862 location info. */
20863 decls_for_scope (outer_scope, subr_die);
20864
20865 if (call_arg_locations && !dwarf_strict)
20866 {
20867 struct call_arg_loc_node *ca_loc;
20868 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
20869 {
20870 dw_die_ref die = NULL;
20871 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
20872 rtx arg, next_arg;
20873
20874 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
20875 ? NOTE_VAR_LOCATION (ca_loc->call_arg_loc_note)
20876 : NULL_RTX);
20877 arg; arg = next_arg)
20878 {
20879 dw_loc_descr_ref reg, val;
20880 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
20881 dw_die_ref cdie, tdie = NULL;
20882
20883 next_arg = XEXP (arg, 1);
20884 if (REG_P (XEXP (XEXP (arg, 0), 0))
20885 && next_arg
20886 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
20887 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
20888 && REGNO (XEXP (XEXP (arg, 0), 0))
20889 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
20890 next_arg = XEXP (next_arg, 1);
20891 if (mode == VOIDmode)
20892 {
20893 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
20894 if (mode == VOIDmode)
20895 mode = GET_MODE (XEXP (arg, 0));
20896 }
20897 if (mode == VOIDmode || mode == BLKmode)
20898 continue;
20899 /* Get dynamic information about call target only if we
20900 have no static information: we cannot generate both
20901 DW_AT_abstract_origin and DW_AT_GNU_call_site_target
20902 attributes. */
20903 if (ca_loc->symbol_ref == NULL_RTX)
20904 {
20905 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
20906 {
20907 tloc = XEXP (XEXP (arg, 0), 1);
20908 continue;
20909 }
20910 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
20911 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
20912 {
20913 tlocc = XEXP (XEXP (arg, 0), 1);
20914 continue;
20915 }
20916 }
20917 reg = NULL;
20918 if (REG_P (XEXP (XEXP (arg, 0), 0)))
20919 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
20920 VAR_INIT_STATUS_INITIALIZED);
20921 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
20922 {
20923 rtx mem = XEXP (XEXP (arg, 0), 0);
20924 reg = mem_loc_descriptor (XEXP (mem, 0),
20925 get_address_mode (mem),
20926 GET_MODE (mem),
20927 VAR_INIT_STATUS_INITIALIZED);
20928 }
20929 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
20930 == DEBUG_PARAMETER_REF)
20931 {
20932 tree tdecl
20933 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
20934 tdie = lookup_decl_die (tdecl);
20935 if (tdie == NULL)
20936 continue;
20937 }
20938 else
20939 continue;
20940 if (reg == NULL
20941 && GET_CODE (XEXP (XEXP (arg, 0), 0))
20942 != DEBUG_PARAMETER_REF)
20943 continue;
20944 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
20945 VOIDmode,
20946 VAR_INIT_STATUS_INITIALIZED);
20947 if (val == NULL)
20948 continue;
20949 if (die == NULL)
20950 die = gen_call_site_die (decl, subr_die, ca_loc);
20951 cdie = new_die (DW_TAG_GNU_call_site_parameter, die,
20952 NULL_TREE);
20953 if (reg != NULL)
20954 add_AT_loc (cdie, DW_AT_location, reg);
20955 else if (tdie != NULL)
20956 add_AT_die_ref (cdie, DW_AT_abstract_origin, tdie);
20957 add_AT_loc (cdie, DW_AT_GNU_call_site_value, val);
20958 if (next_arg != XEXP (arg, 1))
20959 {
20960 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
20961 if (mode == VOIDmode)
20962 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
20963 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
20964 0), 1),
20965 mode, VOIDmode,
20966 VAR_INIT_STATUS_INITIALIZED);
20967 if (val != NULL)
20968 add_AT_loc (cdie, DW_AT_GNU_call_site_data_value, val);
20969 }
20970 }
20971 if (die == NULL
20972 && (ca_loc->symbol_ref || tloc))
20973 die = gen_call_site_die (decl, subr_die, ca_loc);
20974 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
20975 {
20976 dw_loc_descr_ref tval = NULL;
20977
20978 if (tloc != NULL_RTX)
20979 tval = mem_loc_descriptor (tloc,
20980 GET_MODE (tloc) == VOIDmode
20981 ? Pmode : GET_MODE (tloc),
20982 VOIDmode,
20983 VAR_INIT_STATUS_INITIALIZED);
20984 if (tval)
20985 add_AT_loc (die, DW_AT_GNU_call_site_target, tval);
20986 else if (tlocc != NULL_RTX)
20987 {
20988 tval = mem_loc_descriptor (tlocc,
20989 GET_MODE (tlocc) == VOIDmode
20990 ? Pmode : GET_MODE (tlocc),
20991 VOIDmode,
20992 VAR_INIT_STATUS_INITIALIZED);
20993 if (tval)
20994 add_AT_loc (die, DW_AT_GNU_call_site_target_clobbered,
20995 tval);
20996 }
20997 }
20998 if (die != NULL)
20999 {
21000 call_site_note_count++;
21001 if (ca_loc->tail_call_p)
21002 tail_call_site_note_count++;
21003 }
21004 }
21005 }
21006 call_arg_locations = NULL;
21007 call_arg_loc_last = NULL;
21008 if (tail_call_site_count >= 0
21009 && tail_call_site_count == tail_call_site_note_count
21010 && !dwarf_strict)
21011 {
21012 if (call_site_count >= 0
21013 && call_site_count == call_site_note_count)
21014 add_AT_flag (subr_die, DW_AT_GNU_all_call_sites, 1);
21015 else
21016 add_AT_flag (subr_die, DW_AT_GNU_all_tail_call_sites, 1);
21017 }
21018 call_site_count = -1;
21019 tail_call_site_count = -1;
21020 }
21021 }
21022
21023 /* Returns a hash value for X (which really is a die_struct). */
21024
21025 hashval_t
21026 block_die_hasher::hash (die_struct *d)
21027 {
21028 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
21029 }
21030
21031 /* Return nonzero if decl_id and die_parent of die_struct X is the same
21032 as decl_id and die_parent of die_struct Y. */
21033
21034 bool
21035 block_die_hasher::equal (die_struct *x, die_struct *y)
21036 {
21037 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
21038 }
21039
21040 /* Return TRUE if DECL, which may have been previously generated as
21041 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
21042 true if decl (or its origin) is either an extern declaration or a
21043 class/namespace scoped declaration.
21044
21045 The declare_in_namespace support causes us to get two DIEs for one
21046 variable, both of which are declarations. We want to avoid
21047 considering one to be a specification, so we must test for
21048 DECLARATION and DW_AT_declaration. */
21049 static inline bool
21050 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
21051 {
21052 return (old_die && TREE_STATIC (decl) && !declaration
21053 && get_AT_flag (old_die, DW_AT_declaration) == 1);
21054 }
21055
21056 /* Return true if DECL is a local static. */
21057
21058 static inline bool
21059 local_function_static (tree decl)
21060 {
21061 gcc_assert (TREE_CODE (decl) == VAR_DECL);
21062 return TREE_STATIC (decl)
21063 && DECL_CONTEXT (decl)
21064 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
21065 }
21066
21067 /* Generate a DIE to represent a declared data object.
21068 Either DECL or ORIGIN must be non-null. */
21069
21070 static void
21071 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
21072 {
21073 HOST_WIDE_INT off = 0;
21074 tree com_decl;
21075 tree decl_or_origin = decl ? decl : origin;
21076 tree ultimate_origin;
21077 dw_die_ref var_die;
21078 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
21079 dw_die_ref origin_die = NULL;
21080 bool declaration = (DECL_EXTERNAL (decl_or_origin)
21081 || class_or_namespace_scope_p (context_die));
21082 bool specialization_p = false;
21083
21084 ultimate_origin = decl_ultimate_origin (decl_or_origin);
21085 if (decl || ultimate_origin)
21086 origin = ultimate_origin;
21087 com_decl = fortran_common (decl_or_origin, &off);
21088
21089 /* Symbol in common gets emitted as a child of the common block, in the form
21090 of a data member. */
21091 if (com_decl)
21092 {
21093 dw_die_ref com_die;
21094 dw_loc_list_ref loc;
21095 die_node com_die_arg;
21096
21097 var_die = lookup_decl_die (decl_or_origin);
21098 if (var_die)
21099 {
21100 if (get_AT (var_die, DW_AT_location) == NULL)
21101 {
21102 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
21103 if (loc)
21104 {
21105 if (off)
21106 {
21107 /* Optimize the common case. */
21108 if (single_element_loc_list_p (loc)
21109 && loc->expr->dw_loc_opc == DW_OP_addr
21110 && loc->expr->dw_loc_next == NULL
21111 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
21112 == SYMBOL_REF)
21113 {
21114 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
21115 loc->expr->dw_loc_oprnd1.v.val_addr
21116 = plus_constant (GET_MODE (x), x , off);
21117 }
21118 else
21119 loc_list_plus_const (loc, off);
21120 }
21121 add_AT_location_description (var_die, DW_AT_location, loc);
21122 remove_AT (var_die, DW_AT_declaration);
21123 }
21124 }
21125 return;
21126 }
21127
21128 if (common_block_die_table == NULL)
21129 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
21130
21131 com_die_arg.decl_id = DECL_UID (com_decl);
21132 com_die_arg.die_parent = context_die;
21133 com_die = common_block_die_table->find (&com_die_arg);
21134 loc = loc_list_from_tree (com_decl, 2, NULL);
21135 if (com_die == NULL)
21136 {
21137 const char *cnam
21138 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
21139 die_node **slot;
21140
21141 com_die = new_die (DW_TAG_common_block, context_die, decl);
21142 add_name_and_src_coords_attributes (com_die, com_decl);
21143 if (loc)
21144 {
21145 add_AT_location_description (com_die, DW_AT_location, loc);
21146 /* Avoid sharing the same loc descriptor between
21147 DW_TAG_common_block and DW_TAG_variable. */
21148 loc = loc_list_from_tree (com_decl, 2, NULL);
21149 }
21150 else if (DECL_EXTERNAL (decl_or_origin))
21151 add_AT_flag (com_die, DW_AT_declaration, 1);
21152 if (want_pubnames ())
21153 add_pubname_string (cnam, com_die); /* ??? needed? */
21154 com_die->decl_id = DECL_UID (com_decl);
21155 slot = common_block_die_table->find_slot (com_die, INSERT);
21156 *slot = com_die;
21157 }
21158 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
21159 {
21160 add_AT_location_description (com_die, DW_AT_location, loc);
21161 loc = loc_list_from_tree (com_decl, 2, NULL);
21162 remove_AT (com_die, DW_AT_declaration);
21163 }
21164 var_die = new_die (DW_TAG_variable, com_die, decl);
21165 add_name_and_src_coords_attributes (var_die, decl_or_origin);
21166 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
21167 decl_quals (decl_or_origin), false,
21168 context_die);
21169 add_AT_flag (var_die, DW_AT_external, 1);
21170 if (loc)
21171 {
21172 if (off)
21173 {
21174 /* Optimize the common case. */
21175 if (single_element_loc_list_p (loc)
21176 && loc->expr->dw_loc_opc == DW_OP_addr
21177 && loc->expr->dw_loc_next == NULL
21178 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
21179 {
21180 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
21181 loc->expr->dw_loc_oprnd1.v.val_addr
21182 = plus_constant (GET_MODE (x), x, off);
21183 }
21184 else
21185 loc_list_plus_const (loc, off);
21186 }
21187 add_AT_location_description (var_die, DW_AT_location, loc);
21188 }
21189 else if (DECL_EXTERNAL (decl_or_origin))
21190 add_AT_flag (var_die, DW_AT_declaration, 1);
21191 if (decl)
21192 equate_decl_number_to_die (decl, var_die);
21193 return;
21194 }
21195
21196 if (old_die)
21197 {
21198 if (declaration)
21199 {
21200 /* A declaration that has been previously dumped, needs no
21201 further annotations, since it doesn't need location on
21202 the second pass. */
21203 return;
21204 }
21205 else if (decl_will_get_specification_p (old_die, decl, declaration)
21206 && !get_AT (old_die, DW_AT_specification))
21207 {
21208 /* Fall-thru so we can make a new variable die along with a
21209 DW_AT_specification. */
21210 }
21211 else if (origin && old_die->die_parent != context_die)
21212 {
21213 /* If we will be creating an inlined instance, we need a
21214 new DIE that will get annotated with
21215 DW_AT_abstract_origin. Clear things so we can get a
21216 new DIE. */
21217 gcc_assert (!DECL_ABSTRACT_P (decl));
21218 old_die = NULL;
21219 }
21220 else
21221 {
21222 /* If a DIE was dumped early, it still needs location info.
21223 Skip to where we fill the location bits. */
21224 var_die = old_die;
21225 goto gen_variable_die_location;
21226 }
21227 }
21228
21229 /* For static data members, the declaration in the class is supposed
21230 to have DW_TAG_member tag; the specification should still be
21231 DW_TAG_variable referencing the DW_TAG_member DIE. */
21232 if (declaration && class_scope_p (context_die))
21233 var_die = new_die (DW_TAG_member, context_die, decl);
21234 else
21235 var_die = new_die (DW_TAG_variable, context_die, decl);
21236
21237 if (origin != NULL)
21238 origin_die = add_abstract_origin_attribute (var_die, origin);
21239
21240 /* Loop unrolling can create multiple blocks that refer to the same
21241 static variable, so we must test for the DW_AT_declaration flag.
21242
21243 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
21244 copy decls and set the DECL_ABSTRACT_P flag on them instead of
21245 sharing them.
21246
21247 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
21248 else if (decl_will_get_specification_p (old_die, decl, declaration))
21249 {
21250 /* This is a definition of a C++ class level static. */
21251 add_AT_specification (var_die, old_die);
21252 specialization_p = true;
21253 if (DECL_NAME (decl))
21254 {
21255 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
21256 struct dwarf_file_data * file_index = lookup_filename (s.file);
21257
21258 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
21259 add_AT_file (var_die, DW_AT_decl_file, file_index);
21260
21261 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
21262 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
21263
21264 if (old_die->die_tag == DW_TAG_member)
21265 add_linkage_name (var_die, decl);
21266 }
21267 }
21268 else
21269 add_name_and_src_coords_attributes (var_die, decl);
21270
21271 if ((origin == NULL && !specialization_p)
21272 || (origin != NULL
21273 && !DECL_ABSTRACT_P (decl_or_origin)
21274 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
21275 decl_function_context
21276 (decl_or_origin))))
21277 {
21278 tree type = TREE_TYPE (decl_or_origin);
21279
21280 if (decl_by_reference_p (decl_or_origin))
21281 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21282 context_die);
21283 else
21284 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
21285 context_die);
21286 }
21287
21288 if (origin == NULL && !specialization_p)
21289 {
21290 if (TREE_PUBLIC (decl))
21291 add_AT_flag (var_die, DW_AT_external, 1);
21292
21293 if (DECL_ARTIFICIAL (decl))
21294 add_AT_flag (var_die, DW_AT_artificial, 1);
21295
21296 add_accessibility_attribute (var_die, decl);
21297 }
21298
21299 if (declaration)
21300 add_AT_flag (var_die, DW_AT_declaration, 1);
21301
21302 if (decl && (DECL_ABSTRACT_P (decl)
21303 || !old_die || is_declaration_die (old_die)))
21304 equate_decl_number_to_die (decl, var_die);
21305
21306 gen_variable_die_location:
21307 if (! declaration
21308 && (! DECL_ABSTRACT_P (decl_or_origin)
21309 /* Local static vars are shared between all clones/inlines,
21310 so emit DW_AT_location on the abstract DIE if DECL_RTL is
21311 already set. */
21312 || (TREE_CODE (decl_or_origin) == VAR_DECL
21313 && TREE_STATIC (decl_or_origin)
21314 && DECL_RTL_SET_P (decl_or_origin)))
21315 /* When abstract origin already has DW_AT_location attribute, no need
21316 to add it again. */
21317 && (origin_die == NULL || get_AT (origin_die, DW_AT_location) == NULL))
21318 {
21319 if (early_dwarf)
21320 add_pubname (decl_or_origin, var_die);
21321 else
21322 add_location_or_const_value_attribute (var_die, decl_or_origin,
21323 decl == NULL);
21324 }
21325 else
21326 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
21327 }
21328
21329 /* Generate a DIE to represent a named constant. */
21330
21331 static void
21332 gen_const_die (tree decl, dw_die_ref context_die)
21333 {
21334 dw_die_ref const_die;
21335 tree type = TREE_TYPE (decl);
21336
21337 const_die = lookup_decl_die (decl);
21338 if (const_die)
21339 return;
21340
21341 const_die = new_die (DW_TAG_constant, context_die, decl);
21342 equate_decl_number_to_die (decl, const_die);
21343 add_name_and_src_coords_attributes (const_die, decl);
21344 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
21345 if (TREE_PUBLIC (decl))
21346 add_AT_flag (const_die, DW_AT_external, 1);
21347 if (DECL_ARTIFICIAL (decl))
21348 add_AT_flag (const_die, DW_AT_artificial, 1);
21349 tree_add_const_value_attribute_for_decl (const_die, decl);
21350 }
21351
21352 /* Generate a DIE to represent a label identifier. */
21353
21354 static void
21355 gen_label_die (tree decl, dw_die_ref context_die)
21356 {
21357 tree origin = decl_ultimate_origin (decl);
21358 dw_die_ref lbl_die = lookup_decl_die (decl);
21359 rtx insn;
21360 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21361
21362 if (!lbl_die)
21363 {
21364 lbl_die = new_die (DW_TAG_label, context_die, decl);
21365 equate_decl_number_to_die (decl, lbl_die);
21366
21367 if (origin != NULL)
21368 add_abstract_origin_attribute (lbl_die, origin);
21369 else
21370 add_name_and_src_coords_attributes (lbl_die, decl);
21371 }
21372
21373 if (DECL_ABSTRACT_P (decl))
21374 equate_decl_number_to_die (decl, lbl_die);
21375 else
21376 {
21377 insn = DECL_RTL_IF_SET (decl);
21378
21379 /* Deleted labels are programmer specified labels which have been
21380 eliminated because of various optimizations. We still emit them
21381 here so that it is possible to put breakpoints on them. */
21382 if (insn
21383 && (LABEL_P (insn)
21384 || ((NOTE_P (insn)
21385 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
21386 {
21387 /* When optimization is enabled (via -O) some parts of the compiler
21388 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
21389 represent source-level labels which were explicitly declared by
21390 the user. This really shouldn't be happening though, so catch
21391 it if it ever does happen. */
21392 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
21393
21394 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
21395 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
21396 }
21397 else if (insn
21398 && NOTE_P (insn)
21399 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
21400 && CODE_LABEL_NUMBER (insn) != -1)
21401 {
21402 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
21403 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
21404 }
21405 }
21406 }
21407
21408 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
21409 attributes to the DIE for a block STMT, to describe where the inlined
21410 function was called from. This is similar to add_src_coords_attributes. */
21411
21412 static inline void
21413 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
21414 {
21415 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
21416
21417 if (dwarf_version >= 3 || !dwarf_strict)
21418 {
21419 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
21420 add_AT_unsigned (die, DW_AT_call_line, s.line);
21421 }
21422 }
21423
21424
21425 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
21426 Add low_pc and high_pc attributes to the DIE for a block STMT. */
21427
21428 static inline void
21429 add_high_low_attributes (tree stmt, dw_die_ref die)
21430 {
21431 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21432
21433 if (BLOCK_FRAGMENT_CHAIN (stmt)
21434 && (dwarf_version >= 3 || !dwarf_strict))
21435 {
21436 tree chain, superblock = NULL_TREE;
21437 dw_die_ref pdie;
21438 dw_attr_node *attr = NULL;
21439
21440 if (inlined_function_outer_scope_p (stmt))
21441 {
21442 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
21443 BLOCK_NUMBER (stmt));
21444 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21445 }
21446
21447 /* Optimize duplicate .debug_ranges lists or even tails of
21448 lists. If this BLOCK has same ranges as its supercontext,
21449 lookup DW_AT_ranges attribute in the supercontext (and
21450 recursively so), verify that the ranges_table contains the
21451 right values and use it instead of adding a new .debug_range. */
21452 for (chain = stmt, pdie = die;
21453 BLOCK_SAME_RANGE (chain);
21454 chain = BLOCK_SUPERCONTEXT (chain))
21455 {
21456 dw_attr_node *new_attr;
21457
21458 pdie = pdie->die_parent;
21459 if (pdie == NULL)
21460 break;
21461 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
21462 break;
21463 new_attr = get_AT (pdie, DW_AT_ranges);
21464 if (new_attr == NULL
21465 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
21466 break;
21467 attr = new_attr;
21468 superblock = BLOCK_SUPERCONTEXT (chain);
21469 }
21470 if (attr != NULL
21471 && (ranges_table[attr->dw_attr_val.v.val_offset
21472 / 2 / DWARF2_ADDR_SIZE].num
21473 == BLOCK_NUMBER (superblock))
21474 && BLOCK_FRAGMENT_CHAIN (superblock))
21475 {
21476 unsigned long off = attr->dw_attr_val.v.val_offset
21477 / 2 / DWARF2_ADDR_SIZE;
21478 unsigned long supercnt = 0, thiscnt = 0;
21479 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
21480 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
21481 {
21482 ++supercnt;
21483 gcc_checking_assert (ranges_table[off + supercnt].num
21484 == BLOCK_NUMBER (chain));
21485 }
21486 gcc_checking_assert (ranges_table[off + supercnt + 1].num == 0);
21487 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
21488 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
21489 ++thiscnt;
21490 gcc_assert (supercnt >= thiscnt);
21491 add_AT_range_list (die, DW_AT_ranges,
21492 ((off + supercnt - thiscnt)
21493 * 2 * DWARF2_ADDR_SIZE),
21494 false);
21495 return;
21496 }
21497
21498 add_AT_range_list (die, DW_AT_ranges, add_ranges (stmt), false);
21499
21500 chain = BLOCK_FRAGMENT_CHAIN (stmt);
21501 do
21502 {
21503 add_ranges (chain);
21504 chain = BLOCK_FRAGMENT_CHAIN (chain);
21505 }
21506 while (chain);
21507 add_ranges (NULL);
21508 }
21509 else
21510 {
21511 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
21512 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
21513 BLOCK_NUMBER (stmt));
21514 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
21515 BLOCK_NUMBER (stmt));
21516 add_AT_low_high_pc (die, label, label_high, false);
21517 }
21518 }
21519
21520 /* Generate a DIE for a lexical block. */
21521
21522 static void
21523 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
21524 {
21525 dw_die_ref old_die = BLOCK_DIE (stmt);
21526 dw_die_ref stmt_die = NULL;
21527 if (!old_die)
21528 {
21529 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
21530 BLOCK_DIE (stmt) = stmt_die;
21531 }
21532
21533 if (BLOCK_ABSTRACT (stmt))
21534 {
21535 if (old_die)
21536 {
21537 /* This must have been generated early and it won't even
21538 need location information since it's a DW_AT_inline
21539 function. */
21540 if (flag_checking)
21541 for (dw_die_ref c = context_die; c; c = c->die_parent)
21542 if (c->die_tag == DW_TAG_inlined_subroutine
21543 || c->die_tag == DW_TAG_subprogram)
21544 {
21545 gcc_assert (get_AT (c, DW_AT_inline));
21546 break;
21547 }
21548 return;
21549 }
21550 }
21551 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
21552 {
21553 /* If this is an inlined instance, create a new lexical die for
21554 anything below to attach DW_AT_abstract_origin to. */
21555 if (old_die)
21556 {
21557 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
21558 BLOCK_DIE (stmt) = stmt_die;
21559 old_die = NULL;
21560 }
21561
21562 tree origin = block_ultimate_origin (stmt);
21563 if (origin != NULL_TREE && origin != stmt)
21564 add_abstract_origin_attribute (stmt_die, origin);
21565 }
21566
21567 if (old_die)
21568 stmt_die = old_die;
21569
21570 /* A non abstract block whose blocks have already been reordered
21571 should have the instruction range for this block. If so, set the
21572 high/low attributes. */
21573 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
21574 {
21575 gcc_assert (stmt_die);
21576 add_high_low_attributes (stmt, stmt_die);
21577 }
21578
21579 decls_for_scope (stmt, stmt_die);
21580 }
21581
21582 /* Generate a DIE for an inlined subprogram. */
21583
21584 static void
21585 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
21586 {
21587 tree decl;
21588
21589 /* The instance of function that is effectively being inlined shall not
21590 be abstract. */
21591 gcc_assert (! BLOCK_ABSTRACT (stmt));
21592
21593 decl = block_ultimate_origin (stmt);
21594
21595 /* Make sure any inlined functions are known to be inlineable. */
21596 gcc_checking_assert (DECL_ABSTRACT_P (decl)
21597 || cgraph_function_possibly_inlined_p (decl));
21598
21599 /* Emit info for the abstract instance first, if we haven't yet. We
21600 must emit this even if the block is abstract, otherwise when we
21601 emit the block below (or elsewhere), we may end up trying to emit
21602 a die whose origin die hasn't been emitted, and crashing. */
21603 dwarf2out_abstract_function (decl);
21604
21605 if (! BLOCK_ABSTRACT (stmt))
21606 {
21607 dw_die_ref subr_die
21608 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
21609
21610 if (call_arg_locations)
21611 BLOCK_DIE (stmt) = subr_die;
21612 add_abstract_origin_attribute (subr_die, decl);
21613 if (TREE_ASM_WRITTEN (stmt))
21614 add_high_low_attributes (stmt, subr_die);
21615 add_call_src_coords_attributes (stmt, subr_die);
21616
21617 decls_for_scope (stmt, subr_die);
21618 }
21619 }
21620
21621 /* Generate a DIE for a field in a record, or structure. CTX is required: see
21622 the comment for VLR_CONTEXT. */
21623
21624 static void
21625 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
21626 {
21627 dw_die_ref decl_die;
21628
21629 if (TREE_TYPE (decl) == error_mark_node)
21630 return;
21631
21632 decl_die = new_die (DW_TAG_member, context_die, decl);
21633 add_name_and_src_coords_attributes (decl_die, decl);
21634 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
21635 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
21636 context_die);
21637
21638 if (DECL_BIT_FIELD_TYPE (decl))
21639 {
21640 add_byte_size_attribute (decl_die, decl);
21641 add_bit_size_attribute (decl_die, decl);
21642 add_bit_offset_attribute (decl_die, decl, ctx);
21643 }
21644
21645 /* If we have a variant part offset, then we are supposed to process a member
21646 of a QUAL_UNION_TYPE, which is how we represent variant parts in
21647 trees. */
21648 gcc_assert (ctx->variant_part_offset == NULL_TREE
21649 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
21650 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
21651 add_data_member_location_attribute (decl_die, decl, ctx);
21652
21653 if (DECL_ARTIFICIAL (decl))
21654 add_AT_flag (decl_die, DW_AT_artificial, 1);
21655
21656 add_accessibility_attribute (decl_die, decl);
21657
21658 /* Equate decl number to die, so that we can look up this decl later on. */
21659 equate_decl_number_to_die (decl, decl_die);
21660 }
21661
21662 #if 0
21663 /* Don't generate either pointer_type DIEs or reference_type DIEs here.
21664 Use modified_type_die instead.
21665 We keep this code here just in case these types of DIEs may be needed to
21666 represent certain things in other languages (e.g. Pascal) someday. */
21667
21668 static void
21669 gen_pointer_type_die (tree type, dw_die_ref context_die)
21670 {
21671 dw_die_ref ptr_die
21672 = new_die (DW_TAG_pointer_type, scope_die_for (type, context_die), type);
21673
21674 equate_type_number_to_die (type, ptr_die);
21675 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21676 context_die);
21677 add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
21678 }
21679
21680 /* Don't generate either pointer_type DIEs or reference_type DIEs here.
21681 Use modified_type_die instead.
21682 We keep this code here just in case these types of DIEs may be needed to
21683 represent certain things in other languages (e.g. Pascal) someday. */
21684
21685 static void
21686 gen_reference_type_die (tree type, dw_die_ref context_die)
21687 {
21688 dw_die_ref ref_die, scope_die = scope_die_for (type, context_die);
21689
21690 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
21691 ref_die = new_die (DW_TAG_rvalue_reference_type, scope_die, type);
21692 else
21693 ref_die = new_die (DW_TAG_reference_type, scope_die, type);
21694
21695 equate_type_number_to_die (type, ref_die);
21696 add_type_attribute (ref_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21697 context_die);
21698 add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
21699 }
21700 #endif
21701
21702 /* Generate a DIE for a pointer to a member type. */
21703
21704 static void
21705 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
21706 {
21707 dw_die_ref ptr_die
21708 = new_die (DW_TAG_ptr_to_member_type,
21709 scope_die_for (type, context_die), type);
21710
21711 equate_type_number_to_die (type, ptr_die);
21712 add_AT_die_ref (ptr_die, DW_AT_containing_type,
21713 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
21714 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21715 context_die);
21716 }
21717
21718 static char *producer_string;
21719
21720 /* Return a heap allocated producer string including command line options
21721 if -grecord-gcc-switches. */
21722
21723 static char *
21724 gen_producer_string (void)
21725 {
21726 size_t j;
21727 auto_vec<const char *> switches;
21728 const char *language_string = lang_hooks.name;
21729 char *producer, *tail;
21730 const char *p;
21731 size_t len = dwarf_record_gcc_switches ? 0 : 3;
21732 size_t plen = strlen (language_string) + 1 + strlen (version_string);
21733
21734 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
21735 switch (save_decoded_options[j].opt_index)
21736 {
21737 case OPT_o:
21738 case OPT_d:
21739 case OPT_dumpbase:
21740 case OPT_dumpdir:
21741 case OPT_auxbase:
21742 case OPT_auxbase_strip:
21743 case OPT_quiet:
21744 case OPT_version:
21745 case OPT_v:
21746 case OPT_w:
21747 case OPT_L:
21748 case OPT_D:
21749 case OPT_I:
21750 case OPT_U:
21751 case OPT_SPECIAL_unknown:
21752 case OPT_SPECIAL_ignore:
21753 case OPT_SPECIAL_program_name:
21754 case OPT_SPECIAL_input_file:
21755 case OPT_grecord_gcc_switches:
21756 case OPT_gno_record_gcc_switches:
21757 case OPT__output_pch_:
21758 case OPT_fdiagnostics_show_location_:
21759 case OPT_fdiagnostics_show_option:
21760 case OPT_fdiagnostics_show_caret:
21761 case OPT_fdiagnostics_color_:
21762 case OPT_fverbose_asm:
21763 case OPT____:
21764 case OPT__sysroot_:
21765 case OPT_nostdinc:
21766 case OPT_nostdinc__:
21767 case OPT_fpreprocessed:
21768 case OPT_fltrans_output_list_:
21769 case OPT_fresolution_:
21770 case OPT_fdebug_prefix_map_:
21771 /* Ignore these. */
21772 continue;
21773 default:
21774 if (cl_options[save_decoded_options[j].opt_index].flags
21775 & CL_NO_DWARF_RECORD)
21776 continue;
21777 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
21778 == '-');
21779 switch (save_decoded_options[j].canonical_option[0][1])
21780 {
21781 case 'M':
21782 case 'i':
21783 case 'W':
21784 continue;
21785 case 'f':
21786 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
21787 "dump", 4) == 0)
21788 continue;
21789 break;
21790 default:
21791 break;
21792 }
21793 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
21794 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
21795 break;
21796 }
21797
21798 producer = XNEWVEC (char, plen + 1 + len + 1);
21799 tail = producer;
21800 sprintf (tail, "%s %s", language_string, version_string);
21801 tail += plen;
21802
21803 FOR_EACH_VEC_ELT (switches, j, p)
21804 {
21805 len = strlen (p);
21806 *tail = ' ';
21807 memcpy (tail + 1, p, len);
21808 tail += len + 1;
21809 }
21810
21811 *tail = '\0';
21812 return producer;
21813 }
21814
21815 /* Given a C and/or C++ language/version string return the "highest".
21816 C++ is assumed to be "higher" than C in this case. Used for merging
21817 LTO translation unit languages. */
21818 static const char *
21819 highest_c_language (const char *lang1, const char *lang2)
21820 {
21821 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
21822 return "GNU C++14";
21823 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
21824 return "GNU C++11";
21825 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
21826 return "GNU C++98";
21827
21828 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
21829 return "GNU C11";
21830 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
21831 return "GNU C99";
21832 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
21833 return "GNU C89";
21834
21835 gcc_unreachable ();
21836 }
21837
21838
21839 /* Generate the DIE for the compilation unit. */
21840
21841 static dw_die_ref
21842 gen_compile_unit_die (const char *filename)
21843 {
21844 dw_die_ref die;
21845 const char *language_string = lang_hooks.name;
21846 int language;
21847
21848 die = new_die (DW_TAG_compile_unit, NULL, NULL);
21849
21850 if (filename)
21851 {
21852 add_name_attribute (die, filename);
21853 /* Don't add cwd for <built-in>. */
21854 if (!IS_ABSOLUTE_PATH (filename) && filename[0] != '<')
21855 add_comp_dir_attribute (die);
21856 }
21857
21858 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
21859
21860 /* If our producer is LTO try to figure out a common language to use
21861 from the global list of translation units. */
21862 if (strcmp (language_string, "GNU GIMPLE") == 0)
21863 {
21864 unsigned i;
21865 tree t;
21866 const char *common_lang = NULL;
21867
21868 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
21869 {
21870 if (!TRANSLATION_UNIT_LANGUAGE (t))
21871 continue;
21872 if (!common_lang)
21873 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
21874 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
21875 ;
21876 else if (strncmp (common_lang, "GNU C", 5) == 0
21877 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
21878 /* Mixing C and C++ is ok, use C++ in that case. */
21879 common_lang = highest_c_language (common_lang,
21880 TRANSLATION_UNIT_LANGUAGE (t));
21881 else
21882 {
21883 /* Fall back to C. */
21884 common_lang = NULL;
21885 break;
21886 }
21887 }
21888
21889 if (common_lang)
21890 language_string = common_lang;
21891 }
21892
21893 language = DW_LANG_C;
21894 if (strncmp (language_string, "GNU C", 5) == 0
21895 && ISDIGIT (language_string[5]))
21896 {
21897 language = DW_LANG_C89;
21898 if (dwarf_version >= 3 || !dwarf_strict)
21899 {
21900 if (strcmp (language_string, "GNU C89") != 0)
21901 language = DW_LANG_C99;
21902
21903 if (dwarf_version >= 5 /* || !dwarf_strict */)
21904 if (strcmp (language_string, "GNU C11") == 0)
21905 language = DW_LANG_C11;
21906 }
21907 }
21908 else if (strncmp (language_string, "GNU C++", 7) == 0)
21909 {
21910 language = DW_LANG_C_plus_plus;
21911 if (dwarf_version >= 5 /* || !dwarf_strict */)
21912 {
21913 if (strcmp (language_string, "GNU C++11") == 0)
21914 language = DW_LANG_C_plus_plus_11;
21915 else if (strcmp (language_string, "GNU C++14") == 0)
21916 language = DW_LANG_C_plus_plus_14;
21917 }
21918 }
21919 else if (strcmp (language_string, "GNU F77") == 0)
21920 language = DW_LANG_Fortran77;
21921 else if (strcmp (language_string, "GNU Pascal") == 0)
21922 language = DW_LANG_Pascal83;
21923 else if (dwarf_version >= 3 || !dwarf_strict)
21924 {
21925 if (strcmp (language_string, "GNU Ada") == 0)
21926 language = DW_LANG_Ada95;
21927 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
21928 {
21929 language = DW_LANG_Fortran95;
21930 if (dwarf_version >= 5 /* || !dwarf_strict */)
21931 {
21932 if (strcmp (language_string, "GNU Fortran2003") == 0)
21933 language = DW_LANG_Fortran03;
21934 else if (strcmp (language_string, "GNU Fortran2008") == 0)
21935 language = DW_LANG_Fortran08;
21936 }
21937 }
21938 else if (strcmp (language_string, "GNU Java") == 0)
21939 language = DW_LANG_Java;
21940 else if (strcmp (language_string, "GNU Objective-C") == 0)
21941 language = DW_LANG_ObjC;
21942 else if (strcmp (language_string, "GNU Objective-C++") == 0)
21943 language = DW_LANG_ObjC_plus_plus;
21944 else if (dwarf_version >= 5 || !dwarf_strict)
21945 {
21946 if (strcmp (language_string, "GNU Go") == 0)
21947 language = DW_LANG_Go;
21948 }
21949 }
21950 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
21951 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
21952 language = DW_LANG_Fortran90;
21953
21954 add_AT_unsigned (die, DW_AT_language, language);
21955
21956 switch (language)
21957 {
21958 case DW_LANG_Fortran77:
21959 case DW_LANG_Fortran90:
21960 case DW_LANG_Fortran95:
21961 case DW_LANG_Fortran03:
21962 case DW_LANG_Fortran08:
21963 /* Fortran has case insensitive identifiers and the front-end
21964 lowercases everything. */
21965 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
21966 break;
21967 default:
21968 /* The default DW_ID_case_sensitive doesn't need to be specified. */
21969 break;
21970 }
21971 return die;
21972 }
21973
21974 /* Generate the DIE for a base class. */
21975
21976 static void
21977 gen_inheritance_die (tree binfo, tree access, tree type,
21978 dw_die_ref context_die)
21979 {
21980 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
21981 struct vlr_context ctx = { type, NULL };
21982
21983 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
21984 context_die);
21985 add_data_member_location_attribute (die, binfo, &ctx);
21986
21987 if (BINFO_VIRTUAL_P (binfo))
21988 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21989
21990 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
21991 children, otherwise the default is DW_ACCESS_public. In DWARF2
21992 the default has always been DW_ACCESS_private. */
21993 if (access == access_public_node)
21994 {
21995 if (dwarf_version == 2
21996 || context_die->die_tag == DW_TAG_class_type)
21997 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
21998 }
21999 else if (access == access_protected_node)
22000 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
22001 else if (dwarf_version > 2
22002 && context_die->die_tag != DW_TAG_class_type)
22003 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
22004 }
22005
22006 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
22007 structure. */
22008 static bool
22009 is_variant_part (tree decl)
22010 {
22011 return (TREE_CODE (decl) == FIELD_DECL
22012 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
22013 }
22014
22015 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
22016 return the FIELD_DECL. Return NULL_TREE otherwise. */
22017
22018 static tree
22019 analyze_discr_in_predicate (tree operand, tree struct_type)
22020 {
22021 bool continue_stripping = true;
22022 while (continue_stripping)
22023 switch (TREE_CODE (operand))
22024 {
22025 CASE_CONVERT:
22026 operand = TREE_OPERAND (operand, 0);
22027 break;
22028 default:
22029 continue_stripping = false;
22030 break;
22031 }
22032
22033 /* Match field access to members of struct_type only. */
22034 if (TREE_CODE (operand) == COMPONENT_REF
22035 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
22036 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
22037 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
22038 return TREE_OPERAND (operand, 1);
22039 else
22040 return NULL_TREE;
22041 }
22042
22043 /* Check that SRC is a constant integer that can be represented as a native
22044 integer constant (either signed or unsigned). If so, store it into DEST and
22045 return true. Return false otherwise. */
22046
22047 static bool
22048 get_discr_value (tree src, dw_discr_value *dest)
22049 {
22050 bool is_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
22051
22052 if (TREE_CODE (src) != INTEGER_CST
22053 || !(is_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
22054 return false;
22055
22056 dest->pos = is_unsigned;
22057 if (is_unsigned)
22058 dest->v.uval = tree_to_uhwi (src);
22059 else
22060 dest->v.sval = tree_to_shwi (src);
22061
22062 return true;
22063 }
22064
22065 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
22066 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
22067 store NULL_TREE in DISCR_DECL. Otherwise:
22068
22069 - store the discriminant field in STRUCT_TYPE that controls the variant
22070 part to *DISCR_DECL
22071
22072 - put in *DISCR_LISTS_P an array where for each variant, the item
22073 represents the corresponding matching list of discriminant values.
22074
22075 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
22076 the above array.
22077
22078 Note that when the array is allocated (i.e. when the analysis is
22079 successful), it is up to the caller to free the array. */
22080
22081 static void
22082 analyze_variants_discr (tree variant_part_decl,
22083 tree struct_type,
22084 tree *discr_decl,
22085 dw_discr_list_ref **discr_lists_p,
22086 unsigned *discr_lists_length)
22087 {
22088 tree variant_part_type = TREE_TYPE (variant_part_decl);
22089 tree variant;
22090 dw_discr_list_ref *discr_lists;
22091 unsigned i;
22092
22093 /* Compute how many variants there are in this variant part. */
22094 *discr_lists_length = 0;
22095 for (variant = TYPE_FIELDS (variant_part_type);
22096 variant != NULL_TREE;
22097 variant = DECL_CHAIN (variant))
22098 ++*discr_lists_length;
22099
22100 *discr_decl = NULL_TREE;
22101 *discr_lists_p
22102 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
22103 sizeof (**discr_lists_p));
22104 discr_lists = *discr_lists_p;
22105
22106 /* And then analyze all variants to extract discriminant information for all
22107 of them. This analysis is conservative: as soon as we detect something we
22108 do not support, abort everything and pretend we found nothing. */
22109 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
22110 variant != NULL_TREE;
22111 variant = DECL_CHAIN (variant), ++i)
22112 {
22113 tree match_expr = DECL_QUALIFIER (variant);
22114
22115 /* Now, try to analyze the predicate and deduce a discriminant for
22116 it. */
22117 if (match_expr == boolean_true_node)
22118 /* Typically happens for the default variant: it matches all cases that
22119 previous variants rejected. Don't output any matching value for
22120 this one. */
22121 continue;
22122
22123 /* The following loop tries to iterate over each discriminant
22124 possibility: single values or ranges. */
22125 while (match_expr != NULL_TREE)
22126 {
22127 tree next_round_match_expr;
22128 tree candidate_discr = NULL_TREE;
22129 dw_discr_list_ref new_node = NULL;
22130
22131 /* Possibilities are matched one after the other by nested
22132 TRUTH_ORIF_EXPR expressions. Process the current possibility and
22133 continue with the rest at next iteration. */
22134 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
22135 {
22136 next_round_match_expr = TREE_OPERAND (match_expr, 0);
22137 match_expr = TREE_OPERAND (match_expr, 1);
22138 }
22139 else
22140 next_round_match_expr = NULL_TREE;
22141
22142 if (match_expr == boolean_false_node)
22143 /* This sub-expression matches nothing: just wait for the next
22144 one. */
22145 ;
22146
22147 else if (TREE_CODE (match_expr) == EQ_EXPR)
22148 {
22149 /* We are matching: <discr_field> == <integer_cst>
22150 This sub-expression matches a single value. */
22151 tree integer_cst = TREE_OPERAND (match_expr, 1);
22152
22153 candidate_discr
22154 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
22155 struct_type);
22156
22157 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
22158 if (!get_discr_value (integer_cst,
22159 &new_node->dw_discr_lower_bound))
22160 goto abort;
22161 new_node->dw_discr_range = false;
22162 }
22163
22164 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
22165 {
22166 /* We are matching:
22167 <discr_field> > <integer_cst>
22168 && <discr_field> < <integer_cst>.
22169 This sub-expression matches the range of values between the
22170 two matched integer constants. Note that comparisons can be
22171 inclusive or exclusive. */
22172 tree candidate_discr_1, candidate_discr_2;
22173 tree lower_cst, upper_cst;
22174 bool lower_cst_included, upper_cst_included;
22175 tree lower_op = TREE_OPERAND (match_expr, 0);
22176 tree upper_op = TREE_OPERAND (match_expr, 1);
22177
22178 /* When the comparison is exclusive, the integer constant is not
22179 the discriminant range bound we are looking for: we will have
22180 to increment or decrement it. */
22181 if (TREE_CODE (lower_op) == GE_EXPR)
22182 lower_cst_included = true;
22183 else if (TREE_CODE (lower_op) == GT_EXPR)
22184 lower_cst_included = false;
22185 else
22186 goto abort;
22187
22188 if (TREE_CODE (upper_op) == LE_EXPR)
22189 upper_cst_included = true;
22190 else if (TREE_CODE (upper_op) == LT_EXPR)
22191 upper_cst_included = false;
22192 else
22193 goto abort;
22194
22195 /* Extract the discriminant from the first operand and check it
22196 is consistant with the same analysis in the second
22197 operand. */
22198 candidate_discr_1
22199 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
22200 struct_type);
22201 candidate_discr_2
22202 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
22203 struct_type);
22204 if (candidate_discr_1 == candidate_discr_2)
22205 candidate_discr = candidate_discr_1;
22206 else
22207 goto abort;
22208
22209 /* Extract bounds from both. */
22210 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
22211 lower_cst = TREE_OPERAND (lower_op, 1);
22212 upper_cst = TREE_OPERAND (upper_op, 1);
22213
22214 if (!lower_cst_included)
22215 lower_cst
22216 = fold (build2 (PLUS_EXPR, TREE_TYPE (lower_cst),
22217 lower_cst,
22218 build_int_cst (TREE_TYPE (lower_cst), 1)));
22219 if (!upper_cst_included)
22220 upper_cst
22221 = fold (build2 (MINUS_EXPR, TREE_TYPE (upper_cst),
22222 upper_cst,
22223 build_int_cst (TREE_TYPE (upper_cst), 1)));
22224
22225 if (!get_discr_value (lower_cst,
22226 &new_node->dw_discr_lower_bound)
22227 || !get_discr_value (upper_cst,
22228 &new_node->dw_discr_upper_bound))
22229 goto abort;
22230
22231 new_node->dw_discr_range = true;
22232 }
22233
22234 else
22235 /* Unsupported sub-expression: we cannot determine the set of
22236 matching discriminant values. Abort everything. */
22237 goto abort;
22238
22239 /* If the discriminant info is not consistant with what we saw so
22240 far, consider the analysis failed and abort everything. */
22241 if (candidate_discr == NULL_TREE
22242 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
22243 goto abort;
22244 else
22245 *discr_decl = candidate_discr;
22246
22247 if (new_node != NULL)
22248 {
22249 new_node->dw_discr_next = discr_lists[i];
22250 discr_lists[i] = new_node;
22251 }
22252 match_expr = next_round_match_expr;
22253 }
22254 }
22255
22256 /* If we reach this point, we could match everything we were interested
22257 in. */
22258 return;
22259
22260 abort:
22261 /* Clean all data structure and return no result. */
22262 free (*discr_lists_p);
22263 *discr_lists_p = NULL;
22264 *discr_decl = NULL_TREE;
22265 }
22266
22267 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
22268 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
22269 under CONTEXT_DIE.
22270
22271 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
22272 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
22273 this type, which are record types, represent the available variants and each
22274 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
22275 values are inferred from these attributes.
22276
22277 In trees, the offsets for the fields inside these sub-records are relative
22278 to the variant part itself, whereas the corresponding DIEs should have
22279 offset attributes that are relative to the embedding record base address.
22280 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
22281 must be an expression that computes the offset of the variant part to
22282 describe in DWARF. */
22283
22284 static void
22285 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
22286 dw_die_ref context_die)
22287 {
22288 const tree variant_part_type = TREE_TYPE (variant_part_decl);
22289 tree variant_part_offset = vlr_ctx->variant_part_offset;
22290 struct loc_descr_context ctx = {
22291 vlr_ctx->struct_type, /* context_type */
22292 NULL_TREE, /* base_decl */
22293 NULL /* dpi */
22294 };
22295
22296 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
22297 NULL_TREE if there is no such field. */
22298 tree discr_decl = NULL_TREE;
22299 dw_discr_list_ref *discr_lists;
22300 unsigned discr_lists_length = 0;
22301 unsigned i;
22302
22303 dw_die_ref dwarf_proc_die = NULL;
22304 dw_die_ref variant_part_die
22305 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
22306
22307 equate_decl_number_to_die (variant_part_decl, variant_part_die);
22308
22309 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
22310 &discr_decl, &discr_lists, &discr_lists_length);
22311
22312 if (discr_decl != NULL_TREE)
22313 {
22314 dw_die_ref discr_die = lookup_decl_die (discr_decl);
22315
22316 if (discr_die)
22317 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
22318 else
22319 /* We have no DIE for the discriminant, so just discard all
22320 discrimimant information in the output. */
22321 discr_decl = NULL_TREE;
22322 }
22323
22324 /* If the offset for this variant part is more complex than a constant,
22325 create a DWARF procedure for it so that we will not have to generate DWARF
22326 expressions for it for each member. */
22327 if (TREE_CODE (variant_part_offset) != INTEGER_CST
22328 && (dwarf_version >= 3 || !dwarf_strict))
22329 {
22330 const tree dwarf_proc_fndecl
22331 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
22332 build_function_type (TREE_TYPE (variant_part_offset),
22333 NULL_TREE));
22334 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
22335 const dw_loc_descr_ref dwarf_proc_body
22336 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
22337
22338 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
22339 dwarf_proc_fndecl, context_die);
22340 if (dwarf_proc_die != NULL)
22341 variant_part_offset = dwarf_proc_call;
22342 }
22343
22344 /* Output DIEs for all variants. */
22345 i = 0;
22346 for (tree variant = TYPE_FIELDS (variant_part_type);
22347 variant != NULL_TREE;
22348 variant = DECL_CHAIN (variant), ++i)
22349 {
22350 tree variant_type = TREE_TYPE (variant);
22351 dw_die_ref variant_die;
22352
22353 /* All variants (i.e. members of a variant part) are supposed to be
22354 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
22355 under these records. */
22356 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
22357
22358 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
22359 equate_decl_number_to_die (variant, variant_die);
22360
22361 /* Output discriminant values this variant matches, if any. */
22362 if (discr_decl == NULL || discr_lists[i] == NULL)
22363 /* In the case we have discriminant information at all, this is
22364 probably the default variant: as the standard says, don't
22365 output any discriminant value/list attribute. */
22366 ;
22367 else if (discr_lists[i]->dw_discr_next == NULL
22368 && !discr_lists[i]->dw_discr_range)
22369 /* If there is only one accepted value, don't bother outputting a
22370 list. */
22371 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
22372 else
22373 add_discr_list (variant_die, discr_lists[i]);
22374
22375 for (tree member = TYPE_FIELDS (variant_type);
22376 member != NULL_TREE;
22377 member = DECL_CHAIN (member))
22378 {
22379 struct vlr_context vlr_sub_ctx = {
22380 vlr_ctx->struct_type, /* struct_type */
22381 NULL /* variant_part_offset */
22382 };
22383 if (is_variant_part (member))
22384 {
22385 /* All offsets for fields inside variant parts are relative to
22386 the top-level embedding RECORD_TYPE's base address. On the
22387 other hand, offsets in GCC's types are relative to the
22388 nested-most variant part. So we have to sum offsets each time
22389 we recurse. */
22390
22391 vlr_sub_ctx.variant_part_offset
22392 = fold (build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
22393 variant_part_offset, byte_position (member)));
22394 gen_variant_part (member, &vlr_sub_ctx, variant_die);
22395 }
22396 else
22397 {
22398 vlr_sub_ctx.variant_part_offset = variant_part_offset;
22399 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
22400 }
22401 }
22402 }
22403
22404 free (discr_lists);
22405 }
22406
22407 /* Generate a DIE for a class member. */
22408
22409 static void
22410 gen_member_die (tree type, dw_die_ref context_die)
22411 {
22412 tree member;
22413 tree binfo = TYPE_BINFO (type);
22414 dw_die_ref child;
22415
22416 /* If this is not an incomplete type, output descriptions of each of its
22417 members. Note that as we output the DIEs necessary to represent the
22418 members of this record or union type, we will also be trying to output
22419 DIEs to represent the *types* of those members. However the `type'
22420 function (above) will specifically avoid generating type DIEs for member
22421 types *within* the list of member DIEs for this (containing) type except
22422 for those types (of members) which are explicitly marked as also being
22423 members of this (containing) type themselves. The g++ front- end can
22424 force any given type to be treated as a member of some other (containing)
22425 type by setting the TYPE_CONTEXT of the given (member) type to point to
22426 the TREE node representing the appropriate (containing) type. */
22427
22428 /* First output info about the base classes. */
22429 if (binfo)
22430 {
22431 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
22432 int i;
22433 tree base;
22434
22435 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
22436 gen_inheritance_die (base,
22437 (accesses ? (*accesses)[i] : access_public_node),
22438 type,
22439 context_die);
22440 }
22441
22442 /* Now output info about the data members and type members. */
22443 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
22444 {
22445 struct vlr_context vlr_ctx = { type, NULL_TREE };
22446
22447 /* If we thought we were generating minimal debug info for TYPE
22448 and then changed our minds, some of the member declarations
22449 may have already been defined. Don't define them again, but
22450 do put them in the right order. */
22451
22452 child = lookup_decl_die (member);
22453 if (child)
22454 splice_child_die (context_die, child);
22455
22456 /* Do not generate standard DWARF for variant parts if we are generating
22457 the corresponding GNAT encodings: DIEs generated for both would
22458 conflict in our mappings. */
22459 else if (is_variant_part (member)
22460 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
22461 {
22462 vlr_ctx.variant_part_offset = byte_position (member);
22463 gen_variant_part (member, &vlr_ctx, context_die);
22464 }
22465 else
22466 {
22467 vlr_ctx.variant_part_offset = NULL_TREE;
22468 gen_decl_die (member, NULL, &vlr_ctx, context_die);
22469 }
22470 }
22471
22472 /* We do not keep type methods in type variants. */
22473 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
22474 /* Now output info about the function members (if any). */
22475 if (TYPE_METHODS (type) != error_mark_node)
22476 for (member = TYPE_METHODS (type); member; member = DECL_CHAIN (member))
22477 {
22478 /* Don't include clones in the member list. */
22479 if (DECL_ABSTRACT_ORIGIN (member))
22480 continue;
22481 /* Nor constructors for anonymous classes. */
22482 if (DECL_ARTIFICIAL (member)
22483 && dwarf2_name (member, 0) == NULL)
22484 continue;
22485
22486 child = lookup_decl_die (member);
22487 if (child)
22488 splice_child_die (context_die, child);
22489 else
22490 gen_decl_die (member, NULL, NULL, context_die);
22491 }
22492 }
22493
22494 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
22495 is set, we pretend that the type was never defined, so we only get the
22496 member DIEs needed by later specification DIEs. */
22497
22498 static void
22499 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
22500 enum debug_info_usage usage)
22501 {
22502 if (TREE_ASM_WRITTEN (type))
22503 {
22504 /* Fill in the bound of variable-length fields in late dwarf if
22505 still incomplete. */
22506 if (!early_dwarf && variably_modified_type_p (type, NULL))
22507 for (tree member = TYPE_FIELDS (type);
22508 member;
22509 member = DECL_CHAIN (member))
22510 fill_variable_array_bounds (TREE_TYPE (member));
22511 return;
22512 }
22513
22514 dw_die_ref type_die = lookup_type_die (type);
22515 dw_die_ref scope_die = 0;
22516 int nested = 0;
22517 int complete = (TYPE_SIZE (type)
22518 && (! TYPE_STUB_DECL (type)
22519 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
22520 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
22521 complete = complete && should_emit_struct_debug (type, usage);
22522
22523 if (type_die && ! complete)
22524 return;
22525
22526 if (TYPE_CONTEXT (type) != NULL_TREE
22527 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
22528 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
22529 nested = 1;
22530
22531 scope_die = scope_die_for (type, context_die);
22532
22533 /* Generate child dies for template paramaters. */
22534 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
22535 schedule_generic_params_dies_gen (type);
22536
22537 if (! type_die || (nested && is_cu_die (scope_die)))
22538 /* First occurrence of type or toplevel definition of nested class. */
22539 {
22540 dw_die_ref old_die = type_die;
22541
22542 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
22543 ? record_type_tag (type) : DW_TAG_union_type,
22544 scope_die, type);
22545 equate_type_number_to_die (type, type_die);
22546 if (old_die)
22547 add_AT_specification (type_die, old_die);
22548 else
22549 add_name_attribute (type_die, type_tag (type));
22550 }
22551 else
22552 remove_AT (type_die, DW_AT_declaration);
22553
22554 /* If this type has been completed, then give it a byte_size attribute and
22555 then give a list of members. */
22556 if (complete && !ns_decl)
22557 {
22558 /* Prevent infinite recursion in cases where the type of some member of
22559 this type is expressed in terms of this type itself. */
22560 TREE_ASM_WRITTEN (type) = 1;
22561 add_byte_size_attribute (type_die, type);
22562 if (TYPE_STUB_DECL (type) != NULL_TREE)
22563 {
22564 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22565 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22566 }
22567
22568 /* If the first reference to this type was as the return type of an
22569 inline function, then it may not have a parent. Fix this now. */
22570 if (type_die->die_parent == NULL)
22571 add_child_die (scope_die, type_die);
22572
22573 push_decl_scope (type);
22574 gen_member_die (type, type_die);
22575 pop_decl_scope ();
22576
22577 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22578 if (TYPE_ARTIFICIAL (type))
22579 add_AT_flag (type_die, DW_AT_artificial, 1);
22580
22581 /* GNU extension: Record what type our vtable lives in. */
22582 if (TYPE_VFIELD (type))
22583 {
22584 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
22585
22586 gen_type_die (vtype, context_die);
22587 add_AT_die_ref (type_die, DW_AT_containing_type,
22588 lookup_type_die (vtype));
22589 }
22590 }
22591 else
22592 {
22593 add_AT_flag (type_die, DW_AT_declaration, 1);
22594
22595 /* We don't need to do this for function-local types. */
22596 if (TYPE_STUB_DECL (type)
22597 && ! decl_function_context (TYPE_STUB_DECL (type)))
22598 vec_safe_push (incomplete_types, type);
22599 }
22600
22601 if (get_AT (type_die, DW_AT_name))
22602 add_pubtype (type, type_die);
22603 }
22604
22605 /* Generate a DIE for a subroutine _type_. */
22606
22607 static void
22608 gen_subroutine_type_die (tree type, dw_die_ref context_die)
22609 {
22610 tree return_type = TREE_TYPE (type);
22611 dw_die_ref subr_die
22612 = new_die (DW_TAG_subroutine_type,
22613 scope_die_for (type, context_die), type);
22614
22615 equate_type_number_to_die (type, subr_die);
22616 add_prototyped_attribute (subr_die, type);
22617 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
22618 context_die);
22619 gen_formal_types_die (type, subr_die);
22620
22621 if (get_AT (subr_die, DW_AT_name))
22622 add_pubtype (type, subr_die);
22623 }
22624
22625 /* Generate a DIE for a type definition. */
22626
22627 static void
22628 gen_typedef_die (tree decl, dw_die_ref context_die)
22629 {
22630 dw_die_ref type_die;
22631 tree origin;
22632
22633 if (TREE_ASM_WRITTEN (decl))
22634 {
22635 if (DECL_ORIGINAL_TYPE (decl))
22636 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
22637 return;
22638 }
22639
22640 TREE_ASM_WRITTEN (decl) = 1;
22641 type_die = new_die (DW_TAG_typedef, context_die, decl);
22642 origin = decl_ultimate_origin (decl);
22643 if (origin != NULL)
22644 add_abstract_origin_attribute (type_die, origin);
22645 else
22646 {
22647 tree type;
22648
22649 add_name_and_src_coords_attributes (type_die, decl);
22650 if (DECL_ORIGINAL_TYPE (decl))
22651 {
22652 type = DECL_ORIGINAL_TYPE (decl);
22653
22654 if (type == error_mark_node)
22655 return;
22656
22657 gcc_assert (type != TREE_TYPE (decl));
22658 equate_type_number_to_die (TREE_TYPE (decl), type_die);
22659 }
22660 else
22661 {
22662 type = TREE_TYPE (decl);
22663
22664 if (type == error_mark_node)
22665 return;
22666
22667 if (is_naming_typedef_decl (TYPE_NAME (type)))
22668 {
22669 /* Here, we are in the case of decl being a typedef naming
22670 an anonymous type, e.g:
22671 typedef struct {...} foo;
22672 In that case TREE_TYPE (decl) is not a typedef variant
22673 type and TYPE_NAME of the anonymous type is set to the
22674 TYPE_DECL of the typedef. This construct is emitted by
22675 the C++ FE.
22676
22677 TYPE is the anonymous struct named by the typedef
22678 DECL. As we need the DW_AT_type attribute of the
22679 DW_TAG_typedef to point to the DIE of TYPE, let's
22680 generate that DIE right away. add_type_attribute
22681 called below will then pick (via lookup_type_die) that
22682 anonymous struct DIE. */
22683 if (!TREE_ASM_WRITTEN (type))
22684 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
22685
22686 /* This is a GNU Extension. We are adding a
22687 DW_AT_linkage_name attribute to the DIE of the
22688 anonymous struct TYPE. The value of that attribute
22689 is the name of the typedef decl naming the anonymous
22690 struct. This greatly eases the work of consumers of
22691 this debug info. */
22692 add_linkage_name_raw (lookup_type_die (type), decl);
22693 }
22694 }
22695
22696 add_type_attribute (type_die, type, decl_quals (decl), false,
22697 context_die);
22698
22699 if (is_naming_typedef_decl (decl))
22700 /* We want that all subsequent calls to lookup_type_die with
22701 TYPE in argument yield the DW_TAG_typedef we have just
22702 created. */
22703 equate_type_number_to_die (type, type_die);
22704
22705 add_accessibility_attribute (type_die, decl);
22706 }
22707
22708 if (DECL_ABSTRACT_P (decl))
22709 equate_decl_number_to_die (decl, type_die);
22710
22711 if (get_AT (type_die, DW_AT_name))
22712 add_pubtype (decl, type_die);
22713 }
22714
22715 /* Generate a DIE for a struct, class, enum or union type. */
22716
22717 static void
22718 gen_tagged_type_die (tree type,
22719 dw_die_ref context_die,
22720 enum debug_info_usage usage)
22721 {
22722 int need_pop;
22723
22724 if (type == NULL_TREE
22725 || !is_tagged_type (type))
22726 return;
22727
22728 if (TREE_ASM_WRITTEN (type))
22729 need_pop = 0;
22730 /* If this is a nested type whose containing class hasn't been written
22731 out yet, writing it out will cover this one, too. This does not apply
22732 to instantiations of member class templates; they need to be added to
22733 the containing class as they are generated. FIXME: This hurts the
22734 idea of combining type decls from multiple TUs, since we can't predict
22735 what set of template instantiations we'll get. */
22736 else if (TYPE_CONTEXT (type)
22737 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
22738 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
22739 {
22740 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
22741
22742 if (TREE_ASM_WRITTEN (type))
22743 return;
22744
22745 /* If that failed, attach ourselves to the stub. */
22746 push_decl_scope (TYPE_CONTEXT (type));
22747 context_die = lookup_type_die (TYPE_CONTEXT (type));
22748 need_pop = 1;
22749 }
22750 else if (TYPE_CONTEXT (type) != NULL_TREE
22751 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
22752 {
22753 /* If this type is local to a function that hasn't been written
22754 out yet, use a NULL context for now; it will be fixed up in
22755 decls_for_scope. */
22756 context_die = lookup_decl_die (TYPE_CONTEXT (type));
22757 /* A declaration DIE doesn't count; nested types need to go in the
22758 specification. */
22759 if (context_die && is_declaration_die (context_die))
22760 context_die = NULL;
22761 need_pop = 0;
22762 }
22763 else
22764 {
22765 context_die = declare_in_namespace (type, context_die);
22766 need_pop = 0;
22767 }
22768
22769 if (TREE_CODE (type) == ENUMERAL_TYPE)
22770 {
22771 /* This might have been written out by the call to
22772 declare_in_namespace. */
22773 if (!TREE_ASM_WRITTEN (type))
22774 gen_enumeration_type_die (type, context_die);
22775 }
22776 else
22777 gen_struct_or_union_type_die (type, context_die, usage);
22778
22779 if (need_pop)
22780 pop_decl_scope ();
22781
22782 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
22783 it up if it is ever completed. gen_*_type_die will set it for us
22784 when appropriate. */
22785 }
22786
22787 /* Generate a type description DIE. */
22788
22789 static void
22790 gen_type_die_with_usage (tree type, dw_die_ref context_die,
22791 enum debug_info_usage usage)
22792 {
22793 struct array_descr_info info;
22794
22795 if (type == NULL_TREE || type == error_mark_node)
22796 return;
22797
22798 if (flag_checking && type)
22799 verify_type (type);
22800
22801 if (TYPE_NAME (type) != NULL_TREE
22802 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
22803 && is_redundant_typedef (TYPE_NAME (type))
22804 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
22805 /* The DECL of this type is a typedef we don't want to emit debug
22806 info for but we want debug info for its underlying typedef.
22807 This can happen for e.g, the injected-class-name of a C++
22808 type. */
22809 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
22810
22811 /* If TYPE is a typedef type variant, let's generate debug info
22812 for the parent typedef which TYPE is a type of. */
22813 if (typedef_variant_p (type))
22814 {
22815 if (TREE_ASM_WRITTEN (type))
22816 return;
22817
22818 /* Prevent broken recursion; we can't hand off to the same type. */
22819 gcc_assert (DECL_ORIGINAL_TYPE (TYPE_NAME (type)) != type);
22820
22821 /* Give typedefs the right scope. */
22822 context_die = scope_die_for (type, context_die);
22823
22824 TREE_ASM_WRITTEN (type) = 1;
22825
22826 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
22827 return;
22828 }
22829
22830 /* If type is an anonymous tagged type named by a typedef, let's
22831 generate debug info for the typedef. */
22832 if (is_naming_typedef_decl (TYPE_NAME (type)))
22833 {
22834 /* Use the DIE of the containing namespace as the parent DIE of
22835 the type description DIE we want to generate. */
22836 if (DECL_CONTEXT (TYPE_NAME (type))
22837 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
22838 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
22839
22840 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
22841 return;
22842 }
22843
22844 /* We are going to output a DIE to represent the unqualified version
22845 of this type (i.e. without any const or volatile qualifiers) so
22846 get the main variant (i.e. the unqualified version) of this type
22847 now. (Vectors and arrays are special because the debugging info is in the
22848 cloned type itself). */
22849 if (TREE_CODE (type) != VECTOR_TYPE
22850 && TREE_CODE (type) != ARRAY_TYPE)
22851 type = type_main_variant (type);
22852
22853 /* If this is an array type with hidden descriptor, handle it first. */
22854 if (!TREE_ASM_WRITTEN (type)
22855 && lang_hooks.types.get_array_descr_info)
22856 {
22857 memset (&info, 0, sizeof (info));
22858 if (lang_hooks.types.get_array_descr_info (type, &info))
22859 {
22860 /* Fortran sometimes emits array types with no dimension. */
22861 gcc_assert (info.ndimensions >= 0
22862 && (info.ndimensions
22863 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
22864 gen_descr_array_type_die (type, &info, context_die);
22865 TREE_ASM_WRITTEN (type) = 1;
22866 return;
22867 }
22868 }
22869
22870 if (TREE_ASM_WRITTEN (type))
22871 {
22872 /* Variable-length types may be incomplete even if
22873 TREE_ASM_WRITTEN. For such types, fall through to
22874 gen_array_type_die() and possibly fill in
22875 DW_AT_{upper,lower}_bound attributes. */
22876 if ((TREE_CODE (type) != ARRAY_TYPE
22877 && TREE_CODE (type) != RECORD_TYPE
22878 && TREE_CODE (type) != UNION_TYPE
22879 && TREE_CODE (type) != QUAL_UNION_TYPE)
22880 || !variably_modified_type_p (type, NULL))
22881 return;
22882 }
22883
22884 switch (TREE_CODE (type))
22885 {
22886 case ERROR_MARK:
22887 break;
22888
22889 case POINTER_TYPE:
22890 case REFERENCE_TYPE:
22891 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
22892 ensures that the gen_type_die recursion will terminate even if the
22893 type is recursive. Recursive types are possible in Ada. */
22894 /* ??? We could perhaps do this for all types before the switch
22895 statement. */
22896 TREE_ASM_WRITTEN (type) = 1;
22897
22898 /* For these types, all that is required is that we output a DIE (or a
22899 set of DIEs) to represent the "basis" type. */
22900 gen_type_die_with_usage (TREE_TYPE (type), context_die,
22901 DINFO_USAGE_IND_USE);
22902 break;
22903
22904 case OFFSET_TYPE:
22905 /* This code is used for C++ pointer-to-data-member types.
22906 Output a description of the relevant class type. */
22907 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
22908 DINFO_USAGE_IND_USE);
22909
22910 /* Output a description of the type of the object pointed to. */
22911 gen_type_die_with_usage (TREE_TYPE (type), context_die,
22912 DINFO_USAGE_IND_USE);
22913
22914 /* Now output a DIE to represent this pointer-to-data-member type
22915 itself. */
22916 gen_ptr_to_mbr_type_die (type, context_die);
22917 break;
22918
22919 case FUNCTION_TYPE:
22920 /* Force out return type (in case it wasn't forced out already). */
22921 gen_type_die_with_usage (TREE_TYPE (type), context_die,
22922 DINFO_USAGE_DIR_USE);
22923 gen_subroutine_type_die (type, context_die);
22924 break;
22925
22926 case METHOD_TYPE:
22927 /* Force out return type (in case it wasn't forced out already). */
22928 gen_type_die_with_usage (TREE_TYPE (type), context_die,
22929 DINFO_USAGE_DIR_USE);
22930 gen_subroutine_type_die (type, context_die);
22931 break;
22932
22933 case ARRAY_TYPE:
22934 case VECTOR_TYPE:
22935 gen_array_type_die (type, context_die);
22936 break;
22937
22938 case ENUMERAL_TYPE:
22939 case RECORD_TYPE:
22940 case UNION_TYPE:
22941 case QUAL_UNION_TYPE:
22942 gen_tagged_type_die (type, context_die, usage);
22943 return;
22944
22945 case VOID_TYPE:
22946 case INTEGER_TYPE:
22947 case REAL_TYPE:
22948 case FIXED_POINT_TYPE:
22949 case COMPLEX_TYPE:
22950 case BOOLEAN_TYPE:
22951 case POINTER_BOUNDS_TYPE:
22952 /* No DIEs needed for fundamental types. */
22953 break;
22954
22955 case NULLPTR_TYPE:
22956 case LANG_TYPE:
22957 /* Just use DW_TAG_unspecified_type. */
22958 {
22959 dw_die_ref type_die = lookup_type_die (type);
22960 if (type_die == NULL)
22961 {
22962 tree name = TYPE_IDENTIFIER (type);
22963 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
22964 type);
22965 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
22966 equate_type_number_to_die (type, type_die);
22967 }
22968 }
22969 break;
22970
22971 default:
22972 if (is_cxx_auto (type))
22973 {
22974 tree name = TYPE_IDENTIFIER (type);
22975 dw_die_ref *die = (name == get_identifier ("auto")
22976 ? &auto_die : &decltype_auto_die);
22977 if (!*die)
22978 {
22979 *die = new_die (DW_TAG_unspecified_type,
22980 comp_unit_die (), NULL_TREE);
22981 add_name_attribute (*die, IDENTIFIER_POINTER (name));
22982 }
22983 equate_type_number_to_die (type, *die);
22984 break;
22985 }
22986 gcc_unreachable ();
22987 }
22988
22989 TREE_ASM_WRITTEN (type) = 1;
22990 }
22991
22992 static void
22993 gen_type_die (tree type, dw_die_ref context_die)
22994 {
22995 if (type != error_mark_node)
22996 {
22997 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
22998 if (flag_checking)
22999 {
23000 dw_die_ref die = lookup_type_die (type);
23001 if (die)
23002 check_die (die);
23003 }
23004 }
23005 }
23006
23007 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
23008 things which are local to the given block. */
23009
23010 static void
23011 gen_block_die (tree stmt, dw_die_ref context_die)
23012 {
23013 int must_output_die = 0;
23014 bool inlined_func;
23015
23016 /* Ignore blocks that are NULL. */
23017 if (stmt == NULL_TREE)
23018 return;
23019
23020 inlined_func = inlined_function_outer_scope_p (stmt);
23021
23022 /* If the block is one fragment of a non-contiguous block, do not
23023 process the variables, since they will have been done by the
23024 origin block. Do process subblocks. */
23025 if (BLOCK_FRAGMENT_ORIGIN (stmt))
23026 {
23027 tree sub;
23028
23029 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
23030 gen_block_die (sub, context_die);
23031
23032 return;
23033 }
23034
23035 /* Determine if we need to output any Dwarf DIEs at all to represent this
23036 block. */
23037 if (inlined_func)
23038 /* The outer scopes for inlinings *must* always be represented. We
23039 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
23040 must_output_die = 1;
23041 else
23042 {
23043 /* Determine if this block directly contains any "significant"
23044 local declarations which we will need to output DIEs for. */
23045 if (debug_info_level > DINFO_LEVEL_TERSE)
23046 /* We are not in terse mode so *any* local declaration counts
23047 as being a "significant" one. */
23048 must_output_die = ((BLOCK_VARS (stmt) != NULL
23049 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
23050 && (TREE_USED (stmt)
23051 || TREE_ASM_WRITTEN (stmt)
23052 || BLOCK_ABSTRACT (stmt)));
23053 else if ((TREE_USED (stmt)
23054 || TREE_ASM_WRITTEN (stmt)
23055 || BLOCK_ABSTRACT (stmt))
23056 && !dwarf2out_ignore_block (stmt))
23057 must_output_die = 1;
23058 }
23059
23060 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
23061 DIE for any block which contains no significant local declarations at
23062 all. Rather, in such cases we just call `decls_for_scope' so that any
23063 needed Dwarf info for any sub-blocks will get properly generated. Note
23064 that in terse mode, our definition of what constitutes a "significant"
23065 local declaration gets restricted to include only inlined function
23066 instances and local (nested) function definitions. */
23067 if (must_output_die)
23068 {
23069 if (inlined_func)
23070 {
23071 /* If STMT block is abstract, that means we have been called
23072 indirectly from dwarf2out_abstract_function.
23073 That function rightfully marks the descendent blocks (of
23074 the abstract function it is dealing with) as being abstract,
23075 precisely to prevent us from emitting any
23076 DW_TAG_inlined_subroutine DIE as a descendent
23077 of an abstract function instance. So in that case, we should
23078 not call gen_inlined_subroutine_die.
23079
23080 Later though, when cgraph asks dwarf2out to emit info
23081 for the concrete instance of the function decl into which
23082 the concrete instance of STMT got inlined, the later will lead
23083 to the generation of a DW_TAG_inlined_subroutine DIE. */
23084 if (! BLOCK_ABSTRACT (stmt))
23085 gen_inlined_subroutine_die (stmt, context_die);
23086 }
23087 else
23088 gen_lexical_block_die (stmt, context_die);
23089 }
23090 else
23091 decls_for_scope (stmt, context_die);
23092 }
23093
23094 /* Process variable DECL (or variable with origin ORIGIN) within
23095 block STMT and add it to CONTEXT_DIE. */
23096 static void
23097 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
23098 {
23099 dw_die_ref die;
23100 tree decl_or_origin = decl ? decl : origin;
23101
23102 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
23103 die = lookup_decl_die (decl_or_origin);
23104 else if (TREE_CODE (decl_or_origin) == TYPE_DECL
23105 && TYPE_DECL_IS_STUB (decl_or_origin))
23106 die = lookup_type_die (TREE_TYPE (decl_or_origin));
23107 else
23108 die = NULL;
23109
23110 if (die != NULL && die->die_parent == NULL)
23111 add_child_die (context_die, die);
23112 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
23113 {
23114 if (early_dwarf)
23115 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
23116 stmt, context_die);
23117 }
23118 else
23119 gen_decl_die (decl, origin, NULL, context_die);
23120 }
23121
23122 /* Generate all of the decls declared within a given scope and (recursively)
23123 all of its sub-blocks. */
23124
23125 static void
23126 decls_for_scope (tree stmt, dw_die_ref context_die)
23127 {
23128 tree decl;
23129 unsigned int i;
23130 tree subblocks;
23131
23132 /* Ignore NULL blocks. */
23133 if (stmt == NULL_TREE)
23134 return;
23135
23136 /* Output the DIEs to represent all of the data objects and typedefs
23137 declared directly within this block but not within any nested
23138 sub-blocks. Also, nested function and tag DIEs have been
23139 generated with a parent of NULL; fix that up now. We don't
23140 have to do this if we're at -g1. */
23141 if (debug_info_level > DINFO_LEVEL_TERSE)
23142 {
23143 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
23144 process_scope_var (stmt, decl, NULL_TREE, context_die);
23145 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
23146 process_scope_var (stmt, NULL, BLOCK_NONLOCALIZED_VAR (stmt, i),
23147 context_die);
23148 }
23149
23150 /* Even if we're at -g1, we need to process the subblocks in order to get
23151 inlined call information. */
23152
23153 /* Output the DIEs to represent all sub-blocks (and the items declared
23154 therein) of this block. */
23155 for (subblocks = BLOCK_SUBBLOCKS (stmt);
23156 subblocks != NULL;
23157 subblocks = BLOCK_CHAIN (subblocks))
23158 gen_block_die (subblocks, context_die);
23159 }
23160
23161 /* Is this a typedef we can avoid emitting? */
23162
23163 bool
23164 is_redundant_typedef (const_tree decl)
23165 {
23166 if (TYPE_DECL_IS_STUB (decl))
23167 return true;
23168
23169 if (DECL_ARTIFICIAL (decl)
23170 && DECL_CONTEXT (decl)
23171 && is_tagged_type (DECL_CONTEXT (decl))
23172 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
23173 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
23174 /* Also ignore the artificial member typedef for the class name. */
23175 return true;
23176
23177 return false;
23178 }
23179
23180 /* Return TRUE if TYPE is a typedef that names a type for linkage
23181 purposes. This kind of typedefs is produced by the C++ FE for
23182 constructs like:
23183
23184 typedef struct {...} foo;
23185
23186 In that case, there is no typedef variant type produced for foo.
23187 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
23188 struct type. */
23189
23190 static bool
23191 is_naming_typedef_decl (const_tree decl)
23192 {
23193 if (decl == NULL_TREE
23194 || TREE_CODE (decl) != TYPE_DECL
23195 || DECL_NAMELESS (decl)
23196 || !is_tagged_type (TREE_TYPE (decl))
23197 || DECL_IS_BUILTIN (decl)
23198 || is_redundant_typedef (decl)
23199 /* It looks like Ada produces TYPE_DECLs that are very similar
23200 to C++ naming typedefs but that have different
23201 semantics. Let's be specific to c++ for now. */
23202 || !is_cxx ())
23203 return FALSE;
23204
23205 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
23206 && TYPE_NAME (TREE_TYPE (decl)) == decl
23207 && (TYPE_STUB_DECL (TREE_TYPE (decl))
23208 != TYPE_NAME (TREE_TYPE (decl))));
23209 }
23210
23211 /* Looks up the DIE for a context. */
23212
23213 static inline dw_die_ref
23214 lookup_context_die (tree context)
23215 {
23216 if (context)
23217 {
23218 /* Find die that represents this context. */
23219 if (TYPE_P (context))
23220 {
23221 context = TYPE_MAIN_VARIANT (context);
23222 dw_die_ref ctx = lookup_type_die (context);
23223 if (!ctx)
23224 return NULL;
23225 return strip_naming_typedef (context, ctx);
23226 }
23227 else
23228 return lookup_decl_die (context);
23229 }
23230 return comp_unit_die ();
23231 }
23232
23233 /* Returns the DIE for a context. */
23234
23235 static inline dw_die_ref
23236 get_context_die (tree context)
23237 {
23238 if (context)
23239 {
23240 /* Find die that represents this context. */
23241 if (TYPE_P (context))
23242 {
23243 context = TYPE_MAIN_VARIANT (context);
23244 return strip_naming_typedef (context, force_type_die (context));
23245 }
23246 else
23247 return force_decl_die (context);
23248 }
23249 return comp_unit_die ();
23250 }
23251
23252 /* Returns the DIE for decl. A DIE will always be returned. */
23253
23254 static dw_die_ref
23255 force_decl_die (tree decl)
23256 {
23257 dw_die_ref decl_die;
23258 unsigned saved_external_flag;
23259 tree save_fn = NULL_TREE;
23260 decl_die = lookup_decl_die (decl);
23261 if (!decl_die)
23262 {
23263 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
23264
23265 decl_die = lookup_decl_die (decl);
23266 if (decl_die)
23267 return decl_die;
23268
23269 switch (TREE_CODE (decl))
23270 {
23271 case FUNCTION_DECL:
23272 /* Clear current_function_decl, so that gen_subprogram_die thinks
23273 that this is a declaration. At this point, we just want to force
23274 declaration die. */
23275 save_fn = current_function_decl;
23276 current_function_decl = NULL_TREE;
23277 gen_subprogram_die (decl, context_die);
23278 current_function_decl = save_fn;
23279 break;
23280
23281 case VAR_DECL:
23282 /* Set external flag to force declaration die. Restore it after
23283 gen_decl_die() call. */
23284 saved_external_flag = DECL_EXTERNAL (decl);
23285 DECL_EXTERNAL (decl) = 1;
23286 gen_decl_die (decl, NULL, NULL, context_die);
23287 DECL_EXTERNAL (decl) = saved_external_flag;
23288 break;
23289
23290 case NAMESPACE_DECL:
23291 if (dwarf_version >= 3 || !dwarf_strict)
23292 dwarf2out_decl (decl);
23293 else
23294 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
23295 decl_die = comp_unit_die ();
23296 break;
23297
23298 case TRANSLATION_UNIT_DECL:
23299 decl_die = comp_unit_die ();
23300 break;
23301
23302 default:
23303 gcc_unreachable ();
23304 }
23305
23306 /* We should be able to find the DIE now. */
23307 if (!decl_die)
23308 decl_die = lookup_decl_die (decl);
23309 gcc_assert (decl_die);
23310 }
23311
23312 return decl_die;
23313 }
23314
23315 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
23316 always returned. */
23317
23318 static dw_die_ref
23319 force_type_die (tree type)
23320 {
23321 dw_die_ref type_die;
23322
23323 type_die = lookup_type_die (type);
23324 if (!type_die)
23325 {
23326 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
23327
23328 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
23329 false, context_die);
23330 gcc_assert (type_die);
23331 }
23332 return type_die;
23333 }
23334
23335 /* Force out any required namespaces to be able to output DECL,
23336 and return the new context_die for it, if it's changed. */
23337
23338 static dw_die_ref
23339 setup_namespace_context (tree thing, dw_die_ref context_die)
23340 {
23341 tree context = (DECL_P (thing)
23342 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
23343 if (context && TREE_CODE (context) == NAMESPACE_DECL)
23344 /* Force out the namespace. */
23345 context_die = force_decl_die (context);
23346
23347 return context_die;
23348 }
23349
23350 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
23351 type) within its namespace, if appropriate.
23352
23353 For compatibility with older debuggers, namespace DIEs only contain
23354 declarations; all definitions are emitted at CU scope, with
23355 DW_AT_specification pointing to the declaration (like with class
23356 members). */
23357
23358 static dw_die_ref
23359 declare_in_namespace (tree thing, dw_die_ref context_die)
23360 {
23361 dw_die_ref ns_context;
23362
23363 if (debug_info_level <= DINFO_LEVEL_TERSE)
23364 return context_die;
23365
23366 /* External declarations in the local scope only need to be emitted
23367 once, not once in the namespace and once in the scope.
23368
23369 This avoids declaring the `extern' below in the
23370 namespace DIE as well as in the innermost scope:
23371
23372 namespace S
23373 {
23374 int i=5;
23375 int foo()
23376 {
23377 int i=8;
23378 extern int i;
23379 return i;
23380 }
23381 }
23382 */
23383 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
23384 return context_die;
23385
23386 /* If this decl is from an inlined function, then don't try to emit it in its
23387 namespace, as we will get confused. It would have already been emitted
23388 when the abstract instance of the inline function was emitted anyways. */
23389 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
23390 return context_die;
23391
23392 ns_context = setup_namespace_context (thing, context_die);
23393
23394 if (ns_context != context_die)
23395 {
23396 if (is_fortran ())
23397 return ns_context;
23398 if (DECL_P (thing))
23399 gen_decl_die (thing, NULL, NULL, ns_context);
23400 else
23401 gen_type_die (thing, ns_context);
23402 }
23403 return context_die;
23404 }
23405
23406 /* Generate a DIE for a namespace or namespace alias. */
23407
23408 static void
23409 gen_namespace_die (tree decl, dw_die_ref context_die)
23410 {
23411 dw_die_ref namespace_die;
23412
23413 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
23414 they are an alias of. */
23415 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
23416 {
23417 /* Output a real namespace or module. */
23418 context_die = setup_namespace_context (decl, comp_unit_die ());
23419 namespace_die = new_die (is_fortran ()
23420 ? DW_TAG_module : DW_TAG_namespace,
23421 context_die, decl);
23422 /* For Fortran modules defined in different CU don't add src coords. */
23423 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
23424 {
23425 const char *name = dwarf2_name (decl, 0);
23426 if (name)
23427 add_name_attribute (namespace_die, name);
23428 }
23429 else
23430 add_name_and_src_coords_attributes (namespace_die, decl);
23431 if (DECL_EXTERNAL (decl))
23432 add_AT_flag (namespace_die, DW_AT_declaration, 1);
23433 equate_decl_number_to_die (decl, namespace_die);
23434 }
23435 else
23436 {
23437 /* Output a namespace alias. */
23438
23439 /* Force out the namespace we are an alias of, if necessary. */
23440 dw_die_ref origin_die
23441 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
23442
23443 if (DECL_FILE_SCOPE_P (decl)
23444 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
23445 context_die = setup_namespace_context (decl, comp_unit_die ());
23446 /* Now create the namespace alias DIE. */
23447 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
23448 add_name_and_src_coords_attributes (namespace_die, decl);
23449 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
23450 equate_decl_number_to_die (decl, namespace_die);
23451 }
23452 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
23453 if (want_pubnames ())
23454 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
23455 }
23456
23457 /* Generate Dwarf debug information for a decl described by DECL.
23458 The return value is currently only meaningful for PARM_DECLs,
23459 for all other decls it returns NULL.
23460
23461 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
23462 It can be NULL otherwise. */
23463
23464 static dw_die_ref
23465 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
23466 dw_die_ref context_die)
23467 {
23468 tree decl_or_origin = decl ? decl : origin;
23469 tree class_origin = NULL, ultimate_origin;
23470
23471 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
23472 return NULL;
23473
23474 /* Ignore pointer bounds decls. */
23475 if (DECL_P (decl_or_origin)
23476 && TREE_TYPE (decl_or_origin)
23477 && POINTER_BOUNDS_P (decl_or_origin))
23478 return NULL;
23479
23480 switch (TREE_CODE (decl_or_origin))
23481 {
23482 case ERROR_MARK:
23483 break;
23484
23485 case CONST_DECL:
23486 if (!is_fortran () && !is_ada ())
23487 {
23488 /* The individual enumerators of an enum type get output when we output
23489 the Dwarf representation of the relevant enum type itself. */
23490 break;
23491 }
23492
23493 /* Emit its type. */
23494 gen_type_die (TREE_TYPE (decl), context_die);
23495
23496 /* And its containing namespace. */
23497 context_die = declare_in_namespace (decl, context_die);
23498
23499 gen_const_die (decl, context_die);
23500 break;
23501
23502 case FUNCTION_DECL:
23503 /* Don't output any DIEs to represent mere function declarations,
23504 unless they are class members or explicit block externs. */
23505 if (DECL_INITIAL (decl_or_origin) == NULL_TREE
23506 && DECL_FILE_SCOPE_P (decl_or_origin)
23507 && (current_function_decl == NULL_TREE
23508 || DECL_ARTIFICIAL (decl_or_origin)))
23509 break;
23510
23511 #if 0
23512 /* FIXME */
23513 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
23514 on local redeclarations of global functions. That seems broken. */
23515 if (current_function_decl != decl)
23516 /* This is only a declaration. */;
23517 #endif
23518
23519 /* If we're emitting a clone, emit info for the abstract instance. */
23520 if (origin || DECL_ORIGIN (decl) != decl)
23521 dwarf2out_abstract_function (origin
23522 ? DECL_ORIGIN (origin)
23523 : DECL_ABSTRACT_ORIGIN (decl));
23524
23525 /* If we're emitting an out-of-line copy of an inline function,
23526 emit info for the abstract instance and set up to refer to it. */
23527 else if (cgraph_function_possibly_inlined_p (decl)
23528 && ! DECL_ABSTRACT_P (decl)
23529 && ! class_or_namespace_scope_p (context_die)
23530 /* dwarf2out_abstract_function won't emit a die if this is just
23531 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
23532 that case, because that works only if we have a die. */
23533 && DECL_INITIAL (decl) != NULL_TREE)
23534 {
23535 dwarf2out_abstract_function (decl);
23536 set_decl_origin_self (decl);
23537 }
23538
23539 /* Otherwise we're emitting the primary DIE for this decl. */
23540 else if (debug_info_level > DINFO_LEVEL_TERSE)
23541 {
23542 /* Before we describe the FUNCTION_DECL itself, make sure that we
23543 have its containing type. */
23544 if (!origin)
23545 origin = decl_class_context (decl);
23546 if (origin != NULL_TREE)
23547 gen_type_die (origin, context_die);
23548
23549 /* And its return type. */
23550 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
23551
23552 /* And its virtual context. */
23553 if (DECL_VINDEX (decl) != NULL_TREE)
23554 gen_type_die (DECL_CONTEXT (decl), context_die);
23555
23556 /* Make sure we have a member DIE for decl. */
23557 if (origin != NULL_TREE)
23558 gen_type_die_for_member (origin, decl, context_die);
23559
23560 /* And its containing namespace. */
23561 context_die = declare_in_namespace (decl, context_die);
23562 }
23563
23564 /* Now output a DIE to represent the function itself. */
23565 if (decl)
23566 gen_subprogram_die (decl, context_die);
23567 break;
23568
23569 case TYPE_DECL:
23570 /* If we are in terse mode, don't generate any DIEs to represent any
23571 actual typedefs. */
23572 if (debug_info_level <= DINFO_LEVEL_TERSE)
23573 break;
23574
23575 /* In the special case of a TYPE_DECL node representing the declaration
23576 of some type tag, if the given TYPE_DECL is marked as having been
23577 instantiated from some other (original) TYPE_DECL node (e.g. one which
23578 was generated within the original definition of an inline function) we
23579 used to generate a special (abbreviated) DW_TAG_structure_type,
23580 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
23581 should be actually referencing those DIEs, as variable DIEs with that
23582 type would be emitted already in the abstract origin, so it was always
23583 removed during unused type prunning. Don't add anything in this
23584 case. */
23585 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
23586 break;
23587
23588 if (is_redundant_typedef (decl))
23589 gen_type_die (TREE_TYPE (decl), context_die);
23590 else
23591 /* Output a DIE to represent the typedef itself. */
23592 gen_typedef_die (decl, context_die);
23593 break;
23594
23595 case LABEL_DECL:
23596 if (debug_info_level >= DINFO_LEVEL_NORMAL)
23597 gen_label_die (decl, context_die);
23598 break;
23599
23600 case VAR_DECL:
23601 case RESULT_DECL:
23602 /* If we are in terse mode, don't generate any DIEs to represent any
23603 variable declarations or definitions. */
23604 if (debug_info_level <= DINFO_LEVEL_TERSE)
23605 break;
23606
23607 /* Output any DIEs that are needed to specify the type of this data
23608 object. */
23609 if (decl_by_reference_p (decl_or_origin))
23610 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
23611 else
23612 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
23613
23614 /* And its containing type. */
23615 class_origin = decl_class_context (decl_or_origin);
23616 if (class_origin != NULL_TREE)
23617 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
23618
23619 /* And its containing namespace. */
23620 context_die = declare_in_namespace (decl_or_origin, context_die);
23621
23622 /* Now output the DIE to represent the data object itself. This gets
23623 complicated because of the possibility that the VAR_DECL really
23624 represents an inlined instance of a formal parameter for an inline
23625 function. */
23626 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23627 if (ultimate_origin != NULL_TREE
23628 && TREE_CODE (ultimate_origin) == PARM_DECL)
23629 gen_formal_parameter_die (decl, origin,
23630 true /* Emit name attribute. */,
23631 context_die);
23632 else
23633 gen_variable_die (decl, origin, context_die);
23634 break;
23635
23636 case FIELD_DECL:
23637 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
23638 /* Ignore the nameless fields that are used to skip bits but handle C++
23639 anonymous unions and structs. */
23640 if (DECL_NAME (decl) != NULL_TREE
23641 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
23642 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
23643 {
23644 gen_type_die (member_declared_type (decl), context_die);
23645 gen_field_die (decl, ctx, context_die);
23646 }
23647 break;
23648
23649 case PARM_DECL:
23650 if (DECL_BY_REFERENCE (decl_or_origin))
23651 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
23652 else
23653 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
23654 return gen_formal_parameter_die (decl, origin,
23655 true /* Emit name attribute. */,
23656 context_die);
23657
23658 case NAMESPACE_DECL:
23659 if (dwarf_version >= 3 || !dwarf_strict)
23660 gen_namespace_die (decl, context_die);
23661 break;
23662
23663 case IMPORTED_DECL:
23664 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
23665 DECL_CONTEXT (decl), context_die);
23666 break;
23667
23668 case NAMELIST_DECL:
23669 gen_namelist_decl (DECL_NAME (decl), context_die,
23670 NAMELIST_DECL_ASSOCIATED_DECL (decl));
23671 break;
23672
23673 default:
23674 /* Probably some frontend-internal decl. Assume we don't care. */
23675 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
23676 break;
23677 }
23678
23679 return NULL;
23680 }
23681 \f
23682 /* Output initial debug information for global DECL. Called at the
23683 end of the parsing process.
23684
23685 This is the initial debug generation process. As such, the DIEs
23686 generated may be incomplete. A later debug generation pass
23687 (dwarf2out_late_global_decl) will augment the information generated
23688 in this pass (e.g., with complete location info). */
23689
23690 static void
23691 dwarf2out_early_global_decl (tree decl)
23692 {
23693 set_early_dwarf s;
23694
23695 /* gen_decl_die() will set DECL_ABSTRACT because
23696 cgraph_function_possibly_inlined_p() returns true. This is in
23697 turn will cause DW_AT_inline attributes to be set.
23698
23699 This happens because at early dwarf generation, there is no
23700 cgraph information, causing cgraph_function_possibly_inlined_p()
23701 to return true. Trick cgraph_function_possibly_inlined_p()
23702 while we generate dwarf early. */
23703 bool save = symtab->global_info_ready;
23704 symtab->global_info_ready = true;
23705
23706 /* We don't handle TYPE_DECLs. If required, they'll be reached via
23707 other DECLs and they can point to template types or other things
23708 that dwarf2out can't handle when done via dwarf2out_decl. */
23709 if (TREE_CODE (decl) != TYPE_DECL
23710 && TREE_CODE (decl) != PARM_DECL)
23711 {
23712 tree save_fndecl = current_function_decl;
23713 if (TREE_CODE (decl) == FUNCTION_DECL)
23714 {
23715 /* No cfun means the symbol has no body, so there's nothing
23716 to emit. */
23717 if (!DECL_STRUCT_FUNCTION (decl))
23718 goto early_decl_exit;
23719
23720 current_function_decl = decl;
23721 }
23722 dwarf2out_decl (decl);
23723 if (TREE_CODE (decl) == FUNCTION_DECL)
23724 current_function_decl = save_fndecl;
23725 }
23726 early_decl_exit:
23727 symtab->global_info_ready = save;
23728 }
23729
23730 /* Output debug information for global decl DECL. Called from
23731 toplev.c after compilation proper has finished. */
23732
23733 static void
23734 dwarf2out_late_global_decl (tree decl)
23735 {
23736 /* We have to generate early debug late for LTO. */
23737 if (in_lto_p)
23738 dwarf2out_early_global_decl (decl);
23739
23740 /* Fill-in any location information we were unable to determine
23741 on the first pass. */
23742 if (TREE_CODE (decl) == VAR_DECL
23743 && !POINTER_BOUNDS_P (decl))
23744 {
23745 dw_die_ref die = lookup_decl_die (decl);
23746 if (die)
23747 add_location_or_const_value_attribute (die, decl, false);
23748 }
23749 }
23750
23751 /* Output debug information for type decl DECL. Called from toplev.c
23752 and from language front ends (to record built-in types). */
23753 static void
23754 dwarf2out_type_decl (tree decl, int local)
23755 {
23756 if (!local)
23757 {
23758 set_early_dwarf s;
23759 dwarf2out_decl (decl);
23760 }
23761 }
23762
23763 /* Output debug information for imported module or decl DECL.
23764 NAME is non-NULL name in the lexical block if the decl has been renamed.
23765 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
23766 that DECL belongs to.
23767 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
23768 static void
23769 dwarf2out_imported_module_or_decl_1 (tree decl,
23770 tree name,
23771 tree lexical_block,
23772 dw_die_ref lexical_block_die)
23773 {
23774 expanded_location xloc;
23775 dw_die_ref imported_die = NULL;
23776 dw_die_ref at_import_die;
23777
23778 if (TREE_CODE (decl) == IMPORTED_DECL)
23779 {
23780 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
23781 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
23782 gcc_assert (decl);
23783 }
23784 else
23785 xloc = expand_location (input_location);
23786
23787 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
23788 {
23789 at_import_die = force_type_die (TREE_TYPE (decl));
23790 /* For namespace N { typedef void T; } using N::T; base_type_die
23791 returns NULL, but DW_TAG_imported_declaration requires
23792 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
23793 if (!at_import_die)
23794 {
23795 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
23796 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
23797 at_import_die = lookup_type_die (TREE_TYPE (decl));
23798 gcc_assert (at_import_die);
23799 }
23800 }
23801 else
23802 {
23803 at_import_die = lookup_decl_die (decl);
23804 if (!at_import_die)
23805 {
23806 /* If we're trying to avoid duplicate debug info, we may not have
23807 emitted the member decl for this field. Emit it now. */
23808 if (TREE_CODE (decl) == FIELD_DECL)
23809 {
23810 tree type = DECL_CONTEXT (decl);
23811
23812 if (TYPE_CONTEXT (type)
23813 && TYPE_P (TYPE_CONTEXT (type))
23814 && !should_emit_struct_debug (TYPE_CONTEXT (type),
23815 DINFO_USAGE_DIR_USE))
23816 return;
23817 gen_type_die_for_member (type, decl,
23818 get_context_die (TYPE_CONTEXT (type)));
23819 }
23820 if (TREE_CODE (decl) == NAMELIST_DECL)
23821 at_import_die = gen_namelist_decl (DECL_NAME (decl),
23822 get_context_die (DECL_CONTEXT (decl)),
23823 NULL_TREE);
23824 else
23825 at_import_die = force_decl_die (decl);
23826 }
23827 }
23828
23829 if (TREE_CODE (decl) == NAMESPACE_DECL)
23830 {
23831 if (dwarf_version >= 3 || !dwarf_strict)
23832 imported_die = new_die (DW_TAG_imported_module,
23833 lexical_block_die,
23834 lexical_block);
23835 else
23836 return;
23837 }
23838 else
23839 imported_die = new_die (DW_TAG_imported_declaration,
23840 lexical_block_die,
23841 lexical_block);
23842
23843 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
23844 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
23845 if (name)
23846 add_AT_string (imported_die, DW_AT_name,
23847 IDENTIFIER_POINTER (name));
23848 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
23849 }
23850
23851 /* Output debug information for imported module or decl DECL.
23852 NAME is non-NULL name in context if the decl has been renamed.
23853 CHILD is true if decl is one of the renamed decls as part of
23854 importing whole module. */
23855
23856 static void
23857 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
23858 bool child)
23859 {
23860 /* dw_die_ref at_import_die; */
23861 dw_die_ref scope_die;
23862
23863 if (debug_info_level <= DINFO_LEVEL_TERSE)
23864 return;
23865
23866 gcc_assert (decl);
23867
23868 set_early_dwarf s;
23869
23870 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
23871 We need decl DIE for reference and scope die. First, get DIE for the decl
23872 itself. */
23873
23874 /* Get the scope die for decl context. Use comp_unit_die for global module
23875 or decl. If die is not found for non globals, force new die. */
23876 if (context
23877 && TYPE_P (context)
23878 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
23879 return;
23880
23881 if (!(dwarf_version >= 3 || !dwarf_strict))
23882 return;
23883
23884 scope_die = get_context_die (context);
23885
23886 if (child)
23887 {
23888 gcc_assert (scope_die->die_child);
23889 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
23890 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
23891 scope_die = scope_die->die_child;
23892 }
23893
23894 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
23895 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
23896 }
23897
23898 /* Output debug information for namelists. */
23899
23900 static dw_die_ref
23901 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
23902 {
23903 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
23904 tree value;
23905 unsigned i;
23906
23907 if (debug_info_level <= DINFO_LEVEL_TERSE)
23908 return NULL;
23909
23910 gcc_assert (scope_die != NULL);
23911 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
23912 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
23913
23914 /* If there are no item_decls, we have a nondefining namelist, e.g.
23915 with USE association; hence, set DW_AT_declaration. */
23916 if (item_decls == NULL_TREE)
23917 {
23918 add_AT_flag (nml_die, DW_AT_declaration, 1);
23919 return nml_die;
23920 }
23921
23922 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
23923 {
23924 nml_item_ref_die = lookup_decl_die (value);
23925 if (!nml_item_ref_die)
23926 nml_item_ref_die = force_decl_die (value);
23927
23928 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
23929 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
23930 }
23931 return nml_die;
23932 }
23933
23934
23935 /* Write the debugging output for DECL and return the DIE. */
23936
23937 static void
23938 dwarf2out_decl (tree decl)
23939 {
23940 dw_die_ref context_die = comp_unit_die ();
23941
23942 switch (TREE_CODE (decl))
23943 {
23944 case ERROR_MARK:
23945 return;
23946
23947 case FUNCTION_DECL:
23948 /* What we would really like to do here is to filter out all mere
23949 file-scope declarations of file-scope functions which are never
23950 referenced later within this translation unit (and keep all of ones
23951 that *are* referenced later on) but we aren't clairvoyant, so we have
23952 no idea which functions will be referenced in the future (i.e. later
23953 on within the current translation unit). So here we just ignore all
23954 file-scope function declarations which are not also definitions. If
23955 and when the debugger needs to know something about these functions,
23956 it will have to hunt around and find the DWARF information associated
23957 with the definition of the function.
23958
23959 We can't just check DECL_EXTERNAL to find out which FUNCTION_DECL
23960 nodes represent definitions and which ones represent mere
23961 declarations. We have to check DECL_INITIAL instead. That's because
23962 the C front-end supports some weird semantics for "extern inline"
23963 function definitions. These can get inlined within the current
23964 translation unit (and thus, we need to generate Dwarf info for their
23965 abstract instances so that the Dwarf info for the concrete inlined
23966 instances can have something to refer to) but the compiler never
23967 generates any out-of-lines instances of such things (despite the fact
23968 that they *are* definitions).
23969
23970 The important point is that the C front-end marks these "extern
23971 inline" functions as DECL_EXTERNAL, but we need to generate DWARF for
23972 them anyway. Note that the C++ front-end also plays some similar games
23973 for inline function definitions appearing within include files which
23974 also contain `#pragma interface' pragmas.
23975
23976 If we are called from dwarf2out_abstract_function output a DIE
23977 anyway. We can end up here this way with early inlining and LTO
23978 where the inlined function is output in a different LTRANS unit
23979 or not at all. */
23980 if (DECL_INITIAL (decl) == NULL_TREE
23981 && ! DECL_ABSTRACT_P (decl))
23982 return;
23983
23984 /* If we're a nested function, initially use a parent of NULL; if we're
23985 a plain function, this will be fixed up in decls_for_scope. If
23986 we're a method, it will be ignored, since we already have a DIE. */
23987 if (decl_function_context (decl)
23988 /* But if we're in terse mode, we don't care about scope. */
23989 && debug_info_level > DINFO_LEVEL_TERSE)
23990 context_die = NULL;
23991 break;
23992
23993 case VAR_DECL:
23994 /* For local statics lookup proper context die. */
23995 if (local_function_static (decl))
23996 context_die = lookup_decl_die (DECL_CONTEXT (decl));
23997
23998 /* If we are in terse mode, don't generate any DIEs to represent any
23999 variable declarations or definitions. */
24000 if (debug_info_level <= DINFO_LEVEL_TERSE)
24001 return;
24002 break;
24003
24004 case CONST_DECL:
24005 if (debug_info_level <= DINFO_LEVEL_TERSE)
24006 return;
24007 if (!is_fortran () && !is_ada ())
24008 return;
24009 if (TREE_STATIC (decl) && decl_function_context (decl))
24010 context_die = lookup_decl_die (DECL_CONTEXT (decl));
24011 break;
24012
24013 case NAMESPACE_DECL:
24014 case IMPORTED_DECL:
24015 if (debug_info_level <= DINFO_LEVEL_TERSE)
24016 return;
24017 if (lookup_decl_die (decl) != NULL)
24018 return;
24019 break;
24020
24021 case TYPE_DECL:
24022 /* Don't emit stubs for types unless they are needed by other DIEs. */
24023 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
24024 return;
24025
24026 /* Don't bother trying to generate any DIEs to represent any of the
24027 normal built-in types for the language we are compiling. */
24028 if (DECL_IS_BUILTIN (decl))
24029 return;
24030
24031 /* If we are in terse mode, don't generate any DIEs for types. */
24032 if (debug_info_level <= DINFO_LEVEL_TERSE)
24033 return;
24034
24035 /* If we're a function-scope tag, initially use a parent of NULL;
24036 this will be fixed up in decls_for_scope. */
24037 if (decl_function_context (decl))
24038 context_die = NULL;
24039
24040 break;
24041
24042 case NAMELIST_DECL:
24043 break;
24044
24045 default:
24046 return;
24047 }
24048
24049 gen_decl_die (decl, NULL, NULL, context_die);
24050
24051 if (flag_checking)
24052 {
24053 dw_die_ref die = lookup_decl_die (decl);
24054 if (die)
24055 check_die (die);
24056 }
24057 }
24058
24059 /* Write the debugging output for DECL. */
24060
24061 static void
24062 dwarf2out_function_decl (tree decl)
24063 {
24064 dwarf2out_decl (decl);
24065 call_arg_locations = NULL;
24066 call_arg_loc_last = NULL;
24067 call_site_count = -1;
24068 tail_call_site_count = -1;
24069 decl_loc_table->empty ();
24070 cached_dw_loc_list_table->empty ();
24071 }
24072
24073 /* Output a marker (i.e. a label) for the beginning of the generated code for
24074 a lexical block. */
24075
24076 static void
24077 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
24078 unsigned int blocknum)
24079 {
24080 switch_to_section (current_function_section ());
24081 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
24082 }
24083
24084 /* Output a marker (i.e. a label) for the end of the generated code for a
24085 lexical block. */
24086
24087 static void
24088 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
24089 {
24090 switch_to_section (current_function_section ());
24091 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
24092 }
24093
24094 /* Returns nonzero if it is appropriate not to emit any debugging
24095 information for BLOCK, because it doesn't contain any instructions.
24096
24097 Don't allow this for blocks with nested functions or local classes
24098 as we would end up with orphans, and in the presence of scheduling
24099 we may end up calling them anyway. */
24100
24101 static bool
24102 dwarf2out_ignore_block (const_tree block)
24103 {
24104 tree decl;
24105 unsigned int i;
24106
24107 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
24108 if (TREE_CODE (decl) == FUNCTION_DECL
24109 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
24110 return 0;
24111 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
24112 {
24113 decl = BLOCK_NONLOCALIZED_VAR (block, i);
24114 if (TREE_CODE (decl) == FUNCTION_DECL
24115 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
24116 return 0;
24117 }
24118
24119 return 1;
24120 }
24121
24122 /* Hash table routines for file_hash. */
24123
24124 bool
24125 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
24126 {
24127 return filename_cmp (p1->filename, p2) == 0;
24128 }
24129
24130 hashval_t
24131 dwarf_file_hasher::hash (dwarf_file_data *p)
24132 {
24133 return htab_hash_string (p->filename);
24134 }
24135
24136 /* Lookup FILE_NAME (in the list of filenames that we know about here in
24137 dwarf2out.c) and return its "index". The index of each (known) filename is
24138 just a unique number which is associated with only that one filename. We
24139 need such numbers for the sake of generating labels (in the .debug_sfnames
24140 section) and references to those files numbers (in the .debug_srcinfo
24141 and .debug_macinfo sections). If the filename given as an argument is not
24142 found in our current list, add it to the list and assign it the next
24143 available unique index number. */
24144
24145 static struct dwarf_file_data *
24146 lookup_filename (const char *file_name)
24147 {
24148 struct dwarf_file_data * created;
24149
24150 if (!file_name)
24151 return NULL;
24152
24153 dwarf_file_data **slot
24154 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
24155 INSERT);
24156 if (*slot)
24157 return *slot;
24158
24159 created = ggc_alloc<dwarf_file_data> ();
24160 created->filename = file_name;
24161 created->emitted_number = 0;
24162 *slot = created;
24163 return created;
24164 }
24165
24166 /* If the assembler will construct the file table, then translate the compiler
24167 internal file table number into the assembler file table number, and emit
24168 a .file directive if we haven't already emitted one yet. The file table
24169 numbers are different because we prune debug info for unused variables and
24170 types, which may include filenames. */
24171
24172 static int
24173 maybe_emit_file (struct dwarf_file_data * fd)
24174 {
24175 if (! fd->emitted_number)
24176 {
24177 if (last_emitted_file)
24178 fd->emitted_number = last_emitted_file->emitted_number + 1;
24179 else
24180 fd->emitted_number = 1;
24181 last_emitted_file = fd;
24182
24183 if (DWARF2_ASM_LINE_DEBUG_INFO)
24184 {
24185 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
24186 output_quoted_string (asm_out_file,
24187 remap_debug_filename (fd->filename));
24188 fputc ('\n', asm_out_file);
24189 }
24190 }
24191
24192 return fd->emitted_number;
24193 }
24194
24195 /* Schedule generation of a DW_AT_const_value attribute to DIE.
24196 That generation should happen after function debug info has been
24197 generated. The value of the attribute is the constant value of ARG. */
24198
24199 static void
24200 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
24201 {
24202 die_arg_entry entry;
24203
24204 if (!die || !arg)
24205 return;
24206
24207 gcc_assert (early_dwarf);
24208
24209 if (!tmpl_value_parm_die_table)
24210 vec_alloc (tmpl_value_parm_die_table, 32);
24211
24212 entry.die = die;
24213 entry.arg = arg;
24214 vec_safe_push (tmpl_value_parm_die_table, entry);
24215 }
24216
24217 /* Return TRUE if T is an instance of generic type, FALSE
24218 otherwise. */
24219
24220 static bool
24221 generic_type_p (tree t)
24222 {
24223 if (t == NULL_TREE || !TYPE_P (t))
24224 return false;
24225 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
24226 }
24227
24228 /* Schedule the generation of the generic parameter dies for the
24229 instance of generic type T. The proper generation itself is later
24230 done by gen_scheduled_generic_parms_dies. */
24231
24232 static void
24233 schedule_generic_params_dies_gen (tree t)
24234 {
24235 if (!generic_type_p (t))
24236 return;
24237
24238 gcc_assert (early_dwarf);
24239
24240 if (!generic_type_instances)
24241 vec_alloc (generic_type_instances, 256);
24242
24243 vec_safe_push (generic_type_instances, t);
24244 }
24245
24246 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
24247 by append_entry_to_tmpl_value_parm_die_table. This function must
24248 be called after function DIEs have been generated. */
24249
24250 static void
24251 gen_remaining_tmpl_value_param_die_attribute (void)
24252 {
24253 if (tmpl_value_parm_die_table)
24254 {
24255 unsigned i, j;
24256 die_arg_entry *e;
24257
24258 /* We do this in two phases - first get the cases we can
24259 handle during early-finish, preserving those we cannot
24260 (containing symbolic constants where we don't yet know
24261 whether we are going to output the referenced symbols).
24262 For those we try again at late-finish. */
24263 j = 0;
24264 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
24265 {
24266 if (!tree_add_const_value_attribute (e->die, e->arg))
24267 {
24268 dw_loc_descr_ref loc = NULL;
24269 if (dwarf_version >= 5 || !dwarf_strict)
24270 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
24271 if (loc)
24272 add_AT_loc (e->die, DW_AT_location, loc);
24273 else
24274 (*tmpl_value_parm_die_table)[j++] = *e;
24275 }
24276 }
24277 tmpl_value_parm_die_table->truncate (j);
24278 }
24279 }
24280
24281 /* Generate generic parameters DIEs for instances of generic types
24282 that have been previously scheduled by
24283 schedule_generic_params_dies_gen. This function must be called
24284 after all the types of the CU have been laid out. */
24285
24286 static void
24287 gen_scheduled_generic_parms_dies (void)
24288 {
24289 unsigned i;
24290 tree t;
24291
24292 if (!generic_type_instances)
24293 return;
24294
24295 /* We end up "recursing" into schedule_generic_params_dies_gen, so
24296 pretend this generation is part of "early dwarf" as well. */
24297 set_early_dwarf s;
24298
24299 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
24300 if (COMPLETE_TYPE_P (t))
24301 gen_generic_params_dies (t);
24302
24303 generic_type_instances = NULL;
24304 }
24305
24306
24307 /* Replace DW_AT_name for the decl with name. */
24308
24309 static void
24310 dwarf2out_set_name (tree decl, tree name)
24311 {
24312 dw_die_ref die;
24313 dw_attr_node *attr;
24314 const char *dname;
24315
24316 die = TYPE_SYMTAB_DIE (decl);
24317 if (!die)
24318 return;
24319
24320 dname = dwarf2_name (name, 0);
24321 if (!dname)
24322 return;
24323
24324 attr = get_AT (die, DW_AT_name);
24325 if (attr)
24326 {
24327 struct indirect_string_node *node;
24328
24329 node = find_AT_string (dname);
24330 /* replace the string. */
24331 attr->dw_attr_val.v.val_str = node;
24332 }
24333
24334 else
24335 add_name_attribute (die, dname);
24336 }
24337
24338 /* True if before or during processing of the first function being emitted. */
24339 static bool in_first_function_p = true;
24340 /* True if loc_note during dwarf2out_var_location call might still be
24341 before first real instruction at address equal to .Ltext0. */
24342 static bool maybe_at_text_label_p = true;
24343 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
24344 static unsigned int first_loclabel_num_not_at_text_label;
24345
24346 /* Called by the final INSN scan whenever we see a var location. We
24347 use it to drop labels in the right places, and throw the location in
24348 our lookup table. */
24349
24350 static void
24351 dwarf2out_var_location (rtx_insn *loc_note)
24352 {
24353 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
24354 struct var_loc_node *newloc;
24355 rtx_insn *next_real, *next_note;
24356 rtx_insn *call_insn = NULL;
24357 static const char *last_label;
24358 static const char *last_postcall_label;
24359 static bool last_in_cold_section_p;
24360 static rtx_insn *expected_next_loc_note;
24361 tree decl;
24362 bool var_loc_p;
24363
24364 if (!NOTE_P (loc_note))
24365 {
24366 if (CALL_P (loc_note))
24367 {
24368 call_site_count++;
24369 if (SIBLING_CALL_P (loc_note))
24370 tail_call_site_count++;
24371 if (optimize == 0 && !flag_var_tracking)
24372 {
24373 /* When the var-tracking pass is not running, there is no note
24374 for indirect calls whose target is compile-time known. In this
24375 case, process such calls specifically so that we generate call
24376 sites for them anyway. */
24377 rtx x = PATTERN (loc_note);
24378 if (GET_CODE (x) == PARALLEL)
24379 x = XVECEXP (x, 0, 0);
24380 if (GET_CODE (x) == SET)
24381 x = SET_SRC (x);
24382 if (GET_CODE (x) == CALL)
24383 x = XEXP (x, 0);
24384 if (!MEM_P (x)
24385 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
24386 || !SYMBOL_REF_DECL (XEXP (x, 0))
24387 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
24388 != FUNCTION_DECL))
24389 {
24390 call_insn = loc_note;
24391 loc_note = NULL;
24392 var_loc_p = false;
24393
24394 next_real = next_real_insn (call_insn);
24395 next_note = NULL;
24396 cached_next_real_insn = NULL;
24397 goto create_label;
24398 }
24399 }
24400 }
24401 return;
24402 }
24403
24404 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
24405 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
24406 return;
24407
24408 /* Optimize processing a large consecutive sequence of location
24409 notes so we don't spend too much time in next_real_insn. If the
24410 next insn is another location note, remember the next_real_insn
24411 calculation for next time. */
24412 next_real = cached_next_real_insn;
24413 if (next_real)
24414 {
24415 if (expected_next_loc_note != loc_note)
24416 next_real = NULL;
24417 }
24418
24419 next_note = NEXT_INSN (loc_note);
24420 if (! next_note
24421 || next_note->deleted ()
24422 || ! NOTE_P (next_note)
24423 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
24424 && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
24425 next_note = NULL;
24426
24427 if (! next_real)
24428 next_real = next_real_insn (loc_note);
24429
24430 if (next_note)
24431 {
24432 expected_next_loc_note = next_note;
24433 cached_next_real_insn = next_real;
24434 }
24435 else
24436 cached_next_real_insn = NULL;
24437
24438 /* If there are no instructions which would be affected by this note,
24439 don't do anything. */
24440 if (var_loc_p
24441 && next_real == NULL_RTX
24442 && !NOTE_DURING_CALL_P (loc_note))
24443 return;
24444
24445 create_label:
24446
24447 if (next_real == NULL_RTX)
24448 next_real = get_last_insn ();
24449
24450 /* If there were any real insns between note we processed last time
24451 and this note (or if it is the first note), clear
24452 last_{,postcall_}label so that they are not reused this time. */
24453 if (last_var_location_insn == NULL_RTX
24454 || last_var_location_insn != next_real
24455 || last_in_cold_section_p != in_cold_section_p)
24456 {
24457 last_label = NULL;
24458 last_postcall_label = NULL;
24459 }
24460
24461 if (var_loc_p)
24462 {
24463 decl = NOTE_VAR_LOCATION_DECL (loc_note);
24464 newloc = add_var_loc_to_decl (decl, loc_note,
24465 NOTE_DURING_CALL_P (loc_note)
24466 ? last_postcall_label : last_label);
24467 if (newloc == NULL)
24468 return;
24469 }
24470 else
24471 {
24472 decl = NULL_TREE;
24473 newloc = NULL;
24474 }
24475
24476 /* If there were no real insns between note we processed last time
24477 and this note, use the label we emitted last time. Otherwise
24478 create a new label and emit it. */
24479 if (last_label == NULL)
24480 {
24481 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
24482 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
24483 loclabel_num++;
24484 last_label = ggc_strdup (loclabel);
24485 /* See if loclabel might be equal to .Ltext0. If yes,
24486 bump first_loclabel_num_not_at_text_label. */
24487 if (!have_multiple_function_sections
24488 && in_first_function_p
24489 && maybe_at_text_label_p)
24490 {
24491 static rtx_insn *last_start;
24492 rtx_insn *insn;
24493 for (insn = loc_note; insn; insn = previous_insn (insn))
24494 if (insn == last_start)
24495 break;
24496 else if (!NONDEBUG_INSN_P (insn))
24497 continue;
24498 else
24499 {
24500 rtx body = PATTERN (insn);
24501 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
24502 continue;
24503 /* Inline asm could occupy zero bytes. */
24504 else if (GET_CODE (body) == ASM_INPUT
24505 || asm_noperands (body) >= 0)
24506 continue;
24507 #ifdef HAVE_attr_length
24508 else if (get_attr_min_length (insn) == 0)
24509 continue;
24510 #endif
24511 else
24512 {
24513 /* Assume insn has non-zero length. */
24514 maybe_at_text_label_p = false;
24515 break;
24516 }
24517 }
24518 if (maybe_at_text_label_p)
24519 {
24520 last_start = loc_note;
24521 first_loclabel_num_not_at_text_label = loclabel_num;
24522 }
24523 }
24524 }
24525
24526 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
24527 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
24528
24529 if (!var_loc_p)
24530 {
24531 struct call_arg_loc_node *ca_loc
24532 = ggc_cleared_alloc<call_arg_loc_node> ();
24533 rtx_insn *prev
24534 = loc_note != NULL_RTX ? prev_real_insn (loc_note) : call_insn;
24535
24536 ca_loc->call_arg_loc_note = loc_note;
24537 ca_loc->next = NULL;
24538 ca_loc->label = last_label;
24539 gcc_assert (prev
24540 && (CALL_P (prev)
24541 || (NONJUMP_INSN_P (prev)
24542 && GET_CODE (PATTERN (prev)) == SEQUENCE
24543 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
24544 if (!CALL_P (prev))
24545 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
24546 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
24547
24548 /* Look for a SYMBOL_REF in the "prev" instruction. */
24549 rtx x = get_call_rtx_from (PATTERN (prev));
24550 if (x)
24551 {
24552 /* Try to get the call symbol, if any. */
24553 if (MEM_P (XEXP (x, 0)))
24554 x = XEXP (x, 0);
24555 /* First, look for a memory access to a symbol_ref. */
24556 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
24557 && SYMBOL_REF_DECL (XEXP (x, 0))
24558 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
24559 ca_loc->symbol_ref = XEXP (x, 0);
24560 /* Otherwise, look at a compile-time known user-level function
24561 declaration. */
24562 else if (MEM_P (x)
24563 && MEM_EXPR (x)
24564 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
24565 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
24566 }
24567
24568 ca_loc->block = insn_scope (prev);
24569 if (call_arg_locations)
24570 call_arg_loc_last->next = ca_loc;
24571 else
24572 call_arg_locations = ca_loc;
24573 call_arg_loc_last = ca_loc;
24574 }
24575 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
24576 newloc->label = last_label;
24577 else
24578 {
24579 if (!last_postcall_label)
24580 {
24581 sprintf (loclabel, "%s-1", last_label);
24582 last_postcall_label = ggc_strdup (loclabel);
24583 }
24584 newloc->label = last_postcall_label;
24585 }
24586
24587 last_var_location_insn = next_real;
24588 last_in_cold_section_p = in_cold_section_p;
24589 }
24590
24591 /* Called from finalize_size_functions for size functions so that their body
24592 can be encoded in the debug info to describe the layout of variable-length
24593 structures. */
24594
24595 static void
24596 dwarf2out_size_function (tree decl)
24597 {
24598 function_to_dwarf_procedure (decl);
24599 }
24600
24601 /* Note in one location list that text section has changed. */
24602
24603 int
24604 var_location_switch_text_section_1 (var_loc_list **slot, void *)
24605 {
24606 var_loc_list *list = *slot;
24607 if (list->first)
24608 list->last_before_switch
24609 = list->last->next ? list->last->next : list->last;
24610 return 1;
24611 }
24612
24613 /* Note in all location lists that text section has changed. */
24614
24615 static void
24616 var_location_switch_text_section (void)
24617 {
24618 if (decl_loc_table == NULL)
24619 return;
24620
24621 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
24622 }
24623
24624 /* Create a new line number table. */
24625
24626 static dw_line_info_table *
24627 new_line_info_table (void)
24628 {
24629 dw_line_info_table *table;
24630
24631 table = ggc_cleared_alloc<dw_line_info_table> ();
24632 table->file_num = 1;
24633 table->line_num = 1;
24634 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
24635
24636 return table;
24637 }
24638
24639 /* Lookup the "current" table into which we emit line info, so
24640 that we don't have to do it for every source line. */
24641
24642 static void
24643 set_cur_line_info_table (section *sec)
24644 {
24645 dw_line_info_table *table;
24646
24647 if (sec == text_section)
24648 table = text_section_line_info;
24649 else if (sec == cold_text_section)
24650 {
24651 table = cold_text_section_line_info;
24652 if (!table)
24653 {
24654 cold_text_section_line_info = table = new_line_info_table ();
24655 table->end_label = cold_end_label;
24656 }
24657 }
24658 else
24659 {
24660 const char *end_label;
24661
24662 if (flag_reorder_blocks_and_partition)
24663 {
24664 if (in_cold_section_p)
24665 end_label = crtl->subsections.cold_section_end_label;
24666 else
24667 end_label = crtl->subsections.hot_section_end_label;
24668 }
24669 else
24670 {
24671 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24672 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
24673 current_function_funcdef_no);
24674 end_label = ggc_strdup (label);
24675 }
24676
24677 table = new_line_info_table ();
24678 table->end_label = end_label;
24679
24680 vec_safe_push (separate_line_info, table);
24681 }
24682
24683 if (DWARF2_ASM_LINE_DEBUG_INFO)
24684 table->is_stmt = (cur_line_info_table
24685 ? cur_line_info_table->is_stmt
24686 : DWARF_LINE_DEFAULT_IS_STMT_START);
24687 cur_line_info_table = table;
24688 }
24689
24690
24691 /* We need to reset the locations at the beginning of each
24692 function. We can't do this in the end_function hook, because the
24693 declarations that use the locations won't have been output when
24694 that hook is called. Also compute have_multiple_function_sections here. */
24695
24696 static void
24697 dwarf2out_begin_function (tree fun)
24698 {
24699 section *sec = function_section (fun);
24700
24701 if (sec != text_section)
24702 have_multiple_function_sections = true;
24703
24704 if (flag_reorder_blocks_and_partition && !cold_text_section)
24705 {
24706 gcc_assert (current_function_decl == fun);
24707 cold_text_section = unlikely_text_section ();
24708 switch_to_section (cold_text_section);
24709 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
24710 switch_to_section (sec);
24711 }
24712
24713 dwarf2out_note_section_used ();
24714 call_site_count = 0;
24715 tail_call_site_count = 0;
24716
24717 set_cur_line_info_table (sec);
24718 }
24719
24720 /* Helper function of dwarf2out_end_function, called only after emitting
24721 the very first function into assembly. Check if some .debug_loc range
24722 might end with a .LVL* label that could be equal to .Ltext0.
24723 In that case we must force using absolute addresses in .debug_loc ranges,
24724 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
24725 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
24726 list terminator.
24727 Set have_multiple_function_sections to true in that case and
24728 terminate htab traversal. */
24729
24730 int
24731 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
24732 {
24733 var_loc_list *entry = *slot;
24734 struct var_loc_node *node;
24735
24736 node = entry->first;
24737 if (node && node->next && node->next->label)
24738 {
24739 unsigned int i;
24740 const char *label = node->next->label;
24741 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
24742
24743 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
24744 {
24745 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
24746 if (strcmp (label, loclabel) == 0)
24747 {
24748 have_multiple_function_sections = true;
24749 return 0;
24750 }
24751 }
24752 }
24753 return 1;
24754 }
24755
24756 /* Hook called after emitting a function into assembly.
24757 This does something only for the very first function emitted. */
24758
24759 static void
24760 dwarf2out_end_function (unsigned int)
24761 {
24762 if (in_first_function_p
24763 && !have_multiple_function_sections
24764 && first_loclabel_num_not_at_text_label
24765 && decl_loc_table)
24766 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
24767 in_first_function_p = false;
24768 maybe_at_text_label_p = false;
24769 }
24770
24771 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
24772 front-ends register a translation unit even before dwarf2out_init is
24773 called. */
24774 static tree main_translation_unit = NULL_TREE;
24775
24776 /* Hook called by front-ends after they built their main translation unit.
24777 Associate comp_unit_die to UNIT. */
24778
24779 static void
24780 dwarf2out_register_main_translation_unit (tree unit)
24781 {
24782 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
24783 && main_translation_unit == NULL_TREE);
24784 main_translation_unit = unit;
24785 /* If dwarf2out_init has not been called yet, it will perform the association
24786 itself looking at main_translation_unit. */
24787 if (decl_die_table != NULL)
24788 equate_decl_number_to_die (unit, comp_unit_die ());
24789 }
24790
24791 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
24792
24793 static void
24794 push_dw_line_info_entry (dw_line_info_table *table,
24795 enum dw_line_info_opcode opcode, unsigned int val)
24796 {
24797 dw_line_info_entry e;
24798 e.opcode = opcode;
24799 e.val = val;
24800 vec_safe_push (table->entries, e);
24801 }
24802
24803 /* Output a label to mark the beginning of a source code line entry
24804 and record information relating to this source line, in
24805 'line_info_table' for later output of the .debug_line section. */
24806 /* ??? The discriminator parameter ought to be unsigned. */
24807
24808 static void
24809 dwarf2out_source_line (unsigned int line, const char *filename,
24810 int discriminator, bool is_stmt)
24811 {
24812 unsigned int file_num;
24813 dw_line_info_table *table;
24814
24815 if (debug_info_level < DINFO_LEVEL_TERSE || line == 0)
24816 return;
24817
24818 /* The discriminator column was added in dwarf4. Simplify the below
24819 by simply removing it if we're not supposed to output it. */
24820 if (dwarf_version < 4 && dwarf_strict)
24821 discriminator = 0;
24822
24823 table = cur_line_info_table;
24824 file_num = maybe_emit_file (lookup_filename (filename));
24825
24826 /* ??? TODO: Elide duplicate line number entries. Traditionally,
24827 the debugger has used the second (possibly duplicate) line number
24828 at the beginning of the function to mark the end of the prologue.
24829 We could eliminate any other duplicates within the function. For
24830 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
24831 that second line number entry. */
24832 /* Recall that this end-of-prologue indication is *not* the same thing
24833 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
24834 to which the hook corresponds, follows the last insn that was
24835 emitted by gen_prologue. What we need is to precede the first insn
24836 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
24837 insn that corresponds to something the user wrote. These may be
24838 very different locations once scheduling is enabled. */
24839
24840 if (0 && file_num == table->file_num
24841 && line == table->line_num
24842 && discriminator == table->discrim_num
24843 && is_stmt == table->is_stmt)
24844 return;
24845
24846 switch_to_section (current_function_section ());
24847
24848 /* If requested, emit something human-readable. */
24849 if (flag_debug_asm)
24850 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START, filename, line);
24851
24852 if (DWARF2_ASM_LINE_DEBUG_INFO)
24853 {
24854 /* Emit the .loc directive understood by GNU as. */
24855 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
24856 file_num, line, is_stmt, discriminator */
24857 fputs ("\t.loc ", asm_out_file);
24858 fprint_ul (asm_out_file, file_num);
24859 putc (' ', asm_out_file);
24860 fprint_ul (asm_out_file, line);
24861 putc (' ', asm_out_file);
24862 putc ('0', asm_out_file);
24863
24864 if (is_stmt != table->is_stmt)
24865 {
24866 fputs (" is_stmt ", asm_out_file);
24867 putc (is_stmt ? '1' : '0', asm_out_file);
24868 }
24869 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
24870 {
24871 gcc_assert (discriminator > 0);
24872 fputs (" discriminator ", asm_out_file);
24873 fprint_ul (asm_out_file, (unsigned long) discriminator);
24874 }
24875 putc ('\n', asm_out_file);
24876 }
24877 else
24878 {
24879 unsigned int label_num = ++line_info_label_num;
24880
24881 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
24882
24883 push_dw_line_info_entry (table, LI_set_address, label_num);
24884 if (file_num != table->file_num)
24885 push_dw_line_info_entry (table, LI_set_file, file_num);
24886 if (discriminator != table->discrim_num)
24887 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
24888 if (is_stmt != table->is_stmt)
24889 push_dw_line_info_entry (table, LI_negate_stmt, 0);
24890 push_dw_line_info_entry (table, LI_set_line, line);
24891 }
24892
24893 table->file_num = file_num;
24894 table->line_num = line;
24895 table->discrim_num = discriminator;
24896 table->is_stmt = is_stmt;
24897 table->in_use = true;
24898 }
24899
24900 /* Record the beginning of a new source file. */
24901
24902 static void
24903 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
24904 {
24905 if (flag_eliminate_dwarf2_dups)
24906 {
24907 /* Record the beginning of the file for break_out_includes. */
24908 dw_die_ref bincl_die;
24909
24910 bincl_die = new_die (DW_TAG_GNU_BINCL, comp_unit_die (), NULL);
24911 add_AT_string (bincl_die, DW_AT_name, remap_debug_filename (filename));
24912 }
24913
24914 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
24915 {
24916 macinfo_entry e;
24917 e.code = DW_MACINFO_start_file;
24918 e.lineno = lineno;
24919 e.info = ggc_strdup (filename);
24920 vec_safe_push (macinfo_table, e);
24921 }
24922 }
24923
24924 /* Record the end of a source file. */
24925
24926 static void
24927 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
24928 {
24929 if (flag_eliminate_dwarf2_dups)
24930 /* Record the end of the file for break_out_includes. */
24931 new_die (DW_TAG_GNU_EINCL, comp_unit_die (), NULL);
24932
24933 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
24934 {
24935 macinfo_entry e;
24936 e.code = DW_MACINFO_end_file;
24937 e.lineno = lineno;
24938 e.info = NULL;
24939 vec_safe_push (macinfo_table, e);
24940 }
24941 }
24942
24943 /* Called from debug_define in toplev.c. The `buffer' parameter contains
24944 the tail part of the directive line, i.e. the part which is past the
24945 initial whitespace, #, whitespace, directive-name, whitespace part. */
24946
24947 static void
24948 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
24949 const char *buffer ATTRIBUTE_UNUSED)
24950 {
24951 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
24952 {
24953 macinfo_entry e;
24954 /* Insert a dummy first entry to be able to optimize the whole
24955 predefined macro block using DW_MACRO_GNU_transparent_include. */
24956 if (macinfo_table->is_empty () && lineno <= 1)
24957 {
24958 e.code = 0;
24959 e.lineno = 0;
24960 e.info = NULL;
24961 vec_safe_push (macinfo_table, e);
24962 }
24963 e.code = DW_MACINFO_define;
24964 e.lineno = lineno;
24965 e.info = ggc_strdup (buffer);
24966 vec_safe_push (macinfo_table, e);
24967 }
24968 }
24969
24970 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
24971 the tail part of the directive line, i.e. the part which is past the
24972 initial whitespace, #, whitespace, directive-name, whitespace part. */
24973
24974 static void
24975 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
24976 const char *buffer ATTRIBUTE_UNUSED)
24977 {
24978 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
24979 {
24980 macinfo_entry e;
24981 /* Insert a dummy first entry to be able to optimize the whole
24982 predefined macro block using DW_MACRO_GNU_transparent_include. */
24983 if (macinfo_table->is_empty () && lineno <= 1)
24984 {
24985 e.code = 0;
24986 e.lineno = 0;
24987 e.info = NULL;
24988 vec_safe_push (macinfo_table, e);
24989 }
24990 e.code = DW_MACINFO_undef;
24991 e.lineno = lineno;
24992 e.info = ggc_strdup (buffer);
24993 vec_safe_push (macinfo_table, e);
24994 }
24995 }
24996
24997 /* Helpers to manipulate hash table of CUs. */
24998
24999 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
25000 {
25001 static inline hashval_t hash (const macinfo_entry *);
25002 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
25003 };
25004
25005 inline hashval_t
25006 macinfo_entry_hasher::hash (const macinfo_entry *entry)
25007 {
25008 return htab_hash_string (entry->info);
25009 }
25010
25011 inline bool
25012 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
25013 const macinfo_entry *entry2)
25014 {
25015 return !strcmp (entry1->info, entry2->info);
25016 }
25017
25018 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
25019
25020 /* Output a single .debug_macinfo entry. */
25021
25022 static void
25023 output_macinfo_op (macinfo_entry *ref)
25024 {
25025 int file_num;
25026 size_t len;
25027 struct indirect_string_node *node;
25028 char label[MAX_ARTIFICIAL_LABEL_BYTES];
25029 struct dwarf_file_data *fd;
25030
25031 switch (ref->code)
25032 {
25033 case DW_MACINFO_start_file:
25034 fd = lookup_filename (ref->info);
25035 file_num = maybe_emit_file (fd);
25036 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
25037 dw2_asm_output_data_uleb128 (ref->lineno,
25038 "Included from line number %lu",
25039 (unsigned long) ref->lineno);
25040 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
25041 break;
25042 case DW_MACINFO_end_file:
25043 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
25044 break;
25045 case DW_MACINFO_define:
25046 case DW_MACINFO_undef:
25047 len = strlen (ref->info) + 1;
25048 if (!dwarf_strict
25049 && len > DWARF_OFFSET_SIZE
25050 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
25051 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
25052 {
25053 ref->code = ref->code == DW_MACINFO_define
25054 ? DW_MACRO_GNU_define_indirect
25055 : DW_MACRO_GNU_undef_indirect;
25056 output_macinfo_op (ref);
25057 return;
25058 }
25059 dw2_asm_output_data (1, ref->code,
25060 ref->code == DW_MACINFO_define
25061 ? "Define macro" : "Undefine macro");
25062 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
25063 (unsigned long) ref->lineno);
25064 dw2_asm_output_nstring (ref->info, -1, "The macro");
25065 break;
25066 case DW_MACRO_GNU_define_indirect:
25067 case DW_MACRO_GNU_undef_indirect:
25068 node = find_AT_string (ref->info);
25069 gcc_assert (node
25070 && ((node->form == DW_FORM_strp)
25071 || (node->form == DW_FORM_GNU_str_index)));
25072 dw2_asm_output_data (1, ref->code,
25073 ref->code == DW_MACRO_GNU_define_indirect
25074 ? "Define macro indirect"
25075 : "Undefine macro indirect");
25076 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
25077 (unsigned long) ref->lineno);
25078 if (node->form == DW_FORM_strp)
25079 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
25080 debug_str_section, "The macro: \"%s\"",
25081 ref->info);
25082 else
25083 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
25084 ref->info);
25085 break;
25086 case DW_MACRO_GNU_transparent_include:
25087 dw2_asm_output_data (1, ref->code, "Transparent include");
25088 ASM_GENERATE_INTERNAL_LABEL (label,
25089 DEBUG_MACRO_SECTION_LABEL, ref->lineno);
25090 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
25091 break;
25092 default:
25093 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
25094 ASM_COMMENT_START, (unsigned long) ref->code);
25095 break;
25096 }
25097 }
25098
25099 /* Attempt to make a sequence of define/undef macinfo ops shareable with
25100 other compilation unit .debug_macinfo sections. IDX is the first
25101 index of a define/undef, return the number of ops that should be
25102 emitted in a comdat .debug_macinfo section and emit
25103 a DW_MACRO_GNU_transparent_include entry referencing it.
25104 If the define/undef entry should be emitted normally, return 0. */
25105
25106 static unsigned
25107 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
25108 macinfo_hash_type **macinfo_htab)
25109 {
25110 macinfo_entry *first, *second, *cur, *inc;
25111 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
25112 unsigned char checksum[16];
25113 struct md5_ctx ctx;
25114 char *grp_name, *tail;
25115 const char *base;
25116 unsigned int i, count, encoded_filename_len, linebuf_len;
25117 macinfo_entry **slot;
25118
25119 first = &(*macinfo_table)[idx];
25120 second = &(*macinfo_table)[idx + 1];
25121
25122 /* Optimize only if there are at least two consecutive define/undef ops,
25123 and either all of them are before first DW_MACINFO_start_file
25124 with lineno {0,1} (i.e. predefined macro block), or all of them are
25125 in some included header file. */
25126 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
25127 return 0;
25128 if (vec_safe_is_empty (files))
25129 {
25130 if (first->lineno > 1 || second->lineno > 1)
25131 return 0;
25132 }
25133 else if (first->lineno == 0)
25134 return 0;
25135
25136 /* Find the last define/undef entry that can be grouped together
25137 with first and at the same time compute md5 checksum of their
25138 codes, linenumbers and strings. */
25139 md5_init_ctx (&ctx);
25140 for (i = idx; macinfo_table->iterate (i, &cur); i++)
25141 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
25142 break;
25143 else if (vec_safe_is_empty (files) && cur->lineno > 1)
25144 break;
25145 else
25146 {
25147 unsigned char code = cur->code;
25148 md5_process_bytes (&code, 1, &ctx);
25149 checksum_uleb128 (cur->lineno, &ctx);
25150 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
25151 }
25152 md5_finish_ctx (&ctx, checksum);
25153 count = i - idx;
25154
25155 /* From the containing include filename (if any) pick up just
25156 usable characters from its basename. */
25157 if (vec_safe_is_empty (files))
25158 base = "";
25159 else
25160 base = lbasename (files->last ().info);
25161 for (encoded_filename_len = 0, i = 0; base[i]; i++)
25162 if (ISIDNUM (base[i]) || base[i] == '.')
25163 encoded_filename_len++;
25164 /* Count . at the end. */
25165 if (encoded_filename_len)
25166 encoded_filename_len++;
25167
25168 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
25169 linebuf_len = strlen (linebuf);
25170
25171 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
25172 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
25173 + 16 * 2 + 1);
25174 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
25175 tail = grp_name + 4;
25176 if (encoded_filename_len)
25177 {
25178 for (i = 0; base[i]; i++)
25179 if (ISIDNUM (base[i]) || base[i] == '.')
25180 *tail++ = base[i];
25181 *tail++ = '.';
25182 }
25183 memcpy (tail, linebuf, linebuf_len);
25184 tail += linebuf_len;
25185 *tail++ = '.';
25186 for (i = 0; i < 16; i++)
25187 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
25188
25189 /* Construct a macinfo_entry for DW_MACRO_GNU_transparent_include
25190 in the empty vector entry before the first define/undef. */
25191 inc = &(*macinfo_table)[idx - 1];
25192 inc->code = DW_MACRO_GNU_transparent_include;
25193 inc->lineno = 0;
25194 inc->info = ggc_strdup (grp_name);
25195 if (!*macinfo_htab)
25196 *macinfo_htab = new macinfo_hash_type (10);
25197 /* Avoid emitting duplicates. */
25198 slot = (*macinfo_htab)->find_slot (inc, INSERT);
25199 if (*slot != NULL)
25200 {
25201 inc->code = 0;
25202 inc->info = NULL;
25203 /* If such an entry has been used before, just emit
25204 a DW_MACRO_GNU_transparent_include op. */
25205 inc = *slot;
25206 output_macinfo_op (inc);
25207 /* And clear all macinfo_entry in the range to avoid emitting them
25208 in the second pass. */
25209 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
25210 {
25211 cur->code = 0;
25212 cur->info = NULL;
25213 }
25214 }
25215 else
25216 {
25217 *slot = inc;
25218 inc->lineno = (*macinfo_htab)->elements ();
25219 output_macinfo_op (inc);
25220 }
25221 return count;
25222 }
25223
25224 /* Save any strings needed by the macinfo table in the debug str
25225 table. All strings must be collected into the table by the time
25226 index_string is called. */
25227
25228 static void
25229 save_macinfo_strings (void)
25230 {
25231 unsigned len;
25232 unsigned i;
25233 macinfo_entry *ref;
25234
25235 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
25236 {
25237 switch (ref->code)
25238 {
25239 /* Match the logic in output_macinfo_op to decide on
25240 indirect strings. */
25241 case DW_MACINFO_define:
25242 case DW_MACINFO_undef:
25243 len = strlen (ref->info) + 1;
25244 if (!dwarf_strict
25245 && len > DWARF_OFFSET_SIZE
25246 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
25247 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
25248 set_indirect_string (find_AT_string (ref->info));
25249 break;
25250 case DW_MACRO_GNU_define_indirect:
25251 case DW_MACRO_GNU_undef_indirect:
25252 set_indirect_string (find_AT_string (ref->info));
25253 break;
25254 default:
25255 break;
25256 }
25257 }
25258 }
25259
25260 /* Output macinfo section(s). */
25261
25262 static void
25263 output_macinfo (void)
25264 {
25265 unsigned i;
25266 unsigned long length = vec_safe_length (macinfo_table);
25267 macinfo_entry *ref;
25268 vec<macinfo_entry, va_gc> *files = NULL;
25269 macinfo_hash_type *macinfo_htab = NULL;
25270
25271 if (! length)
25272 return;
25273
25274 /* output_macinfo* uses these interchangeably. */
25275 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_GNU_define
25276 && (int) DW_MACINFO_undef == (int) DW_MACRO_GNU_undef
25277 && (int) DW_MACINFO_start_file == (int) DW_MACRO_GNU_start_file
25278 && (int) DW_MACINFO_end_file == (int) DW_MACRO_GNU_end_file);
25279
25280 /* For .debug_macro emit the section header. */
25281 if (!dwarf_strict)
25282 {
25283 dw2_asm_output_data (2, 4, "DWARF macro version number");
25284 if (DWARF_OFFSET_SIZE == 8)
25285 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
25286 else
25287 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
25288 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
25289 (!dwarf_split_debug_info ? debug_line_section_label
25290 : debug_skeleton_line_section_label),
25291 debug_line_section, NULL);
25292 }
25293
25294 /* In the first loop, it emits the primary .debug_macinfo section
25295 and after each emitted op the macinfo_entry is cleared.
25296 If a longer range of define/undef ops can be optimized using
25297 DW_MACRO_GNU_transparent_include, the
25298 DW_MACRO_GNU_transparent_include op is emitted and kept in
25299 the vector before the first define/undef in the range and the
25300 whole range of define/undef ops is not emitted and kept. */
25301 for (i = 0; macinfo_table->iterate (i, &ref); i++)
25302 {
25303 switch (ref->code)
25304 {
25305 case DW_MACINFO_start_file:
25306 vec_safe_push (files, *ref);
25307 break;
25308 case DW_MACINFO_end_file:
25309 if (!vec_safe_is_empty (files))
25310 files->pop ();
25311 break;
25312 case DW_MACINFO_define:
25313 case DW_MACINFO_undef:
25314 if (!dwarf_strict
25315 && HAVE_COMDAT_GROUP
25316 && vec_safe_length (files) != 1
25317 && i > 0
25318 && i + 1 < length
25319 && (*macinfo_table)[i - 1].code == 0)
25320 {
25321 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
25322 if (count)
25323 {
25324 i += count - 1;
25325 continue;
25326 }
25327 }
25328 break;
25329 case 0:
25330 /* A dummy entry may be inserted at the beginning to be able
25331 to optimize the whole block of predefined macros. */
25332 if (i == 0)
25333 continue;
25334 default:
25335 break;
25336 }
25337 output_macinfo_op (ref);
25338 ref->info = NULL;
25339 ref->code = 0;
25340 }
25341
25342 if (!macinfo_htab)
25343 return;
25344
25345 delete macinfo_htab;
25346 macinfo_htab = NULL;
25347
25348 /* If any DW_MACRO_GNU_transparent_include were used, on those
25349 DW_MACRO_GNU_transparent_include entries terminate the
25350 current chain and switch to a new comdat .debug_macinfo
25351 section and emit the define/undef entries within it. */
25352 for (i = 0; macinfo_table->iterate (i, &ref); i++)
25353 switch (ref->code)
25354 {
25355 case 0:
25356 continue;
25357 case DW_MACRO_GNU_transparent_include:
25358 {
25359 char label[MAX_ARTIFICIAL_LABEL_BYTES];
25360 tree comdat_key = get_identifier (ref->info);
25361 /* Terminate the previous .debug_macinfo section. */
25362 dw2_asm_output_data (1, 0, "End compilation unit");
25363 targetm.asm_out.named_section (DEBUG_MACRO_SECTION,
25364 SECTION_DEBUG
25365 | SECTION_LINKONCE,
25366 comdat_key);
25367 ASM_GENERATE_INTERNAL_LABEL (label,
25368 DEBUG_MACRO_SECTION_LABEL,
25369 ref->lineno);
25370 ASM_OUTPUT_LABEL (asm_out_file, label);
25371 ref->code = 0;
25372 ref->info = NULL;
25373 dw2_asm_output_data (2, 4, "DWARF macro version number");
25374 if (DWARF_OFFSET_SIZE == 8)
25375 dw2_asm_output_data (1, 1, "Flags: 64-bit");
25376 else
25377 dw2_asm_output_data (1, 0, "Flags: 32-bit");
25378 }
25379 break;
25380 case DW_MACINFO_define:
25381 case DW_MACINFO_undef:
25382 output_macinfo_op (ref);
25383 ref->code = 0;
25384 ref->info = NULL;
25385 break;
25386 default:
25387 gcc_unreachable ();
25388 }
25389 }
25390
25391 /* Set up for Dwarf output at the start of compilation. */
25392
25393 static void
25394 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
25395 {
25396 /* This option is currently broken, see (PR53118 and PR46102). */
25397 if (flag_eliminate_dwarf2_dups
25398 && strstr (lang_hooks.name, "C++"))
25399 {
25400 warning (0, "-feliminate-dwarf2-dups is broken for C++, ignoring");
25401 flag_eliminate_dwarf2_dups = 0;
25402 }
25403
25404 /* Allocate the file_table. */
25405 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
25406
25407 #ifndef DWARF2_LINENO_DEBUGGING_INFO
25408 /* Allocate the decl_die_table. */
25409 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
25410
25411 /* Allocate the decl_loc_table. */
25412 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
25413
25414 /* Allocate the cached_dw_loc_list_table. */
25415 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
25416
25417 /* Allocate the initial hunk of the decl_scope_table. */
25418 vec_alloc (decl_scope_table, 256);
25419
25420 /* Allocate the initial hunk of the abbrev_die_table. */
25421 abbrev_die_table = ggc_cleared_vec_alloc<dw_die_ref>
25422 (ABBREV_DIE_TABLE_INCREMENT);
25423 abbrev_die_table_allocated = ABBREV_DIE_TABLE_INCREMENT;
25424 /* Zero-th entry is allocated, but unused. */
25425 abbrev_die_table_in_use = 1;
25426
25427 /* Allocate the dwarf_proc_stack_usage_map. */
25428 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
25429
25430 /* Allocate the pubtypes and pubnames vectors. */
25431 vec_alloc (pubname_table, 32);
25432 vec_alloc (pubtype_table, 32);
25433
25434 vec_alloc (incomplete_types, 64);
25435
25436 vec_alloc (used_rtx_array, 32);
25437
25438 if (!dwarf_split_debug_info)
25439 {
25440 debug_info_section = get_section (DEBUG_INFO_SECTION,
25441 SECTION_DEBUG, NULL);
25442 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
25443 SECTION_DEBUG, NULL);
25444 debug_loc_section = get_section (DEBUG_LOC_SECTION,
25445 SECTION_DEBUG, NULL);
25446 }
25447 else
25448 {
25449 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
25450 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
25451 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
25452 SECTION_DEBUG | SECTION_EXCLUDE,
25453 NULL);
25454 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
25455 SECTION_DEBUG, NULL);
25456 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
25457 SECTION_DEBUG, NULL);
25458 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
25459 SECTION_DEBUG, NULL);
25460 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
25461 DEBUG_SKELETON_ABBREV_SECTION_LABEL, 0);
25462
25463 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections stay in
25464 the main .o, but the skeleton_line goes into the split off dwo. */
25465 debug_skeleton_line_section
25466 = get_section (DEBUG_DWO_LINE_SECTION,
25467 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
25468 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
25469 DEBUG_SKELETON_LINE_SECTION_LABEL, 0);
25470 debug_str_offsets_section = get_section (DEBUG_STR_OFFSETS_SECTION,
25471 SECTION_DEBUG | SECTION_EXCLUDE,
25472 NULL);
25473 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
25474 DEBUG_SKELETON_INFO_SECTION_LABEL, 0);
25475 debug_loc_section = get_section (DEBUG_DWO_LOC_SECTION,
25476 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
25477 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
25478 DEBUG_STR_DWO_SECTION_FLAGS, NULL);
25479 }
25480 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
25481 SECTION_DEBUG, NULL);
25482 debug_macinfo_section = get_section (dwarf_strict
25483 ? DEBUG_MACINFO_SECTION
25484 : DEBUG_MACRO_SECTION,
25485 DEBUG_MACRO_SECTION_FLAGS, NULL);
25486 debug_line_section = get_section (DEBUG_LINE_SECTION,
25487 SECTION_DEBUG, NULL);
25488 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
25489 SECTION_DEBUG, NULL);
25490 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
25491 SECTION_DEBUG, NULL);
25492 debug_str_section = get_section (DEBUG_STR_SECTION,
25493 DEBUG_STR_SECTION_FLAGS, NULL);
25494 debug_ranges_section = get_section (DEBUG_RANGES_SECTION,
25495 SECTION_DEBUG, NULL);
25496 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
25497 SECTION_DEBUG, NULL);
25498
25499 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
25500 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
25501 DEBUG_ABBREV_SECTION_LABEL, 0);
25502 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
25503 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
25504 COLD_TEXT_SECTION_LABEL, 0);
25505 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
25506
25507 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
25508 DEBUG_INFO_SECTION_LABEL, 0);
25509 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
25510 DEBUG_LINE_SECTION_LABEL, 0);
25511 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
25512 DEBUG_RANGES_SECTION_LABEL, 0);
25513 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
25514 DEBUG_ADDR_SECTION_LABEL, 0);
25515 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
25516 dwarf_strict
25517 ? DEBUG_MACINFO_SECTION_LABEL
25518 : DEBUG_MACRO_SECTION_LABEL, 0);
25519 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL, 0);
25520
25521 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
25522 vec_alloc (macinfo_table, 64);
25523
25524 switch_to_section (text_section);
25525 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
25526 #endif
25527
25528 /* Make sure the line number table for .text always exists. */
25529 text_section_line_info = new_line_info_table ();
25530 text_section_line_info->end_label = text_end_label;
25531
25532 #ifdef DWARF2_LINENO_DEBUGGING_INFO
25533 cur_line_info_table = text_section_line_info;
25534 #endif
25535
25536 /* If front-ends already registered a main translation unit but we were not
25537 ready to perform the association, do this now. */
25538 if (main_translation_unit != NULL_TREE)
25539 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
25540 }
25541
25542 /* Called before compile () starts outputtting functions, variables
25543 and toplevel asms into assembly. */
25544
25545 static void
25546 dwarf2out_assembly_start (void)
25547 {
25548 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
25549 && dwarf2out_do_cfi_asm ()
25550 && (!(flag_unwind_tables || flag_exceptions)
25551 || targetm_common.except_unwind_info (&global_options) != UI_DWARF2))
25552 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
25553 }
25554
25555 /* A helper function for dwarf2out_finish called through
25556 htab_traverse. Assign a string its index. All strings must be
25557 collected into the table by the time index_string is called,
25558 because the indexing code relies on htab_traverse to traverse nodes
25559 in the same order for each run. */
25560
25561 int
25562 index_string (indirect_string_node **h, unsigned int *index)
25563 {
25564 indirect_string_node *node = *h;
25565
25566 find_string_form (node);
25567 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
25568 {
25569 gcc_assert (node->index == NO_INDEX_ASSIGNED);
25570 node->index = *index;
25571 *index += 1;
25572 }
25573 return 1;
25574 }
25575
25576 /* A helper function for output_indirect_strings called through
25577 htab_traverse. Output the offset to a string and update the
25578 current offset. */
25579
25580 int
25581 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
25582 {
25583 indirect_string_node *node = *h;
25584
25585 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
25586 {
25587 /* Assert that this node has been assigned an index. */
25588 gcc_assert (node->index != NO_INDEX_ASSIGNED
25589 && node->index != NOT_INDEXED);
25590 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
25591 "indexed string 0x%x: %s", node->index, node->str);
25592 *offset += strlen (node->str) + 1;
25593 }
25594 return 1;
25595 }
25596
25597 /* A helper function for dwarf2out_finish called through
25598 htab_traverse. Output the indexed string. */
25599
25600 int
25601 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
25602 {
25603 struct indirect_string_node *node = *h;
25604
25605 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
25606 {
25607 /* Assert that the strings are output in the same order as their
25608 indexes were assigned. */
25609 gcc_assert (*cur_idx == node->index);
25610 assemble_string (node->str, strlen (node->str) + 1);
25611 *cur_idx += 1;
25612 }
25613 return 1;
25614 }
25615
25616 /* A helper function for dwarf2out_finish called through
25617 htab_traverse. Emit one queued .debug_str string. */
25618
25619 int
25620 output_indirect_string (indirect_string_node **h, void *)
25621 {
25622 struct indirect_string_node *node = *h;
25623
25624 node->form = find_string_form (node);
25625 if (node->form == DW_FORM_strp && node->refcount > 0)
25626 {
25627 ASM_OUTPUT_LABEL (asm_out_file, node->label);
25628 assemble_string (node->str, strlen (node->str) + 1);
25629 }
25630
25631 return 1;
25632 }
25633
25634 /* Output the indexed string table. */
25635
25636 static void
25637 output_indirect_strings (void)
25638 {
25639 switch_to_section (debug_str_section);
25640 if (!dwarf_split_debug_info)
25641 debug_str_hash->traverse<void *, output_indirect_string> (NULL);
25642 else
25643 {
25644 unsigned int offset = 0;
25645 unsigned int cur_idx = 0;
25646
25647 skeleton_debug_str_hash->traverse<void *, output_indirect_string> (NULL);
25648
25649 switch_to_section (debug_str_offsets_section);
25650 debug_str_hash->traverse_noresize
25651 <unsigned int *, output_index_string_offset> (&offset);
25652 switch_to_section (debug_str_dwo_section);
25653 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
25654 (&cur_idx);
25655 }
25656 }
25657
25658 /* Callback for htab_traverse to assign an index to an entry in the
25659 table, and to write that entry to the .debug_addr section. */
25660
25661 int
25662 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
25663 {
25664 addr_table_entry *entry = *slot;
25665
25666 if (entry->refcount == 0)
25667 {
25668 gcc_assert (entry->index == NO_INDEX_ASSIGNED
25669 || entry->index == NOT_INDEXED);
25670 return 1;
25671 }
25672
25673 gcc_assert (entry->index == *cur_index);
25674 (*cur_index)++;
25675
25676 switch (entry->kind)
25677 {
25678 case ate_kind_rtx:
25679 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
25680 "0x%x", entry->index);
25681 break;
25682 case ate_kind_rtx_dtprel:
25683 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
25684 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
25685 DWARF2_ADDR_SIZE,
25686 entry->addr.rtl);
25687 fputc ('\n', asm_out_file);
25688 break;
25689 case ate_kind_label:
25690 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
25691 "0x%x", entry->index);
25692 break;
25693 default:
25694 gcc_unreachable ();
25695 }
25696 return 1;
25697 }
25698
25699 /* Produce the .debug_addr section. */
25700
25701 static void
25702 output_addr_table (void)
25703 {
25704 unsigned int index = 0;
25705 if (addr_index_table == NULL || addr_index_table->size () == 0)
25706 return;
25707
25708 switch_to_section (debug_addr_section);
25709 addr_index_table
25710 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
25711 }
25712
25713 #if ENABLE_ASSERT_CHECKING
25714 /* Verify that all marks are clear. */
25715
25716 static void
25717 verify_marks_clear (dw_die_ref die)
25718 {
25719 dw_die_ref c;
25720
25721 gcc_assert (! die->die_mark);
25722 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
25723 }
25724 #endif /* ENABLE_ASSERT_CHECKING */
25725
25726 /* Clear the marks for a die and its children.
25727 Be cool if the mark isn't set. */
25728
25729 static void
25730 prune_unmark_dies (dw_die_ref die)
25731 {
25732 dw_die_ref c;
25733
25734 if (die->die_mark)
25735 die->die_mark = 0;
25736 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
25737 }
25738
25739 /* Given LOC that is referenced by a DIE we're marking as used, find all
25740 referenced DWARF procedures it references and mark them as used. */
25741
25742 static void
25743 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
25744 {
25745 for (; loc != NULL; loc = loc->dw_loc_next)
25746 switch (loc->dw_loc_opc)
25747 {
25748 case DW_OP_GNU_implicit_pointer:
25749 case DW_OP_GNU_convert:
25750 case DW_OP_GNU_reinterpret:
25751 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
25752 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
25753 break;
25754 case DW_OP_call2:
25755 case DW_OP_call4:
25756 case DW_OP_call_ref:
25757 case DW_OP_GNU_const_type:
25758 case DW_OP_GNU_parameter_ref:
25759 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
25760 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
25761 break;
25762 case DW_OP_GNU_regval_type:
25763 case DW_OP_GNU_deref_type:
25764 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
25765 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
25766 break;
25767 case DW_OP_GNU_entry_value:
25768 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
25769 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
25770 break;
25771 default:
25772 break;
25773 }
25774 }
25775
25776 /* Given DIE that we're marking as used, find any other dies
25777 it references as attributes and mark them as used. */
25778
25779 static void
25780 prune_unused_types_walk_attribs (dw_die_ref die)
25781 {
25782 dw_attr_node *a;
25783 unsigned ix;
25784
25785 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
25786 {
25787 switch (AT_class (a))
25788 {
25789 /* Make sure DWARF procedures referenced by location descriptions will
25790 get emitted. */
25791 case dw_val_class_loc:
25792 prune_unused_types_walk_loc_descr (AT_loc (a));
25793 break;
25794 case dw_val_class_loc_list:
25795 for (dw_loc_list_ref list = AT_loc_list (a);
25796 list != NULL;
25797 list = list->dw_loc_next)
25798 prune_unused_types_walk_loc_descr (list->expr);
25799 break;
25800
25801 case dw_val_class_die_ref:
25802 /* A reference to another DIE.
25803 Make sure that it will get emitted.
25804 If it was broken out into a comdat group, don't follow it. */
25805 if (! AT_ref (a)->comdat_type_p
25806 || a->dw_attr == DW_AT_specification)
25807 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
25808 break;
25809
25810 case dw_val_class_str:
25811 /* Set the string's refcount to 0 so that prune_unused_types_mark
25812 accounts properly for it. */
25813 a->dw_attr_val.v.val_str->refcount = 0;
25814 break;
25815
25816 default:
25817 break;
25818 }
25819 }
25820 }
25821
25822 /* Mark the generic parameters and arguments children DIEs of DIE. */
25823
25824 static void
25825 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
25826 {
25827 dw_die_ref c;
25828
25829 if (die == NULL || die->die_child == NULL)
25830 return;
25831 c = die->die_child;
25832 do
25833 {
25834 if (is_template_parameter (c))
25835 prune_unused_types_mark (c, 1);
25836 c = c->die_sib;
25837 } while (c && c != die->die_child);
25838 }
25839
25840 /* Mark DIE as being used. If DOKIDS is true, then walk down
25841 to DIE's children. */
25842
25843 static void
25844 prune_unused_types_mark (dw_die_ref die, int dokids)
25845 {
25846 dw_die_ref c;
25847
25848 if (die->die_mark == 0)
25849 {
25850 /* We haven't done this node yet. Mark it as used. */
25851 die->die_mark = 1;
25852 /* If this is the DIE of a generic type instantiation,
25853 mark the children DIEs that describe its generic parms and
25854 args. */
25855 prune_unused_types_mark_generic_parms_dies (die);
25856
25857 /* We also have to mark its parents as used.
25858 (But we don't want to mark our parent's kids due to this,
25859 unless it is a class.) */
25860 if (die->die_parent)
25861 prune_unused_types_mark (die->die_parent,
25862 class_scope_p (die->die_parent));
25863
25864 /* Mark any referenced nodes. */
25865 prune_unused_types_walk_attribs (die);
25866
25867 /* If this node is a specification,
25868 also mark the definition, if it exists. */
25869 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
25870 prune_unused_types_mark (die->die_definition, 1);
25871 }
25872
25873 if (dokids && die->die_mark != 2)
25874 {
25875 /* We need to walk the children, but haven't done so yet.
25876 Remember that we've walked the kids. */
25877 die->die_mark = 2;
25878
25879 /* If this is an array type, we need to make sure our
25880 kids get marked, even if they're types. If we're
25881 breaking out types into comdat sections, do this
25882 for all type definitions. */
25883 if (die->die_tag == DW_TAG_array_type
25884 || (use_debug_types
25885 && is_type_die (die) && ! is_declaration_die (die)))
25886 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
25887 else
25888 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
25889 }
25890 }
25891
25892 /* For local classes, look if any static member functions were emitted
25893 and if so, mark them. */
25894
25895 static void
25896 prune_unused_types_walk_local_classes (dw_die_ref die)
25897 {
25898 dw_die_ref c;
25899
25900 if (die->die_mark == 2)
25901 return;
25902
25903 switch (die->die_tag)
25904 {
25905 case DW_TAG_structure_type:
25906 case DW_TAG_union_type:
25907 case DW_TAG_class_type:
25908 break;
25909
25910 case DW_TAG_subprogram:
25911 if (!get_AT_flag (die, DW_AT_declaration)
25912 || die->die_definition != NULL)
25913 prune_unused_types_mark (die, 1);
25914 return;
25915
25916 default:
25917 return;
25918 }
25919
25920 /* Mark children. */
25921 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
25922 }
25923
25924 /* Walk the tree DIE and mark types that we actually use. */
25925
25926 static void
25927 prune_unused_types_walk (dw_die_ref die)
25928 {
25929 dw_die_ref c;
25930
25931 /* Don't do anything if this node is already marked and
25932 children have been marked as well. */
25933 if (die->die_mark == 2)
25934 return;
25935
25936 switch (die->die_tag)
25937 {
25938 case DW_TAG_structure_type:
25939 case DW_TAG_union_type:
25940 case DW_TAG_class_type:
25941 if (die->die_perennial_p)
25942 break;
25943
25944 for (c = die->die_parent; c; c = c->die_parent)
25945 if (c->die_tag == DW_TAG_subprogram)
25946 break;
25947
25948 /* Finding used static member functions inside of classes
25949 is needed just for local classes, because for other classes
25950 static member function DIEs with DW_AT_specification
25951 are emitted outside of the DW_TAG_*_type. If we ever change
25952 it, we'd need to call this even for non-local classes. */
25953 if (c)
25954 prune_unused_types_walk_local_classes (die);
25955
25956 /* It's a type node --- don't mark it. */
25957 return;
25958
25959 case DW_TAG_const_type:
25960 case DW_TAG_packed_type:
25961 case DW_TAG_pointer_type:
25962 case DW_TAG_reference_type:
25963 case DW_TAG_rvalue_reference_type:
25964 case DW_TAG_volatile_type:
25965 case DW_TAG_typedef:
25966 case DW_TAG_array_type:
25967 case DW_TAG_interface_type:
25968 case DW_TAG_friend:
25969 case DW_TAG_enumeration_type:
25970 case DW_TAG_subroutine_type:
25971 case DW_TAG_string_type:
25972 case DW_TAG_set_type:
25973 case DW_TAG_subrange_type:
25974 case DW_TAG_ptr_to_member_type:
25975 case DW_TAG_file_type:
25976 /* Type nodes are useful only when other DIEs reference them --- don't
25977 mark them. */
25978 /* FALLTHROUGH */
25979
25980 case DW_TAG_dwarf_procedure:
25981 /* Likewise for DWARF procedures. */
25982
25983 if (die->die_perennial_p)
25984 break;
25985
25986 return;
25987
25988 default:
25989 /* Mark everything else. */
25990 break;
25991 }
25992
25993 if (die->die_mark == 0)
25994 {
25995 die->die_mark = 1;
25996
25997 /* Now, mark any dies referenced from here. */
25998 prune_unused_types_walk_attribs (die);
25999 }
26000
26001 die->die_mark = 2;
26002
26003 /* Mark children. */
26004 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
26005 }
26006
26007 /* Increment the string counts on strings referred to from DIE's
26008 attributes. */
26009
26010 static void
26011 prune_unused_types_update_strings (dw_die_ref die)
26012 {
26013 dw_attr_node *a;
26014 unsigned ix;
26015
26016 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
26017 if (AT_class (a) == dw_val_class_str)
26018 {
26019 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
26020 s->refcount++;
26021 /* Avoid unnecessarily putting strings that are used less than
26022 twice in the hash table. */
26023 if (s->refcount
26024 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
26025 {
26026 indirect_string_node **slot
26027 = debug_str_hash->find_slot_with_hash (s->str,
26028 htab_hash_string (s->str),
26029 INSERT);
26030 gcc_assert (*slot == NULL);
26031 *slot = s;
26032 }
26033 }
26034 }
26035
26036 /* Remove from the tree DIE any dies that aren't marked. */
26037
26038 static void
26039 prune_unused_types_prune (dw_die_ref die)
26040 {
26041 dw_die_ref c;
26042
26043 gcc_assert (die->die_mark);
26044 prune_unused_types_update_strings (die);
26045
26046 if (! die->die_child)
26047 return;
26048
26049 c = die->die_child;
26050 do {
26051 dw_die_ref prev = c;
26052 for (c = c->die_sib; ! c->die_mark; c = c->die_sib)
26053 if (c == die->die_child)
26054 {
26055 /* No marked children between 'prev' and the end of the list. */
26056 if (prev == c)
26057 /* No marked children at all. */
26058 die->die_child = NULL;
26059 else
26060 {
26061 prev->die_sib = c->die_sib;
26062 die->die_child = prev;
26063 }
26064 return;
26065 }
26066
26067 if (c != prev->die_sib)
26068 prev->die_sib = c;
26069 prune_unused_types_prune (c);
26070 } while (c != die->die_child);
26071 }
26072
26073 /* Remove dies representing declarations that we never use. */
26074
26075 static void
26076 prune_unused_types (void)
26077 {
26078 unsigned int i;
26079 limbo_die_node *node;
26080 comdat_type_node *ctnode;
26081 pubname_entry *pub;
26082 dw_die_ref base_type;
26083
26084 #if ENABLE_ASSERT_CHECKING
26085 /* All the marks should already be clear. */
26086 verify_marks_clear (comp_unit_die ());
26087 for (node = limbo_die_list; node; node = node->next)
26088 verify_marks_clear (node->die);
26089 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26090 verify_marks_clear (ctnode->root_die);
26091 #endif /* ENABLE_ASSERT_CHECKING */
26092
26093 /* Mark types that are used in global variables. */
26094 premark_types_used_by_global_vars ();
26095
26096 /* Set the mark on nodes that are actually used. */
26097 prune_unused_types_walk (comp_unit_die ());
26098 for (node = limbo_die_list; node; node = node->next)
26099 prune_unused_types_walk (node->die);
26100 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26101 {
26102 prune_unused_types_walk (ctnode->root_die);
26103 prune_unused_types_mark (ctnode->type_die, 1);
26104 }
26105
26106 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
26107 are unusual in that they are pubnames that are the children of pubtypes.
26108 They should only be marked via their parent DW_TAG_enumeration_type die,
26109 not as roots in themselves. */
26110 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
26111 if (pub->die->die_tag != DW_TAG_enumerator)
26112 prune_unused_types_mark (pub->die, 1);
26113 for (i = 0; base_types.iterate (i, &base_type); i++)
26114 prune_unused_types_mark (base_type, 1);
26115
26116 if (debug_str_hash)
26117 debug_str_hash->empty ();
26118 if (skeleton_debug_str_hash)
26119 skeleton_debug_str_hash->empty ();
26120 prune_unused_types_prune (comp_unit_die ());
26121 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
26122 {
26123 node = *pnode;
26124 if (!node->die->die_mark)
26125 *pnode = node->next;
26126 else
26127 {
26128 prune_unused_types_prune (node->die);
26129 pnode = &node->next;
26130 }
26131 }
26132 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26133 prune_unused_types_prune (ctnode->root_die);
26134
26135 /* Leave the marks clear. */
26136 prune_unmark_dies (comp_unit_die ());
26137 for (node = limbo_die_list; node; node = node->next)
26138 prune_unmark_dies (node->die);
26139 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26140 prune_unmark_dies (ctnode->root_die);
26141 }
26142
26143 /* Set the parameter to true if there are any relative pathnames in
26144 the file table. */
26145 int
26146 file_table_relative_p (dwarf_file_data **slot, bool *p)
26147 {
26148 struct dwarf_file_data *d = *slot;
26149 if (!IS_ABSOLUTE_PATH (d->filename))
26150 {
26151 *p = true;
26152 return 0;
26153 }
26154 return 1;
26155 }
26156
26157 /* Helpers to manipulate hash table of comdat type units. */
26158
26159 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
26160 {
26161 static inline hashval_t hash (const comdat_type_node *);
26162 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
26163 };
26164
26165 inline hashval_t
26166 comdat_type_hasher::hash (const comdat_type_node *type_node)
26167 {
26168 hashval_t h;
26169 memcpy (&h, type_node->signature, sizeof (h));
26170 return h;
26171 }
26172
26173 inline bool
26174 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
26175 const comdat_type_node *type_node_2)
26176 {
26177 return (! memcmp (type_node_1->signature, type_node_2->signature,
26178 DWARF_TYPE_SIGNATURE_SIZE));
26179 }
26180
26181 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
26182 to the location it would have been added, should we know its
26183 DECL_ASSEMBLER_NAME when we added other attributes. This will
26184 probably improve compactness of debug info, removing equivalent
26185 abbrevs, and hide any differences caused by deferring the
26186 computation of the assembler name, triggered by e.g. PCH. */
26187
26188 static inline void
26189 move_linkage_attr (dw_die_ref die)
26190 {
26191 unsigned ix = vec_safe_length (die->die_attr);
26192 dw_attr_node linkage = (*die->die_attr)[ix - 1];
26193
26194 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
26195 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
26196
26197 while (--ix > 0)
26198 {
26199 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
26200
26201 if (prev->dw_attr == DW_AT_decl_line || prev->dw_attr == DW_AT_name)
26202 break;
26203 }
26204
26205 if (ix != vec_safe_length (die->die_attr) - 1)
26206 {
26207 die->die_attr->pop ();
26208 die->die_attr->quick_insert (ix, linkage);
26209 }
26210 }
26211
26212 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
26213 referenced from typed stack ops and count how often they are used. */
26214
26215 static void
26216 mark_base_types (dw_loc_descr_ref loc)
26217 {
26218 dw_die_ref base_type = NULL;
26219
26220 for (; loc; loc = loc->dw_loc_next)
26221 {
26222 switch (loc->dw_loc_opc)
26223 {
26224 case DW_OP_GNU_regval_type:
26225 case DW_OP_GNU_deref_type:
26226 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
26227 break;
26228 case DW_OP_GNU_convert:
26229 case DW_OP_GNU_reinterpret:
26230 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
26231 continue;
26232 /* FALLTHRU */
26233 case DW_OP_GNU_const_type:
26234 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
26235 break;
26236 case DW_OP_GNU_entry_value:
26237 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
26238 continue;
26239 default:
26240 continue;
26241 }
26242 gcc_assert (base_type->die_parent == comp_unit_die ());
26243 if (base_type->die_mark)
26244 base_type->die_mark++;
26245 else
26246 {
26247 base_types.safe_push (base_type);
26248 base_type->die_mark = 1;
26249 }
26250 }
26251 }
26252
26253 /* Comparison function for sorting marked base types. */
26254
26255 static int
26256 base_type_cmp (const void *x, const void *y)
26257 {
26258 dw_die_ref dx = *(const dw_die_ref *) x;
26259 dw_die_ref dy = *(const dw_die_ref *) y;
26260 unsigned int byte_size1, byte_size2;
26261 unsigned int encoding1, encoding2;
26262 if (dx->die_mark > dy->die_mark)
26263 return -1;
26264 if (dx->die_mark < dy->die_mark)
26265 return 1;
26266 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
26267 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
26268 if (byte_size1 < byte_size2)
26269 return 1;
26270 if (byte_size1 > byte_size2)
26271 return -1;
26272 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
26273 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
26274 if (encoding1 < encoding2)
26275 return 1;
26276 if (encoding1 > encoding2)
26277 return -1;
26278 return 0;
26279 }
26280
26281 /* Move base types marked by mark_base_types as early as possible
26282 in the CU, sorted by decreasing usage count both to make the
26283 uleb128 references as small as possible and to make sure they
26284 will have die_offset already computed by calc_die_sizes when
26285 sizes of typed stack loc ops is computed. */
26286
26287 static void
26288 move_marked_base_types (void)
26289 {
26290 unsigned int i;
26291 dw_die_ref base_type, die, c;
26292
26293 if (base_types.is_empty ())
26294 return;
26295
26296 /* Sort by decreasing usage count, they will be added again in that
26297 order later on. */
26298 base_types.qsort (base_type_cmp);
26299 die = comp_unit_die ();
26300 c = die->die_child;
26301 do
26302 {
26303 dw_die_ref prev = c;
26304 c = c->die_sib;
26305 while (c->die_mark)
26306 {
26307 remove_child_with_prev (c, prev);
26308 /* As base types got marked, there must be at least
26309 one node other than DW_TAG_base_type. */
26310 gcc_assert (c != c->die_sib);
26311 c = c->die_sib;
26312 }
26313 }
26314 while (c != die->die_child);
26315 gcc_assert (die->die_child);
26316 c = die->die_child;
26317 for (i = 0; base_types.iterate (i, &base_type); i++)
26318 {
26319 base_type->die_mark = 0;
26320 base_type->die_sib = c->die_sib;
26321 c->die_sib = base_type;
26322 c = base_type;
26323 }
26324 }
26325
26326 /* Helper function for resolve_addr, attempt to resolve
26327 one CONST_STRING, return true if successful. Similarly verify that
26328 SYMBOL_REFs refer to variables emitted in the current CU. */
26329
26330 static bool
26331 resolve_one_addr (rtx *addr)
26332 {
26333 rtx rtl = *addr;
26334
26335 if (GET_CODE (rtl) == CONST_STRING)
26336 {
26337 size_t len = strlen (XSTR (rtl, 0)) + 1;
26338 tree t = build_string (len, XSTR (rtl, 0));
26339 tree tlen = size_int (len - 1);
26340 TREE_TYPE (t)
26341 = build_array_type (char_type_node, build_index_type (tlen));
26342 rtl = lookup_constant_def (t);
26343 if (!rtl || !MEM_P (rtl))
26344 return false;
26345 rtl = XEXP (rtl, 0);
26346 if (GET_CODE (rtl) == SYMBOL_REF
26347 && SYMBOL_REF_DECL (rtl)
26348 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
26349 return false;
26350 vec_safe_push (used_rtx_array, rtl);
26351 *addr = rtl;
26352 return true;
26353 }
26354
26355 if (GET_CODE (rtl) == SYMBOL_REF
26356 && SYMBOL_REF_DECL (rtl))
26357 {
26358 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
26359 {
26360 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
26361 return false;
26362 }
26363 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
26364 return false;
26365 }
26366
26367 if (GET_CODE (rtl) == CONST)
26368 {
26369 subrtx_ptr_iterator::array_type array;
26370 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
26371 if (!resolve_one_addr (*iter))
26372 return false;
26373 }
26374
26375 return true;
26376 }
26377
26378 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
26379 if possible, and create DW_TAG_dwarf_procedure that can be referenced
26380 from DW_OP_GNU_implicit_pointer if the string hasn't been seen yet. */
26381
26382 static rtx
26383 string_cst_pool_decl (tree t)
26384 {
26385 rtx rtl = output_constant_def (t, 1);
26386 unsigned char *array;
26387 dw_loc_descr_ref l;
26388 tree decl;
26389 size_t len;
26390 dw_die_ref ref;
26391
26392 if (!rtl || !MEM_P (rtl))
26393 return NULL_RTX;
26394 rtl = XEXP (rtl, 0);
26395 if (GET_CODE (rtl) != SYMBOL_REF
26396 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
26397 return NULL_RTX;
26398
26399 decl = SYMBOL_REF_DECL (rtl);
26400 if (!lookup_decl_die (decl))
26401 {
26402 len = TREE_STRING_LENGTH (t);
26403 vec_safe_push (used_rtx_array, rtl);
26404 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
26405 array = ggc_vec_alloc<unsigned char> (len);
26406 memcpy (array, TREE_STRING_POINTER (t), len);
26407 l = new_loc_descr (DW_OP_implicit_value, len, 0);
26408 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
26409 l->dw_loc_oprnd2.v.val_vec.length = len;
26410 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
26411 l->dw_loc_oprnd2.v.val_vec.array = array;
26412 add_AT_loc (ref, DW_AT_location, l);
26413 equate_decl_number_to_die (decl, ref);
26414 }
26415 return rtl;
26416 }
26417
26418 /* Helper function of resolve_addr_in_expr. LOC is
26419 a DW_OP_addr followed by DW_OP_stack_value, either at the start
26420 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
26421 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
26422 with DW_OP_GNU_implicit_pointer if possible
26423 and return true, if unsuccessful, return false. */
26424
26425 static bool
26426 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
26427 {
26428 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
26429 HOST_WIDE_INT offset = 0;
26430 dw_die_ref ref = NULL;
26431 tree decl;
26432
26433 if (GET_CODE (rtl) == CONST
26434 && GET_CODE (XEXP (rtl, 0)) == PLUS
26435 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
26436 {
26437 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
26438 rtl = XEXP (XEXP (rtl, 0), 0);
26439 }
26440 if (GET_CODE (rtl) == CONST_STRING)
26441 {
26442 size_t len = strlen (XSTR (rtl, 0)) + 1;
26443 tree t = build_string (len, XSTR (rtl, 0));
26444 tree tlen = size_int (len - 1);
26445
26446 TREE_TYPE (t)
26447 = build_array_type (char_type_node, build_index_type (tlen));
26448 rtl = string_cst_pool_decl (t);
26449 if (!rtl)
26450 return false;
26451 }
26452 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
26453 {
26454 decl = SYMBOL_REF_DECL (rtl);
26455 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
26456 {
26457 ref = lookup_decl_die (decl);
26458 if (ref && (get_AT (ref, DW_AT_location)
26459 || get_AT (ref, DW_AT_const_value)))
26460 {
26461 loc->dw_loc_opc = DW_OP_GNU_implicit_pointer;
26462 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
26463 loc->dw_loc_oprnd1.val_entry = NULL;
26464 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
26465 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
26466 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
26467 loc->dw_loc_oprnd2.v.val_int = offset;
26468 return true;
26469 }
26470 }
26471 }
26472 return false;
26473 }
26474
26475 /* Helper function for resolve_addr, handle one location
26476 expression, return false if at least one CONST_STRING or SYMBOL_REF in
26477 the location list couldn't be resolved. */
26478
26479 static bool
26480 resolve_addr_in_expr (dw_loc_descr_ref loc)
26481 {
26482 dw_loc_descr_ref keep = NULL;
26483 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
26484 switch (loc->dw_loc_opc)
26485 {
26486 case DW_OP_addr:
26487 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
26488 {
26489 if ((prev == NULL
26490 || prev->dw_loc_opc == DW_OP_piece
26491 || prev->dw_loc_opc == DW_OP_bit_piece)
26492 && loc->dw_loc_next
26493 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
26494 && !dwarf_strict
26495 && optimize_one_addr_into_implicit_ptr (loc))
26496 break;
26497 return false;
26498 }
26499 break;
26500 case DW_OP_GNU_addr_index:
26501 case DW_OP_GNU_const_index:
26502 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
26503 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
26504 {
26505 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
26506 if (!resolve_one_addr (&rtl))
26507 return false;
26508 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
26509 loc->dw_loc_oprnd1.val_entry =
26510 add_addr_table_entry (rtl, ate_kind_rtx);
26511 }
26512 break;
26513 case DW_OP_const4u:
26514 case DW_OP_const8u:
26515 if (loc->dtprel
26516 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
26517 return false;
26518 break;
26519 case DW_OP_plus_uconst:
26520 if (size_of_loc_descr (loc)
26521 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
26522 + 1
26523 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
26524 {
26525 dw_loc_descr_ref repl
26526 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
26527 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
26528 add_loc_descr (&repl, loc->dw_loc_next);
26529 *loc = *repl;
26530 }
26531 break;
26532 case DW_OP_implicit_value:
26533 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
26534 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
26535 return false;
26536 break;
26537 case DW_OP_GNU_implicit_pointer:
26538 case DW_OP_GNU_parameter_ref:
26539 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
26540 {
26541 dw_die_ref ref
26542 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
26543 if (ref == NULL)
26544 return false;
26545 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
26546 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
26547 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
26548 }
26549 break;
26550 case DW_OP_GNU_const_type:
26551 case DW_OP_GNU_regval_type:
26552 case DW_OP_GNU_deref_type:
26553 case DW_OP_GNU_convert:
26554 case DW_OP_GNU_reinterpret:
26555 while (loc->dw_loc_next
26556 && loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert)
26557 {
26558 dw_die_ref base1, base2;
26559 unsigned enc1, enc2, size1, size2;
26560 if (loc->dw_loc_opc == DW_OP_GNU_regval_type
26561 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
26562 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
26563 else if (loc->dw_loc_oprnd1.val_class
26564 == dw_val_class_unsigned_const)
26565 break;
26566 else
26567 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
26568 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
26569 == dw_val_class_unsigned_const)
26570 break;
26571 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
26572 gcc_assert (base1->die_tag == DW_TAG_base_type
26573 && base2->die_tag == DW_TAG_base_type);
26574 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
26575 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
26576 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
26577 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
26578 if (size1 == size2
26579 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
26580 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
26581 && loc != keep)
26582 || enc1 == enc2))
26583 {
26584 /* Optimize away next DW_OP_GNU_convert after
26585 adjusting LOC's base type die reference. */
26586 if (loc->dw_loc_opc == DW_OP_GNU_regval_type
26587 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
26588 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
26589 else
26590 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
26591 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
26592 continue;
26593 }
26594 /* Don't change integer DW_OP_GNU_convert after e.g. floating
26595 point typed stack entry. */
26596 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
26597 keep = loc->dw_loc_next;
26598 break;
26599 }
26600 break;
26601 default:
26602 break;
26603 }
26604 return true;
26605 }
26606
26607 /* Helper function of resolve_addr. DIE had DW_AT_location of
26608 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
26609 and DW_OP_addr couldn't be resolved. resolve_addr has already
26610 removed the DW_AT_location attribute. This function attempts to
26611 add a new DW_AT_location attribute with DW_OP_GNU_implicit_pointer
26612 to it or DW_AT_const_value attribute, if possible. */
26613
26614 static void
26615 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
26616 {
26617 if (TREE_CODE (decl) != VAR_DECL
26618 || lookup_decl_die (decl) != die
26619 || DECL_EXTERNAL (decl)
26620 || !TREE_STATIC (decl)
26621 || DECL_INITIAL (decl) == NULL_TREE
26622 || DECL_P (DECL_INITIAL (decl))
26623 || get_AT (die, DW_AT_const_value))
26624 return;
26625
26626 tree init = DECL_INITIAL (decl);
26627 HOST_WIDE_INT offset = 0;
26628 /* For variables that have been optimized away and thus
26629 don't have a memory location, see if we can emit
26630 DW_AT_const_value instead. */
26631 if (tree_add_const_value_attribute (die, init))
26632 return;
26633 if (dwarf_strict)
26634 return;
26635 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
26636 and ADDR_EXPR refers to a decl that has DW_AT_location or
26637 DW_AT_const_value (but isn't addressable, otherwise
26638 resolving the original DW_OP_addr wouldn't fail), see if
26639 we can add DW_OP_GNU_implicit_pointer. */
26640 STRIP_NOPS (init);
26641 if (TREE_CODE (init) == POINTER_PLUS_EXPR
26642 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
26643 {
26644 offset = tree_to_shwi (TREE_OPERAND (init, 1));
26645 init = TREE_OPERAND (init, 0);
26646 STRIP_NOPS (init);
26647 }
26648 if (TREE_CODE (init) != ADDR_EXPR)
26649 return;
26650 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
26651 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
26652 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
26653 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
26654 && TREE_OPERAND (init, 0) != decl))
26655 {
26656 dw_die_ref ref;
26657 dw_loc_descr_ref l;
26658
26659 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
26660 {
26661 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
26662 if (!rtl)
26663 return;
26664 decl = SYMBOL_REF_DECL (rtl);
26665 }
26666 else
26667 decl = TREE_OPERAND (init, 0);
26668 ref = lookup_decl_die (decl);
26669 if (ref == NULL
26670 || (!get_AT (ref, DW_AT_location)
26671 && !get_AT (ref, DW_AT_const_value)))
26672 return;
26673 l = new_loc_descr (DW_OP_GNU_implicit_pointer, 0, offset);
26674 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
26675 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
26676 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
26677 add_AT_loc (die, DW_AT_location, l);
26678 }
26679 }
26680
26681 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
26682 an address in .rodata section if the string literal is emitted there,
26683 or remove the containing location list or replace DW_AT_const_value
26684 with DW_AT_location and empty location expression, if it isn't found
26685 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
26686 to something that has been emitted in the current CU. */
26687
26688 static void
26689 resolve_addr (dw_die_ref die)
26690 {
26691 dw_die_ref c;
26692 dw_attr_node *a;
26693 dw_loc_list_ref *curr, *start, loc;
26694 unsigned ix;
26695
26696 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
26697 switch (AT_class (a))
26698 {
26699 case dw_val_class_loc_list:
26700 start = curr = AT_loc_list_ptr (a);
26701 loc = *curr;
26702 gcc_assert (loc);
26703 /* The same list can be referenced more than once. See if we have
26704 already recorded the result from a previous pass. */
26705 if (loc->replaced)
26706 *curr = loc->dw_loc_next;
26707 else if (!loc->resolved_addr)
26708 {
26709 /* As things stand, we do not expect or allow one die to
26710 reference a suffix of another die's location list chain.
26711 References must be identical or completely separate.
26712 There is therefore no need to cache the result of this
26713 pass on any list other than the first; doing so
26714 would lead to unnecessary writes. */
26715 while (*curr)
26716 {
26717 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
26718 if (!resolve_addr_in_expr ((*curr)->expr))
26719 {
26720 dw_loc_list_ref next = (*curr)->dw_loc_next;
26721 dw_loc_descr_ref l = (*curr)->expr;
26722
26723 if (next && (*curr)->ll_symbol)
26724 {
26725 gcc_assert (!next->ll_symbol);
26726 next->ll_symbol = (*curr)->ll_symbol;
26727 }
26728 if (dwarf_split_debug_info)
26729 remove_loc_list_addr_table_entries (l);
26730 *curr = next;
26731 }
26732 else
26733 {
26734 mark_base_types ((*curr)->expr);
26735 curr = &(*curr)->dw_loc_next;
26736 }
26737 }
26738 if (loc == *start)
26739 loc->resolved_addr = 1;
26740 else
26741 {
26742 loc->replaced = 1;
26743 loc->dw_loc_next = *start;
26744 }
26745 }
26746 if (!*start)
26747 {
26748 remove_AT (die, a->dw_attr);
26749 ix--;
26750 }
26751 break;
26752 case dw_val_class_loc:
26753 {
26754 dw_loc_descr_ref l = AT_loc (a);
26755 /* For -gdwarf-2 don't attempt to optimize
26756 DW_AT_data_member_location containing
26757 DW_OP_plus_uconst - older consumers might
26758 rely on it being that op instead of a more complex,
26759 but shorter, location description. */
26760 if ((dwarf_version > 2
26761 || a->dw_attr != DW_AT_data_member_location
26762 || l == NULL
26763 || l->dw_loc_opc != DW_OP_plus_uconst
26764 || l->dw_loc_next != NULL)
26765 && !resolve_addr_in_expr (l))
26766 {
26767 if (dwarf_split_debug_info)
26768 remove_loc_list_addr_table_entries (l);
26769 if (l != NULL
26770 && l->dw_loc_next == NULL
26771 && l->dw_loc_opc == DW_OP_addr
26772 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
26773 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
26774 && a->dw_attr == DW_AT_location)
26775 {
26776 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
26777 remove_AT (die, a->dw_attr);
26778 ix--;
26779 optimize_location_into_implicit_ptr (die, decl);
26780 break;
26781 }
26782 remove_AT (die, a->dw_attr);
26783 ix--;
26784 }
26785 else
26786 mark_base_types (l);
26787 }
26788 break;
26789 case dw_val_class_addr:
26790 if (a->dw_attr == DW_AT_const_value
26791 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
26792 {
26793 if (AT_index (a) != NOT_INDEXED)
26794 remove_addr_table_entry (a->dw_attr_val.val_entry);
26795 remove_AT (die, a->dw_attr);
26796 ix--;
26797 }
26798 if (die->die_tag == DW_TAG_GNU_call_site
26799 && a->dw_attr == DW_AT_abstract_origin)
26800 {
26801 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
26802 dw_die_ref tdie = lookup_decl_die (tdecl);
26803 dw_die_ref cdie;
26804 if (tdie == NULL
26805 && DECL_EXTERNAL (tdecl)
26806 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
26807 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
26808 {
26809 /* Creating a full DIE for tdecl is overly expensive and
26810 at this point even wrong when in the LTO phase
26811 as it can end up generating new type DIEs we didn't
26812 output and thus optimize_external_refs will crash. */
26813 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
26814 add_AT_flag (tdie, DW_AT_external, 1);
26815 add_AT_flag (tdie, DW_AT_declaration, 1);
26816 add_linkage_attr (tdie, tdecl);
26817 add_name_and_src_coords_attributes (tdie, tdecl);
26818 equate_decl_number_to_die (tdecl, tdie);
26819 }
26820 if (tdie)
26821 {
26822 a->dw_attr_val.val_class = dw_val_class_die_ref;
26823 a->dw_attr_val.v.val_die_ref.die = tdie;
26824 a->dw_attr_val.v.val_die_ref.external = 0;
26825 }
26826 else
26827 {
26828 if (AT_index (a) != NOT_INDEXED)
26829 remove_addr_table_entry (a->dw_attr_val.val_entry);
26830 remove_AT (die, a->dw_attr);
26831 ix--;
26832 }
26833 }
26834 break;
26835 default:
26836 break;
26837 }
26838
26839 FOR_EACH_CHILD (die, c, resolve_addr (c));
26840 }
26841 \f
26842 /* Helper routines for optimize_location_lists.
26843 This pass tries to share identical local lists in .debug_loc
26844 section. */
26845
26846 /* Iteratively hash operands of LOC opcode into HSTATE. */
26847
26848 static void
26849 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
26850 {
26851 dw_val_ref val1 = &loc->dw_loc_oprnd1;
26852 dw_val_ref val2 = &loc->dw_loc_oprnd2;
26853
26854 switch (loc->dw_loc_opc)
26855 {
26856 case DW_OP_const4u:
26857 case DW_OP_const8u:
26858 if (loc->dtprel)
26859 goto hash_addr;
26860 /* FALLTHRU */
26861 case DW_OP_const1u:
26862 case DW_OP_const1s:
26863 case DW_OP_const2u:
26864 case DW_OP_const2s:
26865 case DW_OP_const4s:
26866 case DW_OP_const8s:
26867 case DW_OP_constu:
26868 case DW_OP_consts:
26869 case DW_OP_pick:
26870 case DW_OP_plus_uconst:
26871 case DW_OP_breg0:
26872 case DW_OP_breg1:
26873 case DW_OP_breg2:
26874 case DW_OP_breg3:
26875 case DW_OP_breg4:
26876 case DW_OP_breg5:
26877 case DW_OP_breg6:
26878 case DW_OP_breg7:
26879 case DW_OP_breg8:
26880 case DW_OP_breg9:
26881 case DW_OP_breg10:
26882 case DW_OP_breg11:
26883 case DW_OP_breg12:
26884 case DW_OP_breg13:
26885 case DW_OP_breg14:
26886 case DW_OP_breg15:
26887 case DW_OP_breg16:
26888 case DW_OP_breg17:
26889 case DW_OP_breg18:
26890 case DW_OP_breg19:
26891 case DW_OP_breg20:
26892 case DW_OP_breg21:
26893 case DW_OP_breg22:
26894 case DW_OP_breg23:
26895 case DW_OP_breg24:
26896 case DW_OP_breg25:
26897 case DW_OP_breg26:
26898 case DW_OP_breg27:
26899 case DW_OP_breg28:
26900 case DW_OP_breg29:
26901 case DW_OP_breg30:
26902 case DW_OP_breg31:
26903 case DW_OP_regx:
26904 case DW_OP_fbreg:
26905 case DW_OP_piece:
26906 case DW_OP_deref_size:
26907 case DW_OP_xderef_size:
26908 hstate.add_object (val1->v.val_int);
26909 break;
26910 case DW_OP_skip:
26911 case DW_OP_bra:
26912 {
26913 int offset;
26914
26915 gcc_assert (val1->val_class == dw_val_class_loc);
26916 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
26917 hstate.add_object (offset);
26918 }
26919 break;
26920 case DW_OP_implicit_value:
26921 hstate.add_object (val1->v.val_unsigned);
26922 switch (val2->val_class)
26923 {
26924 case dw_val_class_const:
26925 hstate.add_object (val2->v.val_int);
26926 break;
26927 case dw_val_class_vec:
26928 {
26929 unsigned int elt_size = val2->v.val_vec.elt_size;
26930 unsigned int len = val2->v.val_vec.length;
26931
26932 hstate.add_int (elt_size);
26933 hstate.add_int (len);
26934 hstate.add (val2->v.val_vec.array, len * elt_size);
26935 }
26936 break;
26937 case dw_val_class_const_double:
26938 hstate.add_object (val2->v.val_double.low);
26939 hstate.add_object (val2->v.val_double.high);
26940 break;
26941 case dw_val_class_wide_int:
26942 hstate.add (val2->v.val_wide->get_val (),
26943 get_full_len (*val2->v.val_wide)
26944 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
26945 break;
26946 case dw_val_class_addr:
26947 inchash::add_rtx (val2->v.val_addr, hstate);
26948 break;
26949 default:
26950 gcc_unreachable ();
26951 }
26952 break;
26953 case DW_OP_bregx:
26954 case DW_OP_bit_piece:
26955 hstate.add_object (val1->v.val_int);
26956 hstate.add_object (val2->v.val_int);
26957 break;
26958 case DW_OP_addr:
26959 hash_addr:
26960 if (loc->dtprel)
26961 {
26962 unsigned char dtprel = 0xd1;
26963 hstate.add_object (dtprel);
26964 }
26965 inchash::add_rtx (val1->v.val_addr, hstate);
26966 break;
26967 case DW_OP_GNU_addr_index:
26968 case DW_OP_GNU_const_index:
26969 {
26970 if (loc->dtprel)
26971 {
26972 unsigned char dtprel = 0xd1;
26973 hstate.add_object (dtprel);
26974 }
26975 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
26976 }
26977 break;
26978 case DW_OP_GNU_implicit_pointer:
26979 hstate.add_int (val2->v.val_int);
26980 break;
26981 case DW_OP_GNU_entry_value:
26982 hstate.add_object (val1->v.val_loc);
26983 break;
26984 case DW_OP_GNU_regval_type:
26985 case DW_OP_GNU_deref_type:
26986 {
26987 unsigned int byte_size
26988 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
26989 unsigned int encoding
26990 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
26991 hstate.add_object (val1->v.val_int);
26992 hstate.add_object (byte_size);
26993 hstate.add_object (encoding);
26994 }
26995 break;
26996 case DW_OP_GNU_convert:
26997 case DW_OP_GNU_reinterpret:
26998 if (val1->val_class == dw_val_class_unsigned_const)
26999 {
27000 hstate.add_object (val1->v.val_unsigned);
27001 break;
27002 }
27003 /* FALLTHRU */
27004 case DW_OP_GNU_const_type:
27005 {
27006 unsigned int byte_size
27007 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
27008 unsigned int encoding
27009 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
27010 hstate.add_object (byte_size);
27011 hstate.add_object (encoding);
27012 if (loc->dw_loc_opc != DW_OP_GNU_const_type)
27013 break;
27014 hstate.add_object (val2->val_class);
27015 switch (val2->val_class)
27016 {
27017 case dw_val_class_const:
27018 hstate.add_object (val2->v.val_int);
27019 break;
27020 case dw_val_class_vec:
27021 {
27022 unsigned int elt_size = val2->v.val_vec.elt_size;
27023 unsigned int len = val2->v.val_vec.length;
27024
27025 hstate.add_object (elt_size);
27026 hstate.add_object (len);
27027 hstate.add (val2->v.val_vec.array, len * elt_size);
27028 }
27029 break;
27030 case dw_val_class_const_double:
27031 hstate.add_object (val2->v.val_double.low);
27032 hstate.add_object (val2->v.val_double.high);
27033 break;
27034 case dw_val_class_wide_int:
27035 hstate.add (val2->v.val_wide->get_val (),
27036 get_full_len (*val2->v.val_wide)
27037 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
27038 break;
27039 default:
27040 gcc_unreachable ();
27041 }
27042 }
27043 break;
27044
27045 default:
27046 /* Other codes have no operands. */
27047 break;
27048 }
27049 }
27050
27051 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
27052
27053 static inline void
27054 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
27055 {
27056 dw_loc_descr_ref l;
27057 bool sizes_computed = false;
27058 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
27059 size_of_locs (loc);
27060
27061 for (l = loc; l != NULL; l = l->dw_loc_next)
27062 {
27063 enum dwarf_location_atom opc = l->dw_loc_opc;
27064 hstate.add_object (opc);
27065 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
27066 {
27067 size_of_locs (loc);
27068 sizes_computed = true;
27069 }
27070 hash_loc_operands (l, hstate);
27071 }
27072 }
27073
27074 /* Compute hash of the whole location list LIST_HEAD. */
27075
27076 static inline void
27077 hash_loc_list (dw_loc_list_ref list_head)
27078 {
27079 dw_loc_list_ref curr = list_head;
27080 inchash::hash hstate;
27081
27082 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
27083 {
27084 hstate.add (curr->begin, strlen (curr->begin) + 1);
27085 hstate.add (curr->end, strlen (curr->end) + 1);
27086 if (curr->section)
27087 hstate.add (curr->section, strlen (curr->section) + 1);
27088 hash_locs (curr->expr, hstate);
27089 }
27090 list_head->hash = hstate.end ();
27091 }
27092
27093 /* Return true if X and Y opcodes have the same operands. */
27094
27095 static inline bool
27096 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
27097 {
27098 dw_val_ref valx1 = &x->dw_loc_oprnd1;
27099 dw_val_ref valx2 = &x->dw_loc_oprnd2;
27100 dw_val_ref valy1 = &y->dw_loc_oprnd1;
27101 dw_val_ref valy2 = &y->dw_loc_oprnd2;
27102
27103 switch (x->dw_loc_opc)
27104 {
27105 case DW_OP_const4u:
27106 case DW_OP_const8u:
27107 if (x->dtprel)
27108 goto hash_addr;
27109 /* FALLTHRU */
27110 case DW_OP_const1u:
27111 case DW_OP_const1s:
27112 case DW_OP_const2u:
27113 case DW_OP_const2s:
27114 case DW_OP_const4s:
27115 case DW_OP_const8s:
27116 case DW_OP_constu:
27117 case DW_OP_consts:
27118 case DW_OP_pick:
27119 case DW_OP_plus_uconst:
27120 case DW_OP_breg0:
27121 case DW_OP_breg1:
27122 case DW_OP_breg2:
27123 case DW_OP_breg3:
27124 case DW_OP_breg4:
27125 case DW_OP_breg5:
27126 case DW_OP_breg6:
27127 case DW_OP_breg7:
27128 case DW_OP_breg8:
27129 case DW_OP_breg9:
27130 case DW_OP_breg10:
27131 case DW_OP_breg11:
27132 case DW_OP_breg12:
27133 case DW_OP_breg13:
27134 case DW_OP_breg14:
27135 case DW_OP_breg15:
27136 case DW_OP_breg16:
27137 case DW_OP_breg17:
27138 case DW_OP_breg18:
27139 case DW_OP_breg19:
27140 case DW_OP_breg20:
27141 case DW_OP_breg21:
27142 case DW_OP_breg22:
27143 case DW_OP_breg23:
27144 case DW_OP_breg24:
27145 case DW_OP_breg25:
27146 case DW_OP_breg26:
27147 case DW_OP_breg27:
27148 case DW_OP_breg28:
27149 case DW_OP_breg29:
27150 case DW_OP_breg30:
27151 case DW_OP_breg31:
27152 case DW_OP_regx:
27153 case DW_OP_fbreg:
27154 case DW_OP_piece:
27155 case DW_OP_deref_size:
27156 case DW_OP_xderef_size:
27157 return valx1->v.val_int == valy1->v.val_int;
27158 case DW_OP_skip:
27159 case DW_OP_bra:
27160 /* If splitting debug info, the use of DW_OP_GNU_addr_index
27161 can cause irrelevant differences in dw_loc_addr. */
27162 gcc_assert (valx1->val_class == dw_val_class_loc
27163 && valy1->val_class == dw_val_class_loc
27164 && (dwarf_split_debug_info
27165 || x->dw_loc_addr == y->dw_loc_addr));
27166 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
27167 case DW_OP_implicit_value:
27168 if (valx1->v.val_unsigned != valy1->v.val_unsigned
27169 || valx2->val_class != valy2->val_class)
27170 return false;
27171 switch (valx2->val_class)
27172 {
27173 case dw_val_class_const:
27174 return valx2->v.val_int == valy2->v.val_int;
27175 case dw_val_class_vec:
27176 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
27177 && valx2->v.val_vec.length == valy2->v.val_vec.length
27178 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
27179 valx2->v.val_vec.elt_size
27180 * valx2->v.val_vec.length) == 0;
27181 case dw_val_class_const_double:
27182 return valx2->v.val_double.low == valy2->v.val_double.low
27183 && valx2->v.val_double.high == valy2->v.val_double.high;
27184 case dw_val_class_wide_int:
27185 return *valx2->v.val_wide == *valy2->v.val_wide;
27186 case dw_val_class_addr:
27187 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
27188 default:
27189 gcc_unreachable ();
27190 }
27191 case DW_OP_bregx:
27192 case DW_OP_bit_piece:
27193 return valx1->v.val_int == valy1->v.val_int
27194 && valx2->v.val_int == valy2->v.val_int;
27195 case DW_OP_addr:
27196 hash_addr:
27197 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
27198 case DW_OP_GNU_addr_index:
27199 case DW_OP_GNU_const_index:
27200 {
27201 rtx ax1 = valx1->val_entry->addr.rtl;
27202 rtx ay1 = valy1->val_entry->addr.rtl;
27203 return rtx_equal_p (ax1, ay1);
27204 }
27205 case DW_OP_GNU_implicit_pointer:
27206 return valx1->val_class == dw_val_class_die_ref
27207 && valx1->val_class == valy1->val_class
27208 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
27209 && valx2->v.val_int == valy2->v.val_int;
27210 case DW_OP_GNU_entry_value:
27211 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
27212 case DW_OP_GNU_const_type:
27213 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
27214 || valx2->val_class != valy2->val_class)
27215 return false;
27216 switch (valx2->val_class)
27217 {
27218 case dw_val_class_const:
27219 return valx2->v.val_int == valy2->v.val_int;
27220 case dw_val_class_vec:
27221 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
27222 && valx2->v.val_vec.length == valy2->v.val_vec.length
27223 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
27224 valx2->v.val_vec.elt_size
27225 * valx2->v.val_vec.length) == 0;
27226 case dw_val_class_const_double:
27227 return valx2->v.val_double.low == valy2->v.val_double.low
27228 && valx2->v.val_double.high == valy2->v.val_double.high;
27229 case dw_val_class_wide_int:
27230 return *valx2->v.val_wide == *valy2->v.val_wide;
27231 default:
27232 gcc_unreachable ();
27233 }
27234 case DW_OP_GNU_regval_type:
27235 case DW_OP_GNU_deref_type:
27236 return valx1->v.val_int == valy1->v.val_int
27237 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
27238 case DW_OP_GNU_convert:
27239 case DW_OP_GNU_reinterpret:
27240 if (valx1->val_class != valy1->val_class)
27241 return false;
27242 if (valx1->val_class == dw_val_class_unsigned_const)
27243 return valx1->v.val_unsigned == valy1->v.val_unsigned;
27244 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
27245 case DW_OP_GNU_parameter_ref:
27246 return valx1->val_class == dw_val_class_die_ref
27247 && valx1->val_class == valy1->val_class
27248 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
27249 default:
27250 /* Other codes have no operands. */
27251 return true;
27252 }
27253 }
27254
27255 /* Return true if DWARF location expressions X and Y are the same. */
27256
27257 static inline bool
27258 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
27259 {
27260 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
27261 if (x->dw_loc_opc != y->dw_loc_opc
27262 || x->dtprel != y->dtprel
27263 || !compare_loc_operands (x, y))
27264 break;
27265 return x == NULL && y == NULL;
27266 }
27267
27268 /* Hashtable helpers. */
27269
27270 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
27271 {
27272 static inline hashval_t hash (const dw_loc_list_struct *);
27273 static inline bool equal (const dw_loc_list_struct *,
27274 const dw_loc_list_struct *);
27275 };
27276
27277 /* Return precomputed hash of location list X. */
27278
27279 inline hashval_t
27280 loc_list_hasher::hash (const dw_loc_list_struct *x)
27281 {
27282 return x->hash;
27283 }
27284
27285 /* Return true if location lists A and B are the same. */
27286
27287 inline bool
27288 loc_list_hasher::equal (const dw_loc_list_struct *a,
27289 const dw_loc_list_struct *b)
27290 {
27291 if (a == b)
27292 return 1;
27293 if (a->hash != b->hash)
27294 return 0;
27295 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
27296 if (strcmp (a->begin, b->begin) != 0
27297 || strcmp (a->end, b->end) != 0
27298 || (a->section == NULL) != (b->section == NULL)
27299 || (a->section && strcmp (a->section, b->section) != 0)
27300 || !compare_locs (a->expr, b->expr))
27301 break;
27302 return a == NULL && b == NULL;
27303 }
27304
27305 typedef hash_table<loc_list_hasher> loc_list_hash_type;
27306
27307
27308 /* Recursively optimize location lists referenced from DIE
27309 children and share them whenever possible. */
27310
27311 static void
27312 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
27313 {
27314 dw_die_ref c;
27315 dw_attr_node *a;
27316 unsigned ix;
27317 dw_loc_list_struct **slot;
27318
27319 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27320 if (AT_class (a) == dw_val_class_loc_list)
27321 {
27322 dw_loc_list_ref list = AT_loc_list (a);
27323 /* TODO: perform some optimizations here, before hashing
27324 it and storing into the hash table. */
27325 hash_loc_list (list);
27326 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
27327 if (*slot == NULL)
27328 *slot = list;
27329 else
27330 a->dw_attr_val.v.val_loc_list = *slot;
27331 }
27332
27333 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
27334 }
27335
27336
27337 /* Recursively assign each location list a unique index into the debug_addr
27338 section. */
27339
27340 static void
27341 index_location_lists (dw_die_ref die)
27342 {
27343 dw_die_ref c;
27344 dw_attr_node *a;
27345 unsigned ix;
27346
27347 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27348 if (AT_class (a) == dw_val_class_loc_list)
27349 {
27350 dw_loc_list_ref list = AT_loc_list (a);
27351 dw_loc_list_ref curr;
27352 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
27353 {
27354 /* Don't index an entry that has already been indexed
27355 or won't be output. */
27356 if (curr->begin_entry != NULL
27357 || (strcmp (curr->begin, curr->end) == 0 && !curr->force))
27358 continue;
27359
27360 curr->begin_entry
27361 = add_addr_table_entry (xstrdup (curr->begin),
27362 ate_kind_label);
27363 }
27364 }
27365
27366 FOR_EACH_CHILD (die, c, index_location_lists (c));
27367 }
27368
27369 /* Optimize location lists referenced from DIE
27370 children and share them whenever possible. */
27371
27372 static void
27373 optimize_location_lists (dw_die_ref die)
27374 {
27375 loc_list_hash_type htab (500);
27376 optimize_location_lists_1 (die, &htab);
27377 }
27378 \f
27379 /* Traverse the limbo die list, and add parent/child links. The only
27380 dies without parents that should be here are concrete instances of
27381 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
27382 For concrete instances, we can get the parent die from the abstract
27383 instance. */
27384
27385 static void
27386 flush_limbo_die_list (void)
27387 {
27388 limbo_die_node *node;
27389
27390 /* get_context_die calls force_decl_die, which can put new DIEs on the
27391 limbo list in LTO mode when nested functions are put in a different
27392 partition than that of their parent function. */
27393 while ((node = limbo_die_list))
27394 {
27395 dw_die_ref die = node->die;
27396 limbo_die_list = node->next;
27397
27398 if (die->die_parent == NULL)
27399 {
27400 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
27401
27402 if (origin && origin->die_parent)
27403 add_child_die (origin->die_parent, die);
27404 else if (is_cu_die (die))
27405 ;
27406 else if (seen_error ())
27407 /* It's OK to be confused by errors in the input. */
27408 add_child_die (comp_unit_die (), die);
27409 else
27410 {
27411 /* In certain situations, the lexical block containing a
27412 nested function can be optimized away, which results
27413 in the nested function die being orphaned. Likewise
27414 with the return type of that nested function. Force
27415 this to be a child of the containing function.
27416
27417 It may happen that even the containing function got fully
27418 inlined and optimized out. In that case we are lost and
27419 assign the empty child. This should not be big issue as
27420 the function is likely unreachable too. */
27421 gcc_assert (node->created_for);
27422
27423 if (DECL_P (node->created_for))
27424 origin = get_context_die (DECL_CONTEXT (node->created_for));
27425 else if (TYPE_P (node->created_for))
27426 origin = scope_die_for (node->created_for, comp_unit_die ());
27427 else
27428 origin = comp_unit_die ();
27429
27430 add_child_die (origin, die);
27431 }
27432 }
27433 }
27434 }
27435
27436 /* Output stuff that dwarf requires at the end of every file,
27437 and generate the DWARF-2 debugging info. */
27438
27439 static void
27440 dwarf2out_finish (const char *filename)
27441 {
27442 comdat_type_node *ctnode;
27443 dw_die_ref main_comp_unit_die;
27444
27445 /* Flush out any latecomers to the limbo party. */
27446 flush_limbo_die_list ();
27447
27448 /* We shouldn't have any symbols with delayed asm names for
27449 DIEs generated after early finish. */
27450 gcc_assert (deferred_asm_name == NULL);
27451
27452 /* PCH might result in DW_AT_producer string being restored from the
27453 header compilation, so always fill it with empty string initially
27454 and overwrite only here. */
27455 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
27456 producer_string = gen_producer_string ();
27457 producer->dw_attr_val.v.val_str->refcount--;
27458 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
27459
27460 gen_remaining_tmpl_value_param_die_attribute ();
27461
27462 /* Add the name for the main input file now. We delayed this from
27463 dwarf2out_init to avoid complications with PCH.
27464 For LTO produced units use a fixed artificial name to avoid
27465 leaking tempfile names into the dwarf. */
27466 if (!in_lto_p)
27467 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
27468 else
27469 add_name_attribute (comp_unit_die (), "<artificial>");
27470 if (!IS_ABSOLUTE_PATH (filename) || targetm.force_at_comp_dir)
27471 add_comp_dir_attribute (comp_unit_die ());
27472 else if (get_AT (comp_unit_die (), DW_AT_comp_dir) == NULL)
27473 {
27474 bool p = false;
27475 file_table->traverse<bool *, file_table_relative_p> (&p);
27476 if (p)
27477 add_comp_dir_attribute (comp_unit_die ());
27478 }
27479
27480 #if ENABLE_ASSERT_CHECKING
27481 {
27482 dw_die_ref die = comp_unit_die (), c;
27483 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
27484 }
27485 #endif
27486 resolve_addr (comp_unit_die ());
27487 move_marked_base_types ();
27488
27489 if (flag_eliminate_unused_debug_types)
27490 prune_unused_types ();
27491
27492 /* Generate separate COMDAT sections for type DIEs. */
27493 if (use_debug_types)
27494 {
27495 break_out_comdat_types (comp_unit_die ());
27496
27497 /* Each new type_unit DIE was added to the limbo die list when created.
27498 Since these have all been added to comdat_type_list, clear the
27499 limbo die list. */
27500 limbo_die_list = NULL;
27501
27502 /* For each new comdat type unit, copy declarations for incomplete
27503 types to make the new unit self-contained (i.e., no direct
27504 references to the main compile unit). */
27505 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
27506 copy_decls_for_unworthy_types (ctnode->root_die);
27507 copy_decls_for_unworthy_types (comp_unit_die ());
27508
27509 /* In the process of copying declarations from one unit to another,
27510 we may have left some declarations behind that are no longer
27511 referenced. Prune them. */
27512 prune_unused_types ();
27513 }
27514
27515 /* Generate separate CUs for each of the include files we've seen.
27516 They will go into limbo_die_list. */
27517 if (flag_eliminate_dwarf2_dups)
27518 break_out_includes (comp_unit_die ());
27519
27520 /* Traverse the DIE's and add sibling attributes to those DIE's that
27521 have children. */
27522 add_sibling_attributes (comp_unit_die ());
27523 limbo_die_node *node;
27524 for (node = limbo_die_list; node; node = node->next)
27525 add_sibling_attributes (node->die);
27526 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
27527 add_sibling_attributes (ctnode->root_die);
27528
27529 /* When splitting DWARF info, we put some attributes in the
27530 skeleton compile_unit DIE that remains in the .o, while
27531 most attributes go in the DWO compile_unit_die. */
27532 if (dwarf_split_debug_info)
27533 main_comp_unit_die = gen_compile_unit_die (NULL);
27534 else
27535 main_comp_unit_die = comp_unit_die ();
27536
27537 /* Output a terminator label for the .text section. */
27538 switch_to_section (text_section);
27539 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
27540 if (cold_text_section)
27541 {
27542 switch_to_section (cold_text_section);
27543 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
27544 }
27545
27546 /* We can only use the low/high_pc attributes if all of the code was
27547 in .text. */
27548 if (!have_multiple_function_sections
27549 || (dwarf_version < 3 && dwarf_strict))
27550 {
27551 /* Don't add if the CU has no associated code. */
27552 if (text_section_used)
27553 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
27554 text_end_label, true);
27555 }
27556 else
27557 {
27558 unsigned fde_idx;
27559 dw_fde_ref fde;
27560 bool range_list_added = false;
27561
27562 if (text_section_used)
27563 add_ranges_by_labels (main_comp_unit_die, text_section_label,
27564 text_end_label, &range_list_added, true);
27565 if (cold_text_section_used)
27566 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
27567 cold_end_label, &range_list_added, true);
27568
27569 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
27570 {
27571 if (DECL_IGNORED_P (fde->decl))
27572 continue;
27573 if (!fde->in_std_section)
27574 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
27575 fde->dw_fde_end, &range_list_added,
27576 true);
27577 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
27578 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
27579 fde->dw_fde_second_end, &range_list_added,
27580 true);
27581 }
27582
27583 if (range_list_added)
27584 {
27585 /* We need to give .debug_loc and .debug_ranges an appropriate
27586 "base address". Use zero so that these addresses become
27587 absolute. Historically, we've emitted the unexpected
27588 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
27589 Emit both to give time for other tools to adapt. */
27590 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
27591 if (! dwarf_strict && dwarf_version < 4)
27592 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
27593
27594 add_ranges (NULL);
27595 }
27596 }
27597
27598 if (debug_info_level >= DINFO_LEVEL_TERSE)
27599 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
27600 debug_line_section_label);
27601
27602 if (have_macinfo)
27603 add_AT_macptr (comp_unit_die (),
27604 dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros,
27605 macinfo_section_label);
27606
27607 if (dwarf_split_debug_info)
27608 {
27609 /* optimize_location_lists calculates the size of the lists,
27610 so index them first, and assign indices to the entries.
27611 Although optimize_location_lists will remove entries from
27612 the table, it only does so for duplicates, and therefore
27613 only reduces ref_counts to 1. */
27614 index_location_lists (comp_unit_die ());
27615
27616 if (addr_index_table != NULL)
27617 {
27618 unsigned int index = 0;
27619 addr_index_table
27620 ->traverse_noresize<unsigned int *, index_addr_table_entry>
27621 (&index);
27622 }
27623 }
27624
27625 if (have_location_lists)
27626 optimize_location_lists (comp_unit_die ());
27627
27628 save_macinfo_strings ();
27629
27630 if (dwarf_split_debug_info)
27631 {
27632 unsigned int index = 0;
27633
27634 /* Add attributes common to skeleton compile_units and
27635 type_units. Because these attributes include strings, it
27636 must be done before freezing the string table. Top-level
27637 skeleton die attrs are added when the skeleton type unit is
27638 created, so ensure it is created by this point. */
27639 add_top_level_skeleton_die_attrs (main_comp_unit_die);
27640 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
27641 }
27642
27643 /* Output all of the compilation units. We put the main one last so that
27644 the offsets are available to output_pubnames. */
27645 for (node = limbo_die_list; node; node = node->next)
27646 output_comp_unit (node->die, 0);
27647
27648 hash_table<comdat_type_hasher> comdat_type_table (100);
27649 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
27650 {
27651 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
27652
27653 /* Don't output duplicate types. */
27654 if (*slot != HTAB_EMPTY_ENTRY)
27655 continue;
27656
27657 /* Add a pointer to the line table for the main compilation unit
27658 so that the debugger can make sense of DW_AT_decl_file
27659 attributes. */
27660 if (debug_info_level >= DINFO_LEVEL_TERSE)
27661 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
27662 (!dwarf_split_debug_info
27663 ? debug_line_section_label
27664 : debug_skeleton_line_section_label));
27665
27666 output_comdat_type_unit (ctnode);
27667 *slot = ctnode;
27668 }
27669
27670 /* The AT_pubnames attribute needs to go in all skeleton dies, including
27671 both the main_cu and all skeleton TUs. Making this call unconditional
27672 would end up either adding a second copy of the AT_pubnames attribute, or
27673 requiring a special case in add_top_level_skeleton_die_attrs. */
27674 if (!dwarf_split_debug_info)
27675 add_AT_pubnames (comp_unit_die ());
27676
27677 if (dwarf_split_debug_info)
27678 {
27679 int mark;
27680 unsigned char checksum[16];
27681 struct md5_ctx ctx;
27682
27683 /* Compute a checksum of the comp_unit to use as the dwo_id. */
27684 md5_init_ctx (&ctx);
27685 mark = 0;
27686 die_checksum (comp_unit_die (), &ctx, &mark);
27687 unmark_all_dies (comp_unit_die ());
27688 md5_finish_ctx (&ctx, checksum);
27689
27690 /* Use the first 8 bytes of the checksum as the dwo_id,
27691 and add it to both comp-unit DIEs. */
27692 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
27693 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
27694
27695 /* Add the base offset of the ranges table to the skeleton
27696 comp-unit DIE. */
27697 if (ranges_table_in_use)
27698 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
27699 ranges_section_label);
27700
27701 switch_to_section (debug_addr_section);
27702 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
27703 output_addr_table ();
27704 }
27705
27706 /* Output the main compilation unit if non-empty or if .debug_macinfo
27707 or .debug_macro will be emitted. */
27708 output_comp_unit (comp_unit_die (), have_macinfo);
27709
27710 if (dwarf_split_debug_info && info_section_emitted)
27711 output_skeleton_debug_sections (main_comp_unit_die);
27712
27713 /* Output the abbreviation table. */
27714 if (abbrev_die_table_in_use != 1)
27715 {
27716 switch_to_section (debug_abbrev_section);
27717 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
27718 output_abbrev_section ();
27719 }
27720
27721 /* Output location list section if necessary. */
27722 if (have_location_lists)
27723 {
27724 /* Output the location lists info. */
27725 switch_to_section (debug_loc_section);
27726 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
27727 output_location_lists (comp_unit_die ());
27728 }
27729
27730 output_pubtables ();
27731
27732 /* Output the address range information if a CU (.debug_info section)
27733 was emitted. We output an empty table even if we had no functions
27734 to put in it. This because the consumer has no way to tell the
27735 difference between an empty table that we omitted and failure to
27736 generate a table that would have contained data. */
27737 if (info_section_emitted)
27738 {
27739 switch_to_section (debug_aranges_section);
27740 output_aranges ();
27741 }
27742
27743 /* Output ranges section if necessary. */
27744 if (ranges_table_in_use)
27745 {
27746 switch_to_section (debug_ranges_section);
27747 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
27748 output_ranges ();
27749 }
27750
27751 /* Have to end the macro section. */
27752 if (have_macinfo)
27753 {
27754 switch_to_section (debug_macinfo_section);
27755 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
27756 output_macinfo ();
27757 dw2_asm_output_data (1, 0, "End compilation unit");
27758 }
27759
27760 /* Output the source line correspondence table. We must do this
27761 even if there is no line information. Otherwise, on an empty
27762 translation unit, we will generate a present, but empty,
27763 .debug_info section. IRIX 6.5 `nm' will then complain when
27764 examining the file. This is done late so that any filenames
27765 used by the debug_info section are marked as 'used'. */
27766 switch_to_section (debug_line_section);
27767 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
27768 if (! DWARF2_ASM_LINE_DEBUG_INFO)
27769 output_line_info (false);
27770
27771 if (dwarf_split_debug_info && info_section_emitted)
27772 {
27773 switch_to_section (debug_skeleton_line_section);
27774 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
27775 output_line_info (true);
27776 }
27777
27778 /* If we emitted any indirect strings, output the string table too. */
27779 if (debug_str_hash || skeleton_debug_str_hash)
27780 output_indirect_strings ();
27781 }
27782
27783 /* Perform any cleanups needed after the early debug generation pass
27784 has run. */
27785
27786 static void
27787 dwarf2out_early_finish (void)
27788 {
27789 /* Walk through the list of incomplete types again, trying once more to
27790 emit full debugging info for them. */
27791 retry_incomplete_types ();
27792
27793 /* The point here is to flush out the limbo list so that it is empty
27794 and we don't need to stream it for LTO. */
27795 flush_limbo_die_list ();
27796
27797 gen_scheduled_generic_parms_dies ();
27798 gen_remaining_tmpl_value_param_die_attribute ();
27799
27800 /* Add DW_AT_linkage_name for all deferred DIEs. */
27801 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
27802 {
27803 tree decl = node->created_for;
27804 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
27805 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
27806 ended up in deferred_asm_name before we knew it was
27807 constant and never written to disk. */
27808 && DECL_ASSEMBLER_NAME (decl))
27809 {
27810 add_linkage_attr (node->die, decl);
27811 move_linkage_attr (node->die);
27812 }
27813 }
27814 deferred_asm_name = NULL;
27815 }
27816
27817 /* Reset all state within dwarf2out.c so that we can rerun the compiler
27818 within the same process. For use by toplev::finalize. */
27819
27820 void
27821 dwarf2out_c_finalize (void)
27822 {
27823 last_var_location_insn = NULL;
27824 cached_next_real_insn = NULL;
27825 used_rtx_array = NULL;
27826 incomplete_types = NULL;
27827 decl_scope_table = NULL;
27828 debug_info_section = NULL;
27829 debug_skeleton_info_section = NULL;
27830 debug_abbrev_section = NULL;
27831 debug_skeleton_abbrev_section = NULL;
27832 debug_aranges_section = NULL;
27833 debug_addr_section = NULL;
27834 debug_macinfo_section = NULL;
27835 debug_line_section = NULL;
27836 debug_skeleton_line_section = NULL;
27837 debug_loc_section = NULL;
27838 debug_pubnames_section = NULL;
27839 debug_pubtypes_section = NULL;
27840 debug_str_section = NULL;
27841 debug_str_dwo_section = NULL;
27842 debug_str_offsets_section = NULL;
27843 debug_ranges_section = NULL;
27844 debug_frame_section = NULL;
27845 fde_vec = NULL;
27846 debug_str_hash = NULL;
27847 skeleton_debug_str_hash = NULL;
27848 dw2_string_counter = 0;
27849 have_multiple_function_sections = false;
27850 text_section_used = false;
27851 cold_text_section_used = false;
27852 cold_text_section = NULL;
27853 current_unit_personality = NULL;
27854
27855 next_die_offset = 0;
27856 single_comp_unit_die = NULL;
27857 comdat_type_list = NULL;
27858 limbo_die_list = NULL;
27859 file_table = NULL;
27860 decl_die_table = NULL;
27861 common_block_die_table = NULL;
27862 decl_loc_table = NULL;
27863 call_arg_locations = NULL;
27864 call_arg_loc_last = NULL;
27865 call_site_count = -1;
27866 tail_call_site_count = -1;
27867 cached_dw_loc_list_table = NULL;
27868 abbrev_die_table = NULL;
27869 abbrev_die_table_allocated = 0;
27870 abbrev_die_table_in_use = 0;
27871 delete dwarf_proc_stack_usage_map;
27872 dwarf_proc_stack_usage_map = NULL;
27873 line_info_label_num = 0;
27874 cur_line_info_table = NULL;
27875 text_section_line_info = NULL;
27876 cold_text_section_line_info = NULL;
27877 separate_line_info = NULL;
27878 info_section_emitted = false;
27879 pubname_table = NULL;
27880 pubtype_table = NULL;
27881 macinfo_table = NULL;
27882 ranges_table = NULL;
27883 ranges_table_allocated = 0;
27884 ranges_table_in_use = 0;
27885 ranges_by_label = 0;
27886 ranges_by_label_allocated = 0;
27887 ranges_by_label_in_use = 0;
27888 have_location_lists = false;
27889 loclabel_num = 0;
27890 poc_label_num = 0;
27891 last_emitted_file = NULL;
27892 label_num = 0;
27893 tmpl_value_parm_die_table = NULL;
27894 generic_type_instances = NULL;
27895 frame_pointer_fb_offset = 0;
27896 frame_pointer_fb_offset_valid = false;
27897 base_types.release ();
27898 XDELETEVEC (producer_string);
27899 producer_string = NULL;
27900 }
27901
27902 #include "gt-dwarf2out.h"