DWARF: turn dw_loc_descr_node field into hash map for frame offset check
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2016 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "tm_p.h"
66 #include "stringpool.h"
67 #include "insn-config.h"
68 #include "ira.h"
69 #include "cgraph.h"
70 #include "diagnostic.h"
71 #include "fold-const.h"
72 #include "stor-layout.h"
73 #include "varasm.h"
74 #include "version.h"
75 #include "flags.h"
76 #include "rtlhash.h"
77 #include "reload.h"
78 #include "output.h"
79 #include "expr.h"
80 #include "dwarf2out.h"
81 #include "dwarf2asm.h"
82 #include "toplev.h"
83 #include "md5.h"
84 #include "tree-pretty-print.h"
85 #include "debug.h"
86 #include "common/common-target.h"
87 #include "langhooks.h"
88 #include "lra.h"
89 #include "dumpfile.h"
90 #include "opts.h"
91 #include "tree-dfa.h"
92 #include "gdb/gdb-index.h"
93 #include "rtl-iter.h"
94
95 static void dwarf2out_source_line (unsigned int, const char *, int, bool);
96 static rtx_insn *last_var_location_insn;
97 static rtx_insn *cached_next_real_insn;
98 static void dwarf2out_decl (tree);
99
100 #ifndef XCOFF_DEBUGGING_INFO
101 #define XCOFF_DEBUGGING_INFO 0
102 #endif
103
104 #ifndef HAVE_XCOFF_DWARF_EXTRAS
105 #define HAVE_XCOFF_DWARF_EXTRAS 0
106 #endif
107
108 #ifdef VMS_DEBUGGING_INFO
109 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
110
111 /* Define this macro to be a nonzero value if the directory specifications
112 which are output in the debug info should end with a separator. */
113 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
114 /* Define this macro to evaluate to a nonzero value if GCC should refrain
115 from generating indirect strings in DWARF2 debug information, for instance
116 if your target is stuck with an old version of GDB that is unable to
117 process them properly or uses VMS Debug. */
118 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
119 #else
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
121 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
122 #endif
123
124 /* ??? Poison these here until it can be done generically. They've been
125 totally replaced in this file; make sure it stays that way. */
126 #undef DWARF2_UNWIND_INFO
127 #undef DWARF2_FRAME_INFO
128 #if (GCC_VERSION >= 3000)
129 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
130 #endif
131
132 /* The size of the target's pointer type. */
133 #ifndef PTR_SIZE
134 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
135 #endif
136
137 /* Array of RTXes referenced by the debugging information, which therefore
138 must be kept around forever. */
139 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
140
141 /* A pointer to the base of a list of incomplete types which might be
142 completed at some later time. incomplete_types_list needs to be a
143 vec<tree, va_gc> *because we want to tell the garbage collector about
144 it. */
145 static GTY(()) vec<tree, va_gc> *incomplete_types;
146
147 /* A pointer to the base of a table of references to declaration
148 scopes. This table is a display which tracks the nesting
149 of declaration scopes at the current scope and containing
150 scopes. This table is used to find the proper place to
151 define type declaration DIE's. */
152 static GTY(()) vec<tree, va_gc> *decl_scope_table;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static GTY(()) section *debug_line_section;
163 static GTY(()) section *debug_skeleton_line_section;
164 static GTY(()) section *debug_loc_section;
165 static GTY(()) section *debug_pubnames_section;
166 static GTY(()) section *debug_pubtypes_section;
167 static GTY(()) section *debug_str_section;
168 static GTY(()) section *debug_str_dwo_section;
169 static GTY(()) section *debug_str_offsets_section;
170 static GTY(()) section *debug_ranges_section;
171 static GTY(()) section *debug_frame_section;
172
173 /* Maximum size (in bytes) of an artificially generated label. */
174 #define MAX_ARTIFICIAL_LABEL_BYTES 30
175
176 /* According to the (draft) DWARF 3 specification, the initial length
177 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
178 bytes are 0xffffffff, followed by the length stored in the next 8
179 bytes.
180
181 However, the SGI/MIPS ABI uses an initial length which is equal to
182 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
183
184 #ifndef DWARF_INITIAL_LENGTH_SIZE
185 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
186 #endif
187
188 /* Round SIZE up to the nearest BOUNDARY. */
189 #define DWARF_ROUND(SIZE,BOUNDARY) \
190 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
191
192 /* CIE identifier. */
193 #if HOST_BITS_PER_WIDE_INT >= 64
194 #define DWARF_CIE_ID \
195 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
196 #else
197 #define DWARF_CIE_ID DW_CIE_ID
198 #endif
199
200
201 /* A vector for a table that contains frame description
202 information for each routine. */
203 #define NOT_INDEXED (-1U)
204 #define NO_INDEX_ASSIGNED (-2U)
205
206 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
207
208 struct GTY((for_user)) indirect_string_node {
209 const char *str;
210 unsigned int refcount;
211 enum dwarf_form form;
212 char *label;
213 unsigned int index;
214 };
215
216 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
217 {
218 typedef const char *compare_type;
219
220 static hashval_t hash (indirect_string_node *);
221 static bool equal (indirect_string_node *, const char *);
222 };
223
224 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
225
226 /* With split_debug_info, both the comp_dir and dwo_name go in the
227 main object file, rather than the dwo, similar to the force_direct
228 parameter elsewhere but with additional complications:
229
230 1) The string is needed in both the main object file and the dwo.
231 That is, the comp_dir and dwo_name will appear in both places.
232
233 2) Strings can use three forms: DW_FORM_string, DW_FORM_strp or
234 DW_FORM_GNU_str_index.
235
236 3) GCC chooses the form to use late, depending on the size and
237 reference count.
238
239 Rather than forcing the all debug string handling functions and
240 callers to deal with these complications, simply use a separate,
241 special-cased string table for any attribute that should go in the
242 main object file. This limits the complexity to just the places
243 that need it. */
244
245 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
246
247 static GTY(()) int dw2_string_counter;
248
249 /* True if the compilation unit places functions in more than one section. */
250 static GTY(()) bool have_multiple_function_sections = false;
251
252 /* Whether the default text and cold text sections have been used at all. */
253
254 static GTY(()) bool text_section_used = false;
255 static GTY(()) bool cold_text_section_used = false;
256
257 /* The default cold text section. */
258 static GTY(()) section *cold_text_section;
259
260 /* The DIE for C++14 'auto' in a function return type. */
261 static GTY(()) dw_die_ref auto_die;
262
263 /* The DIE for C++14 'decltype(auto)' in a function return type. */
264 static GTY(()) dw_die_ref decltype_auto_die;
265
266 /* Forward declarations for functions defined in this file. */
267
268 static char *stripattributes (const char *);
269 static void output_call_frame_info (int);
270 static void dwarf2out_note_section_used (void);
271
272 /* Personality decl of current unit. Used only when assembler does not support
273 personality CFI. */
274 static GTY(()) rtx current_unit_personality;
275
276 /* Data and reference forms for relocatable data. */
277 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
278 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
279
280 #ifndef DEBUG_FRAME_SECTION
281 #define DEBUG_FRAME_SECTION ".debug_frame"
282 #endif
283
284 #ifndef FUNC_BEGIN_LABEL
285 #define FUNC_BEGIN_LABEL "LFB"
286 #endif
287
288 #ifndef FUNC_END_LABEL
289 #define FUNC_END_LABEL "LFE"
290 #endif
291
292 #ifndef PROLOGUE_END_LABEL
293 #define PROLOGUE_END_LABEL "LPE"
294 #endif
295
296 #ifndef EPILOGUE_BEGIN_LABEL
297 #define EPILOGUE_BEGIN_LABEL "LEB"
298 #endif
299
300 #ifndef FRAME_BEGIN_LABEL
301 #define FRAME_BEGIN_LABEL "Lframe"
302 #endif
303 #define CIE_AFTER_SIZE_LABEL "LSCIE"
304 #define CIE_END_LABEL "LECIE"
305 #define FDE_LABEL "LSFDE"
306 #define FDE_AFTER_SIZE_LABEL "LASFDE"
307 #define FDE_END_LABEL "LEFDE"
308 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
309 #define LINE_NUMBER_END_LABEL "LELT"
310 #define LN_PROLOG_AS_LABEL "LASLTP"
311 #define LN_PROLOG_END_LABEL "LELTP"
312 #define DIE_LABEL_PREFIX "DW"
313 \f
314 /* Match the base name of a file to the base name of a compilation unit. */
315
316 static int
317 matches_main_base (const char *path)
318 {
319 /* Cache the last query. */
320 static const char *last_path = NULL;
321 static int last_match = 0;
322 if (path != last_path)
323 {
324 const char *base;
325 int length = base_of_path (path, &base);
326 last_path = path;
327 last_match = (length == main_input_baselength
328 && memcmp (base, main_input_basename, length) == 0);
329 }
330 return last_match;
331 }
332
333 #ifdef DEBUG_DEBUG_STRUCT
334
335 static int
336 dump_struct_debug (tree type, enum debug_info_usage usage,
337 enum debug_struct_file criterion, int generic,
338 int matches, int result)
339 {
340 /* Find the type name. */
341 tree type_decl = TYPE_STUB_DECL (type);
342 tree t = type_decl;
343 const char *name = 0;
344 if (TREE_CODE (t) == TYPE_DECL)
345 t = DECL_NAME (t);
346 if (t)
347 name = IDENTIFIER_POINTER (t);
348
349 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
350 criterion,
351 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
352 matches ? "bas" : "hdr",
353 generic ? "gen" : "ord",
354 usage == DINFO_USAGE_DFN ? ";" :
355 usage == DINFO_USAGE_DIR_USE ? "." : "*",
356 result,
357 (void*) type_decl, name);
358 return result;
359 }
360 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
361 dump_struct_debug (type, usage, criterion, generic, matches, result)
362
363 #else
364
365 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
366 (result)
367
368 #endif
369
370 /* Get the number of HOST_WIDE_INTs needed to represent the precision
371 of the number. Some constants have a large uniform precision, so
372 we get the precision needed for the actual value of the number. */
373
374 static unsigned int
375 get_full_len (const wide_int &op)
376 {
377 int prec = wi::min_precision (op, UNSIGNED);
378 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
379 / HOST_BITS_PER_WIDE_INT);
380 }
381
382 static bool
383 should_emit_struct_debug (tree type, enum debug_info_usage usage)
384 {
385 enum debug_struct_file criterion;
386 tree type_decl;
387 bool generic = lang_hooks.types.generic_p (type);
388
389 if (generic)
390 criterion = debug_struct_generic[usage];
391 else
392 criterion = debug_struct_ordinary[usage];
393
394 if (criterion == DINFO_STRUCT_FILE_NONE)
395 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
396 if (criterion == DINFO_STRUCT_FILE_ANY)
397 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
398
399 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
400
401 if (type_decl != NULL)
402 {
403 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
404 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
405
406 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
407 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
408 }
409
410 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
411 }
412 \f
413 /* Return a pointer to a copy of the section string name S with all
414 attributes stripped off, and an asterisk prepended (for assemble_name). */
415
416 static inline char *
417 stripattributes (const char *s)
418 {
419 char *stripped = XNEWVEC (char, strlen (s) + 2);
420 char *p = stripped;
421
422 *p++ = '*';
423
424 while (*s && *s != ',')
425 *p++ = *s++;
426
427 *p = '\0';
428 return stripped;
429 }
430
431 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
432 switch to the data section instead, and write out a synthetic start label
433 for collect2 the first time around. */
434
435 static void
436 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
437 {
438 if (eh_frame_section == 0)
439 {
440 int flags;
441
442 if (EH_TABLES_CAN_BE_READ_ONLY)
443 {
444 int fde_encoding;
445 int per_encoding;
446 int lsda_encoding;
447
448 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
449 /*global=*/0);
450 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
451 /*global=*/1);
452 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
453 /*global=*/0);
454 flags = ((! flag_pic
455 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
456 && (fde_encoding & 0x70) != DW_EH_PE_aligned
457 && (per_encoding & 0x70) != DW_EH_PE_absptr
458 && (per_encoding & 0x70) != DW_EH_PE_aligned
459 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
460 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
461 ? 0 : SECTION_WRITE);
462 }
463 else
464 flags = SECTION_WRITE;
465
466 #ifdef EH_FRAME_SECTION_NAME
467 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
468 #else
469 eh_frame_section = ((flags == SECTION_WRITE)
470 ? data_section : readonly_data_section);
471 #endif /* EH_FRAME_SECTION_NAME */
472 }
473
474 switch_to_section (eh_frame_section);
475
476 #ifdef EH_FRAME_THROUGH_COLLECT2
477 /* We have no special eh_frame section. Emit special labels to guide
478 collect2. */
479 if (!back)
480 {
481 tree label = get_file_function_name ("F");
482 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
483 targetm.asm_out.globalize_label (asm_out_file,
484 IDENTIFIER_POINTER (label));
485 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
486 }
487 #endif
488 }
489
490 /* Switch [BACK] to the eh or debug frame table section, depending on
491 FOR_EH. */
492
493 static void
494 switch_to_frame_table_section (int for_eh, bool back)
495 {
496 if (for_eh)
497 switch_to_eh_frame_section (back);
498 else
499 {
500 if (!debug_frame_section)
501 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
502 SECTION_DEBUG, NULL);
503 switch_to_section (debug_frame_section);
504 }
505 }
506
507 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
508
509 enum dw_cfi_oprnd_type
510 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
511 {
512 switch (cfi)
513 {
514 case DW_CFA_nop:
515 case DW_CFA_GNU_window_save:
516 case DW_CFA_remember_state:
517 case DW_CFA_restore_state:
518 return dw_cfi_oprnd_unused;
519
520 case DW_CFA_set_loc:
521 case DW_CFA_advance_loc1:
522 case DW_CFA_advance_loc2:
523 case DW_CFA_advance_loc4:
524 case DW_CFA_MIPS_advance_loc8:
525 return dw_cfi_oprnd_addr;
526
527 case DW_CFA_offset:
528 case DW_CFA_offset_extended:
529 case DW_CFA_def_cfa:
530 case DW_CFA_offset_extended_sf:
531 case DW_CFA_def_cfa_sf:
532 case DW_CFA_restore:
533 case DW_CFA_restore_extended:
534 case DW_CFA_undefined:
535 case DW_CFA_same_value:
536 case DW_CFA_def_cfa_register:
537 case DW_CFA_register:
538 case DW_CFA_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 return dw_cfi_oprnd_loc;
573
574 default:
575 return dw_cfi_oprnd_unused;
576 }
577 }
578
579 /* Output one FDE. */
580
581 static void
582 output_fde (dw_fde_ref fde, bool for_eh, bool second,
583 char *section_start_label, int fde_encoding, char *augmentation,
584 bool any_lsda_needed, int lsda_encoding)
585 {
586 const char *begin, *end;
587 static unsigned int j;
588 char l1[20], l2[20];
589
590 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
591 /* empty */ 0);
592 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
593 for_eh + j);
594 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
595 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
596 if (!XCOFF_DEBUGGING_INFO || for_eh)
597 {
598 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
599 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
600 " indicating 64-bit DWARF extension");
601 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
602 "FDE Length");
603 }
604 ASM_OUTPUT_LABEL (asm_out_file, l1);
605
606 if (for_eh)
607 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
608 else
609 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
610 debug_frame_section, "FDE CIE offset");
611
612 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
613 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
614
615 if (for_eh)
616 {
617 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
618 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
619 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
620 "FDE initial location");
621 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
622 end, begin, "FDE address range");
623 }
624 else
625 {
626 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
627 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
628 }
629
630 if (augmentation[0])
631 {
632 if (any_lsda_needed)
633 {
634 int size = size_of_encoded_value (lsda_encoding);
635
636 if (lsda_encoding == DW_EH_PE_aligned)
637 {
638 int offset = ( 4 /* Length */
639 + 4 /* CIE offset */
640 + 2 * size_of_encoded_value (fde_encoding)
641 + 1 /* Augmentation size */ );
642 int pad = -offset & (PTR_SIZE - 1);
643
644 size += pad;
645 gcc_assert (size_of_uleb128 (size) == 1);
646 }
647
648 dw2_asm_output_data_uleb128 (size, "Augmentation size");
649
650 if (fde->uses_eh_lsda)
651 {
652 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
653 fde->funcdef_number);
654 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
655 gen_rtx_SYMBOL_REF (Pmode, l1),
656 false,
657 "Language Specific Data Area");
658 }
659 else
660 {
661 if (lsda_encoding == DW_EH_PE_aligned)
662 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
663 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
664 "Language Specific Data Area (none)");
665 }
666 }
667 else
668 dw2_asm_output_data_uleb128 (0, "Augmentation size");
669 }
670
671 /* Loop through the Call Frame Instructions associated with this FDE. */
672 fde->dw_fde_current_label = begin;
673 {
674 size_t from, until, i;
675
676 from = 0;
677 until = vec_safe_length (fde->dw_fde_cfi);
678
679 if (fde->dw_fde_second_begin == NULL)
680 ;
681 else if (!second)
682 until = fde->dw_fde_switch_cfi_index;
683 else
684 from = fde->dw_fde_switch_cfi_index;
685
686 for (i = from; i < until; i++)
687 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
688 }
689
690 /* If we are to emit a ref/link from function bodies to their frame tables,
691 do it now. This is typically performed to make sure that tables
692 associated with functions are dragged with them and not discarded in
693 garbage collecting links. We need to do this on a per function basis to
694 cope with -ffunction-sections. */
695
696 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
697 /* Switch to the function section, emit the ref to the tables, and
698 switch *back* into the table section. */
699 switch_to_section (function_section (fde->decl));
700 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
701 switch_to_frame_table_section (for_eh, true);
702 #endif
703
704 /* Pad the FDE out to an address sized boundary. */
705 ASM_OUTPUT_ALIGN (asm_out_file,
706 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
707 ASM_OUTPUT_LABEL (asm_out_file, l2);
708
709 j += 2;
710 }
711
712 /* Return true if frame description entry FDE is needed for EH. */
713
714 static bool
715 fde_needed_for_eh_p (dw_fde_ref fde)
716 {
717 if (flag_asynchronous_unwind_tables)
718 return true;
719
720 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
721 return true;
722
723 if (fde->uses_eh_lsda)
724 return true;
725
726 /* If exceptions are enabled, we have collected nothrow info. */
727 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
728 return false;
729
730 return true;
731 }
732
733 /* Output the call frame information used to record information
734 that relates to calculating the frame pointer, and records the
735 location of saved registers. */
736
737 static void
738 output_call_frame_info (int for_eh)
739 {
740 unsigned int i;
741 dw_fde_ref fde;
742 dw_cfi_ref cfi;
743 char l1[20], l2[20], section_start_label[20];
744 bool any_lsda_needed = false;
745 char augmentation[6];
746 int augmentation_size;
747 int fde_encoding = DW_EH_PE_absptr;
748 int per_encoding = DW_EH_PE_absptr;
749 int lsda_encoding = DW_EH_PE_absptr;
750 int return_reg;
751 rtx personality = NULL;
752 int dw_cie_version;
753
754 /* Don't emit a CIE if there won't be any FDEs. */
755 if (!fde_vec)
756 return;
757
758 /* Nothing to do if the assembler's doing it all. */
759 if (dwarf2out_do_cfi_asm ())
760 return;
761
762 /* If we don't have any functions we'll want to unwind out of, don't emit
763 any EH unwind information. If we make FDEs linkonce, we may have to
764 emit an empty label for an FDE that wouldn't otherwise be emitted. We
765 want to avoid having an FDE kept around when the function it refers to
766 is discarded. Example where this matters: a primary function template
767 in C++ requires EH information, an explicit specialization doesn't. */
768 if (for_eh)
769 {
770 bool any_eh_needed = false;
771
772 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
773 {
774 if (fde->uses_eh_lsda)
775 any_eh_needed = any_lsda_needed = true;
776 else if (fde_needed_for_eh_p (fde))
777 any_eh_needed = true;
778 else if (TARGET_USES_WEAK_UNWIND_INFO)
779 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
780 }
781
782 if (!any_eh_needed)
783 return;
784 }
785
786 /* We're going to be generating comments, so turn on app. */
787 if (flag_debug_asm)
788 app_enable ();
789
790 /* Switch to the proper frame section, first time. */
791 switch_to_frame_table_section (for_eh, false);
792
793 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
794 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
795
796 /* Output the CIE. */
797 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
798 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
799 if (!XCOFF_DEBUGGING_INFO || for_eh)
800 {
801 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
802 dw2_asm_output_data (4, 0xffffffff,
803 "Initial length escape value indicating 64-bit DWARF extension");
804 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
805 "Length of Common Information Entry");
806 }
807 ASM_OUTPUT_LABEL (asm_out_file, l1);
808
809 /* Now that the CIE pointer is PC-relative for EH,
810 use 0 to identify the CIE. */
811 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
812 (for_eh ? 0 : DWARF_CIE_ID),
813 "CIE Identifier Tag");
814
815 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
816 use CIE version 1, unless that would produce incorrect results
817 due to overflowing the return register column. */
818 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
819 dw_cie_version = 1;
820 if (return_reg >= 256 || dwarf_version > 2)
821 dw_cie_version = 3;
822 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
823
824 augmentation[0] = 0;
825 augmentation_size = 0;
826
827 personality = current_unit_personality;
828 if (for_eh)
829 {
830 char *p;
831
832 /* Augmentation:
833 z Indicates that a uleb128 is present to size the
834 augmentation section.
835 L Indicates the encoding (and thus presence) of
836 an LSDA pointer in the FDE augmentation.
837 R Indicates a non-default pointer encoding for
838 FDE code pointers.
839 P Indicates the presence of an encoding + language
840 personality routine in the CIE augmentation. */
841
842 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
843 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
844 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
845
846 p = augmentation + 1;
847 if (personality)
848 {
849 *p++ = 'P';
850 augmentation_size += 1 + size_of_encoded_value (per_encoding);
851 assemble_external_libcall (personality);
852 }
853 if (any_lsda_needed)
854 {
855 *p++ = 'L';
856 augmentation_size += 1;
857 }
858 if (fde_encoding != DW_EH_PE_absptr)
859 {
860 *p++ = 'R';
861 augmentation_size += 1;
862 }
863 if (p > augmentation + 1)
864 {
865 augmentation[0] = 'z';
866 *p = '\0';
867 }
868
869 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
870 if (personality && per_encoding == DW_EH_PE_aligned)
871 {
872 int offset = ( 4 /* Length */
873 + 4 /* CIE Id */
874 + 1 /* CIE version */
875 + strlen (augmentation) + 1 /* Augmentation */
876 + size_of_uleb128 (1) /* Code alignment */
877 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
878 + 1 /* RA column */
879 + 1 /* Augmentation size */
880 + 1 /* Personality encoding */ );
881 int pad = -offset & (PTR_SIZE - 1);
882
883 augmentation_size += pad;
884
885 /* Augmentations should be small, so there's scarce need to
886 iterate for a solution. Die if we exceed one uleb128 byte. */
887 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
888 }
889 }
890
891 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
892 if (dw_cie_version >= 4)
893 {
894 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
895 dw2_asm_output_data (1, 0, "CIE Segment Size");
896 }
897 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
898 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
899 "CIE Data Alignment Factor");
900
901 if (dw_cie_version == 1)
902 dw2_asm_output_data (1, return_reg, "CIE RA Column");
903 else
904 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
905
906 if (augmentation[0])
907 {
908 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
909 if (personality)
910 {
911 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
912 eh_data_format_name (per_encoding));
913 dw2_asm_output_encoded_addr_rtx (per_encoding,
914 personality,
915 true, NULL);
916 }
917
918 if (any_lsda_needed)
919 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
920 eh_data_format_name (lsda_encoding));
921
922 if (fde_encoding != DW_EH_PE_absptr)
923 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
924 eh_data_format_name (fde_encoding));
925 }
926
927 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
928 output_cfi (cfi, NULL, for_eh);
929
930 /* Pad the CIE out to an address sized boundary. */
931 ASM_OUTPUT_ALIGN (asm_out_file,
932 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
933 ASM_OUTPUT_LABEL (asm_out_file, l2);
934
935 /* Loop through all of the FDE's. */
936 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
937 {
938 unsigned int k;
939
940 /* Don't emit EH unwind info for leaf functions that don't need it. */
941 if (for_eh && !fde_needed_for_eh_p (fde))
942 continue;
943
944 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
945 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
946 augmentation, any_lsda_needed, lsda_encoding);
947 }
948
949 if (for_eh && targetm.terminate_dw2_eh_frame_info)
950 dw2_asm_output_data (4, 0, "End of Table");
951
952 /* Turn off app to make assembly quicker. */
953 if (flag_debug_asm)
954 app_disable ();
955 }
956
957 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
958
959 static void
960 dwarf2out_do_cfi_startproc (bool second)
961 {
962 int enc;
963 rtx ref;
964 rtx personality = get_personality_function (current_function_decl);
965
966 fprintf (asm_out_file, "\t.cfi_startproc\n");
967
968 if (personality)
969 {
970 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
971 ref = personality;
972
973 /* ??? The GAS support isn't entirely consistent. We have to
974 handle indirect support ourselves, but PC-relative is done
975 in the assembler. Further, the assembler can't handle any
976 of the weirder relocation types. */
977 if (enc & DW_EH_PE_indirect)
978 ref = dw2_force_const_mem (ref, true);
979
980 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
981 output_addr_const (asm_out_file, ref);
982 fputc ('\n', asm_out_file);
983 }
984
985 if (crtl->uses_eh_lsda)
986 {
987 char lab[20];
988
989 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
990 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
991 current_function_funcdef_no);
992 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
993 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
994
995 if (enc & DW_EH_PE_indirect)
996 ref = dw2_force_const_mem (ref, true);
997
998 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
999 output_addr_const (asm_out_file, ref);
1000 fputc ('\n', asm_out_file);
1001 }
1002 }
1003
1004 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1005 this allocation may be done before pass_final. */
1006
1007 dw_fde_ref
1008 dwarf2out_alloc_current_fde (void)
1009 {
1010 dw_fde_ref fde;
1011
1012 fde = ggc_cleared_alloc<dw_fde_node> ();
1013 fde->decl = current_function_decl;
1014 fde->funcdef_number = current_function_funcdef_no;
1015 fde->fde_index = vec_safe_length (fde_vec);
1016 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1017 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1018 fde->nothrow = crtl->nothrow;
1019 fde->drap_reg = INVALID_REGNUM;
1020 fde->vdrap_reg = INVALID_REGNUM;
1021
1022 /* Record the FDE associated with this function. */
1023 cfun->fde = fde;
1024 vec_safe_push (fde_vec, fde);
1025
1026 return fde;
1027 }
1028
1029 /* Output a marker (i.e. a label) for the beginning of a function, before
1030 the prologue. */
1031
1032 void
1033 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1034 const char *file ATTRIBUTE_UNUSED)
1035 {
1036 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1037 char * dup_label;
1038 dw_fde_ref fde;
1039 section *fnsec;
1040 bool do_frame;
1041
1042 current_function_func_begin_label = NULL;
1043
1044 do_frame = dwarf2out_do_frame ();
1045
1046 /* ??? current_function_func_begin_label is also used by except.c for
1047 call-site information. We must emit this label if it might be used. */
1048 if (!do_frame
1049 && (!flag_exceptions
1050 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1051 return;
1052
1053 fnsec = function_section (current_function_decl);
1054 switch_to_section (fnsec);
1055 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1056 current_function_funcdef_no);
1057 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1058 current_function_funcdef_no);
1059 dup_label = xstrdup (label);
1060 current_function_func_begin_label = dup_label;
1061
1062 /* We can elide the fde allocation if we're not emitting debug info. */
1063 if (!do_frame)
1064 return;
1065
1066 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1067 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1068 would include pass_dwarf2_frame. If we've not created the FDE yet,
1069 do so now. */
1070 fde = cfun->fde;
1071 if (fde == NULL)
1072 fde = dwarf2out_alloc_current_fde ();
1073
1074 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1075 fde->dw_fde_begin = dup_label;
1076 fde->dw_fde_current_label = dup_label;
1077 fde->in_std_section = (fnsec == text_section
1078 || (cold_text_section && fnsec == cold_text_section));
1079
1080 /* We only want to output line number information for the genuine dwarf2
1081 prologue case, not the eh frame case. */
1082 #ifdef DWARF2_DEBUGGING_INFO
1083 if (file)
1084 dwarf2out_source_line (line, file, 0, true);
1085 #endif
1086
1087 if (dwarf2out_do_cfi_asm ())
1088 dwarf2out_do_cfi_startproc (false);
1089 else
1090 {
1091 rtx personality = get_personality_function (current_function_decl);
1092 if (!current_unit_personality)
1093 current_unit_personality = personality;
1094
1095 /* We cannot keep a current personality per function as without CFI
1096 asm, at the point where we emit the CFI data, there is no current
1097 function anymore. */
1098 if (personality && current_unit_personality != personality)
1099 sorry ("multiple EH personalities are supported only with assemblers "
1100 "supporting .cfi_personality directive");
1101 }
1102 }
1103
1104 /* Output a marker (i.e. a label) for the end of the generated code
1105 for a function prologue. This gets called *after* the prologue code has
1106 been generated. */
1107
1108 void
1109 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1110 const char *file ATTRIBUTE_UNUSED)
1111 {
1112 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1113
1114 /* Output a label to mark the endpoint of the code generated for this
1115 function. */
1116 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1117 current_function_funcdef_no);
1118 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1119 current_function_funcdef_no);
1120 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1121 }
1122
1123 /* Output a marker (i.e. a label) for the beginning of the generated code
1124 for a function epilogue. This gets called *before* the prologue code has
1125 been generated. */
1126
1127 void
1128 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1129 const char *file ATTRIBUTE_UNUSED)
1130 {
1131 dw_fde_ref fde = cfun->fde;
1132 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1133
1134 if (fde->dw_fde_vms_begin_epilogue)
1135 return;
1136
1137 /* Output a label to mark the endpoint of the code generated for this
1138 function. */
1139 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1140 current_function_funcdef_no);
1141 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1142 current_function_funcdef_no);
1143 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1144 }
1145
1146 /* Output a marker (i.e. a label) for the absolute end of the generated code
1147 for a function definition. This gets called *after* the epilogue code has
1148 been generated. */
1149
1150 void
1151 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1152 const char *file ATTRIBUTE_UNUSED)
1153 {
1154 dw_fde_ref fde;
1155 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1156
1157 last_var_location_insn = NULL;
1158 cached_next_real_insn = NULL;
1159
1160 if (dwarf2out_do_cfi_asm ())
1161 fprintf (asm_out_file, "\t.cfi_endproc\n");
1162
1163 /* Output a label to mark the endpoint of the code generated for this
1164 function. */
1165 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1166 current_function_funcdef_no);
1167 ASM_OUTPUT_LABEL (asm_out_file, label);
1168 fde = cfun->fde;
1169 gcc_assert (fde != NULL);
1170 if (fde->dw_fde_second_begin == NULL)
1171 fde->dw_fde_end = xstrdup (label);
1172 }
1173
1174 void
1175 dwarf2out_frame_finish (void)
1176 {
1177 /* Output call frame information. */
1178 if (targetm.debug_unwind_info () == UI_DWARF2)
1179 output_call_frame_info (0);
1180
1181 /* Output another copy for the unwinder. */
1182 if ((flag_unwind_tables || flag_exceptions)
1183 && targetm_common.except_unwind_info (&global_options) == UI_DWARF2)
1184 output_call_frame_info (1);
1185 }
1186
1187 /* Note that the current function section is being used for code. */
1188
1189 static void
1190 dwarf2out_note_section_used (void)
1191 {
1192 section *sec = current_function_section ();
1193 if (sec == text_section)
1194 text_section_used = true;
1195 else if (sec == cold_text_section)
1196 cold_text_section_used = true;
1197 }
1198
1199 static void var_location_switch_text_section (void);
1200 static void set_cur_line_info_table (section *);
1201
1202 void
1203 dwarf2out_switch_text_section (void)
1204 {
1205 section *sect;
1206 dw_fde_ref fde = cfun->fde;
1207
1208 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1209
1210 if (!in_cold_section_p)
1211 {
1212 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1213 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1214 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1215 }
1216 else
1217 {
1218 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1219 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1220 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1221 }
1222 have_multiple_function_sections = true;
1223
1224 /* There is no need to mark used sections when not debugging. */
1225 if (cold_text_section != NULL)
1226 dwarf2out_note_section_used ();
1227
1228 if (dwarf2out_do_cfi_asm ())
1229 fprintf (asm_out_file, "\t.cfi_endproc\n");
1230
1231 /* Now do the real section switch. */
1232 sect = current_function_section ();
1233 switch_to_section (sect);
1234
1235 fde->second_in_std_section
1236 = (sect == text_section
1237 || (cold_text_section && sect == cold_text_section));
1238
1239 if (dwarf2out_do_cfi_asm ())
1240 dwarf2out_do_cfi_startproc (true);
1241
1242 var_location_switch_text_section ();
1243
1244 if (cold_text_section != NULL)
1245 set_cur_line_info_table (sect);
1246 }
1247 \f
1248 /* And now, the subset of the debugging information support code necessary
1249 for emitting location expressions. */
1250
1251 /* Data about a single source file. */
1252 struct GTY((for_user)) dwarf_file_data {
1253 const char * filename;
1254 int emitted_number;
1255 };
1256
1257 /* Describe an entry into the .debug_addr section. */
1258
1259 enum ate_kind {
1260 ate_kind_rtx,
1261 ate_kind_rtx_dtprel,
1262 ate_kind_label
1263 };
1264
1265 struct GTY((for_user)) addr_table_entry {
1266 enum ate_kind kind;
1267 unsigned int refcount;
1268 unsigned int index;
1269 union addr_table_entry_struct_union
1270 {
1271 rtx GTY ((tag ("0"))) rtl;
1272 char * GTY ((tag ("1"))) label;
1273 }
1274 GTY ((desc ("%1.kind"))) addr;
1275 };
1276
1277 /* Location lists are ranges + location descriptions for that range,
1278 so you can track variables that are in different places over
1279 their entire life. */
1280 typedef struct GTY(()) dw_loc_list_struct {
1281 dw_loc_list_ref dw_loc_next;
1282 const char *begin; /* Label and addr_entry for start of range */
1283 addr_table_entry *begin_entry;
1284 const char *end; /* Label for end of range */
1285 char *ll_symbol; /* Label for beginning of location list.
1286 Only on head of list */
1287 const char *section; /* Section this loclist is relative to */
1288 dw_loc_descr_ref expr;
1289 hashval_t hash;
1290 /* True if all addresses in this and subsequent lists are known to be
1291 resolved. */
1292 bool resolved_addr;
1293 /* True if this list has been replaced by dw_loc_next. */
1294 bool replaced;
1295 bool emitted;
1296 /* True if the range should be emitted even if begin and end
1297 are the same. */
1298 bool force;
1299 } dw_loc_list_node;
1300
1301 static dw_loc_descr_ref int_loc_descriptor (HOST_WIDE_INT);
1302 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1303
1304 /* Convert a DWARF stack opcode into its string name. */
1305
1306 static const char *
1307 dwarf_stack_op_name (unsigned int op)
1308 {
1309 const char *name = get_DW_OP_name (op);
1310
1311 if (name != NULL)
1312 return name;
1313
1314 return "OP_<unknown>";
1315 }
1316
1317 /* Return a pointer to a newly allocated location description. Location
1318 descriptions are simple expression terms that can be strung
1319 together to form more complicated location (address) descriptions. */
1320
1321 static inline dw_loc_descr_ref
1322 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1323 unsigned HOST_WIDE_INT oprnd2)
1324 {
1325 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1326
1327 descr->dw_loc_opc = op;
1328 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1329 descr->dw_loc_oprnd1.val_entry = NULL;
1330 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1331 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1332 descr->dw_loc_oprnd2.val_entry = NULL;
1333 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1334
1335 return descr;
1336 }
1337
1338 /* Return a pointer to a newly allocated location description for
1339 REG and OFFSET. */
1340
1341 static inline dw_loc_descr_ref
1342 new_reg_loc_descr (unsigned int reg, unsigned HOST_WIDE_INT offset)
1343 {
1344 if (reg <= 31)
1345 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1346 offset, 0);
1347 else
1348 return new_loc_descr (DW_OP_bregx, reg, offset);
1349 }
1350
1351 /* Add a location description term to a location description expression. */
1352
1353 static inline void
1354 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1355 {
1356 dw_loc_descr_ref *d;
1357
1358 /* Find the end of the chain. */
1359 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1360 ;
1361
1362 *d = descr;
1363 }
1364
1365 /* Compare two location operands for exact equality. */
1366
1367 static bool
1368 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1369 {
1370 if (a->val_class != b->val_class)
1371 return false;
1372 switch (a->val_class)
1373 {
1374 case dw_val_class_none:
1375 return true;
1376 case dw_val_class_addr:
1377 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1378
1379 case dw_val_class_offset:
1380 case dw_val_class_unsigned_const:
1381 case dw_val_class_const:
1382 case dw_val_class_range_list:
1383 case dw_val_class_lineptr:
1384 case dw_val_class_macptr:
1385 /* These are all HOST_WIDE_INT, signed or unsigned. */
1386 return a->v.val_unsigned == b->v.val_unsigned;
1387
1388 case dw_val_class_loc:
1389 return a->v.val_loc == b->v.val_loc;
1390 case dw_val_class_loc_list:
1391 return a->v.val_loc_list == b->v.val_loc_list;
1392 case dw_val_class_die_ref:
1393 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1394 case dw_val_class_fde_ref:
1395 return a->v.val_fde_index == b->v.val_fde_index;
1396 case dw_val_class_lbl_id:
1397 case dw_val_class_high_pc:
1398 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1399 case dw_val_class_str:
1400 return a->v.val_str == b->v.val_str;
1401 case dw_val_class_flag:
1402 return a->v.val_flag == b->v.val_flag;
1403 case dw_val_class_file:
1404 return a->v.val_file == b->v.val_file;
1405 case dw_val_class_decl_ref:
1406 return a->v.val_decl_ref == b->v.val_decl_ref;
1407
1408 case dw_val_class_const_double:
1409 return (a->v.val_double.high == b->v.val_double.high
1410 && a->v.val_double.low == b->v.val_double.low);
1411
1412 case dw_val_class_wide_int:
1413 return *a->v.val_wide == *b->v.val_wide;
1414
1415 case dw_val_class_vec:
1416 {
1417 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1418 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1419
1420 return (a_len == b_len
1421 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1422 }
1423
1424 case dw_val_class_data8:
1425 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1426
1427 case dw_val_class_vms_delta:
1428 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1429 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1430
1431 case dw_val_class_discr_value:
1432 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1433 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1434 case dw_val_class_discr_list:
1435 /* It makes no sense comparing two discriminant value lists. */
1436 return false;
1437 }
1438 gcc_unreachable ();
1439 }
1440
1441 /* Compare two location atoms for exact equality. */
1442
1443 static bool
1444 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1445 {
1446 if (a->dw_loc_opc != b->dw_loc_opc)
1447 return false;
1448
1449 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1450 address size, but since we always allocate cleared storage it
1451 should be zero for other types of locations. */
1452 if (a->dtprel != b->dtprel)
1453 return false;
1454
1455 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1456 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1457 }
1458
1459 /* Compare two complete location expressions for exact equality. */
1460
1461 bool
1462 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1463 {
1464 while (1)
1465 {
1466 if (a == b)
1467 return true;
1468 if (a == NULL || b == NULL)
1469 return false;
1470 if (!loc_descr_equal_p_1 (a, b))
1471 return false;
1472
1473 a = a->dw_loc_next;
1474 b = b->dw_loc_next;
1475 }
1476 }
1477
1478
1479 /* Add a constant OFFSET to a location expression. */
1480
1481 static void
1482 loc_descr_plus_const (dw_loc_descr_ref *list_head, HOST_WIDE_INT offset)
1483 {
1484 dw_loc_descr_ref loc;
1485 HOST_WIDE_INT *p;
1486
1487 gcc_assert (*list_head != NULL);
1488
1489 if (!offset)
1490 return;
1491
1492 /* Find the end of the chain. */
1493 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1494 ;
1495
1496 p = NULL;
1497 if (loc->dw_loc_opc == DW_OP_fbreg
1498 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1499 p = &loc->dw_loc_oprnd1.v.val_int;
1500 else if (loc->dw_loc_opc == DW_OP_bregx)
1501 p = &loc->dw_loc_oprnd2.v.val_int;
1502
1503 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1504 offset. Don't optimize if an signed integer overflow would happen. */
1505 if (p != NULL
1506 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1507 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1508 *p += offset;
1509
1510 else if (offset > 0)
1511 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1512
1513 else
1514 {
1515 loc->dw_loc_next = int_loc_descriptor (-offset);
1516 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1517 }
1518 }
1519
1520 /* Add a constant OFFSET to a location list. */
1521
1522 static void
1523 loc_list_plus_const (dw_loc_list_ref list_head, HOST_WIDE_INT offset)
1524 {
1525 dw_loc_list_ref d;
1526 for (d = list_head; d != NULL; d = d->dw_loc_next)
1527 loc_descr_plus_const (&d->expr, offset);
1528 }
1529
1530 #define DWARF_REF_SIZE \
1531 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1532
1533 static unsigned long int get_base_type_offset (dw_die_ref);
1534
1535 /* Return the size of a location descriptor. */
1536
1537 static unsigned long
1538 size_of_loc_descr (dw_loc_descr_ref loc)
1539 {
1540 unsigned long size = 1;
1541
1542 switch (loc->dw_loc_opc)
1543 {
1544 case DW_OP_addr:
1545 size += DWARF2_ADDR_SIZE;
1546 break;
1547 case DW_OP_GNU_addr_index:
1548 case DW_OP_GNU_const_index:
1549 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1550 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1551 break;
1552 case DW_OP_const1u:
1553 case DW_OP_const1s:
1554 size += 1;
1555 break;
1556 case DW_OP_const2u:
1557 case DW_OP_const2s:
1558 size += 2;
1559 break;
1560 case DW_OP_const4u:
1561 case DW_OP_const4s:
1562 size += 4;
1563 break;
1564 case DW_OP_const8u:
1565 case DW_OP_const8s:
1566 size += 8;
1567 break;
1568 case DW_OP_constu:
1569 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1570 break;
1571 case DW_OP_consts:
1572 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1573 break;
1574 case DW_OP_pick:
1575 size += 1;
1576 break;
1577 case DW_OP_plus_uconst:
1578 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1579 break;
1580 case DW_OP_skip:
1581 case DW_OP_bra:
1582 size += 2;
1583 break;
1584 case DW_OP_breg0:
1585 case DW_OP_breg1:
1586 case DW_OP_breg2:
1587 case DW_OP_breg3:
1588 case DW_OP_breg4:
1589 case DW_OP_breg5:
1590 case DW_OP_breg6:
1591 case DW_OP_breg7:
1592 case DW_OP_breg8:
1593 case DW_OP_breg9:
1594 case DW_OP_breg10:
1595 case DW_OP_breg11:
1596 case DW_OP_breg12:
1597 case DW_OP_breg13:
1598 case DW_OP_breg14:
1599 case DW_OP_breg15:
1600 case DW_OP_breg16:
1601 case DW_OP_breg17:
1602 case DW_OP_breg18:
1603 case DW_OP_breg19:
1604 case DW_OP_breg20:
1605 case DW_OP_breg21:
1606 case DW_OP_breg22:
1607 case DW_OP_breg23:
1608 case DW_OP_breg24:
1609 case DW_OP_breg25:
1610 case DW_OP_breg26:
1611 case DW_OP_breg27:
1612 case DW_OP_breg28:
1613 case DW_OP_breg29:
1614 case DW_OP_breg30:
1615 case DW_OP_breg31:
1616 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1617 break;
1618 case DW_OP_regx:
1619 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1620 break;
1621 case DW_OP_fbreg:
1622 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1623 break;
1624 case DW_OP_bregx:
1625 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1626 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1627 break;
1628 case DW_OP_piece:
1629 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1630 break;
1631 case DW_OP_bit_piece:
1632 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1633 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1634 break;
1635 case DW_OP_deref_size:
1636 case DW_OP_xderef_size:
1637 size += 1;
1638 break;
1639 case DW_OP_call2:
1640 size += 2;
1641 break;
1642 case DW_OP_call4:
1643 size += 4;
1644 break;
1645 case DW_OP_call_ref:
1646 size += DWARF_REF_SIZE;
1647 break;
1648 case DW_OP_implicit_value:
1649 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1650 + loc->dw_loc_oprnd1.v.val_unsigned;
1651 break;
1652 case DW_OP_GNU_implicit_pointer:
1653 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1654 break;
1655 case DW_OP_GNU_entry_value:
1656 {
1657 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1658 size += size_of_uleb128 (op_size) + op_size;
1659 break;
1660 }
1661 case DW_OP_GNU_const_type:
1662 {
1663 unsigned long o
1664 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1665 size += size_of_uleb128 (o) + 1;
1666 switch (loc->dw_loc_oprnd2.val_class)
1667 {
1668 case dw_val_class_vec:
1669 size += loc->dw_loc_oprnd2.v.val_vec.length
1670 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1671 break;
1672 case dw_val_class_const:
1673 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1674 break;
1675 case dw_val_class_const_double:
1676 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1677 break;
1678 case dw_val_class_wide_int:
1679 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1680 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1681 break;
1682 default:
1683 gcc_unreachable ();
1684 }
1685 break;
1686 }
1687 case DW_OP_GNU_regval_type:
1688 {
1689 unsigned long o
1690 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1691 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1692 + size_of_uleb128 (o);
1693 }
1694 break;
1695 case DW_OP_GNU_deref_type:
1696 {
1697 unsigned long o
1698 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1699 size += 1 + size_of_uleb128 (o);
1700 }
1701 break;
1702 case DW_OP_GNU_convert:
1703 case DW_OP_GNU_reinterpret:
1704 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1705 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1706 else
1707 {
1708 unsigned long o
1709 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1710 size += size_of_uleb128 (o);
1711 }
1712 break;
1713 case DW_OP_GNU_parameter_ref:
1714 size += 4;
1715 break;
1716 default:
1717 break;
1718 }
1719
1720 return size;
1721 }
1722
1723 /* Return the size of a series of location descriptors. */
1724
1725 unsigned long
1726 size_of_locs (dw_loc_descr_ref loc)
1727 {
1728 dw_loc_descr_ref l;
1729 unsigned long size;
1730
1731 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1732 field, to avoid writing to a PCH file. */
1733 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1734 {
1735 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1736 break;
1737 size += size_of_loc_descr (l);
1738 }
1739 if (! l)
1740 return size;
1741
1742 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1743 {
1744 l->dw_loc_addr = size;
1745 size += size_of_loc_descr (l);
1746 }
1747
1748 return size;
1749 }
1750
1751 /* Return the size of the value in a DW_AT_discr_value attribute. */
1752
1753 static int
1754 size_of_discr_value (dw_discr_value *discr_value)
1755 {
1756 if (discr_value->pos)
1757 return size_of_uleb128 (discr_value->v.uval);
1758 else
1759 return size_of_sleb128 (discr_value->v.sval);
1760 }
1761
1762 /* Return the size of the value in a DW_discr_list attribute. */
1763
1764 static int
1765 size_of_discr_list (dw_discr_list_ref discr_list)
1766 {
1767 int size = 0;
1768
1769 for (dw_discr_list_ref list = discr_list;
1770 list != NULL;
1771 list = list->dw_discr_next)
1772 {
1773 /* One byte for the discriminant value descriptor, and then one or two
1774 LEB128 numbers, depending on whether it's a single case label or a
1775 range label. */
1776 size += 1;
1777 size += size_of_discr_value (&list->dw_discr_lower_bound);
1778 if (list->dw_discr_range != 0)
1779 size += size_of_discr_value (&list->dw_discr_upper_bound);
1780 }
1781 return size;
1782 }
1783
1784 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
1785 static void get_ref_die_offset_label (char *, dw_die_ref);
1786 static unsigned long int get_ref_die_offset (dw_die_ref);
1787
1788 /* Output location description stack opcode's operands (if any).
1789 The for_eh_or_skip parameter controls whether register numbers are
1790 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
1791 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
1792 info). This should be suppressed for the cases that have not been converted
1793 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
1794
1795 static void
1796 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
1797 {
1798 dw_val_ref val1 = &loc->dw_loc_oprnd1;
1799 dw_val_ref val2 = &loc->dw_loc_oprnd2;
1800
1801 switch (loc->dw_loc_opc)
1802 {
1803 #ifdef DWARF2_DEBUGGING_INFO
1804 case DW_OP_const2u:
1805 case DW_OP_const2s:
1806 dw2_asm_output_data (2, val1->v.val_int, NULL);
1807 break;
1808 case DW_OP_const4u:
1809 if (loc->dtprel)
1810 {
1811 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1812 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
1813 val1->v.val_addr);
1814 fputc ('\n', asm_out_file);
1815 break;
1816 }
1817 /* FALLTHRU */
1818 case DW_OP_const4s:
1819 dw2_asm_output_data (4, val1->v.val_int, NULL);
1820 break;
1821 case DW_OP_const8u:
1822 if (loc->dtprel)
1823 {
1824 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1825 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
1826 val1->v.val_addr);
1827 fputc ('\n', asm_out_file);
1828 break;
1829 }
1830 /* FALLTHRU */
1831 case DW_OP_const8s:
1832 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
1833 dw2_asm_output_data (8, val1->v.val_int, NULL);
1834 break;
1835 case DW_OP_skip:
1836 case DW_OP_bra:
1837 {
1838 int offset;
1839
1840 gcc_assert (val1->val_class == dw_val_class_loc);
1841 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
1842
1843 dw2_asm_output_data (2, offset, NULL);
1844 }
1845 break;
1846 case DW_OP_implicit_value:
1847 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1848 switch (val2->val_class)
1849 {
1850 case dw_val_class_const:
1851 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
1852 break;
1853 case dw_val_class_vec:
1854 {
1855 unsigned int elt_size = val2->v.val_vec.elt_size;
1856 unsigned int len = val2->v.val_vec.length;
1857 unsigned int i;
1858 unsigned char *p;
1859
1860 if (elt_size > sizeof (HOST_WIDE_INT))
1861 {
1862 elt_size /= 2;
1863 len *= 2;
1864 }
1865 for (i = 0, p = val2->v.val_vec.array;
1866 i < len;
1867 i++, p += elt_size)
1868 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
1869 "fp or vector constant word %u", i);
1870 }
1871 break;
1872 case dw_val_class_const_double:
1873 {
1874 unsigned HOST_WIDE_INT first, second;
1875
1876 if (WORDS_BIG_ENDIAN)
1877 {
1878 first = val2->v.val_double.high;
1879 second = val2->v.val_double.low;
1880 }
1881 else
1882 {
1883 first = val2->v.val_double.low;
1884 second = val2->v.val_double.high;
1885 }
1886 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1887 first, NULL);
1888 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1889 second, NULL);
1890 }
1891 break;
1892 case dw_val_class_wide_int:
1893 {
1894 int i;
1895 int len = get_full_len (*val2->v.val_wide);
1896 if (WORDS_BIG_ENDIAN)
1897 for (i = len - 1; i >= 0; --i)
1898 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1899 val2->v.val_wide->elt (i), NULL);
1900 else
1901 for (i = 0; i < len; ++i)
1902 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
1903 val2->v.val_wide->elt (i), NULL);
1904 }
1905 break;
1906 case dw_val_class_addr:
1907 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
1908 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
1909 break;
1910 default:
1911 gcc_unreachable ();
1912 }
1913 break;
1914 #else
1915 case DW_OP_const2u:
1916 case DW_OP_const2s:
1917 case DW_OP_const4u:
1918 case DW_OP_const4s:
1919 case DW_OP_const8u:
1920 case DW_OP_const8s:
1921 case DW_OP_skip:
1922 case DW_OP_bra:
1923 case DW_OP_implicit_value:
1924 /* We currently don't make any attempt to make sure these are
1925 aligned properly like we do for the main unwind info, so
1926 don't support emitting things larger than a byte if we're
1927 only doing unwinding. */
1928 gcc_unreachable ();
1929 #endif
1930 case DW_OP_const1u:
1931 case DW_OP_const1s:
1932 dw2_asm_output_data (1, val1->v.val_int, NULL);
1933 break;
1934 case DW_OP_constu:
1935 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1936 break;
1937 case DW_OP_consts:
1938 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
1939 break;
1940 case DW_OP_pick:
1941 dw2_asm_output_data (1, val1->v.val_int, NULL);
1942 break;
1943 case DW_OP_plus_uconst:
1944 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
1945 break;
1946 case DW_OP_breg0:
1947 case DW_OP_breg1:
1948 case DW_OP_breg2:
1949 case DW_OP_breg3:
1950 case DW_OP_breg4:
1951 case DW_OP_breg5:
1952 case DW_OP_breg6:
1953 case DW_OP_breg7:
1954 case DW_OP_breg8:
1955 case DW_OP_breg9:
1956 case DW_OP_breg10:
1957 case DW_OP_breg11:
1958 case DW_OP_breg12:
1959 case DW_OP_breg13:
1960 case DW_OP_breg14:
1961 case DW_OP_breg15:
1962 case DW_OP_breg16:
1963 case DW_OP_breg17:
1964 case DW_OP_breg18:
1965 case DW_OP_breg19:
1966 case DW_OP_breg20:
1967 case DW_OP_breg21:
1968 case DW_OP_breg22:
1969 case DW_OP_breg23:
1970 case DW_OP_breg24:
1971 case DW_OP_breg25:
1972 case DW_OP_breg26:
1973 case DW_OP_breg27:
1974 case DW_OP_breg28:
1975 case DW_OP_breg29:
1976 case DW_OP_breg30:
1977 case DW_OP_breg31:
1978 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
1979 break;
1980 case DW_OP_regx:
1981 {
1982 unsigned r = val1->v.val_unsigned;
1983 if (for_eh_or_skip >= 0)
1984 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
1985 gcc_assert (size_of_uleb128 (r)
1986 == size_of_uleb128 (val1->v.val_unsigned));
1987 dw2_asm_output_data_uleb128 (r, NULL);
1988 }
1989 break;
1990 case DW_OP_fbreg:
1991 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
1992 break;
1993 case DW_OP_bregx:
1994 {
1995 unsigned r = val1->v.val_unsigned;
1996 if (for_eh_or_skip >= 0)
1997 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
1998 gcc_assert (size_of_uleb128 (r)
1999 == size_of_uleb128 (val1->v.val_unsigned));
2000 dw2_asm_output_data_uleb128 (r, NULL);
2001 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2002 }
2003 break;
2004 case DW_OP_piece:
2005 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2006 break;
2007 case DW_OP_bit_piece:
2008 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2009 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2010 break;
2011 case DW_OP_deref_size:
2012 case DW_OP_xderef_size:
2013 dw2_asm_output_data (1, val1->v.val_int, NULL);
2014 break;
2015
2016 case DW_OP_addr:
2017 if (loc->dtprel)
2018 {
2019 if (targetm.asm_out.output_dwarf_dtprel)
2020 {
2021 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2022 DWARF2_ADDR_SIZE,
2023 val1->v.val_addr);
2024 fputc ('\n', asm_out_file);
2025 }
2026 else
2027 gcc_unreachable ();
2028 }
2029 else
2030 {
2031 #ifdef DWARF2_DEBUGGING_INFO
2032 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2033 #else
2034 gcc_unreachable ();
2035 #endif
2036 }
2037 break;
2038
2039 case DW_OP_GNU_addr_index:
2040 case DW_OP_GNU_const_index:
2041 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2042 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2043 "(index into .debug_addr)");
2044 break;
2045
2046 case DW_OP_call2:
2047 case DW_OP_call4:
2048 {
2049 unsigned long die_offset
2050 = get_ref_die_offset (val1->v.val_die_ref.die);
2051 /* Make sure the offset has been computed and that we can encode it as
2052 an operand. */
2053 gcc_assert (die_offset > 0
2054 && die_offset <= (loc->dw_loc_opc == DW_OP_call2)
2055 ? 0xffff
2056 : 0xffffffff);
2057 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2058 die_offset, NULL);
2059 }
2060 break;
2061
2062 case DW_OP_GNU_implicit_pointer:
2063 {
2064 char label[MAX_ARTIFICIAL_LABEL_BYTES
2065 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2066 gcc_assert (val1->val_class == dw_val_class_die_ref);
2067 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2068 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2069 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2070 }
2071 break;
2072
2073 case DW_OP_GNU_entry_value:
2074 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2075 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2076 break;
2077
2078 case DW_OP_GNU_const_type:
2079 {
2080 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2081 gcc_assert (o);
2082 dw2_asm_output_data_uleb128 (o, NULL);
2083 switch (val2->val_class)
2084 {
2085 case dw_val_class_const:
2086 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2087 dw2_asm_output_data (1, l, NULL);
2088 dw2_asm_output_data (l, val2->v.val_int, NULL);
2089 break;
2090 case dw_val_class_vec:
2091 {
2092 unsigned int elt_size = val2->v.val_vec.elt_size;
2093 unsigned int len = val2->v.val_vec.length;
2094 unsigned int i;
2095 unsigned char *p;
2096
2097 l = len * elt_size;
2098 dw2_asm_output_data (1, l, NULL);
2099 if (elt_size > sizeof (HOST_WIDE_INT))
2100 {
2101 elt_size /= 2;
2102 len *= 2;
2103 }
2104 for (i = 0, p = val2->v.val_vec.array;
2105 i < len;
2106 i++, p += elt_size)
2107 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2108 "fp or vector constant word %u", i);
2109 }
2110 break;
2111 case dw_val_class_const_double:
2112 {
2113 unsigned HOST_WIDE_INT first, second;
2114 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2115
2116 dw2_asm_output_data (1, 2 * l, NULL);
2117 if (WORDS_BIG_ENDIAN)
2118 {
2119 first = val2->v.val_double.high;
2120 second = val2->v.val_double.low;
2121 }
2122 else
2123 {
2124 first = val2->v.val_double.low;
2125 second = val2->v.val_double.high;
2126 }
2127 dw2_asm_output_data (l, first, NULL);
2128 dw2_asm_output_data (l, second, NULL);
2129 }
2130 break;
2131 case dw_val_class_wide_int:
2132 {
2133 int i;
2134 int len = get_full_len (*val2->v.val_wide);
2135 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2136
2137 dw2_asm_output_data (1, len * l, NULL);
2138 if (WORDS_BIG_ENDIAN)
2139 for (i = len - 1; i >= 0; --i)
2140 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2141 else
2142 for (i = 0; i < len; ++i)
2143 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2144 }
2145 break;
2146 default:
2147 gcc_unreachable ();
2148 }
2149 }
2150 break;
2151 case DW_OP_GNU_regval_type:
2152 {
2153 unsigned r = val1->v.val_unsigned;
2154 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2155 gcc_assert (o);
2156 if (for_eh_or_skip >= 0)
2157 {
2158 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2159 gcc_assert (size_of_uleb128 (r)
2160 == size_of_uleb128 (val1->v.val_unsigned));
2161 }
2162 dw2_asm_output_data_uleb128 (r, NULL);
2163 dw2_asm_output_data_uleb128 (o, NULL);
2164 }
2165 break;
2166 case DW_OP_GNU_deref_type:
2167 {
2168 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2169 gcc_assert (o);
2170 dw2_asm_output_data (1, val1->v.val_int, NULL);
2171 dw2_asm_output_data_uleb128 (o, NULL);
2172 }
2173 break;
2174 case DW_OP_GNU_convert:
2175 case DW_OP_GNU_reinterpret:
2176 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2177 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2178 else
2179 {
2180 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2181 gcc_assert (o);
2182 dw2_asm_output_data_uleb128 (o, NULL);
2183 }
2184 break;
2185
2186 case DW_OP_GNU_parameter_ref:
2187 {
2188 unsigned long o;
2189 gcc_assert (val1->val_class == dw_val_class_die_ref);
2190 o = get_ref_die_offset (val1->v.val_die_ref.die);
2191 dw2_asm_output_data (4, o, NULL);
2192 }
2193 break;
2194
2195 default:
2196 /* Other codes have no operands. */
2197 break;
2198 }
2199 }
2200
2201 /* Output a sequence of location operations.
2202 The for_eh_or_skip parameter controls whether register numbers are
2203 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2204 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2205 info). This should be suppressed for the cases that have not been converted
2206 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2207
2208 void
2209 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2210 {
2211 for (; loc != NULL; loc = loc->dw_loc_next)
2212 {
2213 enum dwarf_location_atom opc = loc->dw_loc_opc;
2214 /* Output the opcode. */
2215 if (for_eh_or_skip >= 0
2216 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2217 {
2218 unsigned r = (opc - DW_OP_breg0);
2219 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2220 gcc_assert (r <= 31);
2221 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2222 }
2223 else if (for_eh_or_skip >= 0
2224 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2225 {
2226 unsigned r = (opc - DW_OP_reg0);
2227 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2228 gcc_assert (r <= 31);
2229 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2230 }
2231
2232 dw2_asm_output_data (1, opc,
2233 "%s", dwarf_stack_op_name (opc));
2234
2235 /* Output the operand(s) (if any). */
2236 output_loc_operands (loc, for_eh_or_skip);
2237 }
2238 }
2239
2240 /* Output location description stack opcode's operands (if any).
2241 The output is single bytes on a line, suitable for .cfi_escape. */
2242
2243 static void
2244 output_loc_operands_raw (dw_loc_descr_ref loc)
2245 {
2246 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2247 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2248
2249 switch (loc->dw_loc_opc)
2250 {
2251 case DW_OP_addr:
2252 case DW_OP_GNU_addr_index:
2253 case DW_OP_GNU_const_index:
2254 case DW_OP_implicit_value:
2255 /* We cannot output addresses in .cfi_escape, only bytes. */
2256 gcc_unreachable ();
2257
2258 case DW_OP_const1u:
2259 case DW_OP_const1s:
2260 case DW_OP_pick:
2261 case DW_OP_deref_size:
2262 case DW_OP_xderef_size:
2263 fputc (',', asm_out_file);
2264 dw2_asm_output_data_raw (1, val1->v.val_int);
2265 break;
2266
2267 case DW_OP_const2u:
2268 case DW_OP_const2s:
2269 fputc (',', asm_out_file);
2270 dw2_asm_output_data_raw (2, val1->v.val_int);
2271 break;
2272
2273 case DW_OP_const4u:
2274 case DW_OP_const4s:
2275 fputc (',', asm_out_file);
2276 dw2_asm_output_data_raw (4, val1->v.val_int);
2277 break;
2278
2279 case DW_OP_const8u:
2280 case DW_OP_const8s:
2281 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2282 fputc (',', asm_out_file);
2283 dw2_asm_output_data_raw (8, val1->v.val_int);
2284 break;
2285
2286 case DW_OP_skip:
2287 case DW_OP_bra:
2288 {
2289 int offset;
2290
2291 gcc_assert (val1->val_class == dw_val_class_loc);
2292 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2293
2294 fputc (',', asm_out_file);
2295 dw2_asm_output_data_raw (2, offset);
2296 }
2297 break;
2298
2299 case DW_OP_regx:
2300 {
2301 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2302 gcc_assert (size_of_uleb128 (r)
2303 == size_of_uleb128 (val1->v.val_unsigned));
2304 fputc (',', asm_out_file);
2305 dw2_asm_output_data_uleb128_raw (r);
2306 }
2307 break;
2308
2309 case DW_OP_constu:
2310 case DW_OP_plus_uconst:
2311 case DW_OP_piece:
2312 fputc (',', asm_out_file);
2313 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2314 break;
2315
2316 case DW_OP_bit_piece:
2317 fputc (',', asm_out_file);
2318 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2319 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2320 break;
2321
2322 case DW_OP_consts:
2323 case DW_OP_breg0:
2324 case DW_OP_breg1:
2325 case DW_OP_breg2:
2326 case DW_OP_breg3:
2327 case DW_OP_breg4:
2328 case DW_OP_breg5:
2329 case DW_OP_breg6:
2330 case DW_OP_breg7:
2331 case DW_OP_breg8:
2332 case DW_OP_breg9:
2333 case DW_OP_breg10:
2334 case DW_OP_breg11:
2335 case DW_OP_breg12:
2336 case DW_OP_breg13:
2337 case DW_OP_breg14:
2338 case DW_OP_breg15:
2339 case DW_OP_breg16:
2340 case DW_OP_breg17:
2341 case DW_OP_breg18:
2342 case DW_OP_breg19:
2343 case DW_OP_breg20:
2344 case DW_OP_breg21:
2345 case DW_OP_breg22:
2346 case DW_OP_breg23:
2347 case DW_OP_breg24:
2348 case DW_OP_breg25:
2349 case DW_OP_breg26:
2350 case DW_OP_breg27:
2351 case DW_OP_breg28:
2352 case DW_OP_breg29:
2353 case DW_OP_breg30:
2354 case DW_OP_breg31:
2355 case DW_OP_fbreg:
2356 fputc (',', asm_out_file);
2357 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2358 break;
2359
2360 case DW_OP_bregx:
2361 {
2362 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2363 gcc_assert (size_of_uleb128 (r)
2364 == size_of_uleb128 (val1->v.val_unsigned));
2365 fputc (',', asm_out_file);
2366 dw2_asm_output_data_uleb128_raw (r);
2367 fputc (',', asm_out_file);
2368 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2369 }
2370 break;
2371
2372 case DW_OP_GNU_implicit_pointer:
2373 case DW_OP_GNU_entry_value:
2374 case DW_OP_GNU_const_type:
2375 case DW_OP_GNU_regval_type:
2376 case DW_OP_GNU_deref_type:
2377 case DW_OP_GNU_convert:
2378 case DW_OP_GNU_reinterpret:
2379 case DW_OP_GNU_parameter_ref:
2380 gcc_unreachable ();
2381 break;
2382
2383 default:
2384 /* Other codes have no operands. */
2385 break;
2386 }
2387 }
2388
2389 void
2390 output_loc_sequence_raw (dw_loc_descr_ref loc)
2391 {
2392 while (1)
2393 {
2394 enum dwarf_location_atom opc = loc->dw_loc_opc;
2395 /* Output the opcode. */
2396 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2397 {
2398 unsigned r = (opc - DW_OP_breg0);
2399 r = DWARF2_FRAME_REG_OUT (r, 1);
2400 gcc_assert (r <= 31);
2401 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2402 }
2403 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2404 {
2405 unsigned r = (opc - DW_OP_reg0);
2406 r = DWARF2_FRAME_REG_OUT (r, 1);
2407 gcc_assert (r <= 31);
2408 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2409 }
2410 /* Output the opcode. */
2411 fprintf (asm_out_file, "%#x", opc);
2412 output_loc_operands_raw (loc);
2413
2414 if (!loc->dw_loc_next)
2415 break;
2416 loc = loc->dw_loc_next;
2417
2418 fputc (',', asm_out_file);
2419 }
2420 }
2421
2422 /* This function builds a dwarf location descriptor sequence from a
2423 dw_cfa_location, adding the given OFFSET to the result of the
2424 expression. */
2425
2426 struct dw_loc_descr_node *
2427 build_cfa_loc (dw_cfa_location *cfa, HOST_WIDE_INT offset)
2428 {
2429 struct dw_loc_descr_node *head, *tmp;
2430
2431 offset += cfa->offset;
2432
2433 if (cfa->indirect)
2434 {
2435 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2436 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2437 head->dw_loc_oprnd1.val_entry = NULL;
2438 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2439 add_loc_descr (&head, tmp);
2440 if (offset != 0)
2441 {
2442 tmp = new_loc_descr (DW_OP_plus_uconst, offset, 0);
2443 add_loc_descr (&head, tmp);
2444 }
2445 }
2446 else
2447 head = new_reg_loc_descr (cfa->reg, offset);
2448
2449 return head;
2450 }
2451
2452 /* This function builds a dwarf location descriptor sequence for
2453 the address at OFFSET from the CFA when stack is aligned to
2454 ALIGNMENT byte. */
2455
2456 struct dw_loc_descr_node *
2457 build_cfa_aligned_loc (dw_cfa_location *cfa,
2458 HOST_WIDE_INT offset, HOST_WIDE_INT alignment)
2459 {
2460 struct dw_loc_descr_node *head;
2461 unsigned int dwarf_fp
2462 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2463
2464 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2465 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2466 {
2467 head = new_reg_loc_descr (dwarf_fp, 0);
2468 add_loc_descr (&head, int_loc_descriptor (alignment));
2469 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2470 loc_descr_plus_const (&head, offset);
2471 }
2472 else
2473 head = new_reg_loc_descr (dwarf_fp, offset);
2474 return head;
2475 }
2476 \f
2477 /* And now, the support for symbolic debugging information. */
2478
2479 /* .debug_str support. */
2480
2481 static void dwarf2out_init (const char *);
2482 static void dwarf2out_finish (const char *);
2483 static void dwarf2out_early_finish (void);
2484 static void dwarf2out_assembly_start (void);
2485 static void dwarf2out_define (unsigned int, const char *);
2486 static void dwarf2out_undef (unsigned int, const char *);
2487 static void dwarf2out_start_source_file (unsigned, const char *);
2488 static void dwarf2out_end_source_file (unsigned);
2489 static void dwarf2out_function_decl (tree);
2490 static void dwarf2out_begin_block (unsigned, unsigned);
2491 static void dwarf2out_end_block (unsigned, unsigned);
2492 static bool dwarf2out_ignore_block (const_tree);
2493 static void dwarf2out_early_global_decl (tree);
2494 static void dwarf2out_late_global_decl (tree);
2495 static void dwarf2out_type_decl (tree, int);
2496 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool);
2497 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2498 dw_die_ref);
2499 static void dwarf2out_abstract_function (tree);
2500 static void dwarf2out_var_location (rtx_insn *);
2501 static void dwarf2out_size_function (tree);
2502 static void dwarf2out_begin_function (tree);
2503 static void dwarf2out_end_function (unsigned int);
2504 static void dwarf2out_register_main_translation_unit (tree unit);
2505 static void dwarf2out_set_name (tree, tree);
2506
2507 /* The debug hooks structure. */
2508
2509 const struct gcc_debug_hooks dwarf2_debug_hooks =
2510 {
2511 dwarf2out_init,
2512 dwarf2out_finish,
2513 dwarf2out_early_finish,
2514 dwarf2out_assembly_start,
2515 dwarf2out_define,
2516 dwarf2out_undef,
2517 dwarf2out_start_source_file,
2518 dwarf2out_end_source_file,
2519 dwarf2out_begin_block,
2520 dwarf2out_end_block,
2521 dwarf2out_ignore_block,
2522 dwarf2out_source_line,
2523 dwarf2out_begin_prologue,
2524 #if VMS_DEBUGGING_INFO
2525 dwarf2out_vms_end_prologue,
2526 dwarf2out_vms_begin_epilogue,
2527 #else
2528 debug_nothing_int_charstar,
2529 debug_nothing_int_charstar,
2530 #endif
2531 dwarf2out_end_epilogue,
2532 dwarf2out_begin_function,
2533 dwarf2out_end_function, /* end_function */
2534 dwarf2out_register_main_translation_unit,
2535 dwarf2out_function_decl, /* function_decl */
2536 dwarf2out_early_global_decl,
2537 dwarf2out_late_global_decl,
2538 dwarf2out_type_decl, /* type_decl */
2539 dwarf2out_imported_module_or_decl,
2540 debug_nothing_tree, /* deferred_inline_function */
2541 /* The DWARF 2 backend tries to reduce debugging bloat by not
2542 emitting the abstract description of inline functions until
2543 something tries to reference them. */
2544 dwarf2out_abstract_function, /* outlining_inline_function */
2545 debug_nothing_rtx_code_label, /* label */
2546 debug_nothing_int, /* handle_pch */
2547 dwarf2out_var_location,
2548 dwarf2out_size_function, /* size_function */
2549 dwarf2out_switch_text_section,
2550 dwarf2out_set_name,
2551 1, /* start_end_main_source_file */
2552 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2553 };
2554
2555 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2556 {
2557 dwarf2out_init,
2558 debug_nothing_charstar,
2559 debug_nothing_void,
2560 debug_nothing_void,
2561 debug_nothing_int_charstar,
2562 debug_nothing_int_charstar,
2563 debug_nothing_int_charstar,
2564 debug_nothing_int,
2565 debug_nothing_int_int, /* begin_block */
2566 debug_nothing_int_int, /* end_block */
2567 debug_true_const_tree, /* ignore_block */
2568 dwarf2out_source_line, /* source_line */
2569 debug_nothing_int_charstar, /* begin_prologue */
2570 debug_nothing_int_charstar, /* end_prologue */
2571 debug_nothing_int_charstar, /* begin_epilogue */
2572 debug_nothing_int_charstar, /* end_epilogue */
2573 debug_nothing_tree, /* begin_function */
2574 debug_nothing_int, /* end_function */
2575 debug_nothing_tree, /* register_main_translation_unit */
2576 debug_nothing_tree, /* function_decl */
2577 debug_nothing_tree, /* early_global_decl */
2578 debug_nothing_tree, /* late_global_decl */
2579 debug_nothing_tree_int, /* type_decl */
2580 debug_nothing_tree_tree_tree_bool, /* imported_module_or_decl */
2581 debug_nothing_tree, /* deferred_inline_function */
2582 debug_nothing_tree, /* outlining_inline_function */
2583 debug_nothing_rtx_code_label, /* label */
2584 debug_nothing_int, /* handle_pch */
2585 debug_nothing_rtx_insn, /* var_location */
2586 debug_nothing_tree, /* size_function */
2587 debug_nothing_void, /* switch_text_section */
2588 debug_nothing_tree_tree, /* set_name */
2589 0, /* start_end_main_source_file */
2590 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2591 };
2592 \f
2593 /* NOTE: In the comments in this file, many references are made to
2594 "Debugging Information Entries". This term is abbreviated as `DIE'
2595 throughout the remainder of this file. */
2596
2597 /* An internal representation of the DWARF output is built, and then
2598 walked to generate the DWARF debugging info. The walk of the internal
2599 representation is done after the entire program has been compiled.
2600 The types below are used to describe the internal representation. */
2601
2602 /* Whether to put type DIEs into their own section .debug_types instead
2603 of making them part of the .debug_info section. Only supported for
2604 Dwarf V4 or higher and the user didn't disable them through
2605 -fno-debug-types-section. It is more efficient to put them in a
2606 separate comdat sections since the linker will then be able to
2607 remove duplicates. But not all tools support .debug_types sections
2608 yet. */
2609
2610 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2611
2612 /* Various DIE's use offsets relative to the beginning of the
2613 .debug_info section to refer to each other. */
2614
2615 typedef long int dw_offset;
2616
2617 struct comdat_type_node;
2618
2619 /* The entries in the line_info table more-or-less mirror the opcodes
2620 that are used in the real dwarf line table. Arrays of these entries
2621 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2622 supported. */
2623
2624 enum dw_line_info_opcode {
2625 /* Emit DW_LNE_set_address; the operand is the label index. */
2626 LI_set_address,
2627
2628 /* Emit a row to the matrix with the given line. This may be done
2629 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2630 special opcodes. */
2631 LI_set_line,
2632
2633 /* Emit a DW_LNS_set_file. */
2634 LI_set_file,
2635
2636 /* Emit a DW_LNS_set_column. */
2637 LI_set_column,
2638
2639 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2640 LI_negate_stmt,
2641
2642 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2643 LI_set_prologue_end,
2644 LI_set_epilogue_begin,
2645
2646 /* Emit a DW_LNE_set_discriminator. */
2647 LI_set_discriminator
2648 };
2649
2650 typedef struct GTY(()) dw_line_info_struct {
2651 enum dw_line_info_opcode opcode;
2652 unsigned int val;
2653 } dw_line_info_entry;
2654
2655
2656 struct GTY(()) dw_line_info_table {
2657 /* The label that marks the end of this section. */
2658 const char *end_label;
2659
2660 /* The values for the last row of the matrix, as collected in the table.
2661 These are used to minimize the changes to the next row. */
2662 unsigned int file_num;
2663 unsigned int line_num;
2664 unsigned int column_num;
2665 int discrim_num;
2666 bool is_stmt;
2667 bool in_use;
2668
2669 vec<dw_line_info_entry, va_gc> *entries;
2670 };
2671
2672
2673 /* Each DIE attribute has a field specifying the attribute kind,
2674 a link to the next attribute in the chain, and an attribute value.
2675 Attributes are typically linked below the DIE they modify. */
2676
2677 typedef struct GTY(()) dw_attr_struct {
2678 enum dwarf_attribute dw_attr;
2679 dw_val_node dw_attr_val;
2680 }
2681 dw_attr_node;
2682
2683
2684 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
2685 The children of each node form a circular list linked by
2686 die_sib. die_child points to the node *before* the "first" child node. */
2687
2688 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
2689 union die_symbol_or_type_node
2690 {
2691 const char * GTY ((tag ("0"))) die_symbol;
2692 comdat_type_node *GTY ((tag ("1"))) die_type_node;
2693 }
2694 GTY ((desc ("%0.comdat_type_p"))) die_id;
2695 vec<dw_attr_node, va_gc> *die_attr;
2696 dw_die_ref die_parent;
2697 dw_die_ref die_child;
2698 dw_die_ref die_sib;
2699 dw_die_ref die_definition; /* ref from a specification to its definition */
2700 dw_offset die_offset;
2701 unsigned long die_abbrev;
2702 int die_mark;
2703 unsigned int decl_id;
2704 enum dwarf_tag die_tag;
2705 /* Die is used and must not be pruned as unused. */
2706 BOOL_BITFIELD die_perennial_p : 1;
2707 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
2708 /* Lots of spare bits. */
2709 }
2710 die_node;
2711
2712 /* Set to TRUE while dwarf2out_early_global_decl is running. */
2713 static bool early_dwarf;
2714 struct set_early_dwarf {
2715 bool saved;
2716 set_early_dwarf () : saved(early_dwarf) { early_dwarf = true; }
2717 ~set_early_dwarf () { early_dwarf = saved; }
2718 };
2719
2720 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
2721 #define FOR_EACH_CHILD(die, c, expr) do { \
2722 c = die->die_child; \
2723 if (c) do { \
2724 c = c->die_sib; \
2725 expr; \
2726 } while (c != die->die_child); \
2727 } while (0)
2728
2729 /* The pubname structure */
2730
2731 typedef struct GTY(()) pubname_struct {
2732 dw_die_ref die;
2733 const char *name;
2734 }
2735 pubname_entry;
2736
2737
2738 struct GTY(()) dw_ranges {
2739 /* If this is positive, it's a block number, otherwise it's a
2740 bitwise-negated index into dw_ranges_by_label. */
2741 int num;
2742 };
2743
2744 /* A structure to hold a macinfo entry. */
2745
2746 typedef struct GTY(()) macinfo_struct {
2747 unsigned char code;
2748 unsigned HOST_WIDE_INT lineno;
2749 const char *info;
2750 }
2751 macinfo_entry;
2752
2753
2754 struct GTY(()) dw_ranges_by_label {
2755 const char *begin;
2756 const char *end;
2757 };
2758
2759 /* The comdat type node structure. */
2760 struct GTY(()) comdat_type_node
2761 {
2762 dw_die_ref root_die;
2763 dw_die_ref type_die;
2764 dw_die_ref skeleton_die;
2765 char signature[DWARF_TYPE_SIGNATURE_SIZE];
2766 comdat_type_node *next;
2767 };
2768
2769 /* A list of DIEs for which we can't determine ancestry (parent_die
2770 field) just yet. Later in dwarf2out_finish we will fill in the
2771 missing bits. */
2772 typedef struct GTY(()) limbo_die_struct {
2773 dw_die_ref die;
2774 /* The tree for which this DIE was created. We use this to
2775 determine ancestry later. */
2776 tree created_for;
2777 struct limbo_die_struct *next;
2778 }
2779 limbo_die_node;
2780
2781 typedef struct skeleton_chain_struct
2782 {
2783 dw_die_ref old_die;
2784 dw_die_ref new_die;
2785 struct skeleton_chain_struct *parent;
2786 }
2787 skeleton_chain_node;
2788
2789 /* Define a macro which returns nonzero for a TYPE_DECL which was
2790 implicitly generated for a type.
2791
2792 Note that, unlike the C front-end (which generates a NULL named
2793 TYPE_DECL node for each complete tagged type, each array type,
2794 and each function type node created) the C++ front-end generates
2795 a _named_ TYPE_DECL node for each tagged type node created.
2796 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
2797 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
2798 front-end, but for each type, tagged or not. */
2799
2800 #define TYPE_DECL_IS_STUB(decl) \
2801 (DECL_NAME (decl) == NULL_TREE \
2802 || (DECL_ARTIFICIAL (decl) \
2803 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
2804 /* This is necessary for stub decls that \
2805 appear in nested inline functions. */ \
2806 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
2807 && (decl_ultimate_origin (decl) \
2808 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
2809
2810 /* Information concerning the compilation unit's programming
2811 language, and compiler version. */
2812
2813 /* Fixed size portion of the DWARF compilation unit header. */
2814 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
2815 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 3)
2816
2817 /* Fixed size portion of the DWARF comdat type unit header. */
2818 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
2819 (DWARF_COMPILE_UNIT_HEADER_SIZE + DWARF_TYPE_SIGNATURE_SIZE \
2820 + DWARF_OFFSET_SIZE)
2821
2822 /* Fixed size portion of public names info. */
2823 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
2824
2825 /* Fixed size portion of the address range info. */
2826 #define DWARF_ARANGES_HEADER_SIZE \
2827 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
2828 DWARF2_ADDR_SIZE * 2) \
2829 - DWARF_INITIAL_LENGTH_SIZE)
2830
2831 /* Size of padding portion in the address range info. It must be
2832 aligned to twice the pointer size. */
2833 #define DWARF_ARANGES_PAD_SIZE \
2834 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
2835 DWARF2_ADDR_SIZE * 2) \
2836 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
2837
2838 /* Use assembler line directives if available. */
2839 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
2840 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
2841 #define DWARF2_ASM_LINE_DEBUG_INFO 1
2842 #else
2843 #define DWARF2_ASM_LINE_DEBUG_INFO 0
2844 #endif
2845 #endif
2846
2847 /* Minimum line offset in a special line info. opcode.
2848 This value was chosen to give a reasonable range of values. */
2849 #define DWARF_LINE_BASE -10
2850
2851 /* First special line opcode - leave room for the standard opcodes. */
2852 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
2853
2854 /* Range of line offsets in a special line info. opcode. */
2855 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
2856
2857 /* Flag that indicates the initial value of the is_stmt_start flag.
2858 In the present implementation, we do not mark any lines as
2859 the beginning of a source statement, because that information
2860 is not made available by the GCC front-end. */
2861 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
2862
2863 /* Maximum number of operations per instruction bundle. */
2864 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
2865 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
2866 #endif
2867
2868 /* This location is used by calc_die_sizes() to keep track
2869 the offset of each DIE within the .debug_info section. */
2870 static unsigned long next_die_offset;
2871
2872 /* Record the root of the DIE's built for the current compilation unit. */
2873 static GTY(()) dw_die_ref single_comp_unit_die;
2874
2875 /* A list of type DIEs that have been separated into comdat sections. */
2876 static GTY(()) comdat_type_node *comdat_type_list;
2877
2878 /* A list of DIEs with a NULL parent waiting to be relocated. */
2879 static GTY(()) limbo_die_node *limbo_die_list;
2880
2881 /* A list of DIEs for which we may have to generate
2882 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
2883 static GTY(()) limbo_die_node *deferred_asm_name;
2884
2885 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
2886 {
2887 typedef const char *compare_type;
2888
2889 static hashval_t hash (dwarf_file_data *);
2890 static bool equal (dwarf_file_data *, const char *);
2891 };
2892
2893 /* Filenames referenced by this compilation unit. */
2894 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
2895
2896 struct decl_die_hasher : ggc_ptr_hash<die_node>
2897 {
2898 typedef tree compare_type;
2899
2900 static hashval_t hash (die_node *);
2901 static bool equal (die_node *, tree);
2902 };
2903 /* A hash table of references to DIE's that describe declarations.
2904 The key is a DECL_UID() which is a unique number identifying each decl. */
2905 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
2906
2907 struct block_die_hasher : ggc_ptr_hash<die_struct>
2908 {
2909 static hashval_t hash (die_struct *);
2910 static bool equal (die_struct *, die_struct *);
2911 };
2912
2913 /* A hash table of references to DIE's that describe COMMON blocks.
2914 The key is DECL_UID() ^ die_parent. */
2915 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
2916
2917 typedef struct GTY(()) die_arg_entry_struct {
2918 dw_die_ref die;
2919 tree arg;
2920 } die_arg_entry;
2921
2922
2923 /* Node of the variable location list. */
2924 struct GTY ((chain_next ("%h.next"))) var_loc_node {
2925 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
2926 EXPR_LIST chain. For small bitsizes, bitsize is encoded
2927 in mode of the EXPR_LIST node and first EXPR_LIST operand
2928 is either NOTE_INSN_VAR_LOCATION for a piece with a known
2929 location or NULL for padding. For larger bitsizes,
2930 mode is 0 and first operand is a CONCAT with bitsize
2931 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
2932 NULL as second operand. */
2933 rtx GTY (()) loc;
2934 const char * GTY (()) label;
2935 struct var_loc_node * GTY (()) next;
2936 };
2937
2938 /* Variable location list. */
2939 struct GTY ((for_user)) var_loc_list_def {
2940 struct var_loc_node * GTY (()) first;
2941
2942 /* Pointer to the last but one or last element of the
2943 chained list. If the list is empty, both first and
2944 last are NULL, if the list contains just one node
2945 or the last node certainly is not redundant, it points
2946 to the last node, otherwise points to the last but one.
2947 Do not mark it for GC because it is marked through the chain. */
2948 struct var_loc_node * GTY ((skip ("%h"))) last;
2949
2950 /* Pointer to the last element before section switch,
2951 if NULL, either sections weren't switched or first
2952 is after section switch. */
2953 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
2954
2955 /* DECL_UID of the variable decl. */
2956 unsigned int decl_id;
2957 };
2958 typedef struct var_loc_list_def var_loc_list;
2959
2960 /* Call argument location list. */
2961 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
2962 rtx GTY (()) call_arg_loc_note;
2963 const char * GTY (()) label;
2964 tree GTY (()) block;
2965 bool tail_call_p;
2966 rtx GTY (()) symbol_ref;
2967 struct call_arg_loc_node * GTY (()) next;
2968 };
2969
2970
2971 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
2972 {
2973 typedef const_tree compare_type;
2974
2975 static hashval_t hash (var_loc_list *);
2976 static bool equal (var_loc_list *, const_tree);
2977 };
2978
2979 /* Table of decl location linked lists. */
2980 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
2981
2982 /* Head and tail of call_arg_loc chain. */
2983 static GTY (()) struct call_arg_loc_node *call_arg_locations;
2984 static struct call_arg_loc_node *call_arg_loc_last;
2985
2986 /* Number of call sites in the current function. */
2987 static int call_site_count = -1;
2988 /* Number of tail call sites in the current function. */
2989 static int tail_call_site_count = -1;
2990
2991 /* A cached location list. */
2992 struct GTY ((for_user)) cached_dw_loc_list_def {
2993 /* The DECL_UID of the decl that this entry describes. */
2994 unsigned int decl_id;
2995
2996 /* The cached location list. */
2997 dw_loc_list_ref loc_list;
2998 };
2999 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3000
3001 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3002 {
3003
3004 typedef const_tree compare_type;
3005
3006 static hashval_t hash (cached_dw_loc_list *);
3007 static bool equal (cached_dw_loc_list *, const_tree);
3008 };
3009
3010 /* Table of cached location lists. */
3011 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3012
3013 /* A pointer to the base of a list of references to DIE's that
3014 are uniquely identified by their tag, presence/absence of
3015 children DIE's, and list of attribute/value pairs. */
3016 static GTY((length ("abbrev_die_table_allocated")))
3017 dw_die_ref *abbrev_die_table;
3018
3019 /* Number of elements currently allocated for abbrev_die_table. */
3020 static GTY(()) unsigned abbrev_die_table_allocated;
3021
3022 /* Number of elements in abbrev_die_table currently in use. */
3023 static GTY(()) unsigned abbrev_die_table_in_use;
3024
3025 /* A hash map to remember the stack usage for DWARF procedures. The value
3026 stored is the stack size difference between before the DWARF procedure
3027 invokation and after it returned. In other words, for a DWARF procedure
3028 that consumes N stack slots and that pushes M ones, this stores M - N. */
3029 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3030
3031 /* Size (in elements) of increments by which we may expand the
3032 abbrev_die_table. */
3033 #define ABBREV_DIE_TABLE_INCREMENT 256
3034
3035 /* A global counter for generating labels for line number data. */
3036 static unsigned int line_info_label_num;
3037
3038 /* The current table to which we should emit line number information
3039 for the current function. This will be set up at the beginning of
3040 assembly for the function. */
3041 static dw_line_info_table *cur_line_info_table;
3042
3043 /* The two default tables of line number info. */
3044 static GTY(()) dw_line_info_table *text_section_line_info;
3045 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3046
3047 /* The set of all non-default tables of line number info. */
3048 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3049
3050 /* A flag to tell pubnames/types export if there is an info section to
3051 refer to. */
3052 static bool info_section_emitted;
3053
3054 /* A pointer to the base of a table that contains a list of publicly
3055 accessible names. */
3056 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3057
3058 /* A pointer to the base of a table that contains a list of publicly
3059 accessible types. */
3060 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3061
3062 /* A pointer to the base of a table that contains a list of macro
3063 defines/undefines (and file start/end markers). */
3064 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3065
3066 /* True if .debug_macinfo or .debug_macros section is going to be
3067 emitted. */
3068 #define have_macinfo \
3069 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3070 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3071 && !macinfo_table->is_empty ())
3072
3073 /* Array of dies for which we should generate .debug_ranges info. */
3074 static GTY ((length ("ranges_table_allocated"))) dw_ranges *ranges_table;
3075
3076 /* Number of elements currently allocated for ranges_table. */
3077 static GTY(()) unsigned ranges_table_allocated;
3078
3079 /* Number of elements in ranges_table currently in use. */
3080 static GTY(()) unsigned ranges_table_in_use;
3081
3082 /* Array of pairs of labels referenced in ranges_table. */
3083 static GTY ((length ("ranges_by_label_allocated")))
3084 dw_ranges_by_label *ranges_by_label;
3085
3086 /* Number of elements currently allocated for ranges_by_label. */
3087 static GTY(()) unsigned ranges_by_label_allocated;
3088
3089 /* Number of elements in ranges_by_label currently in use. */
3090 static GTY(()) unsigned ranges_by_label_in_use;
3091
3092 /* Size (in elements) of increments by which we may expand the
3093 ranges_table. */
3094 #define RANGES_TABLE_INCREMENT 64
3095
3096 /* Whether we have location lists that need outputting */
3097 static GTY(()) bool have_location_lists;
3098
3099 /* Unique label counter. */
3100 static GTY(()) unsigned int loclabel_num;
3101
3102 /* Unique label counter for point-of-call tables. */
3103 static GTY(()) unsigned int poc_label_num;
3104
3105 /* The last file entry emitted by maybe_emit_file(). */
3106 static GTY(()) struct dwarf_file_data * last_emitted_file;
3107
3108 /* Number of internal labels generated by gen_internal_sym(). */
3109 static GTY(()) int label_num;
3110
3111 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3112
3113 /* Instances of generic types for which we need to generate debug
3114 info that describe their generic parameters and arguments. That
3115 generation needs to happen once all types are properly laid out so
3116 we do it at the end of compilation. */
3117 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3118
3119 /* Offset from the "steady-state frame pointer" to the frame base,
3120 within the current function. */
3121 static HOST_WIDE_INT frame_pointer_fb_offset;
3122 static bool frame_pointer_fb_offset_valid;
3123
3124 static vec<dw_die_ref> base_types;
3125
3126 /* Flags to represent a set of attribute classes for attributes that represent
3127 a scalar value (bounds, pointers, ...). */
3128 enum dw_scalar_form
3129 {
3130 dw_scalar_form_constant = 0x01,
3131 dw_scalar_form_exprloc = 0x02,
3132 dw_scalar_form_reference = 0x04
3133 };
3134
3135 /* Forward declarations for functions defined in this file. */
3136
3137 static int is_pseudo_reg (const_rtx);
3138 static tree type_main_variant (tree);
3139 static int is_tagged_type (const_tree);
3140 static const char *dwarf_tag_name (unsigned);
3141 static const char *dwarf_attr_name (unsigned);
3142 static const char *dwarf_form_name (unsigned);
3143 static tree decl_ultimate_origin (const_tree);
3144 static tree decl_class_context (tree);
3145 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3146 static inline enum dw_val_class AT_class (dw_attr_node *);
3147 static inline unsigned int AT_index (dw_attr_node *);
3148 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3149 static inline unsigned AT_flag (dw_attr_node *);
3150 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3151 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3152 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3153 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3154 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3155 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3156 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3157 unsigned int, unsigned char *);
3158 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3159 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3160 static inline const char *AT_string (dw_attr_node *);
3161 static enum dwarf_form AT_string_form (dw_attr_node *);
3162 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3163 static void add_AT_specification (dw_die_ref, dw_die_ref);
3164 static inline dw_die_ref AT_ref (dw_attr_node *);
3165 static inline int AT_ref_external (dw_attr_node *);
3166 static inline void set_AT_ref_external (dw_attr_node *, int);
3167 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3168 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3169 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3170 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3171 dw_loc_list_ref);
3172 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3173 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3174 static void remove_addr_table_entry (addr_table_entry *);
3175 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3176 static inline rtx AT_addr (dw_attr_node *);
3177 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3178 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3179 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3180 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3181 unsigned HOST_WIDE_INT);
3182 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3183 unsigned long, bool);
3184 static inline const char *AT_lbl (dw_attr_node *);
3185 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3186 static const char *get_AT_low_pc (dw_die_ref);
3187 static const char *get_AT_hi_pc (dw_die_ref);
3188 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3189 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3190 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3191 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3192 static bool is_cxx (void);
3193 static bool is_fortran (void);
3194 static bool is_ada (void);
3195 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3196 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3197 static void add_child_die (dw_die_ref, dw_die_ref);
3198 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3199 static dw_die_ref lookup_type_die (tree);
3200 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3201 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3202 static void equate_type_number_to_die (tree, dw_die_ref);
3203 static dw_die_ref lookup_decl_die (tree);
3204 static var_loc_list *lookup_decl_loc (const_tree);
3205 static void equate_decl_number_to_die (tree, dw_die_ref);
3206 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
3207 static void print_spaces (FILE *);
3208 static void print_die (dw_die_ref, FILE *);
3209 static dw_die_ref push_new_compile_unit (dw_die_ref, dw_die_ref);
3210 static dw_die_ref pop_compile_unit (dw_die_ref);
3211 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3212 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3213 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3214 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3215 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3216 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3217 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3218 struct md5_ctx *, int *);
3219 struct checksum_attributes;
3220 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3221 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3222 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3223 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3224 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3225 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3226 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3227 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3228 static int same_die_p_wrap (dw_die_ref, dw_die_ref);
3229 static void compute_section_prefix (dw_die_ref);
3230 static int is_type_die (dw_die_ref);
3231 static int is_comdat_die (dw_die_ref);
3232 static int is_symbol_die (dw_die_ref);
3233 static inline bool is_template_instantiation (dw_die_ref);
3234 static void assign_symbol_names (dw_die_ref);
3235 static void break_out_includes (dw_die_ref);
3236 static int is_declaration_die (dw_die_ref);
3237 static int should_move_die_to_comdat (dw_die_ref);
3238 static dw_die_ref clone_as_declaration (dw_die_ref);
3239 static dw_die_ref clone_die (dw_die_ref);
3240 static dw_die_ref clone_tree (dw_die_ref);
3241 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3242 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3243 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3244 static dw_die_ref generate_skeleton (dw_die_ref);
3245 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3246 dw_die_ref,
3247 dw_die_ref);
3248 static void break_out_comdat_types (dw_die_ref);
3249 static void copy_decls_for_unworthy_types (dw_die_ref);
3250
3251 static void add_sibling_attributes (dw_die_ref);
3252 static void output_location_lists (dw_die_ref);
3253 static int constant_size (unsigned HOST_WIDE_INT);
3254 static unsigned long size_of_die (dw_die_ref);
3255 static void calc_die_sizes (dw_die_ref);
3256 static void calc_base_type_die_sizes (void);
3257 static void mark_dies (dw_die_ref);
3258 static void unmark_dies (dw_die_ref);
3259 static void unmark_all_dies (dw_die_ref);
3260 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3261 static unsigned long size_of_aranges (void);
3262 static enum dwarf_form value_format (dw_attr_node *);
3263 static void output_value_format (dw_attr_node *);
3264 static void output_abbrev_section (void);
3265 static void output_die_abbrevs (unsigned long, dw_die_ref);
3266 static void output_die_symbol (dw_die_ref);
3267 static void output_die (dw_die_ref);
3268 static void output_compilation_unit_header (void);
3269 static void output_comp_unit (dw_die_ref, int);
3270 static void output_comdat_type_unit (comdat_type_node *);
3271 static const char *dwarf2_name (tree, int);
3272 static void add_pubname (tree, dw_die_ref);
3273 static void add_enumerator_pubname (const char *, dw_die_ref);
3274 static void add_pubname_string (const char *, dw_die_ref);
3275 static void add_pubtype (tree, dw_die_ref);
3276 static void output_pubnames (vec<pubname_entry, va_gc> *);
3277 static void output_aranges (void);
3278 static unsigned int add_ranges_num (int);
3279 static unsigned int add_ranges (const_tree);
3280 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3281 bool *, bool);
3282 static void output_ranges (void);
3283 static dw_line_info_table *new_line_info_table (void);
3284 static void output_line_info (bool);
3285 static void output_file_names (void);
3286 static dw_die_ref base_type_die (tree, bool);
3287 static int is_base_type (tree);
3288 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3289 static int decl_quals (const_tree);
3290 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3291 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3292 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3293 static int type_is_enum (const_tree);
3294 static unsigned int dbx_reg_number (const_rtx);
3295 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3296 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3297 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3298 enum var_init_status);
3299 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3300 enum var_init_status);
3301 static dw_loc_descr_ref based_loc_descr (rtx, HOST_WIDE_INT,
3302 enum var_init_status);
3303 static int is_based_loc (const_rtx);
3304 static bool resolve_one_addr (rtx *);
3305 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3306 enum var_init_status);
3307 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3308 enum var_init_status);
3309 struct loc_descr_context;
3310 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3311 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3312 static dw_loc_list_ref loc_list_from_tree (tree, int,
3313 const struct loc_descr_context *);
3314 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3315 const struct loc_descr_context *);
3316 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3317 static tree field_type (const_tree);
3318 static unsigned int simple_type_align_in_bits (const_tree);
3319 static unsigned int simple_decl_align_in_bits (const_tree);
3320 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3321 struct vlr_context;
3322 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3323 HOST_WIDE_INT *);
3324 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3325 dw_loc_list_ref);
3326 static void add_data_member_location_attribute (dw_die_ref, tree,
3327 struct vlr_context *);
3328 static bool add_const_value_attribute (dw_die_ref, rtx);
3329 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3330 static void insert_wide_int (const wide_int &, unsigned char *, int);
3331 static void insert_float (const_rtx, unsigned char *);
3332 static rtx rtl_for_decl_location (tree);
3333 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3334 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3335 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3336 static void add_name_attribute (dw_die_ref, const char *);
3337 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3338 static void add_comp_dir_attribute (dw_die_ref);
3339 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3340 const struct loc_descr_context *);
3341 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3342 const struct loc_descr_context *);
3343 static void add_subscript_info (dw_die_ref, tree, bool);
3344 static void add_byte_size_attribute (dw_die_ref, tree);
3345 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3346 struct vlr_context *);
3347 static void add_bit_size_attribute (dw_die_ref, tree);
3348 static void add_prototyped_attribute (dw_die_ref, tree);
3349 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3350 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3351 static void add_src_coords_attributes (dw_die_ref, tree);
3352 static void add_name_and_src_coords_attributes (dw_die_ref, tree);
3353 static void add_discr_value (dw_die_ref, dw_discr_value *);
3354 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3355 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3356 static void push_decl_scope (tree);
3357 static void pop_decl_scope (void);
3358 static dw_die_ref scope_die_for (tree, dw_die_ref);
3359 static inline int local_scope_p (dw_die_ref);
3360 static inline int class_scope_p (dw_die_ref);
3361 static inline int class_or_namespace_scope_p (dw_die_ref);
3362 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3363 static void add_calling_convention_attribute (dw_die_ref, tree);
3364 static const char *type_tag (const_tree);
3365 static tree member_declared_type (const_tree);
3366 #if 0
3367 static const char *decl_start_label (tree);
3368 #endif
3369 static void gen_array_type_die (tree, dw_die_ref);
3370 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3371 #if 0
3372 static void gen_entry_point_die (tree, dw_die_ref);
3373 #endif
3374 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3375 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3376 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3377 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3378 static void gen_formal_types_die (tree, dw_die_ref);
3379 static void gen_subprogram_die (tree, dw_die_ref);
3380 static void gen_variable_die (tree, tree, dw_die_ref);
3381 static void gen_const_die (tree, dw_die_ref);
3382 static void gen_label_die (tree, dw_die_ref);
3383 static void gen_lexical_block_die (tree, dw_die_ref);
3384 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3385 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3386 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3387 static dw_die_ref gen_compile_unit_die (const char *);
3388 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3389 static void gen_member_die (tree, dw_die_ref);
3390 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3391 enum debug_info_usage);
3392 static void gen_subroutine_type_die (tree, dw_die_ref);
3393 static void gen_typedef_die (tree, dw_die_ref);
3394 static void gen_type_die (tree, dw_die_ref);
3395 static void gen_block_die (tree, dw_die_ref);
3396 static void decls_for_scope (tree, dw_die_ref);
3397 static bool is_naming_typedef_decl (const_tree);
3398 static inline dw_die_ref get_context_die (tree);
3399 static void gen_namespace_die (tree, dw_die_ref);
3400 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3401 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3402 static dw_die_ref force_decl_die (tree);
3403 static dw_die_ref force_type_die (tree);
3404 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3405 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3406 static struct dwarf_file_data * lookup_filename (const char *);
3407 static void retry_incomplete_types (void);
3408 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3409 static void gen_generic_params_dies (tree);
3410 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3411 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3412 static void splice_child_die (dw_die_ref, dw_die_ref);
3413 static int file_info_cmp (const void *, const void *);
3414 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
3415 const char *, const char *);
3416 static void output_loc_list (dw_loc_list_ref);
3417 static char *gen_internal_sym (const char *);
3418 static bool want_pubnames (void);
3419
3420 static void prune_unmark_dies (dw_die_ref);
3421 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3422 static void prune_unused_types_mark (dw_die_ref, int);
3423 static void prune_unused_types_walk (dw_die_ref);
3424 static void prune_unused_types_walk_attribs (dw_die_ref);
3425 static void prune_unused_types_prune (dw_die_ref);
3426 static void prune_unused_types (void);
3427 static int maybe_emit_file (struct dwarf_file_data *fd);
3428 static inline const char *AT_vms_delta1 (dw_attr_node *);
3429 static inline const char *AT_vms_delta2 (dw_attr_node *);
3430 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3431 const char *, const char *);
3432 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3433 static void gen_remaining_tmpl_value_param_die_attribute (void);
3434 static bool generic_type_p (tree);
3435 static void schedule_generic_params_dies_gen (tree t);
3436 static void gen_scheduled_generic_parms_dies (void);
3437
3438 static const char *comp_dir_string (void);
3439
3440 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3441
3442 /* enum for tracking thread-local variables whose address is really an offset
3443 relative to the TLS pointer, which will need link-time relocation, but will
3444 not need relocation by the DWARF consumer. */
3445
3446 enum dtprel_bool
3447 {
3448 dtprel_false = 0,
3449 dtprel_true = 1
3450 };
3451
3452 /* Return the operator to use for an address of a variable. For dtprel_true, we
3453 use DW_OP_const*. For regular variables, which need both link-time
3454 relocation and consumer-level relocation (e.g., to account for shared objects
3455 loaded at a random address), we use DW_OP_addr*. */
3456
3457 static inline enum dwarf_location_atom
3458 dw_addr_op (enum dtprel_bool dtprel)
3459 {
3460 if (dtprel == dtprel_true)
3461 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3462 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3463 else
3464 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3465 }
3466
3467 /* Return a pointer to a newly allocated address location description. If
3468 dwarf_split_debug_info is true, then record the address with the appropriate
3469 relocation. */
3470 static inline dw_loc_descr_ref
3471 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3472 {
3473 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3474
3475 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3476 ref->dw_loc_oprnd1.v.val_addr = addr;
3477 ref->dtprel = dtprel;
3478 if (dwarf_split_debug_info)
3479 ref->dw_loc_oprnd1.val_entry
3480 = add_addr_table_entry (addr,
3481 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3482 else
3483 ref->dw_loc_oprnd1.val_entry = NULL;
3484
3485 return ref;
3486 }
3487
3488 /* Section names used to hold DWARF debugging information. */
3489
3490 #ifndef DEBUG_INFO_SECTION
3491 #define DEBUG_INFO_SECTION ".debug_info"
3492 #endif
3493 #ifndef DEBUG_DWO_INFO_SECTION
3494 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3495 #endif
3496 #ifndef DEBUG_ABBREV_SECTION
3497 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3498 #endif
3499 #ifndef DEBUG_DWO_ABBREV_SECTION
3500 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3501 #endif
3502 #ifndef DEBUG_ARANGES_SECTION
3503 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3504 #endif
3505 #ifndef DEBUG_ADDR_SECTION
3506 #define DEBUG_ADDR_SECTION ".debug_addr"
3507 #endif
3508 #ifndef DEBUG_NORM_MACINFO_SECTION
3509 #define DEBUG_NORM_MACINFO_SECTION ".debug_macinfo"
3510 #endif
3511 #ifndef DEBUG_DWO_MACINFO_SECTION
3512 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3513 #endif
3514 #ifndef DEBUG_MACINFO_SECTION
3515 #define DEBUG_MACINFO_SECTION \
3516 (!dwarf_split_debug_info \
3517 ? (DEBUG_NORM_MACINFO_SECTION) : (DEBUG_DWO_MACINFO_SECTION))
3518 #endif
3519 #ifndef DEBUG_NORM_MACRO_SECTION
3520 #define DEBUG_NORM_MACRO_SECTION ".debug_macro"
3521 #endif
3522 #ifndef DEBUG_DWO_MACRO_SECTION
3523 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3524 #endif
3525 #ifndef DEBUG_MACRO_SECTION
3526 #define DEBUG_MACRO_SECTION \
3527 (!dwarf_split_debug_info \
3528 ? (DEBUG_NORM_MACRO_SECTION) : (DEBUG_DWO_MACRO_SECTION))
3529 #endif
3530 #ifndef DEBUG_LINE_SECTION
3531 #define DEBUG_LINE_SECTION ".debug_line"
3532 #endif
3533 #ifndef DEBUG_DWO_LINE_SECTION
3534 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3535 #endif
3536 #ifndef DEBUG_LOC_SECTION
3537 #define DEBUG_LOC_SECTION ".debug_loc"
3538 #endif
3539 #ifndef DEBUG_DWO_LOC_SECTION
3540 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3541 #endif
3542 #ifndef DEBUG_PUBNAMES_SECTION
3543 #define DEBUG_PUBNAMES_SECTION \
3544 ((debug_generate_pub_sections == 2) \
3545 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3546 #endif
3547 #ifndef DEBUG_PUBTYPES_SECTION
3548 #define DEBUG_PUBTYPES_SECTION \
3549 ((debug_generate_pub_sections == 2) \
3550 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
3551 #endif
3552 #define DEBUG_NORM_STR_OFFSETS_SECTION ".debug_str_offsets"
3553 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
3554 #ifndef DEBUG_STR_OFFSETS_SECTION
3555 #define DEBUG_STR_OFFSETS_SECTION \
3556 (!dwarf_split_debug_info \
3557 ? (DEBUG_NORM_STR_OFFSETS_SECTION) : (DEBUG_DWO_STR_OFFSETS_SECTION))
3558 #endif
3559 #ifndef DEBUG_STR_DWO_SECTION
3560 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
3561 #endif
3562 #ifndef DEBUG_STR_SECTION
3563 #define DEBUG_STR_SECTION ".debug_str"
3564 #endif
3565 #ifndef DEBUG_RANGES_SECTION
3566 #define DEBUG_RANGES_SECTION ".debug_ranges"
3567 #endif
3568
3569 /* Standard ELF section names for compiled code and data. */
3570 #ifndef TEXT_SECTION_NAME
3571 #define TEXT_SECTION_NAME ".text"
3572 #endif
3573
3574 /* Section flags for .debug_macinfo/.debug_macro section. */
3575 #define DEBUG_MACRO_SECTION_FLAGS \
3576 (dwarf_split_debug_info ? SECTION_DEBUG | SECTION_EXCLUDE : SECTION_DEBUG)
3577
3578 /* Section flags for .debug_str section. */
3579 #define DEBUG_STR_SECTION_FLAGS \
3580 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
3581 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
3582 : SECTION_DEBUG)
3583
3584 /* Section flags for .debug_str.dwo section. */
3585 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
3586
3587 /* Labels we insert at beginning sections we can reference instead of
3588 the section names themselves. */
3589
3590 #ifndef TEXT_SECTION_LABEL
3591 #define TEXT_SECTION_LABEL "Ltext"
3592 #endif
3593 #ifndef COLD_TEXT_SECTION_LABEL
3594 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
3595 #endif
3596 #ifndef DEBUG_LINE_SECTION_LABEL
3597 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
3598 #endif
3599 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
3600 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
3601 #endif
3602 #ifndef DEBUG_INFO_SECTION_LABEL
3603 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
3604 #endif
3605 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
3606 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
3607 #endif
3608 #ifndef DEBUG_ABBREV_SECTION_LABEL
3609 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
3610 #endif
3611 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
3612 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
3613 #endif
3614 #ifndef DEBUG_ADDR_SECTION_LABEL
3615 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
3616 #endif
3617 #ifndef DEBUG_LOC_SECTION_LABEL
3618 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
3619 #endif
3620 #ifndef DEBUG_RANGES_SECTION_LABEL
3621 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
3622 #endif
3623 #ifndef DEBUG_MACINFO_SECTION_LABEL
3624 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
3625 #endif
3626 #ifndef DEBUG_MACRO_SECTION_LABEL
3627 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
3628 #endif
3629 #define SKELETON_COMP_DIE_ABBREV 1
3630 #define SKELETON_TYPE_DIE_ABBREV 2
3631
3632 /* Definitions of defaults for formats and names of various special
3633 (artificial) labels which may be generated within this file (when the -g
3634 options is used and DWARF2_DEBUGGING_INFO is in effect.
3635 If necessary, these may be overridden from within the tm.h file, but
3636 typically, overriding these defaults is unnecessary. */
3637
3638 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3639 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3640 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3641 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3642 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3643 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3644 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3645 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3646 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3647 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3648 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3649 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3650 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3651 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3652
3653 #ifndef TEXT_END_LABEL
3654 #define TEXT_END_LABEL "Letext"
3655 #endif
3656 #ifndef COLD_END_LABEL
3657 #define COLD_END_LABEL "Letext_cold"
3658 #endif
3659 #ifndef BLOCK_BEGIN_LABEL
3660 #define BLOCK_BEGIN_LABEL "LBB"
3661 #endif
3662 #ifndef BLOCK_END_LABEL
3663 #define BLOCK_END_LABEL "LBE"
3664 #endif
3665 #ifndef LINE_CODE_LABEL
3666 #define LINE_CODE_LABEL "LM"
3667 #endif
3668
3669 \f
3670 /* Return the root of the DIE's built for the current compilation unit. */
3671 static dw_die_ref
3672 comp_unit_die (void)
3673 {
3674 if (!single_comp_unit_die)
3675 single_comp_unit_die = gen_compile_unit_die (NULL);
3676 return single_comp_unit_die;
3677 }
3678
3679 /* We allow a language front-end to designate a function that is to be
3680 called to "demangle" any name before it is put into a DIE. */
3681
3682 static const char *(*demangle_name_func) (const char *);
3683
3684 void
3685 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
3686 {
3687 demangle_name_func = func;
3688 }
3689
3690 /* Test if rtl node points to a pseudo register. */
3691
3692 static inline int
3693 is_pseudo_reg (const_rtx rtl)
3694 {
3695 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
3696 || (GET_CODE (rtl) == SUBREG
3697 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
3698 }
3699
3700 /* Return a reference to a type, with its const and volatile qualifiers
3701 removed. */
3702
3703 static inline tree
3704 type_main_variant (tree type)
3705 {
3706 type = TYPE_MAIN_VARIANT (type);
3707
3708 /* ??? There really should be only one main variant among any group of
3709 variants of a given type (and all of the MAIN_VARIANT values for all
3710 members of the group should point to that one type) but sometimes the C
3711 front-end messes this up for array types, so we work around that bug
3712 here. */
3713 if (TREE_CODE (type) == ARRAY_TYPE)
3714 while (type != TYPE_MAIN_VARIANT (type))
3715 type = TYPE_MAIN_VARIANT (type);
3716
3717 return type;
3718 }
3719
3720 /* Return nonzero if the given type node represents a tagged type. */
3721
3722 static inline int
3723 is_tagged_type (const_tree type)
3724 {
3725 enum tree_code code = TREE_CODE (type);
3726
3727 return (code == RECORD_TYPE || code == UNION_TYPE
3728 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
3729 }
3730
3731 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
3732
3733 static void
3734 get_ref_die_offset_label (char *label, dw_die_ref ref)
3735 {
3736 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
3737 }
3738
3739 /* Return die_offset of a DIE reference to a base type. */
3740
3741 static unsigned long int
3742 get_base_type_offset (dw_die_ref ref)
3743 {
3744 if (ref->die_offset)
3745 return ref->die_offset;
3746 if (comp_unit_die ()->die_abbrev)
3747 {
3748 calc_base_type_die_sizes ();
3749 gcc_assert (ref->die_offset);
3750 }
3751 return ref->die_offset;
3752 }
3753
3754 /* Return die_offset of a DIE reference other than base type. */
3755
3756 static unsigned long int
3757 get_ref_die_offset (dw_die_ref ref)
3758 {
3759 gcc_assert (ref->die_offset);
3760 return ref->die_offset;
3761 }
3762
3763 /* Convert a DIE tag into its string name. */
3764
3765 static const char *
3766 dwarf_tag_name (unsigned int tag)
3767 {
3768 const char *name = get_DW_TAG_name (tag);
3769
3770 if (name != NULL)
3771 return name;
3772
3773 return "DW_TAG_<unknown>";
3774 }
3775
3776 /* Convert a DWARF attribute code into its string name. */
3777
3778 static const char *
3779 dwarf_attr_name (unsigned int attr)
3780 {
3781 const char *name;
3782
3783 switch (attr)
3784 {
3785 #if VMS_DEBUGGING_INFO
3786 case DW_AT_HP_prologue:
3787 return "DW_AT_HP_prologue";
3788 #else
3789 case DW_AT_MIPS_loop_unroll_factor:
3790 return "DW_AT_MIPS_loop_unroll_factor";
3791 #endif
3792
3793 #if VMS_DEBUGGING_INFO
3794 case DW_AT_HP_epilogue:
3795 return "DW_AT_HP_epilogue";
3796 #else
3797 case DW_AT_MIPS_stride:
3798 return "DW_AT_MIPS_stride";
3799 #endif
3800 }
3801
3802 name = get_DW_AT_name (attr);
3803
3804 if (name != NULL)
3805 return name;
3806
3807 return "DW_AT_<unknown>";
3808 }
3809
3810 /* Convert a DWARF value form code into its string name. */
3811
3812 static const char *
3813 dwarf_form_name (unsigned int form)
3814 {
3815 const char *name = get_DW_FORM_name (form);
3816
3817 if (name != NULL)
3818 return name;
3819
3820 return "DW_FORM_<unknown>";
3821 }
3822 \f
3823 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
3824 instance of an inlined instance of a decl which is local to an inline
3825 function, so we have to trace all of the way back through the origin chain
3826 to find out what sort of node actually served as the original seed for the
3827 given block. */
3828
3829 static tree
3830 decl_ultimate_origin (const_tree decl)
3831 {
3832 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
3833 return NULL_TREE;
3834
3835 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
3836 we're trying to output the abstract instance of this function. */
3837 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
3838 return NULL_TREE;
3839
3840 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
3841 most distant ancestor, this should never happen. */
3842 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
3843
3844 return DECL_ABSTRACT_ORIGIN (decl);
3845 }
3846
3847 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
3848 of a virtual function may refer to a base class, so we check the 'this'
3849 parameter. */
3850
3851 static tree
3852 decl_class_context (tree decl)
3853 {
3854 tree context = NULL_TREE;
3855
3856 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
3857 context = DECL_CONTEXT (decl);
3858 else
3859 context = TYPE_MAIN_VARIANT
3860 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
3861
3862 if (context && !TYPE_P (context))
3863 context = NULL_TREE;
3864
3865 return context;
3866 }
3867 \f
3868 /* Add an attribute/value pair to a DIE. */
3869
3870 static inline void
3871 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
3872 {
3873 /* Maybe this should be an assert? */
3874 if (die == NULL)
3875 return;
3876
3877 vec_safe_reserve (die->die_attr, 1);
3878 vec_safe_push (die->die_attr, *attr);
3879 }
3880
3881 static inline enum dw_val_class
3882 AT_class (dw_attr_node *a)
3883 {
3884 return a->dw_attr_val.val_class;
3885 }
3886
3887 /* Return the index for any attribute that will be referenced with a
3888 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
3889 are stored in dw_attr_val.v.val_str for reference counting
3890 pruning. */
3891
3892 static inline unsigned int
3893 AT_index (dw_attr_node *a)
3894 {
3895 if (AT_class (a) == dw_val_class_str)
3896 return a->dw_attr_val.v.val_str->index;
3897 else if (a->dw_attr_val.val_entry != NULL)
3898 return a->dw_attr_val.val_entry->index;
3899 return NOT_INDEXED;
3900 }
3901
3902 /* Add a flag value attribute to a DIE. */
3903
3904 static inline void
3905 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
3906 {
3907 dw_attr_node attr;
3908
3909 attr.dw_attr = attr_kind;
3910 attr.dw_attr_val.val_class = dw_val_class_flag;
3911 attr.dw_attr_val.val_entry = NULL;
3912 attr.dw_attr_val.v.val_flag = flag;
3913 add_dwarf_attr (die, &attr);
3914 }
3915
3916 static inline unsigned
3917 AT_flag (dw_attr_node *a)
3918 {
3919 gcc_assert (a && AT_class (a) == dw_val_class_flag);
3920 return a->dw_attr_val.v.val_flag;
3921 }
3922
3923 /* Add a signed integer attribute value to a DIE. */
3924
3925 static inline void
3926 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
3927 {
3928 dw_attr_node attr;
3929
3930 attr.dw_attr = attr_kind;
3931 attr.dw_attr_val.val_class = dw_val_class_const;
3932 attr.dw_attr_val.val_entry = NULL;
3933 attr.dw_attr_val.v.val_int = int_val;
3934 add_dwarf_attr (die, &attr);
3935 }
3936
3937 static inline HOST_WIDE_INT
3938 AT_int (dw_attr_node *a)
3939 {
3940 gcc_assert (a && AT_class (a) == dw_val_class_const);
3941 return a->dw_attr_val.v.val_int;
3942 }
3943
3944 /* Add an unsigned integer attribute value to a DIE. */
3945
3946 static inline void
3947 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
3948 unsigned HOST_WIDE_INT unsigned_val)
3949 {
3950 dw_attr_node attr;
3951
3952 attr.dw_attr = attr_kind;
3953 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
3954 attr.dw_attr_val.val_entry = NULL;
3955 attr.dw_attr_val.v.val_unsigned = unsigned_val;
3956 add_dwarf_attr (die, &attr);
3957 }
3958
3959 static inline unsigned HOST_WIDE_INT
3960 AT_unsigned (dw_attr_node *a)
3961 {
3962 gcc_assert (a && AT_class (a) == dw_val_class_unsigned_const);
3963 return a->dw_attr_val.v.val_unsigned;
3964 }
3965
3966 /* Add an unsigned wide integer attribute value to a DIE. */
3967
3968 static inline void
3969 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
3970 const wide_int& w)
3971 {
3972 dw_attr_node attr;
3973
3974 attr.dw_attr = attr_kind;
3975 attr.dw_attr_val.val_class = dw_val_class_wide_int;
3976 attr.dw_attr_val.val_entry = NULL;
3977 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
3978 *attr.dw_attr_val.v.val_wide = w;
3979 add_dwarf_attr (die, &attr);
3980 }
3981
3982 /* Add an unsigned double integer attribute value to a DIE. */
3983
3984 static inline void
3985 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
3986 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
3987 {
3988 dw_attr_node attr;
3989
3990 attr.dw_attr = attr_kind;
3991 attr.dw_attr_val.val_class = dw_val_class_const_double;
3992 attr.dw_attr_val.val_entry = NULL;
3993 attr.dw_attr_val.v.val_double.high = high;
3994 attr.dw_attr_val.v.val_double.low = low;
3995 add_dwarf_attr (die, &attr);
3996 }
3997
3998 /* Add a floating point attribute value to a DIE and return it. */
3999
4000 static inline void
4001 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4002 unsigned int length, unsigned int elt_size, unsigned char *array)
4003 {
4004 dw_attr_node attr;
4005
4006 attr.dw_attr = attr_kind;
4007 attr.dw_attr_val.val_class = dw_val_class_vec;
4008 attr.dw_attr_val.val_entry = NULL;
4009 attr.dw_attr_val.v.val_vec.length = length;
4010 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4011 attr.dw_attr_val.v.val_vec.array = array;
4012 add_dwarf_attr (die, &attr);
4013 }
4014
4015 /* Add an 8-byte data attribute value to a DIE. */
4016
4017 static inline void
4018 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4019 unsigned char data8[8])
4020 {
4021 dw_attr_node attr;
4022
4023 attr.dw_attr = attr_kind;
4024 attr.dw_attr_val.val_class = dw_val_class_data8;
4025 attr.dw_attr_val.val_entry = NULL;
4026 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4027 add_dwarf_attr (die, &attr);
4028 }
4029
4030 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4031 dwarf_split_debug_info, address attributes in dies destined for the
4032 final executable have force_direct set to avoid using indexed
4033 references. */
4034
4035 static inline void
4036 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4037 bool force_direct)
4038 {
4039 dw_attr_node attr;
4040 char * lbl_id;
4041
4042 lbl_id = xstrdup (lbl_low);
4043 attr.dw_attr = DW_AT_low_pc;
4044 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4045 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4046 if (dwarf_split_debug_info && !force_direct)
4047 attr.dw_attr_val.val_entry
4048 = add_addr_table_entry (lbl_id, ate_kind_label);
4049 else
4050 attr.dw_attr_val.val_entry = NULL;
4051 add_dwarf_attr (die, &attr);
4052
4053 attr.dw_attr = DW_AT_high_pc;
4054 if (dwarf_version < 4)
4055 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4056 else
4057 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4058 lbl_id = xstrdup (lbl_high);
4059 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4060 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4061 && dwarf_split_debug_info && !force_direct)
4062 attr.dw_attr_val.val_entry
4063 = add_addr_table_entry (lbl_id, ate_kind_label);
4064 else
4065 attr.dw_attr_val.val_entry = NULL;
4066 add_dwarf_attr (die, &attr);
4067 }
4068
4069 /* Hash and equality functions for debug_str_hash. */
4070
4071 hashval_t
4072 indirect_string_hasher::hash (indirect_string_node *x)
4073 {
4074 return htab_hash_string (x->str);
4075 }
4076
4077 bool
4078 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4079 {
4080 return strcmp (x1->str, x2) == 0;
4081 }
4082
4083 /* Add STR to the given string hash table. */
4084
4085 static struct indirect_string_node *
4086 find_AT_string_in_table (const char *str,
4087 hash_table<indirect_string_hasher> *table)
4088 {
4089 struct indirect_string_node *node;
4090
4091 indirect_string_node **slot
4092 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4093 if (*slot == NULL)
4094 {
4095 node = ggc_cleared_alloc<indirect_string_node> ();
4096 node->str = ggc_strdup (str);
4097 *slot = node;
4098 }
4099 else
4100 node = *slot;
4101
4102 node->refcount++;
4103 return node;
4104 }
4105
4106 /* Add STR to the indirect string hash table. */
4107
4108 static struct indirect_string_node *
4109 find_AT_string (const char *str)
4110 {
4111 if (! debug_str_hash)
4112 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4113
4114 return find_AT_string_in_table (str, debug_str_hash);
4115 }
4116
4117 /* Add a string attribute value to a DIE. */
4118
4119 static inline void
4120 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4121 {
4122 dw_attr_node attr;
4123 struct indirect_string_node *node;
4124
4125 node = find_AT_string (str);
4126
4127 attr.dw_attr = attr_kind;
4128 attr.dw_attr_val.val_class = dw_val_class_str;
4129 attr.dw_attr_val.val_entry = NULL;
4130 attr.dw_attr_val.v.val_str = node;
4131 add_dwarf_attr (die, &attr);
4132 }
4133
4134 static inline const char *
4135 AT_string (dw_attr_node *a)
4136 {
4137 gcc_assert (a && AT_class (a) == dw_val_class_str);
4138 return a->dw_attr_val.v.val_str->str;
4139 }
4140
4141 /* Call this function directly to bypass AT_string_form's logic to put
4142 the string inline in the die. */
4143
4144 static void
4145 set_indirect_string (struct indirect_string_node *node)
4146 {
4147 char label[32];
4148 /* Already indirect is a no op. */
4149 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4150 {
4151 gcc_assert (node->label);
4152 return;
4153 }
4154 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4155 ++dw2_string_counter;
4156 node->label = xstrdup (label);
4157
4158 if (!dwarf_split_debug_info)
4159 {
4160 node->form = DW_FORM_strp;
4161 node->index = NOT_INDEXED;
4162 }
4163 else
4164 {
4165 node->form = DW_FORM_GNU_str_index;
4166 node->index = NO_INDEX_ASSIGNED;
4167 }
4168 }
4169
4170 /* Find out whether a string should be output inline in DIE
4171 or out-of-line in .debug_str section. */
4172
4173 static enum dwarf_form
4174 find_string_form (struct indirect_string_node *node)
4175 {
4176 unsigned int len;
4177
4178 if (node->form)
4179 return node->form;
4180
4181 len = strlen (node->str) + 1;
4182
4183 /* If the string is shorter or equal to the size of the reference, it is
4184 always better to put it inline. */
4185 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4186 return node->form = DW_FORM_string;
4187
4188 /* If we cannot expect the linker to merge strings in .debug_str
4189 section, only put it into .debug_str if it is worth even in this
4190 single module. */
4191 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4192 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4193 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4194 return node->form = DW_FORM_string;
4195
4196 set_indirect_string (node);
4197
4198 return node->form;
4199 }
4200
4201 /* Find out whether the string referenced from the attribute should be
4202 output inline in DIE or out-of-line in .debug_str section. */
4203
4204 static enum dwarf_form
4205 AT_string_form (dw_attr_node *a)
4206 {
4207 gcc_assert (a && AT_class (a) == dw_val_class_str);
4208 return find_string_form (a->dw_attr_val.v.val_str);
4209 }
4210
4211 /* Add a DIE reference attribute value to a DIE. */
4212
4213 static inline void
4214 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4215 {
4216 dw_attr_node attr;
4217 gcc_checking_assert (targ_die != NULL);
4218
4219 /* With LTO we can end up trying to reference something we didn't create
4220 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4221 if (targ_die == NULL)
4222 return;
4223
4224 attr.dw_attr = attr_kind;
4225 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4226 attr.dw_attr_val.val_entry = NULL;
4227 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4228 attr.dw_attr_val.v.val_die_ref.external = 0;
4229 add_dwarf_attr (die, &attr);
4230 }
4231
4232 /* Change DIE reference REF to point to NEW_DIE instead. */
4233
4234 static inline void
4235 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4236 {
4237 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4238 ref->dw_attr_val.v.val_die_ref.die = new_die;
4239 ref->dw_attr_val.v.val_die_ref.external = 0;
4240 }
4241
4242 /* Add an AT_specification attribute to a DIE, and also make the back
4243 pointer from the specification to the definition. */
4244
4245 static inline void
4246 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4247 {
4248 add_AT_die_ref (die, DW_AT_specification, targ_die);
4249 gcc_assert (!targ_die->die_definition);
4250 targ_die->die_definition = die;
4251 }
4252
4253 static inline dw_die_ref
4254 AT_ref (dw_attr_node *a)
4255 {
4256 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4257 return a->dw_attr_val.v.val_die_ref.die;
4258 }
4259
4260 static inline int
4261 AT_ref_external (dw_attr_node *a)
4262 {
4263 if (a && AT_class (a) == dw_val_class_die_ref)
4264 return a->dw_attr_val.v.val_die_ref.external;
4265
4266 return 0;
4267 }
4268
4269 static inline void
4270 set_AT_ref_external (dw_attr_node *a, int i)
4271 {
4272 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4273 a->dw_attr_val.v.val_die_ref.external = i;
4274 }
4275
4276 /* Add an FDE reference attribute value to a DIE. */
4277
4278 static inline void
4279 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4280 {
4281 dw_attr_node attr;
4282
4283 attr.dw_attr = attr_kind;
4284 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4285 attr.dw_attr_val.val_entry = NULL;
4286 attr.dw_attr_val.v.val_fde_index = targ_fde;
4287 add_dwarf_attr (die, &attr);
4288 }
4289
4290 /* Add a location description attribute value to a DIE. */
4291
4292 static inline void
4293 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4294 {
4295 dw_attr_node attr;
4296
4297 attr.dw_attr = attr_kind;
4298 attr.dw_attr_val.val_class = dw_val_class_loc;
4299 attr.dw_attr_val.val_entry = NULL;
4300 attr.dw_attr_val.v.val_loc = loc;
4301 add_dwarf_attr (die, &attr);
4302 }
4303
4304 static inline dw_loc_descr_ref
4305 AT_loc (dw_attr_node *a)
4306 {
4307 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4308 return a->dw_attr_val.v.val_loc;
4309 }
4310
4311 static inline void
4312 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4313 {
4314 dw_attr_node attr;
4315
4316 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4317 return;
4318
4319 attr.dw_attr = attr_kind;
4320 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4321 attr.dw_attr_val.val_entry = NULL;
4322 attr.dw_attr_val.v.val_loc_list = loc_list;
4323 add_dwarf_attr (die, &attr);
4324 have_location_lists = true;
4325 }
4326
4327 static inline dw_loc_list_ref
4328 AT_loc_list (dw_attr_node *a)
4329 {
4330 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4331 return a->dw_attr_val.v.val_loc_list;
4332 }
4333
4334 static inline dw_loc_list_ref *
4335 AT_loc_list_ptr (dw_attr_node *a)
4336 {
4337 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4338 return &a->dw_attr_val.v.val_loc_list;
4339 }
4340
4341 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4342 {
4343 static hashval_t hash (addr_table_entry *);
4344 static bool equal (addr_table_entry *, addr_table_entry *);
4345 };
4346
4347 /* Table of entries into the .debug_addr section. */
4348
4349 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4350
4351 /* Hash an address_table_entry. */
4352
4353 hashval_t
4354 addr_hasher::hash (addr_table_entry *a)
4355 {
4356 inchash::hash hstate;
4357 switch (a->kind)
4358 {
4359 case ate_kind_rtx:
4360 hstate.add_int (0);
4361 break;
4362 case ate_kind_rtx_dtprel:
4363 hstate.add_int (1);
4364 break;
4365 case ate_kind_label:
4366 return htab_hash_string (a->addr.label);
4367 default:
4368 gcc_unreachable ();
4369 }
4370 inchash::add_rtx (a->addr.rtl, hstate);
4371 return hstate.end ();
4372 }
4373
4374 /* Determine equality for two address_table_entries. */
4375
4376 bool
4377 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4378 {
4379 if (a1->kind != a2->kind)
4380 return 0;
4381 switch (a1->kind)
4382 {
4383 case ate_kind_rtx:
4384 case ate_kind_rtx_dtprel:
4385 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4386 case ate_kind_label:
4387 return strcmp (a1->addr.label, a2->addr.label) == 0;
4388 default:
4389 gcc_unreachable ();
4390 }
4391 }
4392
4393 /* Initialize an addr_table_entry. */
4394
4395 void
4396 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4397 {
4398 e->kind = kind;
4399 switch (kind)
4400 {
4401 case ate_kind_rtx:
4402 case ate_kind_rtx_dtprel:
4403 e->addr.rtl = (rtx) addr;
4404 break;
4405 case ate_kind_label:
4406 e->addr.label = (char *) addr;
4407 break;
4408 }
4409 e->refcount = 0;
4410 e->index = NO_INDEX_ASSIGNED;
4411 }
4412
4413 /* Add attr to the address table entry to the table. Defer setting an
4414 index until output time. */
4415
4416 static addr_table_entry *
4417 add_addr_table_entry (void *addr, enum ate_kind kind)
4418 {
4419 addr_table_entry *node;
4420 addr_table_entry finder;
4421
4422 gcc_assert (dwarf_split_debug_info);
4423 if (! addr_index_table)
4424 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4425 init_addr_table_entry (&finder, kind, addr);
4426 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4427
4428 if (*slot == HTAB_EMPTY_ENTRY)
4429 {
4430 node = ggc_cleared_alloc<addr_table_entry> ();
4431 init_addr_table_entry (node, kind, addr);
4432 *slot = node;
4433 }
4434 else
4435 node = *slot;
4436
4437 node->refcount++;
4438 return node;
4439 }
4440
4441 /* Remove an entry from the addr table by decrementing its refcount.
4442 Strictly, decrementing the refcount would be enough, but the
4443 assertion that the entry is actually in the table has found
4444 bugs. */
4445
4446 static void
4447 remove_addr_table_entry (addr_table_entry *entry)
4448 {
4449 gcc_assert (dwarf_split_debug_info && addr_index_table);
4450 /* After an index is assigned, the table is frozen. */
4451 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
4452 entry->refcount--;
4453 }
4454
4455 /* Given a location list, remove all addresses it refers to from the
4456 address_table. */
4457
4458 static void
4459 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
4460 {
4461 for (; descr; descr = descr->dw_loc_next)
4462 if (descr->dw_loc_oprnd1.val_entry != NULL)
4463 {
4464 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
4465 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
4466 }
4467 }
4468
4469 /* A helper function for dwarf2out_finish called through
4470 htab_traverse. Assign an addr_table_entry its index. All entries
4471 must be collected into the table when this function is called,
4472 because the indexing code relies on htab_traverse to traverse nodes
4473 in the same order for each run. */
4474
4475 int
4476 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
4477 {
4478 addr_table_entry *node = *h;
4479
4480 /* Don't index unreferenced nodes. */
4481 if (node->refcount == 0)
4482 return 1;
4483
4484 gcc_assert (node->index == NO_INDEX_ASSIGNED);
4485 node->index = *index;
4486 *index += 1;
4487
4488 return 1;
4489 }
4490
4491 /* Add an address constant attribute value to a DIE. When using
4492 dwarf_split_debug_info, address attributes in dies destined for the
4493 final executable should be direct references--setting the parameter
4494 force_direct ensures this behavior. */
4495
4496 static inline void
4497 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
4498 bool force_direct)
4499 {
4500 dw_attr_node attr;
4501
4502 attr.dw_attr = attr_kind;
4503 attr.dw_attr_val.val_class = dw_val_class_addr;
4504 attr.dw_attr_val.v.val_addr = addr;
4505 if (dwarf_split_debug_info && !force_direct)
4506 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
4507 else
4508 attr.dw_attr_val.val_entry = NULL;
4509 add_dwarf_attr (die, &attr);
4510 }
4511
4512 /* Get the RTX from to an address DIE attribute. */
4513
4514 static inline rtx
4515 AT_addr (dw_attr_node *a)
4516 {
4517 gcc_assert (a && AT_class (a) == dw_val_class_addr);
4518 return a->dw_attr_val.v.val_addr;
4519 }
4520
4521 /* Add a file attribute value to a DIE. */
4522
4523 static inline void
4524 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
4525 struct dwarf_file_data *fd)
4526 {
4527 dw_attr_node attr;
4528
4529 attr.dw_attr = attr_kind;
4530 attr.dw_attr_val.val_class = dw_val_class_file;
4531 attr.dw_attr_val.val_entry = NULL;
4532 attr.dw_attr_val.v.val_file = fd;
4533 add_dwarf_attr (die, &attr);
4534 }
4535
4536 /* Get the dwarf_file_data from a file DIE attribute. */
4537
4538 static inline struct dwarf_file_data *
4539 AT_file (dw_attr_node *a)
4540 {
4541 gcc_assert (a && AT_class (a) == dw_val_class_file);
4542 return a->dw_attr_val.v.val_file;
4543 }
4544
4545 /* Add a vms delta attribute value to a DIE. */
4546
4547 static inline void
4548 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
4549 const char *lbl1, const char *lbl2)
4550 {
4551 dw_attr_node attr;
4552
4553 attr.dw_attr = attr_kind;
4554 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
4555 attr.dw_attr_val.val_entry = NULL;
4556 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
4557 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
4558 add_dwarf_attr (die, &attr);
4559 }
4560
4561 /* Add a label identifier attribute value to a DIE. */
4562
4563 static inline void
4564 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
4565 const char *lbl_id)
4566 {
4567 dw_attr_node attr;
4568
4569 attr.dw_attr = attr_kind;
4570 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4571 attr.dw_attr_val.val_entry = NULL;
4572 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
4573 if (dwarf_split_debug_info)
4574 attr.dw_attr_val.val_entry
4575 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
4576 ate_kind_label);
4577 add_dwarf_attr (die, &attr);
4578 }
4579
4580 /* Add a section offset attribute value to a DIE, an offset into the
4581 debug_line section. */
4582
4583 static inline void
4584 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4585 const char *label)
4586 {
4587 dw_attr_node attr;
4588
4589 attr.dw_attr = attr_kind;
4590 attr.dw_attr_val.val_class = dw_val_class_lineptr;
4591 attr.dw_attr_val.val_entry = NULL;
4592 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4593 add_dwarf_attr (die, &attr);
4594 }
4595
4596 /* Add a section offset attribute value to a DIE, an offset into the
4597 debug_macinfo section. */
4598
4599 static inline void
4600 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4601 const char *label)
4602 {
4603 dw_attr_node attr;
4604
4605 attr.dw_attr = attr_kind;
4606 attr.dw_attr_val.val_class = dw_val_class_macptr;
4607 attr.dw_attr_val.val_entry = NULL;
4608 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4609 add_dwarf_attr (die, &attr);
4610 }
4611
4612 /* Add an offset attribute value to a DIE. */
4613
4614 static inline void
4615 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
4616 unsigned HOST_WIDE_INT offset)
4617 {
4618 dw_attr_node attr;
4619
4620 attr.dw_attr = attr_kind;
4621 attr.dw_attr_val.val_class = dw_val_class_offset;
4622 attr.dw_attr_val.val_entry = NULL;
4623 attr.dw_attr_val.v.val_offset = offset;
4624 add_dwarf_attr (die, &attr);
4625 }
4626
4627 /* Add a range_list attribute value to a DIE. When using
4628 dwarf_split_debug_info, address attributes in dies destined for the
4629 final executable should be direct references--setting the parameter
4630 force_direct ensures this behavior. */
4631
4632 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
4633 #define RELOCATED_OFFSET (NULL)
4634
4635 static void
4636 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
4637 long unsigned int offset, bool force_direct)
4638 {
4639 dw_attr_node attr;
4640
4641 attr.dw_attr = attr_kind;
4642 attr.dw_attr_val.val_class = dw_val_class_range_list;
4643 /* For the range_list attribute, use val_entry to store whether the
4644 offset should follow split-debug-info or normal semantics. This
4645 value is read in output_range_list_offset. */
4646 if (dwarf_split_debug_info && !force_direct)
4647 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
4648 else
4649 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
4650 attr.dw_attr_val.v.val_offset = offset;
4651 add_dwarf_attr (die, &attr);
4652 }
4653
4654 /* Return the start label of a delta attribute. */
4655
4656 static inline const char *
4657 AT_vms_delta1 (dw_attr_node *a)
4658 {
4659 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4660 return a->dw_attr_val.v.val_vms_delta.lbl1;
4661 }
4662
4663 /* Return the end label of a delta attribute. */
4664
4665 static inline const char *
4666 AT_vms_delta2 (dw_attr_node *a)
4667 {
4668 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4669 return a->dw_attr_val.v.val_vms_delta.lbl2;
4670 }
4671
4672 static inline const char *
4673 AT_lbl (dw_attr_node *a)
4674 {
4675 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
4676 || AT_class (a) == dw_val_class_lineptr
4677 || AT_class (a) == dw_val_class_macptr
4678 || AT_class (a) == dw_val_class_high_pc));
4679 return a->dw_attr_val.v.val_lbl_id;
4680 }
4681
4682 /* Get the attribute of type attr_kind. */
4683
4684 static dw_attr_node *
4685 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
4686 {
4687 dw_attr_node *a;
4688 unsigned ix;
4689 dw_die_ref spec = NULL;
4690
4691 if (! die)
4692 return NULL;
4693
4694 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4695 if (a->dw_attr == attr_kind)
4696 return a;
4697 else if (a->dw_attr == DW_AT_specification
4698 || a->dw_attr == DW_AT_abstract_origin)
4699 spec = AT_ref (a);
4700
4701 if (spec)
4702 return get_AT (spec, attr_kind);
4703
4704 return NULL;
4705 }
4706
4707 /* Returns the parent of the declaration of DIE. */
4708
4709 static dw_die_ref
4710 get_die_parent (dw_die_ref die)
4711 {
4712 dw_die_ref t;
4713
4714 if (!die)
4715 return NULL;
4716
4717 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
4718 || (t = get_AT_ref (die, DW_AT_specification)))
4719 die = t;
4720
4721 return die->die_parent;
4722 }
4723
4724 /* Return the "low pc" attribute value, typically associated with a subprogram
4725 DIE. Return null if the "low pc" attribute is either not present, or if it
4726 cannot be represented as an assembler label identifier. */
4727
4728 static inline const char *
4729 get_AT_low_pc (dw_die_ref die)
4730 {
4731 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
4732
4733 return a ? AT_lbl (a) : NULL;
4734 }
4735
4736 /* Return the "high pc" attribute value, typically associated with a subprogram
4737 DIE. Return null if the "high pc" attribute is either not present, or if it
4738 cannot be represented as an assembler label identifier. */
4739
4740 static inline const char *
4741 get_AT_hi_pc (dw_die_ref die)
4742 {
4743 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
4744
4745 return a ? AT_lbl (a) : NULL;
4746 }
4747
4748 /* Return the value of the string attribute designated by ATTR_KIND, or
4749 NULL if it is not present. */
4750
4751 static inline const char *
4752 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
4753 {
4754 dw_attr_node *a = get_AT (die, attr_kind);
4755
4756 return a ? AT_string (a) : NULL;
4757 }
4758
4759 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
4760 if it is not present. */
4761
4762 static inline int
4763 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
4764 {
4765 dw_attr_node *a = get_AT (die, attr_kind);
4766
4767 return a ? AT_flag (a) : 0;
4768 }
4769
4770 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
4771 if it is not present. */
4772
4773 static inline unsigned
4774 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
4775 {
4776 dw_attr_node *a = get_AT (die, attr_kind);
4777
4778 return a ? AT_unsigned (a) : 0;
4779 }
4780
4781 static inline dw_die_ref
4782 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
4783 {
4784 dw_attr_node *a = get_AT (die, attr_kind);
4785
4786 return a ? AT_ref (a) : NULL;
4787 }
4788
4789 static inline struct dwarf_file_data *
4790 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
4791 {
4792 dw_attr_node *a = get_AT (die, attr_kind);
4793
4794 return a ? AT_file (a) : NULL;
4795 }
4796
4797 /* Return TRUE if the language is C++. */
4798
4799 static inline bool
4800 is_cxx (void)
4801 {
4802 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4803
4804 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
4805 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
4806 }
4807
4808 /* Return TRUE if the language is Java. */
4809
4810 static inline bool
4811 is_java (void)
4812 {
4813 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4814
4815 return lang == DW_LANG_Java;
4816 }
4817
4818 /* Return TRUE if the language is Fortran. */
4819
4820 static inline bool
4821 is_fortran (void)
4822 {
4823 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4824
4825 return (lang == DW_LANG_Fortran77
4826 || lang == DW_LANG_Fortran90
4827 || lang == DW_LANG_Fortran95
4828 || lang == DW_LANG_Fortran03
4829 || lang == DW_LANG_Fortran08);
4830 }
4831
4832 /* Return TRUE if the language is Ada. */
4833
4834 static inline bool
4835 is_ada (void)
4836 {
4837 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
4838
4839 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
4840 }
4841
4842 /* Remove the specified attribute if present. Return TRUE if removal
4843 was successful. */
4844
4845 static bool
4846 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
4847 {
4848 dw_attr_node *a;
4849 unsigned ix;
4850
4851 if (! die)
4852 return false;
4853
4854 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4855 if (a->dw_attr == attr_kind)
4856 {
4857 if (AT_class (a) == dw_val_class_str)
4858 if (a->dw_attr_val.v.val_str->refcount)
4859 a->dw_attr_val.v.val_str->refcount--;
4860
4861 /* vec::ordered_remove should help reduce the number of abbrevs
4862 that are needed. */
4863 die->die_attr->ordered_remove (ix);
4864 return true;
4865 }
4866 return false;
4867 }
4868
4869 /* Remove CHILD from its parent. PREV must have the property that
4870 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
4871
4872 static void
4873 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
4874 {
4875 gcc_assert (child->die_parent == prev->die_parent);
4876 gcc_assert (prev->die_sib == child);
4877 if (prev == child)
4878 {
4879 gcc_assert (child->die_parent->die_child == child);
4880 prev = NULL;
4881 }
4882 else
4883 prev->die_sib = child->die_sib;
4884 if (child->die_parent->die_child == child)
4885 child->die_parent->die_child = prev;
4886 }
4887
4888 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
4889 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
4890
4891 static void
4892 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
4893 {
4894 dw_die_ref parent = old_child->die_parent;
4895
4896 gcc_assert (parent == prev->die_parent);
4897 gcc_assert (prev->die_sib == old_child);
4898
4899 new_child->die_parent = parent;
4900 if (prev == old_child)
4901 {
4902 gcc_assert (parent->die_child == old_child);
4903 new_child->die_sib = new_child;
4904 }
4905 else
4906 {
4907 prev->die_sib = new_child;
4908 new_child->die_sib = old_child->die_sib;
4909 }
4910 if (old_child->die_parent->die_child == old_child)
4911 old_child->die_parent->die_child = new_child;
4912 }
4913
4914 /* Move all children from OLD_PARENT to NEW_PARENT. */
4915
4916 static void
4917 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
4918 {
4919 dw_die_ref c;
4920 new_parent->die_child = old_parent->die_child;
4921 old_parent->die_child = NULL;
4922 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
4923 }
4924
4925 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
4926 matches TAG. */
4927
4928 static void
4929 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
4930 {
4931 dw_die_ref c;
4932
4933 c = die->die_child;
4934 if (c) do {
4935 dw_die_ref prev = c;
4936 c = c->die_sib;
4937 while (c->die_tag == tag)
4938 {
4939 remove_child_with_prev (c, prev);
4940 c->die_parent = NULL;
4941 /* Might have removed every child. */
4942 if (c == c->die_sib)
4943 return;
4944 c = c->die_sib;
4945 }
4946 } while (c != die->die_child);
4947 }
4948
4949 /* Add a CHILD_DIE as the last child of DIE. */
4950
4951 static void
4952 add_child_die (dw_die_ref die, dw_die_ref child_die)
4953 {
4954 /* FIXME this should probably be an assert. */
4955 if (! die || ! child_die)
4956 return;
4957 gcc_assert (die != child_die);
4958
4959 child_die->die_parent = die;
4960 if (die->die_child)
4961 {
4962 child_die->die_sib = die->die_child->die_sib;
4963 die->die_child->die_sib = child_die;
4964 }
4965 else
4966 child_die->die_sib = child_die;
4967 die->die_child = child_die;
4968 }
4969
4970 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
4971
4972 static void
4973 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
4974 dw_die_ref after_die)
4975 {
4976 gcc_assert (die
4977 && child_die
4978 && after_die
4979 && die->die_child
4980 && die != child_die);
4981
4982 child_die->die_parent = die;
4983 child_die->die_sib = after_die->die_sib;
4984 after_die->die_sib = child_die;
4985 if (die->die_child == after_die)
4986 die->die_child = child_die;
4987 }
4988
4989 /* Unassociate CHILD from its parent, and make its parent be
4990 NEW_PARENT. */
4991
4992 static void
4993 reparent_child (dw_die_ref child, dw_die_ref new_parent)
4994 {
4995 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
4996 if (p->die_sib == child)
4997 {
4998 remove_child_with_prev (child, p);
4999 break;
5000 }
5001 add_child_die (new_parent, child);
5002 }
5003
5004 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5005 is the specification, to the end of PARENT's list of children.
5006 This is done by removing and re-adding it. */
5007
5008 static void
5009 splice_child_die (dw_die_ref parent, dw_die_ref child)
5010 {
5011 /* We want the declaration DIE from inside the class, not the
5012 specification DIE at toplevel. */
5013 if (child->die_parent != parent)
5014 {
5015 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5016
5017 if (tmp)
5018 child = tmp;
5019 }
5020
5021 gcc_assert (child->die_parent == parent
5022 || (child->die_parent
5023 == get_AT_ref (parent, DW_AT_specification)));
5024
5025 reparent_child (child, parent);
5026 }
5027
5028 /* Create and return a new die with a parent of PARENT_DIE. If
5029 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5030 associated tree T must be supplied to determine parenthood
5031 later. */
5032
5033 static inline dw_die_ref
5034 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5035 {
5036 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5037
5038 die->die_tag = tag_value;
5039
5040 if (parent_die != NULL)
5041 add_child_die (parent_die, die);
5042 else
5043 {
5044 limbo_die_node *limbo_node;
5045
5046 /* No DIEs created after early dwarf should end up in limbo,
5047 because the limbo list should not persist past LTO
5048 streaming. */
5049 if (tag_value != DW_TAG_compile_unit
5050 /* These are allowed because they're generated while
5051 breaking out COMDAT units late. */
5052 && tag_value != DW_TAG_type_unit
5053 && !early_dwarf
5054 /* Allow nested functions to live in limbo because they will
5055 only temporarily live there, as decls_for_scope will fix
5056 them up. */
5057 && (TREE_CODE (t) != FUNCTION_DECL
5058 || !decl_function_context (t))
5059 /* Same as nested functions above but for types. Types that
5060 are local to a function will be fixed in
5061 decls_for_scope. */
5062 && (!RECORD_OR_UNION_TYPE_P (t)
5063 || !TYPE_CONTEXT (t)
5064 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5065 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5066 especially in the ltrans stage, but once we implement LTO
5067 dwarf streaming, we should remove this exception. */
5068 && !in_lto_p)
5069 {
5070 fprintf (stderr, "symbol ended up in limbo too late:");
5071 debug_generic_stmt (t);
5072 gcc_unreachable ();
5073 }
5074
5075 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5076 limbo_node->die = die;
5077 limbo_node->created_for = t;
5078 limbo_node->next = limbo_die_list;
5079 limbo_die_list = limbo_node;
5080 }
5081
5082 return die;
5083 }
5084
5085 /* Return the DIE associated with the given type specifier. */
5086
5087 static inline dw_die_ref
5088 lookup_type_die (tree type)
5089 {
5090 return TYPE_SYMTAB_DIE (type);
5091 }
5092
5093 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5094 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5095 anonymous type instead the one of the naming typedef. */
5096
5097 static inline dw_die_ref
5098 strip_naming_typedef (tree type, dw_die_ref type_die)
5099 {
5100 if (type
5101 && TREE_CODE (type) == RECORD_TYPE
5102 && type_die
5103 && type_die->die_tag == DW_TAG_typedef
5104 && is_naming_typedef_decl (TYPE_NAME (type)))
5105 type_die = get_AT_ref (type_die, DW_AT_type);
5106 return type_die;
5107 }
5108
5109 /* Like lookup_type_die, but if type is an anonymous type named by a
5110 typedef[1], return the DIE of the anonymous type instead the one of
5111 the naming typedef. This is because in gen_typedef_die, we did
5112 equate the anonymous struct named by the typedef with the DIE of
5113 the naming typedef. So by default, lookup_type_die on an anonymous
5114 struct yields the DIE of the naming typedef.
5115
5116 [1]: Read the comment of is_naming_typedef_decl to learn about what
5117 a naming typedef is. */
5118
5119 static inline dw_die_ref
5120 lookup_type_die_strip_naming_typedef (tree type)
5121 {
5122 dw_die_ref die = lookup_type_die (type);
5123 return strip_naming_typedef (type, die);
5124 }
5125
5126 /* Equate a DIE to a given type specifier. */
5127
5128 static inline void
5129 equate_type_number_to_die (tree type, dw_die_ref type_die)
5130 {
5131 TYPE_SYMTAB_DIE (type) = type_die;
5132 }
5133
5134 /* Returns a hash value for X (which really is a die_struct). */
5135
5136 inline hashval_t
5137 decl_die_hasher::hash (die_node *x)
5138 {
5139 return (hashval_t) x->decl_id;
5140 }
5141
5142 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5143
5144 inline bool
5145 decl_die_hasher::equal (die_node *x, tree y)
5146 {
5147 return (x->decl_id == DECL_UID (y));
5148 }
5149
5150 /* Return the DIE associated with a given declaration. */
5151
5152 static inline dw_die_ref
5153 lookup_decl_die (tree decl)
5154 {
5155 return decl_die_table->find_with_hash (decl, DECL_UID (decl));
5156 }
5157
5158 /* Returns a hash value for X (which really is a var_loc_list). */
5159
5160 inline hashval_t
5161 decl_loc_hasher::hash (var_loc_list *x)
5162 {
5163 return (hashval_t) x->decl_id;
5164 }
5165
5166 /* Return nonzero if decl_id of var_loc_list X is the same as
5167 UID of decl *Y. */
5168
5169 inline bool
5170 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5171 {
5172 return (x->decl_id == DECL_UID (y));
5173 }
5174
5175 /* Return the var_loc list associated with a given declaration. */
5176
5177 static inline var_loc_list *
5178 lookup_decl_loc (const_tree decl)
5179 {
5180 if (!decl_loc_table)
5181 return NULL;
5182 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5183 }
5184
5185 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5186
5187 inline hashval_t
5188 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5189 {
5190 return (hashval_t) x->decl_id;
5191 }
5192
5193 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5194 UID of decl *Y. */
5195
5196 inline bool
5197 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5198 {
5199 return (x->decl_id == DECL_UID (y));
5200 }
5201
5202 /* Equate a DIE to a particular declaration. */
5203
5204 static void
5205 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
5206 {
5207 unsigned int decl_id = DECL_UID (decl);
5208
5209 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
5210 decl_die->decl_id = decl_id;
5211 }
5212
5213 /* Return how many bits covers PIECE EXPR_LIST. */
5214
5215 static HOST_WIDE_INT
5216 decl_piece_bitsize (rtx piece)
5217 {
5218 int ret = (int) GET_MODE (piece);
5219 if (ret)
5220 return ret;
5221 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
5222 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
5223 return INTVAL (XEXP (XEXP (piece, 0), 0));
5224 }
5225
5226 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
5227
5228 static rtx *
5229 decl_piece_varloc_ptr (rtx piece)
5230 {
5231 if ((int) GET_MODE (piece))
5232 return &XEXP (piece, 0);
5233 else
5234 return &XEXP (XEXP (piece, 0), 1);
5235 }
5236
5237 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
5238 Next is the chain of following piece nodes. */
5239
5240 static rtx_expr_list *
5241 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
5242 {
5243 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
5244 return alloc_EXPR_LIST (bitsize, loc_note, next);
5245 else
5246 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
5247 GEN_INT (bitsize),
5248 loc_note), next);
5249 }
5250
5251 /* Return rtx that should be stored into loc field for
5252 LOC_NOTE and BITPOS/BITSIZE. */
5253
5254 static rtx
5255 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
5256 HOST_WIDE_INT bitsize)
5257 {
5258 if (bitsize != -1)
5259 {
5260 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
5261 if (bitpos != 0)
5262 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
5263 }
5264 return loc_note;
5265 }
5266
5267 /* This function either modifies location piece list *DEST in
5268 place (if SRC and INNER is NULL), or copies location piece list
5269 *SRC to *DEST while modifying it. Location BITPOS is modified
5270 to contain LOC_NOTE, any pieces overlapping it are removed resp.
5271 not copied and if needed some padding around it is added.
5272 When modifying in place, DEST should point to EXPR_LIST where
5273 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
5274 to the start of the whole list and INNER points to the EXPR_LIST
5275 where earlier pieces cover PIECE_BITPOS bits. */
5276
5277 static void
5278 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
5279 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
5280 HOST_WIDE_INT bitsize, rtx loc_note)
5281 {
5282 HOST_WIDE_INT diff;
5283 bool copy = inner != NULL;
5284
5285 if (copy)
5286 {
5287 /* First copy all nodes preceding the current bitpos. */
5288 while (src != inner)
5289 {
5290 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5291 decl_piece_bitsize (*src), NULL_RTX);
5292 dest = &XEXP (*dest, 1);
5293 src = &XEXP (*src, 1);
5294 }
5295 }
5296 /* Add padding if needed. */
5297 if (bitpos != piece_bitpos)
5298 {
5299 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
5300 copy ? NULL_RTX : *dest);
5301 dest = &XEXP (*dest, 1);
5302 }
5303 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
5304 {
5305 gcc_assert (!copy);
5306 /* A piece with correct bitpos and bitsize already exist,
5307 just update the location for it and return. */
5308 *decl_piece_varloc_ptr (*dest) = loc_note;
5309 return;
5310 }
5311 /* Add the piece that changed. */
5312 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
5313 dest = &XEXP (*dest, 1);
5314 /* Skip over pieces that overlap it. */
5315 diff = bitpos - piece_bitpos + bitsize;
5316 if (!copy)
5317 src = dest;
5318 while (diff > 0 && *src)
5319 {
5320 rtx piece = *src;
5321 diff -= decl_piece_bitsize (piece);
5322 if (copy)
5323 src = &XEXP (piece, 1);
5324 else
5325 {
5326 *src = XEXP (piece, 1);
5327 free_EXPR_LIST_node (piece);
5328 }
5329 }
5330 /* Add padding if needed. */
5331 if (diff < 0 && *src)
5332 {
5333 if (!copy)
5334 dest = src;
5335 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
5336 dest = &XEXP (*dest, 1);
5337 }
5338 if (!copy)
5339 return;
5340 /* Finally copy all nodes following it. */
5341 while (*src)
5342 {
5343 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5344 decl_piece_bitsize (*src), NULL_RTX);
5345 dest = &XEXP (*dest, 1);
5346 src = &XEXP (*src, 1);
5347 }
5348 }
5349
5350 /* Add a variable location node to the linked list for DECL. */
5351
5352 static struct var_loc_node *
5353 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
5354 {
5355 unsigned int decl_id;
5356 var_loc_list *temp;
5357 struct var_loc_node *loc = NULL;
5358 HOST_WIDE_INT bitsize = -1, bitpos = -1;
5359
5360 if (TREE_CODE (decl) == VAR_DECL
5361 && DECL_HAS_DEBUG_EXPR_P (decl))
5362 {
5363 tree realdecl = DECL_DEBUG_EXPR (decl);
5364 if (handled_component_p (realdecl)
5365 || (TREE_CODE (realdecl) == MEM_REF
5366 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5367 {
5368 HOST_WIDE_INT maxsize;
5369 bool reverse;
5370 tree innerdecl
5371 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize,
5372 &reverse);
5373 if (!DECL_P (innerdecl)
5374 || DECL_IGNORED_P (innerdecl)
5375 || TREE_STATIC (innerdecl)
5376 || bitsize <= 0
5377 || bitpos + bitsize > 256
5378 || bitsize != maxsize)
5379 return NULL;
5380 decl = innerdecl;
5381 }
5382 }
5383
5384 decl_id = DECL_UID (decl);
5385 var_loc_list **slot
5386 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
5387 if (*slot == NULL)
5388 {
5389 temp = ggc_cleared_alloc<var_loc_list> ();
5390 temp->decl_id = decl_id;
5391 *slot = temp;
5392 }
5393 else
5394 temp = *slot;
5395
5396 /* For PARM_DECLs try to keep around the original incoming value,
5397 even if that means we'll emit a zero-range .debug_loc entry. */
5398 if (temp->last
5399 && temp->first == temp->last
5400 && TREE_CODE (decl) == PARM_DECL
5401 && NOTE_P (temp->first->loc)
5402 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
5403 && DECL_INCOMING_RTL (decl)
5404 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
5405 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
5406 == GET_CODE (DECL_INCOMING_RTL (decl))
5407 && prev_real_insn (temp->first->loc) == NULL_RTX
5408 && (bitsize != -1
5409 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
5410 NOTE_VAR_LOCATION_LOC (loc_note))
5411 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
5412 != NOTE_VAR_LOCATION_STATUS (loc_note))))
5413 {
5414 loc = ggc_cleared_alloc<var_loc_node> ();
5415 temp->first->next = loc;
5416 temp->last = loc;
5417 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5418 }
5419 else if (temp->last)
5420 {
5421 struct var_loc_node *last = temp->last, *unused = NULL;
5422 rtx *piece_loc = NULL, last_loc_note;
5423 HOST_WIDE_INT piece_bitpos = 0;
5424 if (last->next)
5425 {
5426 last = last->next;
5427 gcc_assert (last->next == NULL);
5428 }
5429 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
5430 {
5431 piece_loc = &last->loc;
5432 do
5433 {
5434 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
5435 if (piece_bitpos + cur_bitsize > bitpos)
5436 break;
5437 piece_bitpos += cur_bitsize;
5438 piece_loc = &XEXP (*piece_loc, 1);
5439 }
5440 while (*piece_loc);
5441 }
5442 /* TEMP->LAST here is either pointer to the last but one or
5443 last element in the chained list, LAST is pointer to the
5444 last element. */
5445 if (label && strcmp (last->label, label) == 0)
5446 {
5447 /* For SRA optimized variables if there weren't any real
5448 insns since last note, just modify the last node. */
5449 if (piece_loc != NULL)
5450 {
5451 adjust_piece_list (piece_loc, NULL, NULL,
5452 bitpos, piece_bitpos, bitsize, loc_note);
5453 return NULL;
5454 }
5455 /* If the last note doesn't cover any instructions, remove it. */
5456 if (temp->last != last)
5457 {
5458 temp->last->next = NULL;
5459 unused = last;
5460 last = temp->last;
5461 gcc_assert (strcmp (last->label, label) != 0);
5462 }
5463 else
5464 {
5465 gcc_assert (temp->first == temp->last
5466 || (temp->first->next == temp->last
5467 && TREE_CODE (decl) == PARM_DECL));
5468 memset (temp->last, '\0', sizeof (*temp->last));
5469 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
5470 return temp->last;
5471 }
5472 }
5473 if (bitsize == -1 && NOTE_P (last->loc))
5474 last_loc_note = last->loc;
5475 else if (piece_loc != NULL
5476 && *piece_loc != NULL_RTX
5477 && piece_bitpos == bitpos
5478 && decl_piece_bitsize (*piece_loc) == bitsize)
5479 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
5480 else
5481 last_loc_note = NULL_RTX;
5482 /* If the current location is the same as the end of the list,
5483 and either both or neither of the locations is uninitialized,
5484 we have nothing to do. */
5485 if (last_loc_note == NULL_RTX
5486 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
5487 NOTE_VAR_LOCATION_LOC (loc_note)))
5488 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
5489 != NOTE_VAR_LOCATION_STATUS (loc_note))
5490 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
5491 == VAR_INIT_STATUS_UNINITIALIZED)
5492 || (NOTE_VAR_LOCATION_STATUS (loc_note)
5493 == VAR_INIT_STATUS_UNINITIALIZED))))
5494 {
5495 /* Add LOC to the end of list and update LAST. If the last
5496 element of the list has been removed above, reuse its
5497 memory for the new node, otherwise allocate a new one. */
5498 if (unused)
5499 {
5500 loc = unused;
5501 memset (loc, '\0', sizeof (*loc));
5502 }
5503 else
5504 loc = ggc_cleared_alloc<var_loc_node> ();
5505 if (bitsize == -1 || piece_loc == NULL)
5506 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5507 else
5508 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
5509 bitpos, piece_bitpos, bitsize, loc_note);
5510 last->next = loc;
5511 /* Ensure TEMP->LAST will point either to the new last but one
5512 element of the chain, or to the last element in it. */
5513 if (last != temp->last)
5514 temp->last = last;
5515 }
5516 else if (unused)
5517 ggc_free (unused);
5518 }
5519 else
5520 {
5521 loc = ggc_cleared_alloc<var_loc_node> ();
5522 temp->first = loc;
5523 temp->last = loc;
5524 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5525 }
5526 return loc;
5527 }
5528 \f
5529 /* Keep track of the number of spaces used to indent the
5530 output of the debugging routines that print the structure of
5531 the DIE internal representation. */
5532 static int print_indent;
5533
5534 /* Indent the line the number of spaces given by print_indent. */
5535
5536 static inline void
5537 print_spaces (FILE *outfile)
5538 {
5539 fprintf (outfile, "%*s", print_indent, "");
5540 }
5541
5542 /* Print a type signature in hex. */
5543
5544 static inline void
5545 print_signature (FILE *outfile, char *sig)
5546 {
5547 int i;
5548
5549 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
5550 fprintf (outfile, "%02x", sig[i] & 0xff);
5551 }
5552
5553 static inline void
5554 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
5555 {
5556 if (discr_value->pos)
5557 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
5558 else
5559 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
5560 }
5561
5562 static void print_loc_descr (dw_loc_descr_ref, FILE *);
5563
5564 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
5565 RECURSE, output location descriptor operations. */
5566
5567 static void
5568 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
5569 {
5570 switch (val->val_class)
5571 {
5572 case dw_val_class_addr:
5573 fprintf (outfile, "address");
5574 break;
5575 case dw_val_class_offset:
5576 fprintf (outfile, "offset");
5577 break;
5578 case dw_val_class_loc:
5579 fprintf (outfile, "location descriptor");
5580 if (val->v.val_loc == NULL)
5581 fprintf (outfile, " -> <null>\n");
5582 else if (recurse)
5583 {
5584 fprintf (outfile, ":\n");
5585 print_indent += 4;
5586 print_loc_descr (val->v.val_loc, outfile);
5587 print_indent -= 4;
5588 }
5589 else
5590 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
5591 break;
5592 case dw_val_class_loc_list:
5593 fprintf (outfile, "location list -> label:%s",
5594 val->v.val_loc_list->ll_symbol);
5595 break;
5596 case dw_val_class_range_list:
5597 fprintf (outfile, "range list");
5598 break;
5599 case dw_val_class_const:
5600 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
5601 break;
5602 case dw_val_class_unsigned_const:
5603 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
5604 break;
5605 case dw_val_class_const_double:
5606 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
5607 HOST_WIDE_INT_PRINT_UNSIGNED")",
5608 val->v.val_double.high,
5609 val->v.val_double.low);
5610 break;
5611 case dw_val_class_wide_int:
5612 {
5613 int i = val->v.val_wide->get_len ();
5614 fprintf (outfile, "constant (");
5615 gcc_assert (i > 0);
5616 if (val->v.val_wide->elt (i - 1) == 0)
5617 fprintf (outfile, "0x");
5618 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
5619 val->v.val_wide->elt (--i));
5620 while (--i >= 0)
5621 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
5622 val->v.val_wide->elt (i));
5623 fprintf (outfile, ")");
5624 break;
5625 }
5626 case dw_val_class_vec:
5627 fprintf (outfile, "floating-point or vector constant");
5628 break;
5629 case dw_val_class_flag:
5630 fprintf (outfile, "%u", val->v.val_flag);
5631 break;
5632 case dw_val_class_die_ref:
5633 if (val->v.val_die_ref.die != NULL)
5634 {
5635 dw_die_ref die = val->v.val_die_ref.die;
5636
5637 if (die->comdat_type_p)
5638 {
5639 fprintf (outfile, "die -> signature: ");
5640 print_signature (outfile,
5641 die->die_id.die_type_node->signature);
5642 }
5643 else if (die->die_id.die_symbol)
5644 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
5645 else
5646 fprintf (outfile, "die -> %ld", die->die_offset);
5647 fprintf (outfile, " (%p)", (void *) die);
5648 }
5649 else
5650 fprintf (outfile, "die -> <null>");
5651 break;
5652 case dw_val_class_vms_delta:
5653 fprintf (outfile, "delta: @slotcount(%s-%s)",
5654 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
5655 break;
5656 case dw_val_class_lbl_id:
5657 case dw_val_class_lineptr:
5658 case dw_val_class_macptr:
5659 case dw_val_class_high_pc:
5660 fprintf (outfile, "label: %s", val->v.val_lbl_id);
5661 break;
5662 case dw_val_class_str:
5663 if (val->v.val_str->str != NULL)
5664 fprintf (outfile, "\"%s\"", val->v.val_str->str);
5665 else
5666 fprintf (outfile, "<null>");
5667 break;
5668 case dw_val_class_file:
5669 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
5670 val->v.val_file->emitted_number);
5671 break;
5672 case dw_val_class_data8:
5673 {
5674 int i;
5675
5676 for (i = 0; i < 8; i++)
5677 fprintf (outfile, "%02x", val->v.val_data8[i]);
5678 break;
5679 }
5680 case dw_val_class_discr_value:
5681 print_discr_value (outfile, &val->v.val_discr_value);
5682 break;
5683 case dw_val_class_discr_list:
5684 for (dw_discr_list_ref node = val->v.val_discr_list;
5685 node != NULL;
5686 node = node->dw_discr_next)
5687 {
5688 if (node->dw_discr_range)
5689 {
5690 fprintf (outfile, " .. ");
5691 print_discr_value (outfile, &node->dw_discr_lower_bound);
5692 print_discr_value (outfile, &node->dw_discr_upper_bound);
5693 }
5694 else
5695 print_discr_value (outfile, &node->dw_discr_lower_bound);
5696
5697 if (node->dw_discr_next != NULL)
5698 fprintf (outfile, " | ");
5699 }
5700 default:
5701 break;
5702 }
5703 }
5704
5705 /* Likewise, for a DIE attribute. */
5706
5707 static void
5708 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
5709 {
5710 print_dw_val (&a->dw_attr_val, recurse, outfile);
5711 }
5712
5713
5714 /* Print the list of operands in the LOC location description to OUTFILE. This
5715 routine is a debugging aid only. */
5716
5717 static void
5718 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
5719 {
5720 dw_loc_descr_ref l = loc;
5721
5722 if (loc == NULL)
5723 {
5724 print_spaces (outfile);
5725 fprintf (outfile, "<null>\n");
5726 return;
5727 }
5728
5729 for (l = loc; l != NULL; l = l->dw_loc_next)
5730 {
5731 print_spaces (outfile);
5732 fprintf (outfile, "(%p) %s",
5733 (void *) l,
5734 dwarf_stack_op_name (l->dw_loc_opc));
5735 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
5736 {
5737 fprintf (outfile, " ");
5738 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
5739 }
5740 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
5741 {
5742 fprintf (outfile, ", ");
5743 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
5744 }
5745 fprintf (outfile, "\n");
5746 }
5747 }
5748
5749 /* Print the information associated with a given DIE, and its children.
5750 This routine is a debugging aid only. */
5751
5752 static void
5753 print_die (dw_die_ref die, FILE *outfile)
5754 {
5755 dw_attr_node *a;
5756 dw_die_ref c;
5757 unsigned ix;
5758
5759 print_spaces (outfile);
5760 fprintf (outfile, "DIE %4ld: %s (%p)\n",
5761 die->die_offset, dwarf_tag_name (die->die_tag),
5762 (void*) die);
5763 print_spaces (outfile);
5764 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
5765 fprintf (outfile, " offset: %ld", die->die_offset);
5766 fprintf (outfile, " mark: %d\n", die->die_mark);
5767
5768 if (die->comdat_type_p)
5769 {
5770 print_spaces (outfile);
5771 fprintf (outfile, " signature: ");
5772 print_signature (outfile, die->die_id.die_type_node->signature);
5773 fprintf (outfile, "\n");
5774 }
5775
5776 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5777 {
5778 print_spaces (outfile);
5779 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
5780
5781 print_attribute (a, true, outfile);
5782 fprintf (outfile, "\n");
5783 }
5784
5785 if (die->die_child != NULL)
5786 {
5787 print_indent += 4;
5788 FOR_EACH_CHILD (die, c, print_die (c, outfile));
5789 print_indent -= 4;
5790 }
5791 if (print_indent == 0)
5792 fprintf (outfile, "\n");
5793 }
5794
5795 /* Print the list of operations in the LOC location description. */
5796
5797 DEBUG_FUNCTION void
5798 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
5799 {
5800 print_loc_descr (loc, stderr);
5801 }
5802
5803 /* Print the information collected for a given DIE. */
5804
5805 DEBUG_FUNCTION void
5806 debug_dwarf_die (dw_die_ref die)
5807 {
5808 print_die (die, stderr);
5809 }
5810
5811 DEBUG_FUNCTION void
5812 debug (die_struct &ref)
5813 {
5814 print_die (&ref, stderr);
5815 }
5816
5817 DEBUG_FUNCTION void
5818 debug (die_struct *ptr)
5819 {
5820 if (ptr)
5821 debug (*ptr);
5822 else
5823 fprintf (stderr, "<nil>\n");
5824 }
5825
5826
5827 /* Print all DWARF information collected for the compilation unit.
5828 This routine is a debugging aid only. */
5829
5830 DEBUG_FUNCTION void
5831 debug_dwarf (void)
5832 {
5833 print_indent = 0;
5834 print_die (comp_unit_die (), stderr);
5835 }
5836
5837 /* Sanity checks on DIEs. */
5838
5839 static void
5840 check_die (dw_die_ref die)
5841 {
5842 unsigned ix;
5843 dw_attr_node *a;
5844 bool inline_found = false;
5845 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
5846 int n_decl_line = 0, n_decl_file = 0;
5847 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5848 {
5849 switch (a->dw_attr)
5850 {
5851 case DW_AT_inline:
5852 if (a->dw_attr_val.v.val_unsigned)
5853 inline_found = true;
5854 break;
5855 case DW_AT_location:
5856 ++n_location;
5857 break;
5858 case DW_AT_low_pc:
5859 ++n_low_pc;
5860 break;
5861 case DW_AT_high_pc:
5862 ++n_high_pc;
5863 break;
5864 case DW_AT_artificial:
5865 ++n_artificial;
5866 break;
5867 case DW_AT_decl_line:
5868 ++n_decl_line;
5869 break;
5870 case DW_AT_decl_file:
5871 ++n_decl_file;
5872 break;
5873 default:
5874 break;
5875 }
5876 }
5877 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
5878 || n_decl_line > 1 || n_decl_file > 1)
5879 {
5880 fprintf (stderr, "Duplicate attributes in DIE:\n");
5881 debug_dwarf_die (die);
5882 gcc_unreachable ();
5883 }
5884 if (inline_found)
5885 {
5886 /* A debugging information entry that is a member of an abstract
5887 instance tree [that has DW_AT_inline] should not contain any
5888 attributes which describe aspects of the subroutine which vary
5889 between distinct inlined expansions or distinct out-of-line
5890 expansions. */
5891 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5892 gcc_assert (a->dw_attr != DW_AT_low_pc
5893 && a->dw_attr != DW_AT_high_pc
5894 && a->dw_attr != DW_AT_location
5895 && a->dw_attr != DW_AT_frame_base
5896 && a->dw_attr != DW_AT_GNU_all_call_sites);
5897 }
5898 }
5899 \f
5900 /* Start a new compilation unit DIE for an include file. OLD_UNIT is the CU
5901 for the enclosing include file, if any. BINCL_DIE is the DW_TAG_GNU_BINCL
5902 DIE that marks the start of the DIEs for this include file. */
5903
5904 static dw_die_ref
5905 push_new_compile_unit (dw_die_ref old_unit, dw_die_ref bincl_die)
5906 {
5907 const char *filename = get_AT_string (bincl_die, DW_AT_name);
5908 dw_die_ref new_unit = gen_compile_unit_die (filename);
5909
5910 new_unit->die_sib = old_unit;
5911 return new_unit;
5912 }
5913
5914 /* Close an include-file CU and reopen the enclosing one. */
5915
5916 static dw_die_ref
5917 pop_compile_unit (dw_die_ref old_unit)
5918 {
5919 dw_die_ref new_unit = old_unit->die_sib;
5920
5921 old_unit->die_sib = NULL;
5922 return new_unit;
5923 }
5924
5925 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
5926 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
5927 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
5928
5929 /* Calculate the checksum of a location expression. */
5930
5931 static inline void
5932 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
5933 {
5934 int tem;
5935 inchash::hash hstate;
5936 hashval_t hash;
5937
5938 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
5939 CHECKSUM (tem);
5940 hash_loc_operands (loc, hstate);
5941 hash = hstate.end();
5942 CHECKSUM (hash);
5943 }
5944
5945 /* Calculate the checksum of an attribute. */
5946
5947 static void
5948 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
5949 {
5950 dw_loc_descr_ref loc;
5951 rtx r;
5952
5953 CHECKSUM (at->dw_attr);
5954
5955 /* We don't care that this was compiled with a different compiler
5956 snapshot; if the output is the same, that's what matters. */
5957 if (at->dw_attr == DW_AT_producer)
5958 return;
5959
5960 switch (AT_class (at))
5961 {
5962 case dw_val_class_const:
5963 CHECKSUM (at->dw_attr_val.v.val_int);
5964 break;
5965 case dw_val_class_unsigned_const:
5966 CHECKSUM (at->dw_attr_val.v.val_unsigned);
5967 break;
5968 case dw_val_class_const_double:
5969 CHECKSUM (at->dw_attr_val.v.val_double);
5970 break;
5971 case dw_val_class_wide_int:
5972 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
5973 get_full_len (*at->dw_attr_val.v.val_wide)
5974 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
5975 break;
5976 case dw_val_class_vec:
5977 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
5978 (at->dw_attr_val.v.val_vec.length
5979 * at->dw_attr_val.v.val_vec.elt_size));
5980 break;
5981 case dw_val_class_flag:
5982 CHECKSUM (at->dw_attr_val.v.val_flag);
5983 break;
5984 case dw_val_class_str:
5985 CHECKSUM_STRING (AT_string (at));
5986 break;
5987
5988 case dw_val_class_addr:
5989 r = AT_addr (at);
5990 gcc_assert (GET_CODE (r) == SYMBOL_REF);
5991 CHECKSUM_STRING (XSTR (r, 0));
5992 break;
5993
5994 case dw_val_class_offset:
5995 CHECKSUM (at->dw_attr_val.v.val_offset);
5996 break;
5997
5998 case dw_val_class_loc:
5999 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6000 loc_checksum (loc, ctx);
6001 break;
6002
6003 case dw_val_class_die_ref:
6004 die_checksum (AT_ref (at), ctx, mark);
6005 break;
6006
6007 case dw_val_class_fde_ref:
6008 case dw_val_class_vms_delta:
6009 case dw_val_class_lbl_id:
6010 case dw_val_class_lineptr:
6011 case dw_val_class_macptr:
6012 case dw_val_class_high_pc:
6013 break;
6014
6015 case dw_val_class_file:
6016 CHECKSUM_STRING (AT_file (at)->filename);
6017 break;
6018
6019 case dw_val_class_data8:
6020 CHECKSUM (at->dw_attr_val.v.val_data8);
6021 break;
6022
6023 default:
6024 break;
6025 }
6026 }
6027
6028 /* Calculate the checksum of a DIE. */
6029
6030 static void
6031 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6032 {
6033 dw_die_ref c;
6034 dw_attr_node *a;
6035 unsigned ix;
6036
6037 /* To avoid infinite recursion. */
6038 if (die->die_mark)
6039 {
6040 CHECKSUM (die->die_mark);
6041 return;
6042 }
6043 die->die_mark = ++(*mark);
6044
6045 CHECKSUM (die->die_tag);
6046
6047 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6048 attr_checksum (a, ctx, mark);
6049
6050 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6051 }
6052
6053 #undef CHECKSUM
6054 #undef CHECKSUM_BLOCK
6055 #undef CHECKSUM_STRING
6056
6057 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6058 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6059 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6060 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6061 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6062 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6063 #define CHECKSUM_ATTR(FOO) \
6064 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6065
6066 /* Calculate the checksum of a number in signed LEB128 format. */
6067
6068 static void
6069 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6070 {
6071 unsigned char byte;
6072 bool more;
6073
6074 while (1)
6075 {
6076 byte = (value & 0x7f);
6077 value >>= 7;
6078 more = !((value == 0 && (byte & 0x40) == 0)
6079 || (value == -1 && (byte & 0x40) != 0));
6080 if (more)
6081 byte |= 0x80;
6082 CHECKSUM (byte);
6083 if (!more)
6084 break;
6085 }
6086 }
6087
6088 /* Calculate the checksum of a number in unsigned LEB128 format. */
6089
6090 static void
6091 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6092 {
6093 while (1)
6094 {
6095 unsigned char byte = (value & 0x7f);
6096 value >>= 7;
6097 if (value != 0)
6098 /* More bytes to follow. */
6099 byte |= 0x80;
6100 CHECKSUM (byte);
6101 if (value == 0)
6102 break;
6103 }
6104 }
6105
6106 /* Checksum the context of the DIE. This adds the names of any
6107 surrounding namespaces or structures to the checksum. */
6108
6109 static void
6110 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6111 {
6112 const char *name;
6113 dw_die_ref spec;
6114 int tag = die->die_tag;
6115
6116 if (tag != DW_TAG_namespace
6117 && tag != DW_TAG_structure_type
6118 && tag != DW_TAG_class_type)
6119 return;
6120
6121 name = get_AT_string (die, DW_AT_name);
6122
6123 spec = get_AT_ref (die, DW_AT_specification);
6124 if (spec != NULL)
6125 die = spec;
6126
6127 if (die->die_parent != NULL)
6128 checksum_die_context (die->die_parent, ctx);
6129
6130 CHECKSUM_ULEB128 ('C');
6131 CHECKSUM_ULEB128 (tag);
6132 if (name != NULL)
6133 CHECKSUM_STRING (name);
6134 }
6135
6136 /* Calculate the checksum of a location expression. */
6137
6138 static inline void
6139 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6140 {
6141 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6142 were emitted as a DW_FORM_sdata instead of a location expression. */
6143 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6144 {
6145 CHECKSUM_ULEB128 (DW_FORM_sdata);
6146 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6147 return;
6148 }
6149
6150 /* Otherwise, just checksum the raw location expression. */
6151 while (loc != NULL)
6152 {
6153 inchash::hash hstate;
6154 hashval_t hash;
6155
6156 CHECKSUM_ULEB128 (loc->dtprel);
6157 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6158 hash_loc_operands (loc, hstate);
6159 hash = hstate.end ();
6160 CHECKSUM (hash);
6161 loc = loc->dw_loc_next;
6162 }
6163 }
6164
6165 /* Calculate the checksum of an attribute. */
6166
6167 static void
6168 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6169 struct md5_ctx *ctx, int *mark)
6170 {
6171 dw_loc_descr_ref loc;
6172 rtx r;
6173
6174 if (AT_class (at) == dw_val_class_die_ref)
6175 {
6176 dw_die_ref target_die = AT_ref (at);
6177
6178 /* For pointer and reference types, we checksum only the (qualified)
6179 name of the target type (if there is a name). For friend entries,
6180 we checksum only the (qualified) name of the target type or function.
6181 This allows the checksum to remain the same whether the target type
6182 is complete or not. */
6183 if ((at->dw_attr == DW_AT_type
6184 && (tag == DW_TAG_pointer_type
6185 || tag == DW_TAG_reference_type
6186 || tag == DW_TAG_rvalue_reference_type
6187 || tag == DW_TAG_ptr_to_member_type))
6188 || (at->dw_attr == DW_AT_friend
6189 && tag == DW_TAG_friend))
6190 {
6191 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
6192
6193 if (name_attr != NULL)
6194 {
6195 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6196
6197 if (decl == NULL)
6198 decl = target_die;
6199 CHECKSUM_ULEB128 ('N');
6200 CHECKSUM_ULEB128 (at->dw_attr);
6201 if (decl->die_parent != NULL)
6202 checksum_die_context (decl->die_parent, ctx);
6203 CHECKSUM_ULEB128 ('E');
6204 CHECKSUM_STRING (AT_string (name_attr));
6205 return;
6206 }
6207 }
6208
6209 /* For all other references to another DIE, we check to see if the
6210 target DIE has already been visited. If it has, we emit a
6211 backward reference; if not, we descend recursively. */
6212 if (target_die->die_mark > 0)
6213 {
6214 CHECKSUM_ULEB128 ('R');
6215 CHECKSUM_ULEB128 (at->dw_attr);
6216 CHECKSUM_ULEB128 (target_die->die_mark);
6217 }
6218 else
6219 {
6220 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6221
6222 if (decl == NULL)
6223 decl = target_die;
6224 target_die->die_mark = ++(*mark);
6225 CHECKSUM_ULEB128 ('T');
6226 CHECKSUM_ULEB128 (at->dw_attr);
6227 if (decl->die_parent != NULL)
6228 checksum_die_context (decl->die_parent, ctx);
6229 die_checksum_ordered (target_die, ctx, mark);
6230 }
6231 return;
6232 }
6233
6234 CHECKSUM_ULEB128 ('A');
6235 CHECKSUM_ULEB128 (at->dw_attr);
6236
6237 switch (AT_class (at))
6238 {
6239 case dw_val_class_const:
6240 CHECKSUM_ULEB128 (DW_FORM_sdata);
6241 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
6242 break;
6243
6244 case dw_val_class_unsigned_const:
6245 CHECKSUM_ULEB128 (DW_FORM_sdata);
6246 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
6247 break;
6248
6249 case dw_val_class_const_double:
6250 CHECKSUM_ULEB128 (DW_FORM_block);
6251 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
6252 CHECKSUM (at->dw_attr_val.v.val_double);
6253 break;
6254
6255 case dw_val_class_wide_int:
6256 CHECKSUM_ULEB128 (DW_FORM_block);
6257 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
6258 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
6259 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6260 get_full_len (*at->dw_attr_val.v.val_wide)
6261 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6262 break;
6263
6264 case dw_val_class_vec:
6265 CHECKSUM_ULEB128 (DW_FORM_block);
6266 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
6267 * at->dw_attr_val.v.val_vec.elt_size);
6268 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6269 (at->dw_attr_val.v.val_vec.length
6270 * at->dw_attr_val.v.val_vec.elt_size));
6271 break;
6272
6273 case dw_val_class_flag:
6274 CHECKSUM_ULEB128 (DW_FORM_flag);
6275 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
6276 break;
6277
6278 case dw_val_class_str:
6279 CHECKSUM_ULEB128 (DW_FORM_string);
6280 CHECKSUM_STRING (AT_string (at));
6281 break;
6282
6283 case dw_val_class_addr:
6284 r = AT_addr (at);
6285 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6286 CHECKSUM_ULEB128 (DW_FORM_string);
6287 CHECKSUM_STRING (XSTR (r, 0));
6288 break;
6289
6290 case dw_val_class_offset:
6291 CHECKSUM_ULEB128 (DW_FORM_sdata);
6292 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
6293 break;
6294
6295 case dw_val_class_loc:
6296 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6297 loc_checksum_ordered (loc, ctx);
6298 break;
6299
6300 case dw_val_class_fde_ref:
6301 case dw_val_class_lbl_id:
6302 case dw_val_class_lineptr:
6303 case dw_val_class_macptr:
6304 case dw_val_class_high_pc:
6305 break;
6306
6307 case dw_val_class_file:
6308 CHECKSUM_ULEB128 (DW_FORM_string);
6309 CHECKSUM_STRING (AT_file (at)->filename);
6310 break;
6311
6312 case dw_val_class_data8:
6313 CHECKSUM (at->dw_attr_val.v.val_data8);
6314 break;
6315
6316 default:
6317 break;
6318 }
6319 }
6320
6321 struct checksum_attributes
6322 {
6323 dw_attr_node *at_name;
6324 dw_attr_node *at_type;
6325 dw_attr_node *at_friend;
6326 dw_attr_node *at_accessibility;
6327 dw_attr_node *at_address_class;
6328 dw_attr_node *at_allocated;
6329 dw_attr_node *at_artificial;
6330 dw_attr_node *at_associated;
6331 dw_attr_node *at_binary_scale;
6332 dw_attr_node *at_bit_offset;
6333 dw_attr_node *at_bit_size;
6334 dw_attr_node *at_bit_stride;
6335 dw_attr_node *at_byte_size;
6336 dw_attr_node *at_byte_stride;
6337 dw_attr_node *at_const_value;
6338 dw_attr_node *at_containing_type;
6339 dw_attr_node *at_count;
6340 dw_attr_node *at_data_location;
6341 dw_attr_node *at_data_member_location;
6342 dw_attr_node *at_decimal_scale;
6343 dw_attr_node *at_decimal_sign;
6344 dw_attr_node *at_default_value;
6345 dw_attr_node *at_digit_count;
6346 dw_attr_node *at_discr;
6347 dw_attr_node *at_discr_list;
6348 dw_attr_node *at_discr_value;
6349 dw_attr_node *at_encoding;
6350 dw_attr_node *at_endianity;
6351 dw_attr_node *at_explicit;
6352 dw_attr_node *at_is_optional;
6353 dw_attr_node *at_location;
6354 dw_attr_node *at_lower_bound;
6355 dw_attr_node *at_mutable;
6356 dw_attr_node *at_ordering;
6357 dw_attr_node *at_picture_string;
6358 dw_attr_node *at_prototyped;
6359 dw_attr_node *at_small;
6360 dw_attr_node *at_segment;
6361 dw_attr_node *at_string_length;
6362 dw_attr_node *at_threads_scaled;
6363 dw_attr_node *at_upper_bound;
6364 dw_attr_node *at_use_location;
6365 dw_attr_node *at_use_UTF8;
6366 dw_attr_node *at_variable_parameter;
6367 dw_attr_node *at_virtuality;
6368 dw_attr_node *at_visibility;
6369 dw_attr_node *at_vtable_elem_location;
6370 };
6371
6372 /* Collect the attributes that we will want to use for the checksum. */
6373
6374 static void
6375 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
6376 {
6377 dw_attr_node *a;
6378 unsigned ix;
6379
6380 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6381 {
6382 switch (a->dw_attr)
6383 {
6384 case DW_AT_name:
6385 attrs->at_name = a;
6386 break;
6387 case DW_AT_type:
6388 attrs->at_type = a;
6389 break;
6390 case DW_AT_friend:
6391 attrs->at_friend = a;
6392 break;
6393 case DW_AT_accessibility:
6394 attrs->at_accessibility = a;
6395 break;
6396 case DW_AT_address_class:
6397 attrs->at_address_class = a;
6398 break;
6399 case DW_AT_allocated:
6400 attrs->at_allocated = a;
6401 break;
6402 case DW_AT_artificial:
6403 attrs->at_artificial = a;
6404 break;
6405 case DW_AT_associated:
6406 attrs->at_associated = a;
6407 break;
6408 case DW_AT_binary_scale:
6409 attrs->at_binary_scale = a;
6410 break;
6411 case DW_AT_bit_offset:
6412 attrs->at_bit_offset = a;
6413 break;
6414 case DW_AT_bit_size:
6415 attrs->at_bit_size = a;
6416 break;
6417 case DW_AT_bit_stride:
6418 attrs->at_bit_stride = a;
6419 break;
6420 case DW_AT_byte_size:
6421 attrs->at_byte_size = a;
6422 break;
6423 case DW_AT_byte_stride:
6424 attrs->at_byte_stride = a;
6425 break;
6426 case DW_AT_const_value:
6427 attrs->at_const_value = a;
6428 break;
6429 case DW_AT_containing_type:
6430 attrs->at_containing_type = a;
6431 break;
6432 case DW_AT_count:
6433 attrs->at_count = a;
6434 break;
6435 case DW_AT_data_location:
6436 attrs->at_data_location = a;
6437 break;
6438 case DW_AT_data_member_location:
6439 attrs->at_data_member_location = a;
6440 break;
6441 case DW_AT_decimal_scale:
6442 attrs->at_decimal_scale = a;
6443 break;
6444 case DW_AT_decimal_sign:
6445 attrs->at_decimal_sign = a;
6446 break;
6447 case DW_AT_default_value:
6448 attrs->at_default_value = a;
6449 break;
6450 case DW_AT_digit_count:
6451 attrs->at_digit_count = a;
6452 break;
6453 case DW_AT_discr:
6454 attrs->at_discr = a;
6455 break;
6456 case DW_AT_discr_list:
6457 attrs->at_discr_list = a;
6458 break;
6459 case DW_AT_discr_value:
6460 attrs->at_discr_value = a;
6461 break;
6462 case DW_AT_encoding:
6463 attrs->at_encoding = a;
6464 break;
6465 case DW_AT_endianity:
6466 attrs->at_endianity = a;
6467 break;
6468 case DW_AT_explicit:
6469 attrs->at_explicit = a;
6470 break;
6471 case DW_AT_is_optional:
6472 attrs->at_is_optional = a;
6473 break;
6474 case DW_AT_location:
6475 attrs->at_location = a;
6476 break;
6477 case DW_AT_lower_bound:
6478 attrs->at_lower_bound = a;
6479 break;
6480 case DW_AT_mutable:
6481 attrs->at_mutable = a;
6482 break;
6483 case DW_AT_ordering:
6484 attrs->at_ordering = a;
6485 break;
6486 case DW_AT_picture_string:
6487 attrs->at_picture_string = a;
6488 break;
6489 case DW_AT_prototyped:
6490 attrs->at_prototyped = a;
6491 break;
6492 case DW_AT_small:
6493 attrs->at_small = a;
6494 break;
6495 case DW_AT_segment:
6496 attrs->at_segment = a;
6497 break;
6498 case DW_AT_string_length:
6499 attrs->at_string_length = a;
6500 break;
6501 case DW_AT_threads_scaled:
6502 attrs->at_threads_scaled = a;
6503 break;
6504 case DW_AT_upper_bound:
6505 attrs->at_upper_bound = a;
6506 break;
6507 case DW_AT_use_location:
6508 attrs->at_use_location = a;
6509 break;
6510 case DW_AT_use_UTF8:
6511 attrs->at_use_UTF8 = a;
6512 break;
6513 case DW_AT_variable_parameter:
6514 attrs->at_variable_parameter = a;
6515 break;
6516 case DW_AT_virtuality:
6517 attrs->at_virtuality = a;
6518 break;
6519 case DW_AT_visibility:
6520 attrs->at_visibility = a;
6521 break;
6522 case DW_AT_vtable_elem_location:
6523 attrs->at_vtable_elem_location = a;
6524 break;
6525 default:
6526 break;
6527 }
6528 }
6529 }
6530
6531 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
6532
6533 static void
6534 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6535 {
6536 dw_die_ref c;
6537 dw_die_ref decl;
6538 struct checksum_attributes attrs;
6539
6540 CHECKSUM_ULEB128 ('D');
6541 CHECKSUM_ULEB128 (die->die_tag);
6542
6543 memset (&attrs, 0, sizeof (attrs));
6544
6545 decl = get_AT_ref (die, DW_AT_specification);
6546 if (decl != NULL)
6547 collect_checksum_attributes (&attrs, decl);
6548 collect_checksum_attributes (&attrs, die);
6549
6550 CHECKSUM_ATTR (attrs.at_name);
6551 CHECKSUM_ATTR (attrs.at_accessibility);
6552 CHECKSUM_ATTR (attrs.at_address_class);
6553 CHECKSUM_ATTR (attrs.at_allocated);
6554 CHECKSUM_ATTR (attrs.at_artificial);
6555 CHECKSUM_ATTR (attrs.at_associated);
6556 CHECKSUM_ATTR (attrs.at_binary_scale);
6557 CHECKSUM_ATTR (attrs.at_bit_offset);
6558 CHECKSUM_ATTR (attrs.at_bit_size);
6559 CHECKSUM_ATTR (attrs.at_bit_stride);
6560 CHECKSUM_ATTR (attrs.at_byte_size);
6561 CHECKSUM_ATTR (attrs.at_byte_stride);
6562 CHECKSUM_ATTR (attrs.at_const_value);
6563 CHECKSUM_ATTR (attrs.at_containing_type);
6564 CHECKSUM_ATTR (attrs.at_count);
6565 CHECKSUM_ATTR (attrs.at_data_location);
6566 CHECKSUM_ATTR (attrs.at_data_member_location);
6567 CHECKSUM_ATTR (attrs.at_decimal_scale);
6568 CHECKSUM_ATTR (attrs.at_decimal_sign);
6569 CHECKSUM_ATTR (attrs.at_default_value);
6570 CHECKSUM_ATTR (attrs.at_digit_count);
6571 CHECKSUM_ATTR (attrs.at_discr);
6572 CHECKSUM_ATTR (attrs.at_discr_list);
6573 CHECKSUM_ATTR (attrs.at_discr_value);
6574 CHECKSUM_ATTR (attrs.at_encoding);
6575 CHECKSUM_ATTR (attrs.at_endianity);
6576 CHECKSUM_ATTR (attrs.at_explicit);
6577 CHECKSUM_ATTR (attrs.at_is_optional);
6578 CHECKSUM_ATTR (attrs.at_location);
6579 CHECKSUM_ATTR (attrs.at_lower_bound);
6580 CHECKSUM_ATTR (attrs.at_mutable);
6581 CHECKSUM_ATTR (attrs.at_ordering);
6582 CHECKSUM_ATTR (attrs.at_picture_string);
6583 CHECKSUM_ATTR (attrs.at_prototyped);
6584 CHECKSUM_ATTR (attrs.at_small);
6585 CHECKSUM_ATTR (attrs.at_segment);
6586 CHECKSUM_ATTR (attrs.at_string_length);
6587 CHECKSUM_ATTR (attrs.at_threads_scaled);
6588 CHECKSUM_ATTR (attrs.at_upper_bound);
6589 CHECKSUM_ATTR (attrs.at_use_location);
6590 CHECKSUM_ATTR (attrs.at_use_UTF8);
6591 CHECKSUM_ATTR (attrs.at_variable_parameter);
6592 CHECKSUM_ATTR (attrs.at_virtuality);
6593 CHECKSUM_ATTR (attrs.at_visibility);
6594 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
6595 CHECKSUM_ATTR (attrs.at_type);
6596 CHECKSUM_ATTR (attrs.at_friend);
6597
6598 /* Checksum the child DIEs. */
6599 c = die->die_child;
6600 if (c) do {
6601 dw_attr_node *name_attr;
6602
6603 c = c->die_sib;
6604 name_attr = get_AT (c, DW_AT_name);
6605 if (is_template_instantiation (c))
6606 {
6607 /* Ignore instantiations of member type and function templates. */
6608 }
6609 else if (name_attr != NULL
6610 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
6611 {
6612 /* Use a shallow checksum for named nested types and member
6613 functions. */
6614 CHECKSUM_ULEB128 ('S');
6615 CHECKSUM_ULEB128 (c->die_tag);
6616 CHECKSUM_STRING (AT_string (name_attr));
6617 }
6618 else
6619 {
6620 /* Use a deep checksum for other children. */
6621 /* Mark this DIE so it gets processed when unmarking. */
6622 if (c->die_mark == 0)
6623 c->die_mark = -1;
6624 die_checksum_ordered (c, ctx, mark);
6625 }
6626 } while (c != die->die_child);
6627
6628 CHECKSUM_ULEB128 (0);
6629 }
6630
6631 /* Add a type name and tag to a hash. */
6632 static void
6633 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
6634 {
6635 CHECKSUM_ULEB128 (tag);
6636 CHECKSUM_STRING (name);
6637 }
6638
6639 #undef CHECKSUM
6640 #undef CHECKSUM_STRING
6641 #undef CHECKSUM_ATTR
6642 #undef CHECKSUM_LEB128
6643 #undef CHECKSUM_ULEB128
6644
6645 /* Generate the type signature for DIE. This is computed by generating an
6646 MD5 checksum over the DIE's tag, its relevant attributes, and its
6647 children. Attributes that are references to other DIEs are processed
6648 by recursion, using the MARK field to prevent infinite recursion.
6649 If the DIE is nested inside a namespace or another type, we also
6650 need to include that context in the signature. The lower 64 bits
6651 of the resulting MD5 checksum comprise the signature. */
6652
6653 static void
6654 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
6655 {
6656 int mark;
6657 const char *name;
6658 unsigned char checksum[16];
6659 struct md5_ctx ctx;
6660 dw_die_ref decl;
6661 dw_die_ref parent;
6662
6663 name = get_AT_string (die, DW_AT_name);
6664 decl = get_AT_ref (die, DW_AT_specification);
6665 parent = get_die_parent (die);
6666
6667 /* First, compute a signature for just the type name (and its surrounding
6668 context, if any. This is stored in the type unit DIE for link-time
6669 ODR (one-definition rule) checking. */
6670
6671 if (is_cxx () && name != NULL)
6672 {
6673 md5_init_ctx (&ctx);
6674
6675 /* Checksum the names of surrounding namespaces and structures. */
6676 if (parent != NULL)
6677 checksum_die_context (parent, &ctx);
6678
6679 /* Checksum the current DIE. */
6680 die_odr_checksum (die->die_tag, name, &ctx);
6681 md5_finish_ctx (&ctx, checksum);
6682
6683 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
6684 }
6685
6686 /* Next, compute the complete type signature. */
6687
6688 md5_init_ctx (&ctx);
6689 mark = 1;
6690 die->die_mark = mark;
6691
6692 /* Checksum the names of surrounding namespaces and structures. */
6693 if (parent != NULL)
6694 checksum_die_context (parent, &ctx);
6695
6696 /* Checksum the DIE and its children. */
6697 die_checksum_ordered (die, &ctx, &mark);
6698 unmark_all_dies (die);
6699 md5_finish_ctx (&ctx, checksum);
6700
6701 /* Store the signature in the type node and link the type DIE and the
6702 type node together. */
6703 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
6704 DWARF_TYPE_SIGNATURE_SIZE);
6705 die->comdat_type_p = true;
6706 die->die_id.die_type_node = type_node;
6707 type_node->type_die = die;
6708
6709 /* If the DIE is a specification, link its declaration to the type node
6710 as well. */
6711 if (decl != NULL)
6712 {
6713 decl->comdat_type_p = true;
6714 decl->die_id.die_type_node = type_node;
6715 }
6716 }
6717
6718 /* Do the location expressions look same? */
6719 static inline int
6720 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
6721 {
6722 return loc1->dw_loc_opc == loc2->dw_loc_opc
6723 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
6724 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
6725 }
6726
6727 /* Do the values look the same? */
6728 static int
6729 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
6730 {
6731 dw_loc_descr_ref loc1, loc2;
6732 rtx r1, r2;
6733
6734 if (v1->val_class != v2->val_class)
6735 return 0;
6736
6737 switch (v1->val_class)
6738 {
6739 case dw_val_class_const:
6740 return v1->v.val_int == v2->v.val_int;
6741 case dw_val_class_unsigned_const:
6742 return v1->v.val_unsigned == v2->v.val_unsigned;
6743 case dw_val_class_const_double:
6744 return v1->v.val_double.high == v2->v.val_double.high
6745 && v1->v.val_double.low == v2->v.val_double.low;
6746 case dw_val_class_wide_int:
6747 return *v1->v.val_wide == *v2->v.val_wide;
6748 case dw_val_class_vec:
6749 if (v1->v.val_vec.length != v2->v.val_vec.length
6750 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
6751 return 0;
6752 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
6753 v1->v.val_vec.length * v1->v.val_vec.elt_size))
6754 return 0;
6755 return 1;
6756 case dw_val_class_flag:
6757 return v1->v.val_flag == v2->v.val_flag;
6758 case dw_val_class_str:
6759 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
6760
6761 case dw_val_class_addr:
6762 r1 = v1->v.val_addr;
6763 r2 = v2->v.val_addr;
6764 if (GET_CODE (r1) != GET_CODE (r2))
6765 return 0;
6766 return !rtx_equal_p (r1, r2);
6767
6768 case dw_val_class_offset:
6769 return v1->v.val_offset == v2->v.val_offset;
6770
6771 case dw_val_class_loc:
6772 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
6773 loc1 && loc2;
6774 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
6775 if (!same_loc_p (loc1, loc2, mark))
6776 return 0;
6777 return !loc1 && !loc2;
6778
6779 case dw_val_class_die_ref:
6780 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
6781
6782 case dw_val_class_fde_ref:
6783 case dw_val_class_vms_delta:
6784 case dw_val_class_lbl_id:
6785 case dw_val_class_lineptr:
6786 case dw_val_class_macptr:
6787 case dw_val_class_high_pc:
6788 return 1;
6789
6790 case dw_val_class_file:
6791 return v1->v.val_file == v2->v.val_file;
6792
6793 case dw_val_class_data8:
6794 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
6795
6796 default:
6797 return 1;
6798 }
6799 }
6800
6801 /* Do the attributes look the same? */
6802
6803 static int
6804 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
6805 {
6806 if (at1->dw_attr != at2->dw_attr)
6807 return 0;
6808
6809 /* We don't care that this was compiled with a different compiler
6810 snapshot; if the output is the same, that's what matters. */
6811 if (at1->dw_attr == DW_AT_producer)
6812 return 1;
6813
6814 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
6815 }
6816
6817 /* Do the dies look the same? */
6818
6819 static int
6820 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
6821 {
6822 dw_die_ref c1, c2;
6823 dw_attr_node *a1;
6824 unsigned ix;
6825
6826 /* To avoid infinite recursion. */
6827 if (die1->die_mark)
6828 return die1->die_mark == die2->die_mark;
6829 die1->die_mark = die2->die_mark = ++(*mark);
6830
6831 if (die1->die_tag != die2->die_tag)
6832 return 0;
6833
6834 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
6835 return 0;
6836
6837 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
6838 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
6839 return 0;
6840
6841 c1 = die1->die_child;
6842 c2 = die2->die_child;
6843 if (! c1)
6844 {
6845 if (c2)
6846 return 0;
6847 }
6848 else
6849 for (;;)
6850 {
6851 if (!same_die_p (c1, c2, mark))
6852 return 0;
6853 c1 = c1->die_sib;
6854 c2 = c2->die_sib;
6855 if (c1 == die1->die_child)
6856 {
6857 if (c2 == die2->die_child)
6858 break;
6859 else
6860 return 0;
6861 }
6862 }
6863
6864 return 1;
6865 }
6866
6867 /* Do the dies look the same? Wrapper around same_die_p. */
6868
6869 static int
6870 same_die_p_wrap (dw_die_ref die1, dw_die_ref die2)
6871 {
6872 int mark = 0;
6873 int ret = same_die_p (die1, die2, &mark);
6874
6875 unmark_all_dies (die1);
6876 unmark_all_dies (die2);
6877
6878 return ret;
6879 }
6880
6881 /* The prefix to attach to symbols on DIEs in the current comdat debug
6882 info section. */
6883 static const char *comdat_symbol_id;
6884
6885 /* The index of the current symbol within the current comdat CU. */
6886 static unsigned int comdat_symbol_number;
6887
6888 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
6889 children, and set comdat_symbol_id accordingly. */
6890
6891 static void
6892 compute_section_prefix (dw_die_ref unit_die)
6893 {
6894 const char *die_name = get_AT_string (unit_die, DW_AT_name);
6895 const char *base = die_name ? lbasename (die_name) : "anonymous";
6896 char *name = XALLOCAVEC (char, strlen (base) + 64);
6897 char *p;
6898 int i, mark;
6899 unsigned char checksum[16];
6900 struct md5_ctx ctx;
6901
6902 /* Compute the checksum of the DIE, then append part of it as hex digits to
6903 the name filename of the unit. */
6904
6905 md5_init_ctx (&ctx);
6906 mark = 0;
6907 die_checksum (unit_die, &ctx, &mark);
6908 unmark_all_dies (unit_die);
6909 md5_finish_ctx (&ctx, checksum);
6910
6911 sprintf (name, "%s.", base);
6912 clean_symbol_name (name);
6913
6914 p = name + strlen (name);
6915 for (i = 0; i < 4; i++)
6916 {
6917 sprintf (p, "%.2x", checksum[i]);
6918 p += 2;
6919 }
6920
6921 comdat_symbol_id = unit_die->die_id.die_symbol = xstrdup (name);
6922 comdat_symbol_number = 0;
6923 }
6924
6925 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
6926
6927 static int
6928 is_type_die (dw_die_ref die)
6929 {
6930 switch (die->die_tag)
6931 {
6932 case DW_TAG_array_type:
6933 case DW_TAG_class_type:
6934 case DW_TAG_interface_type:
6935 case DW_TAG_enumeration_type:
6936 case DW_TAG_pointer_type:
6937 case DW_TAG_reference_type:
6938 case DW_TAG_rvalue_reference_type:
6939 case DW_TAG_string_type:
6940 case DW_TAG_structure_type:
6941 case DW_TAG_subroutine_type:
6942 case DW_TAG_union_type:
6943 case DW_TAG_ptr_to_member_type:
6944 case DW_TAG_set_type:
6945 case DW_TAG_subrange_type:
6946 case DW_TAG_base_type:
6947 case DW_TAG_const_type:
6948 case DW_TAG_file_type:
6949 case DW_TAG_packed_type:
6950 case DW_TAG_volatile_type:
6951 case DW_TAG_typedef:
6952 return 1;
6953 default:
6954 return 0;
6955 }
6956 }
6957
6958 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
6959 Basically, we want to choose the bits that are likely to be shared between
6960 compilations (types) and leave out the bits that are specific to individual
6961 compilations (functions). */
6962
6963 static int
6964 is_comdat_die (dw_die_ref c)
6965 {
6966 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
6967 we do for stabs. The advantage is a greater likelihood of sharing between
6968 objects that don't include headers in the same order (and therefore would
6969 put the base types in a different comdat). jason 8/28/00 */
6970
6971 if (c->die_tag == DW_TAG_base_type)
6972 return 0;
6973
6974 if (c->die_tag == DW_TAG_pointer_type
6975 || c->die_tag == DW_TAG_reference_type
6976 || c->die_tag == DW_TAG_rvalue_reference_type
6977 || c->die_tag == DW_TAG_const_type
6978 || c->die_tag == DW_TAG_volatile_type)
6979 {
6980 dw_die_ref t = get_AT_ref (c, DW_AT_type);
6981
6982 return t ? is_comdat_die (t) : 0;
6983 }
6984
6985 return is_type_die (c);
6986 }
6987
6988 /* Returns 1 iff C is the sort of DIE that might be referred to from another
6989 compilation unit. */
6990
6991 static int
6992 is_symbol_die (dw_die_ref c)
6993 {
6994 return (is_type_die (c)
6995 || is_declaration_die (c)
6996 || c->die_tag == DW_TAG_namespace
6997 || c->die_tag == DW_TAG_module);
6998 }
6999
7000 /* Returns true iff C is a compile-unit DIE. */
7001
7002 static inline bool
7003 is_cu_die (dw_die_ref c)
7004 {
7005 return c && c->die_tag == DW_TAG_compile_unit;
7006 }
7007
7008 /* Returns true iff C is a unit DIE of some sort. */
7009
7010 static inline bool
7011 is_unit_die (dw_die_ref c)
7012 {
7013 return c && (c->die_tag == DW_TAG_compile_unit
7014 || c->die_tag == DW_TAG_partial_unit
7015 || c->die_tag == DW_TAG_type_unit);
7016 }
7017
7018 /* Returns true iff C is a namespace DIE. */
7019
7020 static inline bool
7021 is_namespace_die (dw_die_ref c)
7022 {
7023 return c && c->die_tag == DW_TAG_namespace;
7024 }
7025
7026 /* Returns true iff C is a class or structure DIE. */
7027
7028 static inline bool
7029 is_class_die (dw_die_ref c)
7030 {
7031 return c && (c->die_tag == DW_TAG_class_type
7032 || c->die_tag == DW_TAG_structure_type);
7033 }
7034
7035 /* Return non-zero if this DIE is a template parameter. */
7036
7037 static inline bool
7038 is_template_parameter (dw_die_ref die)
7039 {
7040 switch (die->die_tag)
7041 {
7042 case DW_TAG_template_type_param:
7043 case DW_TAG_template_value_param:
7044 case DW_TAG_GNU_template_template_param:
7045 case DW_TAG_GNU_template_parameter_pack:
7046 return true;
7047 default:
7048 return false;
7049 }
7050 }
7051
7052 /* Return non-zero if this DIE represents a template instantiation. */
7053
7054 static inline bool
7055 is_template_instantiation (dw_die_ref die)
7056 {
7057 dw_die_ref c;
7058
7059 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7060 return false;
7061 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7062 return false;
7063 }
7064
7065 static char *
7066 gen_internal_sym (const char *prefix)
7067 {
7068 char buf[256];
7069
7070 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7071 return xstrdup (buf);
7072 }
7073
7074 /* Assign symbols to all worthy DIEs under DIE. */
7075
7076 static void
7077 assign_symbol_names (dw_die_ref die)
7078 {
7079 dw_die_ref c;
7080
7081 if (is_symbol_die (die) && !die->comdat_type_p)
7082 {
7083 if (comdat_symbol_id)
7084 {
7085 char *p = XALLOCAVEC (char, strlen (comdat_symbol_id) + 64);
7086
7087 sprintf (p, "%s.%s.%x", DIE_LABEL_PREFIX,
7088 comdat_symbol_id, comdat_symbol_number++);
7089 die->die_id.die_symbol = xstrdup (p);
7090 }
7091 else
7092 die->die_id.die_symbol = gen_internal_sym ("LDIE");
7093 }
7094
7095 FOR_EACH_CHILD (die, c, assign_symbol_names (c));
7096 }
7097
7098 struct cu_hash_table_entry
7099 {
7100 dw_die_ref cu;
7101 unsigned min_comdat_num, max_comdat_num;
7102 struct cu_hash_table_entry *next;
7103 };
7104
7105 /* Helpers to manipulate hash table of CUs. */
7106
7107 struct cu_hash_table_entry_hasher : pointer_hash <cu_hash_table_entry>
7108 {
7109 typedef die_struct *compare_type;
7110 static inline hashval_t hash (const cu_hash_table_entry *);
7111 static inline bool equal (const cu_hash_table_entry *, const die_struct *);
7112 static inline void remove (cu_hash_table_entry *);
7113 };
7114
7115 inline hashval_t
7116 cu_hash_table_entry_hasher::hash (const cu_hash_table_entry *entry)
7117 {
7118 return htab_hash_string (entry->cu->die_id.die_symbol);
7119 }
7120
7121 inline bool
7122 cu_hash_table_entry_hasher::equal (const cu_hash_table_entry *entry1,
7123 const die_struct *entry2)
7124 {
7125 return !strcmp (entry1->cu->die_id.die_symbol, entry2->die_id.die_symbol);
7126 }
7127
7128 inline void
7129 cu_hash_table_entry_hasher::remove (cu_hash_table_entry *entry)
7130 {
7131 struct cu_hash_table_entry *next;
7132
7133 while (entry)
7134 {
7135 next = entry->next;
7136 free (entry);
7137 entry = next;
7138 }
7139 }
7140
7141 typedef hash_table<cu_hash_table_entry_hasher> cu_hash_type;
7142
7143 /* Check whether we have already seen this CU and set up SYM_NUM
7144 accordingly. */
7145 static int
7146 check_duplicate_cu (dw_die_ref cu, cu_hash_type *htable, unsigned int *sym_num)
7147 {
7148 struct cu_hash_table_entry dummy;
7149 struct cu_hash_table_entry **slot, *entry, *last = &dummy;
7150
7151 dummy.max_comdat_num = 0;
7152
7153 slot = htable->find_slot_with_hash (cu,
7154 htab_hash_string (cu->die_id.die_symbol),
7155 INSERT);
7156 entry = *slot;
7157
7158 for (; entry; last = entry, entry = entry->next)
7159 {
7160 if (same_die_p_wrap (cu, entry->cu))
7161 break;
7162 }
7163
7164 if (entry)
7165 {
7166 *sym_num = entry->min_comdat_num;
7167 return 1;
7168 }
7169
7170 entry = XCNEW (struct cu_hash_table_entry);
7171 entry->cu = cu;
7172 entry->min_comdat_num = *sym_num = last->max_comdat_num;
7173 entry->next = *slot;
7174 *slot = entry;
7175
7176 return 0;
7177 }
7178
7179 /* Record SYM_NUM to record of CU in HTABLE. */
7180 static void
7181 record_comdat_symbol_number (dw_die_ref cu, cu_hash_type *htable,
7182 unsigned int sym_num)
7183 {
7184 struct cu_hash_table_entry **slot, *entry;
7185
7186 slot = htable->find_slot_with_hash (cu,
7187 htab_hash_string (cu->die_id.die_symbol),
7188 NO_INSERT);
7189 entry = *slot;
7190
7191 entry->max_comdat_num = sym_num;
7192 }
7193
7194 /* Traverse the DIE (which is always comp_unit_die), and set up
7195 additional compilation units for each of the include files we see
7196 bracketed by BINCL/EINCL. */
7197
7198 static void
7199 break_out_includes (dw_die_ref die)
7200 {
7201 dw_die_ref c;
7202 dw_die_ref unit = NULL;
7203 limbo_die_node *node, **pnode;
7204
7205 c = die->die_child;
7206 if (c) do {
7207 dw_die_ref prev = c;
7208 c = c->die_sib;
7209 while (c->die_tag == DW_TAG_GNU_BINCL || c->die_tag == DW_TAG_GNU_EINCL
7210 || (unit && is_comdat_die (c)))
7211 {
7212 dw_die_ref next = c->die_sib;
7213
7214 /* This DIE is for a secondary CU; remove it from the main one. */
7215 remove_child_with_prev (c, prev);
7216
7217 if (c->die_tag == DW_TAG_GNU_BINCL)
7218 unit = push_new_compile_unit (unit, c);
7219 else if (c->die_tag == DW_TAG_GNU_EINCL)
7220 unit = pop_compile_unit (unit);
7221 else
7222 add_child_die (unit, c);
7223 c = next;
7224 if (c == die->die_child)
7225 break;
7226 }
7227 } while (c != die->die_child);
7228
7229 #if 0
7230 /* We can only use this in debugging, since the frontend doesn't check
7231 to make sure that we leave every include file we enter. */
7232 gcc_assert (!unit);
7233 #endif
7234
7235 assign_symbol_names (die);
7236 cu_hash_type cu_hash_table (10);
7237 for (node = limbo_die_list, pnode = &limbo_die_list;
7238 node;
7239 node = node->next)
7240 {
7241 int is_dupl;
7242
7243 compute_section_prefix (node->die);
7244 is_dupl = check_duplicate_cu (node->die, &cu_hash_table,
7245 &comdat_symbol_number);
7246 assign_symbol_names (node->die);
7247 if (is_dupl)
7248 *pnode = node->next;
7249 else
7250 {
7251 pnode = &node->next;
7252 record_comdat_symbol_number (node->die, &cu_hash_table,
7253 comdat_symbol_number);
7254 }
7255 }
7256 }
7257
7258 /* Return non-zero if this DIE is a declaration. */
7259
7260 static int
7261 is_declaration_die (dw_die_ref die)
7262 {
7263 dw_attr_node *a;
7264 unsigned ix;
7265
7266 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7267 if (a->dw_attr == DW_AT_declaration)
7268 return 1;
7269
7270 return 0;
7271 }
7272
7273 /* Return non-zero if this DIE is nested inside a subprogram. */
7274
7275 static int
7276 is_nested_in_subprogram (dw_die_ref die)
7277 {
7278 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7279
7280 if (decl == NULL)
7281 decl = die;
7282 return local_scope_p (decl);
7283 }
7284
7285 /* Return non-zero if this DIE contains a defining declaration of a
7286 subprogram. */
7287
7288 static int
7289 contains_subprogram_definition (dw_die_ref die)
7290 {
7291 dw_die_ref c;
7292
7293 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7294 return 1;
7295 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7296 return 0;
7297 }
7298
7299 /* Return non-zero if this is a type DIE that should be moved to a
7300 COMDAT .debug_types section. */
7301
7302 static int
7303 should_move_die_to_comdat (dw_die_ref die)
7304 {
7305 switch (die->die_tag)
7306 {
7307 case DW_TAG_class_type:
7308 case DW_TAG_structure_type:
7309 case DW_TAG_enumeration_type:
7310 case DW_TAG_union_type:
7311 /* Don't move declarations, inlined instances, types nested in a
7312 subprogram, or types that contain subprogram definitions. */
7313 if (is_declaration_die (die)
7314 || get_AT (die, DW_AT_abstract_origin)
7315 || is_nested_in_subprogram (die)
7316 || contains_subprogram_definition (die))
7317 return 0;
7318 return 1;
7319 case DW_TAG_array_type:
7320 case DW_TAG_interface_type:
7321 case DW_TAG_pointer_type:
7322 case DW_TAG_reference_type:
7323 case DW_TAG_rvalue_reference_type:
7324 case DW_TAG_string_type:
7325 case DW_TAG_subroutine_type:
7326 case DW_TAG_ptr_to_member_type:
7327 case DW_TAG_set_type:
7328 case DW_TAG_subrange_type:
7329 case DW_TAG_base_type:
7330 case DW_TAG_const_type:
7331 case DW_TAG_file_type:
7332 case DW_TAG_packed_type:
7333 case DW_TAG_volatile_type:
7334 case DW_TAG_typedef:
7335 default:
7336 return 0;
7337 }
7338 }
7339
7340 /* Make a clone of DIE. */
7341
7342 static dw_die_ref
7343 clone_die (dw_die_ref die)
7344 {
7345 dw_die_ref clone;
7346 dw_attr_node *a;
7347 unsigned ix;
7348
7349 clone = ggc_cleared_alloc<die_node> ();
7350 clone->die_tag = die->die_tag;
7351
7352 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7353 add_dwarf_attr (clone, a);
7354
7355 return clone;
7356 }
7357
7358 /* Make a clone of the tree rooted at DIE. */
7359
7360 static dw_die_ref
7361 clone_tree (dw_die_ref die)
7362 {
7363 dw_die_ref c;
7364 dw_die_ref clone = clone_die (die);
7365
7366 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
7367
7368 return clone;
7369 }
7370
7371 /* Make a clone of DIE as a declaration. */
7372
7373 static dw_die_ref
7374 clone_as_declaration (dw_die_ref die)
7375 {
7376 dw_die_ref clone;
7377 dw_die_ref decl;
7378 dw_attr_node *a;
7379 unsigned ix;
7380
7381 /* If the DIE is already a declaration, just clone it. */
7382 if (is_declaration_die (die))
7383 return clone_die (die);
7384
7385 /* If the DIE is a specification, just clone its declaration DIE. */
7386 decl = get_AT_ref (die, DW_AT_specification);
7387 if (decl != NULL)
7388 {
7389 clone = clone_die (decl);
7390 if (die->comdat_type_p)
7391 add_AT_die_ref (clone, DW_AT_signature, die);
7392 return clone;
7393 }
7394
7395 clone = ggc_cleared_alloc<die_node> ();
7396 clone->die_tag = die->die_tag;
7397
7398 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7399 {
7400 /* We don't want to copy over all attributes.
7401 For example we don't want DW_AT_byte_size because otherwise we will no
7402 longer have a declaration and GDB will treat it as a definition. */
7403
7404 switch (a->dw_attr)
7405 {
7406 case DW_AT_abstract_origin:
7407 case DW_AT_artificial:
7408 case DW_AT_containing_type:
7409 case DW_AT_external:
7410 case DW_AT_name:
7411 case DW_AT_type:
7412 case DW_AT_virtuality:
7413 case DW_AT_linkage_name:
7414 case DW_AT_MIPS_linkage_name:
7415 add_dwarf_attr (clone, a);
7416 break;
7417 case DW_AT_byte_size:
7418 default:
7419 break;
7420 }
7421 }
7422
7423 if (die->comdat_type_p)
7424 add_AT_die_ref (clone, DW_AT_signature, die);
7425
7426 add_AT_flag (clone, DW_AT_declaration, 1);
7427 return clone;
7428 }
7429
7430
7431 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
7432
7433 struct decl_table_entry
7434 {
7435 dw_die_ref orig;
7436 dw_die_ref copy;
7437 };
7438
7439 /* Helpers to manipulate hash table of copied declarations. */
7440
7441 /* Hashtable helpers. */
7442
7443 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
7444 {
7445 typedef die_struct *compare_type;
7446 static inline hashval_t hash (const decl_table_entry *);
7447 static inline bool equal (const decl_table_entry *, const die_struct *);
7448 };
7449
7450 inline hashval_t
7451 decl_table_entry_hasher::hash (const decl_table_entry *entry)
7452 {
7453 return htab_hash_pointer (entry->orig);
7454 }
7455
7456 inline bool
7457 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
7458 const die_struct *entry2)
7459 {
7460 return entry1->orig == entry2;
7461 }
7462
7463 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
7464
7465 /* Copy DIE and its ancestors, up to, but not including, the compile unit
7466 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
7467 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
7468 to check if the ancestor has already been copied into UNIT. */
7469
7470 static dw_die_ref
7471 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
7472 decl_hash_type *decl_table)
7473 {
7474 dw_die_ref parent = die->die_parent;
7475 dw_die_ref new_parent = unit;
7476 dw_die_ref copy;
7477 decl_table_entry **slot = NULL;
7478 struct decl_table_entry *entry = NULL;
7479
7480 if (decl_table)
7481 {
7482 /* Check if the entry has already been copied to UNIT. */
7483 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
7484 INSERT);
7485 if (*slot != HTAB_EMPTY_ENTRY)
7486 {
7487 entry = *slot;
7488 return entry->copy;
7489 }
7490
7491 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
7492 entry = XCNEW (struct decl_table_entry);
7493 entry->orig = die;
7494 entry->copy = NULL;
7495 *slot = entry;
7496 }
7497
7498 if (parent != NULL)
7499 {
7500 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
7501 if (spec != NULL)
7502 parent = spec;
7503 if (!is_unit_die (parent))
7504 new_parent = copy_ancestor_tree (unit, parent, decl_table);
7505 }
7506
7507 copy = clone_as_declaration (die);
7508 add_child_die (new_parent, copy);
7509
7510 if (decl_table)
7511 {
7512 /* Record the pointer to the copy. */
7513 entry->copy = copy;
7514 }
7515
7516 return copy;
7517 }
7518 /* Copy the declaration context to the new type unit DIE. This includes
7519 any surrounding namespace or type declarations. If the DIE has an
7520 AT_specification attribute, it also includes attributes and children
7521 attached to the specification, and returns a pointer to the original
7522 parent of the declaration DIE. Returns NULL otherwise. */
7523
7524 static dw_die_ref
7525 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
7526 {
7527 dw_die_ref decl;
7528 dw_die_ref new_decl;
7529 dw_die_ref orig_parent = NULL;
7530
7531 decl = get_AT_ref (die, DW_AT_specification);
7532 if (decl == NULL)
7533 decl = die;
7534 else
7535 {
7536 unsigned ix;
7537 dw_die_ref c;
7538 dw_attr_node *a;
7539
7540 /* The original DIE will be changed to a declaration, and must
7541 be moved to be a child of the original declaration DIE. */
7542 orig_parent = decl->die_parent;
7543
7544 /* Copy the type node pointer from the new DIE to the original
7545 declaration DIE so we can forward references later. */
7546 decl->comdat_type_p = true;
7547 decl->die_id.die_type_node = die->die_id.die_type_node;
7548
7549 remove_AT (die, DW_AT_specification);
7550
7551 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
7552 {
7553 if (a->dw_attr != DW_AT_name
7554 && a->dw_attr != DW_AT_declaration
7555 && a->dw_attr != DW_AT_external)
7556 add_dwarf_attr (die, a);
7557 }
7558
7559 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
7560 }
7561
7562 if (decl->die_parent != NULL
7563 && !is_unit_die (decl->die_parent))
7564 {
7565 new_decl = copy_ancestor_tree (unit, decl, NULL);
7566 if (new_decl != NULL)
7567 {
7568 remove_AT (new_decl, DW_AT_signature);
7569 add_AT_specification (die, new_decl);
7570 }
7571 }
7572
7573 return orig_parent;
7574 }
7575
7576 /* Generate the skeleton ancestor tree for the given NODE, then clone
7577 the DIE and add the clone into the tree. */
7578
7579 static void
7580 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
7581 {
7582 if (node->new_die != NULL)
7583 return;
7584
7585 node->new_die = clone_as_declaration (node->old_die);
7586
7587 if (node->parent != NULL)
7588 {
7589 generate_skeleton_ancestor_tree (node->parent);
7590 add_child_die (node->parent->new_die, node->new_die);
7591 }
7592 }
7593
7594 /* Generate a skeleton tree of DIEs containing any declarations that are
7595 found in the original tree. We traverse the tree looking for declaration
7596 DIEs, and construct the skeleton from the bottom up whenever we find one. */
7597
7598 static void
7599 generate_skeleton_bottom_up (skeleton_chain_node *parent)
7600 {
7601 skeleton_chain_node node;
7602 dw_die_ref c;
7603 dw_die_ref first;
7604 dw_die_ref prev = NULL;
7605 dw_die_ref next = NULL;
7606
7607 node.parent = parent;
7608
7609 first = c = parent->old_die->die_child;
7610 if (c)
7611 next = c->die_sib;
7612 if (c) do {
7613 if (prev == NULL || prev->die_sib == c)
7614 prev = c;
7615 c = next;
7616 next = (c == first ? NULL : c->die_sib);
7617 node.old_die = c;
7618 node.new_die = NULL;
7619 if (is_declaration_die (c))
7620 {
7621 if (is_template_instantiation (c))
7622 {
7623 /* Instantiated templates do not need to be cloned into the
7624 type unit. Just move the DIE and its children back to
7625 the skeleton tree (in the main CU). */
7626 remove_child_with_prev (c, prev);
7627 add_child_die (parent->new_die, c);
7628 c = prev;
7629 }
7630 else
7631 {
7632 /* Clone the existing DIE, move the original to the skeleton
7633 tree (which is in the main CU), and put the clone, with
7634 all the original's children, where the original came from
7635 (which is about to be moved to the type unit). */
7636 dw_die_ref clone = clone_die (c);
7637 move_all_children (c, clone);
7638
7639 /* If the original has a DW_AT_object_pointer attribute,
7640 it would now point to a child DIE just moved to the
7641 cloned tree, so we need to remove that attribute from
7642 the original. */
7643 remove_AT (c, DW_AT_object_pointer);
7644
7645 replace_child (c, clone, prev);
7646 generate_skeleton_ancestor_tree (parent);
7647 add_child_die (parent->new_die, c);
7648 node.new_die = c;
7649 c = clone;
7650 }
7651 }
7652 generate_skeleton_bottom_up (&node);
7653 } while (next != NULL);
7654 }
7655
7656 /* Wrapper function for generate_skeleton_bottom_up. */
7657
7658 static dw_die_ref
7659 generate_skeleton (dw_die_ref die)
7660 {
7661 skeleton_chain_node node;
7662
7663 node.old_die = die;
7664 node.new_die = NULL;
7665 node.parent = NULL;
7666
7667 /* If this type definition is nested inside another type,
7668 and is not an instantiation of a template, always leave
7669 at least a declaration in its place. */
7670 if (die->die_parent != NULL
7671 && is_type_die (die->die_parent)
7672 && !is_template_instantiation (die))
7673 node.new_die = clone_as_declaration (die);
7674
7675 generate_skeleton_bottom_up (&node);
7676 return node.new_die;
7677 }
7678
7679 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
7680 declaration. The original DIE is moved to a new compile unit so that
7681 existing references to it follow it to the new location. If any of the
7682 original DIE's descendants is a declaration, we need to replace the
7683 original DIE with a skeleton tree and move the declarations back into the
7684 skeleton tree. */
7685
7686 static dw_die_ref
7687 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
7688 dw_die_ref prev)
7689 {
7690 dw_die_ref skeleton, orig_parent;
7691
7692 /* Copy the declaration context to the type unit DIE. If the returned
7693 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
7694 that DIE. */
7695 orig_parent = copy_declaration_context (unit, child);
7696
7697 skeleton = generate_skeleton (child);
7698 if (skeleton == NULL)
7699 remove_child_with_prev (child, prev);
7700 else
7701 {
7702 skeleton->comdat_type_p = true;
7703 skeleton->die_id.die_type_node = child->die_id.die_type_node;
7704
7705 /* If the original DIE was a specification, we need to put
7706 the skeleton under the parent DIE of the declaration.
7707 This leaves the original declaration in the tree, but
7708 it will be pruned later since there are no longer any
7709 references to it. */
7710 if (orig_parent != NULL)
7711 {
7712 remove_child_with_prev (child, prev);
7713 add_child_die (orig_parent, skeleton);
7714 }
7715 else
7716 replace_child (child, skeleton, prev);
7717 }
7718
7719 return skeleton;
7720 }
7721
7722 static void
7723 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
7724 comdat_type_node *type_node,
7725 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
7726
7727 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
7728 procedure, put it under TYPE_NODE and return the copy. Continue looking for
7729 DWARF procedure references in the DW_AT_location attribute. */
7730
7731 static dw_die_ref
7732 copy_dwarf_procedure (dw_die_ref die,
7733 comdat_type_node *type_node,
7734 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
7735 {
7736 /* We do this for COMDAT section, which is DWARFv4 specific, so
7737 DWARF procedure are always DW_TAG_dwarf_procedure DIEs (unlike
7738 DW_TAG_variable in DWARFv3). */
7739 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
7740
7741 /* DWARF procedures are not supposed to have children... */
7742 gcc_assert (die->die_child == NULL);
7743
7744 /* ... and they are supposed to have only one attribute: DW_AT_location. */
7745 gcc_assert (vec_safe_length (die->die_attr) == 1
7746 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
7747
7748 /* Do not copy more than once DWARF procedures. */
7749 bool existed;
7750 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
7751 if (existed)
7752 return die_copy;
7753
7754 die_copy = clone_die (die);
7755 add_child_die (type_node->root_die, die_copy);
7756 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
7757 return die_copy;
7758 }
7759
7760 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
7761 procedures in DIE's attributes. */
7762
7763 static void
7764 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
7765 comdat_type_node *type_node,
7766 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
7767 {
7768 dw_attr_node *a;
7769 unsigned i;
7770
7771 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
7772 {
7773 dw_loc_descr_ref loc;
7774
7775 if (a->dw_attr_val.val_class != dw_val_class_loc)
7776 continue;
7777
7778 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
7779 {
7780 switch (loc->dw_loc_opc)
7781 {
7782 case DW_OP_call2:
7783 case DW_OP_call4:
7784 case DW_OP_call_ref:
7785 gcc_assert (loc->dw_loc_oprnd1.val_class
7786 == dw_val_class_die_ref);
7787 loc->dw_loc_oprnd1.v.val_die_ref.die
7788 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
7789 type_node,
7790 copied_dwarf_procs);
7791
7792 default:
7793 break;
7794 }
7795 }
7796 }
7797 }
7798
7799 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
7800 rewrite references to point to the copies.
7801
7802 References are looked for in DIE's attributes and recursively in all its
7803 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
7804 mapping from old DWARF procedures to their copy. It is used not to copy
7805 twice the same DWARF procedure under TYPE_NODE. */
7806
7807 static void
7808 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
7809 comdat_type_node *type_node,
7810 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
7811 {
7812 dw_die_ref c;
7813
7814 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
7815 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
7816 type_node,
7817 copied_dwarf_procs));
7818 }
7819
7820 /* Traverse the DIE and set up additional .debug_types sections for each
7821 type worthy of being placed in a COMDAT section. */
7822
7823 static void
7824 break_out_comdat_types (dw_die_ref die)
7825 {
7826 dw_die_ref c;
7827 dw_die_ref first;
7828 dw_die_ref prev = NULL;
7829 dw_die_ref next = NULL;
7830 dw_die_ref unit = NULL;
7831
7832 first = c = die->die_child;
7833 if (c)
7834 next = c->die_sib;
7835 if (c) do {
7836 if (prev == NULL || prev->die_sib == c)
7837 prev = c;
7838 c = next;
7839 next = (c == first ? NULL : c->die_sib);
7840 if (should_move_die_to_comdat (c))
7841 {
7842 dw_die_ref replacement;
7843 comdat_type_node *type_node;
7844
7845 /* Break out nested types into their own type units. */
7846 break_out_comdat_types (c);
7847
7848 /* Create a new type unit DIE as the root for the new tree, and
7849 add it to the list of comdat types. */
7850 unit = new_die (DW_TAG_type_unit, NULL, NULL);
7851 add_AT_unsigned (unit, DW_AT_language,
7852 get_AT_unsigned (comp_unit_die (), DW_AT_language));
7853 type_node = ggc_cleared_alloc<comdat_type_node> ();
7854 type_node->root_die = unit;
7855 type_node->next = comdat_type_list;
7856 comdat_type_list = type_node;
7857
7858 /* Generate the type signature. */
7859 generate_type_signature (c, type_node);
7860
7861 /* Copy the declaration context, attributes, and children of the
7862 declaration into the new type unit DIE, then remove this DIE
7863 from the main CU (or replace it with a skeleton if necessary). */
7864 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
7865 type_node->skeleton_die = replacement;
7866
7867 /* Add the DIE to the new compunit. */
7868 add_child_die (unit, c);
7869
7870 /* Types can reference DWARF procedures for type size or data location
7871 expressions. Calls in DWARF expressions cannot target procedures
7872 that are not in the same section. So we must copy DWARF procedures
7873 along with this type and then rewrite references to them. */
7874 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
7875 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
7876
7877 if (replacement != NULL)
7878 c = replacement;
7879 }
7880 else if (c->die_tag == DW_TAG_namespace
7881 || c->die_tag == DW_TAG_class_type
7882 || c->die_tag == DW_TAG_structure_type
7883 || c->die_tag == DW_TAG_union_type)
7884 {
7885 /* Look for nested types that can be broken out. */
7886 break_out_comdat_types (c);
7887 }
7888 } while (next != NULL);
7889 }
7890
7891 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
7892 Enter all the cloned children into the hash table decl_table. */
7893
7894 static dw_die_ref
7895 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
7896 {
7897 dw_die_ref c;
7898 dw_die_ref clone;
7899 struct decl_table_entry *entry;
7900 decl_table_entry **slot;
7901
7902 if (die->die_tag == DW_TAG_subprogram)
7903 clone = clone_as_declaration (die);
7904 else
7905 clone = clone_die (die);
7906
7907 slot = decl_table->find_slot_with_hash (die,
7908 htab_hash_pointer (die), INSERT);
7909
7910 /* Assert that DIE isn't in the hash table yet. If it would be there
7911 before, the ancestors would be necessarily there as well, therefore
7912 clone_tree_partial wouldn't be called. */
7913 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
7914
7915 entry = XCNEW (struct decl_table_entry);
7916 entry->orig = die;
7917 entry->copy = clone;
7918 *slot = entry;
7919
7920 if (die->die_tag != DW_TAG_subprogram)
7921 FOR_EACH_CHILD (die, c,
7922 add_child_die (clone, clone_tree_partial (c, decl_table)));
7923
7924 return clone;
7925 }
7926
7927 /* Walk the DIE and its children, looking for references to incomplete
7928 or trivial types that are unmarked (i.e., that are not in the current
7929 type_unit). */
7930
7931 static void
7932 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
7933 {
7934 dw_die_ref c;
7935 dw_attr_node *a;
7936 unsigned ix;
7937
7938 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7939 {
7940 if (AT_class (a) == dw_val_class_die_ref)
7941 {
7942 dw_die_ref targ = AT_ref (a);
7943 decl_table_entry **slot;
7944 struct decl_table_entry *entry;
7945
7946 if (targ->die_mark != 0 || targ->comdat_type_p)
7947 continue;
7948
7949 slot = decl_table->find_slot_with_hash (targ,
7950 htab_hash_pointer (targ),
7951 INSERT);
7952
7953 if (*slot != HTAB_EMPTY_ENTRY)
7954 {
7955 /* TARG has already been copied, so we just need to
7956 modify the reference to point to the copy. */
7957 entry = *slot;
7958 a->dw_attr_val.v.val_die_ref.die = entry->copy;
7959 }
7960 else
7961 {
7962 dw_die_ref parent = unit;
7963 dw_die_ref copy = clone_die (targ);
7964
7965 /* Record in DECL_TABLE that TARG has been copied.
7966 Need to do this now, before the recursive call,
7967 because DECL_TABLE may be expanded and SLOT
7968 would no longer be a valid pointer. */
7969 entry = XCNEW (struct decl_table_entry);
7970 entry->orig = targ;
7971 entry->copy = copy;
7972 *slot = entry;
7973
7974 /* If TARG is not a declaration DIE, we need to copy its
7975 children. */
7976 if (!is_declaration_die (targ))
7977 {
7978 FOR_EACH_CHILD (
7979 targ, c,
7980 add_child_die (copy,
7981 clone_tree_partial (c, decl_table)));
7982 }
7983
7984 /* Make sure the cloned tree is marked as part of the
7985 type unit. */
7986 mark_dies (copy);
7987
7988 /* If TARG has surrounding context, copy its ancestor tree
7989 into the new type unit. */
7990 if (targ->die_parent != NULL
7991 && !is_unit_die (targ->die_parent))
7992 parent = copy_ancestor_tree (unit, targ->die_parent,
7993 decl_table);
7994
7995 add_child_die (parent, copy);
7996 a->dw_attr_val.v.val_die_ref.die = copy;
7997
7998 /* Make sure the newly-copied DIE is walked. If it was
7999 installed in a previously-added context, it won't
8000 get visited otherwise. */
8001 if (parent != unit)
8002 {
8003 /* Find the highest point of the newly-added tree,
8004 mark each node along the way, and walk from there. */
8005 parent->die_mark = 1;
8006 while (parent->die_parent
8007 && parent->die_parent->die_mark == 0)
8008 {
8009 parent = parent->die_parent;
8010 parent->die_mark = 1;
8011 }
8012 copy_decls_walk (unit, parent, decl_table);
8013 }
8014 }
8015 }
8016 }
8017
8018 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8019 }
8020
8021 /* Copy declarations for "unworthy" types into the new comdat section.
8022 Incomplete types, modified types, and certain other types aren't broken
8023 out into comdat sections of their own, so they don't have a signature,
8024 and we need to copy the declaration into the same section so that we
8025 don't have an external reference. */
8026
8027 static void
8028 copy_decls_for_unworthy_types (dw_die_ref unit)
8029 {
8030 mark_dies (unit);
8031 decl_hash_type decl_table (10);
8032 copy_decls_walk (unit, unit, &decl_table);
8033 unmark_dies (unit);
8034 }
8035
8036 /* Traverse the DIE and add a sibling attribute if it may have the
8037 effect of speeding up access to siblings. To save some space,
8038 avoid generating sibling attributes for DIE's without children. */
8039
8040 static void
8041 add_sibling_attributes (dw_die_ref die)
8042 {
8043 dw_die_ref c;
8044
8045 if (! die->die_child)
8046 return;
8047
8048 if (die->die_parent && die != die->die_parent->die_child)
8049 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8050
8051 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8052 }
8053
8054 /* Output all location lists for the DIE and its children. */
8055
8056 static void
8057 output_location_lists (dw_die_ref die)
8058 {
8059 dw_die_ref c;
8060 dw_attr_node *a;
8061 unsigned ix;
8062
8063 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8064 if (AT_class (a) == dw_val_class_loc_list)
8065 output_loc_list (AT_loc_list (a));
8066
8067 FOR_EACH_CHILD (die, c, output_location_lists (c));
8068 }
8069
8070 /* We want to limit the number of external references, because they are
8071 larger than local references: a relocation takes multiple words, and
8072 even a sig8 reference is always eight bytes, whereas a local reference
8073 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8074 So if we encounter multiple external references to the same type DIE, we
8075 make a local typedef stub for it and redirect all references there.
8076
8077 This is the element of the hash table for keeping track of these
8078 references. */
8079
8080 struct external_ref
8081 {
8082 dw_die_ref type;
8083 dw_die_ref stub;
8084 unsigned n_refs;
8085 };
8086
8087 /* Hashtable helpers. */
8088
8089 struct external_ref_hasher : free_ptr_hash <external_ref>
8090 {
8091 static inline hashval_t hash (const external_ref *);
8092 static inline bool equal (const external_ref *, const external_ref *);
8093 };
8094
8095 inline hashval_t
8096 external_ref_hasher::hash (const external_ref *r)
8097 {
8098 dw_die_ref die = r->type;
8099 hashval_t h = 0;
8100
8101 /* We can't use the address of the DIE for hashing, because
8102 that will make the order of the stub DIEs non-deterministic. */
8103 if (! die->comdat_type_p)
8104 /* We have a symbol; use it to compute a hash. */
8105 h = htab_hash_string (die->die_id.die_symbol);
8106 else
8107 {
8108 /* We have a type signature; use a subset of the bits as the hash.
8109 The 8-byte signature is at least as large as hashval_t. */
8110 comdat_type_node *type_node = die->die_id.die_type_node;
8111 memcpy (&h, type_node->signature, sizeof (h));
8112 }
8113 return h;
8114 }
8115
8116 inline bool
8117 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8118 {
8119 return r1->type == r2->type;
8120 }
8121
8122 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8123
8124 /* Return a pointer to the external_ref for references to DIE. */
8125
8126 static struct external_ref *
8127 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8128 {
8129 struct external_ref ref, *ref_p;
8130 external_ref **slot;
8131
8132 ref.type = die;
8133 slot = map->find_slot (&ref, INSERT);
8134 if (*slot != HTAB_EMPTY_ENTRY)
8135 return *slot;
8136
8137 ref_p = XCNEW (struct external_ref);
8138 ref_p->type = die;
8139 *slot = ref_p;
8140 return ref_p;
8141 }
8142
8143 /* Subroutine of optimize_external_refs, below.
8144
8145 If we see a type skeleton, record it as our stub. If we see external
8146 references, remember how many we've seen. */
8147
8148 static void
8149 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8150 {
8151 dw_die_ref c;
8152 dw_attr_node *a;
8153 unsigned ix;
8154 struct external_ref *ref_p;
8155
8156 if (is_type_die (die)
8157 && (c = get_AT_ref (die, DW_AT_signature)))
8158 {
8159 /* This is a local skeleton; use it for local references. */
8160 ref_p = lookup_external_ref (map, c);
8161 ref_p->stub = die;
8162 }
8163
8164 /* Scan the DIE references, and remember any that refer to DIEs from
8165 other CUs (i.e. those which are not marked). */
8166 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8167 if (AT_class (a) == dw_val_class_die_ref
8168 && (c = AT_ref (a))->die_mark == 0
8169 && is_type_die (c))
8170 {
8171 ref_p = lookup_external_ref (map, c);
8172 ref_p->n_refs++;
8173 }
8174
8175 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8176 }
8177
8178 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8179 points to an external_ref, DATA is the CU we're processing. If we don't
8180 already have a local stub, and we have multiple refs, build a stub. */
8181
8182 int
8183 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8184 {
8185 struct external_ref *ref_p = *slot;
8186
8187 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8188 {
8189 /* We have multiple references to this type, so build a small stub.
8190 Both of these forms are a bit dodgy from the perspective of the
8191 DWARF standard, since technically they should have names. */
8192 dw_die_ref cu = data;
8193 dw_die_ref type = ref_p->type;
8194 dw_die_ref stub = NULL;
8195
8196 if (type->comdat_type_p)
8197 {
8198 /* If we refer to this type via sig8, use AT_signature. */
8199 stub = new_die (type->die_tag, cu, NULL_TREE);
8200 add_AT_die_ref (stub, DW_AT_signature, type);
8201 }
8202 else
8203 {
8204 /* Otherwise, use a typedef with no name. */
8205 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8206 add_AT_die_ref (stub, DW_AT_type, type);
8207 }
8208
8209 stub->die_mark++;
8210 ref_p->stub = stub;
8211 }
8212 return 1;
8213 }
8214
8215 /* DIE is a unit; look through all the DIE references to see if there are
8216 any external references to types, and if so, create local stubs for
8217 them which will be applied in build_abbrev_table. This is useful because
8218 references to local DIEs are smaller. */
8219
8220 static external_ref_hash_type *
8221 optimize_external_refs (dw_die_ref die)
8222 {
8223 external_ref_hash_type *map = new external_ref_hash_type (10);
8224 optimize_external_refs_1 (die, map);
8225 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8226 return map;
8227 }
8228
8229 /* The format of each DIE (and its attribute value pairs) is encoded in an
8230 abbreviation table. This routine builds the abbreviation table and assigns
8231 a unique abbreviation id for each abbreviation entry. The children of each
8232 die are visited recursively. */
8233
8234 static void
8235 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8236 {
8237 unsigned long abbrev_id;
8238 unsigned int n_alloc;
8239 dw_die_ref c;
8240 dw_attr_node *a;
8241 unsigned ix;
8242
8243 /* Scan the DIE references, and replace any that refer to
8244 DIEs from other CUs (i.e. those which are not marked) with
8245 the local stubs we built in optimize_external_refs. */
8246 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8247 if (AT_class (a) == dw_val_class_die_ref
8248 && (c = AT_ref (a))->die_mark == 0)
8249 {
8250 struct external_ref *ref_p;
8251 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8252
8253 ref_p = lookup_external_ref (extern_map, c);
8254 if (ref_p->stub && ref_p->stub != die)
8255 change_AT_die_ref (a, ref_p->stub);
8256 else
8257 /* We aren't changing this reference, so mark it external. */
8258 set_AT_ref_external (a, 1);
8259 }
8260
8261 for (abbrev_id = 1; abbrev_id < abbrev_die_table_in_use; ++abbrev_id)
8262 {
8263 dw_die_ref abbrev = abbrev_die_table[abbrev_id];
8264 dw_attr_node *die_a, *abbrev_a;
8265 unsigned ix;
8266 bool ok = true;
8267
8268 if (abbrev->die_tag != die->die_tag)
8269 continue;
8270 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8271 continue;
8272
8273 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8274 continue;
8275
8276 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8277 {
8278 abbrev_a = &(*abbrev->die_attr)[ix];
8279 if ((abbrev_a->dw_attr != die_a->dw_attr)
8280 || (value_format (abbrev_a) != value_format (die_a)))
8281 {
8282 ok = false;
8283 break;
8284 }
8285 }
8286 if (ok)
8287 break;
8288 }
8289
8290 if (abbrev_id >= abbrev_die_table_in_use)
8291 {
8292 if (abbrev_die_table_in_use >= abbrev_die_table_allocated)
8293 {
8294 n_alloc = abbrev_die_table_allocated + ABBREV_DIE_TABLE_INCREMENT;
8295 abbrev_die_table = GGC_RESIZEVEC (dw_die_ref, abbrev_die_table,
8296 n_alloc);
8297
8298 memset (&abbrev_die_table[abbrev_die_table_allocated], 0,
8299 (n_alloc - abbrev_die_table_allocated) * sizeof (dw_die_ref));
8300 abbrev_die_table_allocated = n_alloc;
8301 }
8302
8303 ++abbrev_die_table_in_use;
8304 abbrev_die_table[abbrev_id] = die;
8305 }
8306
8307 die->die_abbrev = abbrev_id;
8308 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
8309 }
8310 \f
8311 /* Return the power-of-two number of bytes necessary to represent VALUE. */
8312
8313 static int
8314 constant_size (unsigned HOST_WIDE_INT value)
8315 {
8316 int log;
8317
8318 if (value == 0)
8319 log = 0;
8320 else
8321 log = floor_log2 (value);
8322
8323 log = log / 8;
8324 log = 1 << (floor_log2 (log) + 1);
8325
8326 return log;
8327 }
8328
8329 /* Return the size of a DIE as it is represented in the
8330 .debug_info section. */
8331
8332 static unsigned long
8333 size_of_die (dw_die_ref die)
8334 {
8335 unsigned long size = 0;
8336 dw_attr_node *a;
8337 unsigned ix;
8338 enum dwarf_form form;
8339
8340 size += size_of_uleb128 (die->die_abbrev);
8341 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8342 {
8343 switch (AT_class (a))
8344 {
8345 case dw_val_class_addr:
8346 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8347 {
8348 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8349 size += size_of_uleb128 (AT_index (a));
8350 }
8351 else
8352 size += DWARF2_ADDR_SIZE;
8353 break;
8354 case dw_val_class_offset:
8355 size += DWARF_OFFSET_SIZE;
8356 break;
8357 case dw_val_class_loc:
8358 {
8359 unsigned long lsize = size_of_locs (AT_loc (a));
8360
8361 /* Block length. */
8362 if (dwarf_version >= 4)
8363 size += size_of_uleb128 (lsize);
8364 else
8365 size += constant_size (lsize);
8366 size += lsize;
8367 }
8368 break;
8369 case dw_val_class_loc_list:
8370 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8371 {
8372 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8373 size += size_of_uleb128 (AT_index (a));
8374 }
8375 else
8376 size += DWARF_OFFSET_SIZE;
8377 break;
8378 case dw_val_class_range_list:
8379 size += DWARF_OFFSET_SIZE;
8380 break;
8381 case dw_val_class_const:
8382 size += size_of_sleb128 (AT_int (a));
8383 break;
8384 case dw_val_class_unsigned_const:
8385 {
8386 int csize = constant_size (AT_unsigned (a));
8387 if (dwarf_version == 3
8388 && a->dw_attr == DW_AT_data_member_location
8389 && csize >= 4)
8390 size += size_of_uleb128 (AT_unsigned (a));
8391 else
8392 size += csize;
8393 }
8394 break;
8395 case dw_val_class_const_double:
8396 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
8397 if (HOST_BITS_PER_WIDE_INT >= 64)
8398 size++; /* block */
8399 break;
8400 case dw_val_class_wide_int:
8401 size += (get_full_len (*a->dw_attr_val.v.val_wide)
8402 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
8403 if (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT
8404 > 64)
8405 size++; /* block */
8406 break;
8407 case dw_val_class_vec:
8408 size += constant_size (a->dw_attr_val.v.val_vec.length
8409 * a->dw_attr_val.v.val_vec.elt_size)
8410 + a->dw_attr_val.v.val_vec.length
8411 * a->dw_attr_val.v.val_vec.elt_size; /* block */
8412 break;
8413 case dw_val_class_flag:
8414 if (dwarf_version >= 4)
8415 /* Currently all add_AT_flag calls pass in 1 as last argument,
8416 so DW_FORM_flag_present can be used. If that ever changes,
8417 we'll need to use DW_FORM_flag and have some optimization
8418 in build_abbrev_table that will change those to
8419 DW_FORM_flag_present if it is set to 1 in all DIEs using
8420 the same abbrev entry. */
8421 gcc_assert (a->dw_attr_val.v.val_flag == 1);
8422 else
8423 size += 1;
8424 break;
8425 case dw_val_class_die_ref:
8426 if (AT_ref_external (a))
8427 {
8428 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
8429 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
8430 is sized by target address length, whereas in DWARF3
8431 it's always sized as an offset. */
8432 if (use_debug_types)
8433 size += DWARF_TYPE_SIGNATURE_SIZE;
8434 else if (dwarf_version == 2)
8435 size += DWARF2_ADDR_SIZE;
8436 else
8437 size += DWARF_OFFSET_SIZE;
8438 }
8439 else
8440 size += DWARF_OFFSET_SIZE;
8441 break;
8442 case dw_val_class_fde_ref:
8443 size += DWARF_OFFSET_SIZE;
8444 break;
8445 case dw_val_class_lbl_id:
8446 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8447 {
8448 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8449 size += size_of_uleb128 (AT_index (a));
8450 }
8451 else
8452 size += DWARF2_ADDR_SIZE;
8453 break;
8454 case dw_val_class_lineptr:
8455 case dw_val_class_macptr:
8456 size += DWARF_OFFSET_SIZE;
8457 break;
8458 case dw_val_class_str:
8459 form = AT_string_form (a);
8460 if (form == DW_FORM_strp)
8461 size += DWARF_OFFSET_SIZE;
8462 else if (form == DW_FORM_GNU_str_index)
8463 size += size_of_uleb128 (AT_index (a));
8464 else
8465 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
8466 break;
8467 case dw_val_class_file:
8468 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
8469 break;
8470 case dw_val_class_data8:
8471 size += 8;
8472 break;
8473 case dw_val_class_vms_delta:
8474 size += DWARF_OFFSET_SIZE;
8475 break;
8476 case dw_val_class_high_pc:
8477 size += DWARF2_ADDR_SIZE;
8478 break;
8479 case dw_val_class_discr_value:
8480 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
8481 break;
8482 case dw_val_class_discr_list:
8483 {
8484 unsigned block_size = size_of_discr_list (AT_discr_list (a));
8485
8486 /* This is a block, so we have the block length and then its
8487 data. */
8488 size += constant_size (block_size) + block_size;
8489 }
8490 break;
8491 default:
8492 gcc_unreachable ();
8493 }
8494 }
8495
8496 return size;
8497 }
8498
8499 /* Size the debugging information associated with a given DIE. Visits the
8500 DIE's children recursively. Updates the global variable next_die_offset, on
8501 each time through. Uses the current value of next_die_offset to update the
8502 die_offset field in each DIE. */
8503
8504 static void
8505 calc_die_sizes (dw_die_ref die)
8506 {
8507 dw_die_ref c;
8508
8509 gcc_assert (die->die_offset == 0
8510 || (unsigned long int) die->die_offset == next_die_offset);
8511 die->die_offset = next_die_offset;
8512 next_die_offset += size_of_die (die);
8513
8514 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
8515
8516 if (die->die_child != NULL)
8517 /* Count the null byte used to terminate sibling lists. */
8518 next_die_offset += 1;
8519 }
8520
8521 /* Size just the base type children at the start of the CU.
8522 This is needed because build_abbrev needs to size locs
8523 and sizing of type based stack ops needs to know die_offset
8524 values for the base types. */
8525
8526 static void
8527 calc_base_type_die_sizes (void)
8528 {
8529 unsigned long die_offset = DWARF_COMPILE_UNIT_HEADER_SIZE;
8530 unsigned int i;
8531 dw_die_ref base_type;
8532 #if ENABLE_ASSERT_CHECKING
8533 dw_die_ref prev = comp_unit_die ()->die_child;
8534 #endif
8535
8536 die_offset += size_of_die (comp_unit_die ());
8537 for (i = 0; base_types.iterate (i, &base_type); i++)
8538 {
8539 #if ENABLE_ASSERT_CHECKING
8540 gcc_assert (base_type->die_offset == 0
8541 && prev->die_sib == base_type
8542 && base_type->die_child == NULL
8543 && base_type->die_abbrev);
8544 prev = base_type;
8545 #endif
8546 base_type->die_offset = die_offset;
8547 die_offset += size_of_die (base_type);
8548 }
8549 }
8550
8551 /* Set the marks for a die and its children. We do this so
8552 that we know whether or not a reference needs to use FORM_ref_addr; only
8553 DIEs in the same CU will be marked. We used to clear out the offset
8554 and use that as the flag, but ran into ordering problems. */
8555
8556 static void
8557 mark_dies (dw_die_ref die)
8558 {
8559 dw_die_ref c;
8560
8561 gcc_assert (!die->die_mark);
8562
8563 die->die_mark = 1;
8564 FOR_EACH_CHILD (die, c, mark_dies (c));
8565 }
8566
8567 /* Clear the marks for a die and its children. */
8568
8569 static void
8570 unmark_dies (dw_die_ref die)
8571 {
8572 dw_die_ref c;
8573
8574 if (! use_debug_types)
8575 gcc_assert (die->die_mark);
8576
8577 die->die_mark = 0;
8578 FOR_EACH_CHILD (die, c, unmark_dies (c));
8579 }
8580
8581 /* Clear the marks for a die, its children and referred dies. */
8582
8583 static void
8584 unmark_all_dies (dw_die_ref die)
8585 {
8586 dw_die_ref c;
8587 dw_attr_node *a;
8588 unsigned ix;
8589
8590 if (!die->die_mark)
8591 return;
8592 die->die_mark = 0;
8593
8594 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
8595
8596 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8597 if (AT_class (a) == dw_val_class_die_ref)
8598 unmark_all_dies (AT_ref (a));
8599 }
8600
8601 /* Calculate if the entry should appear in the final output file. It may be
8602 from a pruned a type. */
8603
8604 static bool
8605 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
8606 {
8607 /* By limiting gnu pubnames to definitions only, gold can generate a
8608 gdb index without entries for declarations, which don't include
8609 enough information to be useful. */
8610 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
8611 return false;
8612
8613 if (table == pubname_table)
8614 {
8615 /* Enumerator names are part of the pubname table, but the
8616 parent DW_TAG_enumeration_type die may have been pruned.
8617 Don't output them if that is the case. */
8618 if (p->die->die_tag == DW_TAG_enumerator &&
8619 (p->die->die_parent == NULL
8620 || !p->die->die_parent->die_perennial_p))
8621 return false;
8622
8623 /* Everything else in the pubname table is included. */
8624 return true;
8625 }
8626
8627 /* The pubtypes table shouldn't include types that have been
8628 pruned. */
8629 return (p->die->die_offset != 0
8630 || !flag_eliminate_unused_debug_types);
8631 }
8632
8633 /* Return the size of the .debug_pubnames or .debug_pubtypes table
8634 generated for the compilation unit. */
8635
8636 static unsigned long
8637 size_of_pubnames (vec<pubname_entry, va_gc> *names)
8638 {
8639 unsigned long size;
8640 unsigned i;
8641 pubname_entry *p;
8642 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
8643
8644 size = DWARF_PUBNAMES_HEADER_SIZE;
8645 FOR_EACH_VEC_ELT (*names, i, p)
8646 if (include_pubname_in_output (names, p))
8647 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
8648
8649 size += DWARF_OFFSET_SIZE;
8650 return size;
8651 }
8652
8653 /* Return the size of the information in the .debug_aranges section. */
8654
8655 static unsigned long
8656 size_of_aranges (void)
8657 {
8658 unsigned long size;
8659
8660 size = DWARF_ARANGES_HEADER_SIZE;
8661
8662 /* Count the address/length pair for this compilation unit. */
8663 if (text_section_used)
8664 size += 2 * DWARF2_ADDR_SIZE;
8665 if (cold_text_section_used)
8666 size += 2 * DWARF2_ADDR_SIZE;
8667 if (have_multiple_function_sections)
8668 {
8669 unsigned fde_idx;
8670 dw_fde_ref fde;
8671
8672 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
8673 {
8674 if (DECL_IGNORED_P (fde->decl))
8675 continue;
8676 if (!fde->in_std_section)
8677 size += 2 * DWARF2_ADDR_SIZE;
8678 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
8679 size += 2 * DWARF2_ADDR_SIZE;
8680 }
8681 }
8682
8683 /* Count the two zero words used to terminated the address range table. */
8684 size += 2 * DWARF2_ADDR_SIZE;
8685 return size;
8686 }
8687 \f
8688 /* Select the encoding of an attribute value. */
8689
8690 static enum dwarf_form
8691 value_format (dw_attr_node *a)
8692 {
8693 switch (AT_class (a))
8694 {
8695 case dw_val_class_addr:
8696 /* Only very few attributes allow DW_FORM_addr. */
8697 switch (a->dw_attr)
8698 {
8699 case DW_AT_low_pc:
8700 case DW_AT_high_pc:
8701 case DW_AT_entry_pc:
8702 case DW_AT_trampoline:
8703 return (AT_index (a) == NOT_INDEXED
8704 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
8705 default:
8706 break;
8707 }
8708 switch (DWARF2_ADDR_SIZE)
8709 {
8710 case 1:
8711 return DW_FORM_data1;
8712 case 2:
8713 return DW_FORM_data2;
8714 case 4:
8715 return DW_FORM_data4;
8716 case 8:
8717 return DW_FORM_data8;
8718 default:
8719 gcc_unreachable ();
8720 }
8721 case dw_val_class_range_list:
8722 case dw_val_class_loc_list:
8723 if (dwarf_version >= 4)
8724 return DW_FORM_sec_offset;
8725 /* FALLTHRU */
8726 case dw_val_class_vms_delta:
8727 case dw_val_class_offset:
8728 switch (DWARF_OFFSET_SIZE)
8729 {
8730 case 4:
8731 return DW_FORM_data4;
8732 case 8:
8733 return DW_FORM_data8;
8734 default:
8735 gcc_unreachable ();
8736 }
8737 case dw_val_class_loc:
8738 if (dwarf_version >= 4)
8739 return DW_FORM_exprloc;
8740 switch (constant_size (size_of_locs (AT_loc (a))))
8741 {
8742 case 1:
8743 return DW_FORM_block1;
8744 case 2:
8745 return DW_FORM_block2;
8746 case 4:
8747 return DW_FORM_block4;
8748 default:
8749 gcc_unreachable ();
8750 }
8751 case dw_val_class_const:
8752 return DW_FORM_sdata;
8753 case dw_val_class_unsigned_const:
8754 switch (constant_size (AT_unsigned (a)))
8755 {
8756 case 1:
8757 return DW_FORM_data1;
8758 case 2:
8759 return DW_FORM_data2;
8760 case 4:
8761 /* In DWARF3 DW_AT_data_member_location with
8762 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
8763 constant, so we need to use DW_FORM_udata if we need
8764 a large constant. */
8765 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
8766 return DW_FORM_udata;
8767 return DW_FORM_data4;
8768 case 8:
8769 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
8770 return DW_FORM_udata;
8771 return DW_FORM_data8;
8772 default:
8773 gcc_unreachable ();
8774 }
8775 case dw_val_class_const_double:
8776 switch (HOST_BITS_PER_WIDE_INT)
8777 {
8778 case 8:
8779 return DW_FORM_data2;
8780 case 16:
8781 return DW_FORM_data4;
8782 case 32:
8783 return DW_FORM_data8;
8784 case 64:
8785 default:
8786 return DW_FORM_block1;
8787 }
8788 case dw_val_class_wide_int:
8789 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
8790 {
8791 case 8:
8792 return DW_FORM_data1;
8793 case 16:
8794 return DW_FORM_data2;
8795 case 32:
8796 return DW_FORM_data4;
8797 case 64:
8798 return DW_FORM_data8;
8799 default:
8800 return DW_FORM_block1;
8801 }
8802 case dw_val_class_vec:
8803 switch (constant_size (a->dw_attr_val.v.val_vec.length
8804 * a->dw_attr_val.v.val_vec.elt_size))
8805 {
8806 case 1:
8807 return DW_FORM_block1;
8808 case 2:
8809 return DW_FORM_block2;
8810 case 4:
8811 return DW_FORM_block4;
8812 default:
8813 gcc_unreachable ();
8814 }
8815 case dw_val_class_flag:
8816 if (dwarf_version >= 4)
8817 {
8818 /* Currently all add_AT_flag calls pass in 1 as last argument,
8819 so DW_FORM_flag_present can be used. If that ever changes,
8820 we'll need to use DW_FORM_flag and have some optimization
8821 in build_abbrev_table that will change those to
8822 DW_FORM_flag_present if it is set to 1 in all DIEs using
8823 the same abbrev entry. */
8824 gcc_assert (a->dw_attr_val.v.val_flag == 1);
8825 return DW_FORM_flag_present;
8826 }
8827 return DW_FORM_flag;
8828 case dw_val_class_die_ref:
8829 if (AT_ref_external (a))
8830 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
8831 else
8832 return DW_FORM_ref;
8833 case dw_val_class_fde_ref:
8834 return DW_FORM_data;
8835 case dw_val_class_lbl_id:
8836 return (AT_index (a) == NOT_INDEXED
8837 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
8838 case dw_val_class_lineptr:
8839 case dw_val_class_macptr:
8840 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
8841 case dw_val_class_str:
8842 return AT_string_form (a);
8843 case dw_val_class_file:
8844 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
8845 {
8846 case 1:
8847 return DW_FORM_data1;
8848 case 2:
8849 return DW_FORM_data2;
8850 case 4:
8851 return DW_FORM_data4;
8852 default:
8853 gcc_unreachable ();
8854 }
8855
8856 case dw_val_class_data8:
8857 return DW_FORM_data8;
8858
8859 case dw_val_class_high_pc:
8860 switch (DWARF2_ADDR_SIZE)
8861 {
8862 case 1:
8863 return DW_FORM_data1;
8864 case 2:
8865 return DW_FORM_data2;
8866 case 4:
8867 return DW_FORM_data4;
8868 case 8:
8869 return DW_FORM_data8;
8870 default:
8871 gcc_unreachable ();
8872 }
8873
8874 case dw_val_class_discr_value:
8875 return (a->dw_attr_val.v.val_discr_value.pos
8876 ? DW_FORM_udata
8877 : DW_FORM_sdata);
8878 case dw_val_class_discr_list:
8879 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
8880 {
8881 case 1:
8882 return DW_FORM_block1;
8883 case 2:
8884 return DW_FORM_block2;
8885 case 4:
8886 return DW_FORM_block4;
8887 default:
8888 gcc_unreachable ();
8889 }
8890
8891 default:
8892 gcc_unreachable ();
8893 }
8894 }
8895
8896 /* Output the encoding of an attribute value. */
8897
8898 static void
8899 output_value_format (dw_attr_node *a)
8900 {
8901 enum dwarf_form form = value_format (a);
8902
8903 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
8904 }
8905
8906 /* Given a die and id, produce the appropriate abbreviations. */
8907
8908 static void
8909 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
8910 {
8911 unsigned ix;
8912 dw_attr_node *a_attr;
8913
8914 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
8915 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
8916 dwarf_tag_name (abbrev->die_tag));
8917
8918 if (abbrev->die_child != NULL)
8919 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
8920 else
8921 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
8922
8923 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
8924 {
8925 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
8926 dwarf_attr_name (a_attr->dw_attr));
8927 output_value_format (a_attr);
8928 }
8929
8930 dw2_asm_output_data (1, 0, NULL);
8931 dw2_asm_output_data (1, 0, NULL);
8932 }
8933
8934
8935 /* Output the .debug_abbrev section which defines the DIE abbreviation
8936 table. */
8937
8938 static void
8939 output_abbrev_section (void)
8940 {
8941 unsigned long abbrev_id;
8942
8943 for (abbrev_id = 1; abbrev_id < abbrev_die_table_in_use; ++abbrev_id)
8944 output_die_abbrevs (abbrev_id, abbrev_die_table[abbrev_id]);
8945
8946 /* Terminate the table. */
8947 dw2_asm_output_data (1, 0, NULL);
8948 }
8949
8950 /* Output a symbol we can use to refer to this DIE from another CU. */
8951
8952 static inline void
8953 output_die_symbol (dw_die_ref die)
8954 {
8955 const char *sym = die->die_id.die_symbol;
8956
8957 gcc_assert (!die->comdat_type_p);
8958
8959 if (sym == 0)
8960 return;
8961
8962 if (strncmp (sym, DIE_LABEL_PREFIX, sizeof (DIE_LABEL_PREFIX) - 1) == 0)
8963 /* We make these global, not weak; if the target doesn't support
8964 .linkonce, it doesn't support combining the sections, so debugging
8965 will break. */
8966 targetm.asm_out.globalize_label (asm_out_file, sym);
8967
8968 ASM_OUTPUT_LABEL (asm_out_file, sym);
8969 }
8970
8971 /* Return a new location list, given the begin and end range, and the
8972 expression. */
8973
8974 static inline dw_loc_list_ref
8975 new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
8976 const char *section)
8977 {
8978 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
8979
8980 retlist->begin = begin;
8981 retlist->begin_entry = NULL;
8982 retlist->end = end;
8983 retlist->expr = expr;
8984 retlist->section = section;
8985
8986 return retlist;
8987 }
8988
8989 /* Generate a new internal symbol for this location list node, if it
8990 hasn't got one yet. */
8991
8992 static inline void
8993 gen_llsym (dw_loc_list_ref list)
8994 {
8995 gcc_assert (!list->ll_symbol);
8996 list->ll_symbol = gen_internal_sym ("LLST");
8997 }
8998
8999 /* Output the location list given to us. */
9000
9001 static void
9002 output_loc_list (dw_loc_list_ref list_head)
9003 {
9004 dw_loc_list_ref curr = list_head;
9005
9006 if (list_head->emitted)
9007 return;
9008 list_head->emitted = true;
9009
9010 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
9011
9012 /* Walk the location list, and output each range + expression. */
9013 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
9014 {
9015 unsigned long size;
9016 /* Don't output an entry that starts and ends at the same address. */
9017 if (strcmp (curr->begin, curr->end) == 0 && !curr->force)
9018 continue;
9019 size = size_of_locs (curr->expr);
9020 /* If the expression is too large, drop it on the floor. We could
9021 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9022 in the expression, but >= 64KB expressions for a single value
9023 in a single range are unlikely very useful. */
9024 if (size > 0xffff)
9025 continue;
9026 if (dwarf_split_debug_info)
9027 {
9028 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
9029 "Location list start/length entry (%s)",
9030 list_head->ll_symbol);
9031 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9032 "Location list range start index (%s)",
9033 curr->begin);
9034 /* The length field is 4 bytes. If we ever need to support
9035 an 8-byte length, we can add a new DW_LLE code or fall back
9036 to DW_LLE_GNU_start_end_entry. */
9037 dw2_asm_output_delta (4, curr->end, curr->begin,
9038 "Location list range length (%s)",
9039 list_head->ll_symbol);
9040 }
9041 else if (!have_multiple_function_sections)
9042 {
9043 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
9044 "Location list begin address (%s)",
9045 list_head->ll_symbol);
9046 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
9047 "Location list end address (%s)",
9048 list_head->ll_symbol);
9049 }
9050 else
9051 {
9052 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9053 "Location list begin address (%s)",
9054 list_head->ll_symbol);
9055 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9056 "Location list end address (%s)",
9057 list_head->ll_symbol);
9058 }
9059
9060 /* Output the block length for this list of location operations. */
9061 gcc_assert (size <= 0xffff);
9062 dw2_asm_output_data (2, size, "%s", "Location expression size");
9063
9064 output_loc_sequence (curr->expr, -1);
9065 }
9066
9067 if (dwarf_split_debug_info)
9068 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
9069 "Location list terminator (%s)",
9070 list_head->ll_symbol);
9071 else
9072 {
9073 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9074 "Location list terminator begin (%s)",
9075 list_head->ll_symbol);
9076 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9077 "Location list terminator end (%s)",
9078 list_head->ll_symbol);
9079 }
9080 }
9081
9082 /* Output a range_list offset into the debug_range section. Emit a
9083 relocated reference if val_entry is NULL, otherwise, emit an
9084 indirect reference. */
9085
9086 static void
9087 output_range_list_offset (dw_attr_node *a)
9088 {
9089 const char *name = dwarf_attr_name (a->dw_attr);
9090
9091 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
9092 {
9093 char *p = strchr (ranges_section_label, '\0');
9094 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX, a->dw_attr_val.v.val_offset);
9095 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
9096 debug_ranges_section, "%s", name);
9097 *p = '\0';
9098 }
9099 else
9100 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
9101 "%s (offset from %s)", name, ranges_section_label);
9102 }
9103
9104 /* Output the offset into the debug_loc section. */
9105
9106 static void
9107 output_loc_list_offset (dw_attr_node *a)
9108 {
9109 char *sym = AT_loc_list (a)->ll_symbol;
9110
9111 gcc_assert (sym);
9112 if (dwarf_split_debug_info)
9113 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
9114 "%s", dwarf_attr_name (a->dw_attr));
9115 else
9116 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
9117 "%s", dwarf_attr_name (a->dw_attr));
9118 }
9119
9120 /* Output an attribute's index or value appropriately. */
9121
9122 static void
9123 output_attr_index_or_value (dw_attr_node *a)
9124 {
9125 const char *name = dwarf_attr_name (a->dw_attr);
9126
9127 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9128 {
9129 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
9130 return;
9131 }
9132 switch (AT_class (a))
9133 {
9134 case dw_val_class_addr:
9135 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
9136 break;
9137 case dw_val_class_high_pc:
9138 case dw_val_class_lbl_id:
9139 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
9140 break;
9141 case dw_val_class_loc_list:
9142 output_loc_list_offset (a);
9143 break;
9144 default:
9145 gcc_unreachable ();
9146 }
9147 }
9148
9149 /* Output a type signature. */
9150
9151 static inline void
9152 output_signature (const char *sig, const char *name)
9153 {
9154 int i;
9155
9156 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9157 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
9158 }
9159
9160 /* Output a discriminant value. */
9161
9162 static inline void
9163 output_discr_value (dw_discr_value *discr_value, const char *name)
9164 {
9165 if (discr_value->pos)
9166 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
9167 else
9168 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
9169 }
9170
9171 /* Output the DIE and its attributes. Called recursively to generate
9172 the definitions of each child DIE. */
9173
9174 static void
9175 output_die (dw_die_ref die)
9176 {
9177 dw_attr_node *a;
9178 dw_die_ref c;
9179 unsigned long size;
9180 unsigned ix;
9181
9182 /* If someone in another CU might refer to us, set up a symbol for
9183 them to point to. */
9184 if (! die->comdat_type_p && die->die_id.die_symbol)
9185 output_die_symbol (die);
9186
9187 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
9188 (unsigned long)die->die_offset,
9189 dwarf_tag_name (die->die_tag));
9190
9191 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9192 {
9193 const char *name = dwarf_attr_name (a->dw_attr);
9194
9195 switch (AT_class (a))
9196 {
9197 case dw_val_class_addr:
9198 output_attr_index_or_value (a);
9199 break;
9200
9201 case dw_val_class_offset:
9202 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
9203 "%s", name);
9204 break;
9205
9206 case dw_val_class_range_list:
9207 output_range_list_offset (a);
9208 break;
9209
9210 case dw_val_class_loc:
9211 size = size_of_locs (AT_loc (a));
9212
9213 /* Output the block length for this list of location operations. */
9214 if (dwarf_version >= 4)
9215 dw2_asm_output_data_uleb128 (size, "%s", name);
9216 else
9217 dw2_asm_output_data (constant_size (size), size, "%s", name);
9218
9219 output_loc_sequence (AT_loc (a), -1);
9220 break;
9221
9222 case dw_val_class_const:
9223 /* ??? It would be slightly more efficient to use a scheme like is
9224 used for unsigned constants below, but gdb 4.x does not sign
9225 extend. Gdb 5.x does sign extend. */
9226 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
9227 break;
9228
9229 case dw_val_class_unsigned_const:
9230 {
9231 int csize = constant_size (AT_unsigned (a));
9232 if (dwarf_version == 3
9233 && a->dw_attr == DW_AT_data_member_location
9234 && csize >= 4)
9235 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
9236 else
9237 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
9238 }
9239 break;
9240
9241 case dw_val_class_const_double:
9242 {
9243 unsigned HOST_WIDE_INT first, second;
9244
9245 if (HOST_BITS_PER_WIDE_INT >= 64)
9246 dw2_asm_output_data (1,
9247 HOST_BITS_PER_DOUBLE_INT
9248 / HOST_BITS_PER_CHAR,
9249 NULL);
9250
9251 if (WORDS_BIG_ENDIAN)
9252 {
9253 first = a->dw_attr_val.v.val_double.high;
9254 second = a->dw_attr_val.v.val_double.low;
9255 }
9256 else
9257 {
9258 first = a->dw_attr_val.v.val_double.low;
9259 second = a->dw_attr_val.v.val_double.high;
9260 }
9261
9262 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
9263 first, "%s", name);
9264 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
9265 second, NULL);
9266 }
9267 break;
9268
9269 case dw_val_class_wide_int:
9270 {
9271 int i;
9272 int len = get_full_len (*a->dw_attr_val.v.val_wide);
9273 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
9274 if (len * HOST_BITS_PER_WIDE_INT > 64)
9275 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide) * l,
9276 NULL);
9277
9278 if (WORDS_BIG_ENDIAN)
9279 for (i = len - 1; i >= 0; --i)
9280 {
9281 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
9282 "%s", name);
9283 name = "";
9284 }
9285 else
9286 for (i = 0; i < len; ++i)
9287 {
9288 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
9289 "%s", name);
9290 name = "";
9291 }
9292 }
9293 break;
9294
9295 case dw_val_class_vec:
9296 {
9297 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
9298 unsigned int len = a->dw_attr_val.v.val_vec.length;
9299 unsigned int i;
9300 unsigned char *p;
9301
9302 dw2_asm_output_data (constant_size (len * elt_size),
9303 len * elt_size, "%s", name);
9304 if (elt_size > sizeof (HOST_WIDE_INT))
9305 {
9306 elt_size /= 2;
9307 len *= 2;
9308 }
9309 for (i = 0, p = a->dw_attr_val.v.val_vec.array;
9310 i < len;
9311 i++, p += elt_size)
9312 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
9313 "fp or vector constant word %u", i);
9314 break;
9315 }
9316
9317 case dw_val_class_flag:
9318 if (dwarf_version >= 4)
9319 {
9320 /* Currently all add_AT_flag calls pass in 1 as last argument,
9321 so DW_FORM_flag_present can be used. If that ever changes,
9322 we'll need to use DW_FORM_flag and have some optimization
9323 in build_abbrev_table that will change those to
9324 DW_FORM_flag_present if it is set to 1 in all DIEs using
9325 the same abbrev entry. */
9326 gcc_assert (AT_flag (a) == 1);
9327 if (flag_debug_asm)
9328 fprintf (asm_out_file, "\t\t\t%s %s\n",
9329 ASM_COMMENT_START, name);
9330 break;
9331 }
9332 dw2_asm_output_data (1, AT_flag (a), "%s", name);
9333 break;
9334
9335 case dw_val_class_loc_list:
9336 output_attr_index_or_value (a);
9337 break;
9338
9339 case dw_val_class_die_ref:
9340 if (AT_ref_external (a))
9341 {
9342 if (AT_ref (a)->comdat_type_p)
9343 {
9344 comdat_type_node *type_node =
9345 AT_ref (a)->die_id.die_type_node;
9346
9347 gcc_assert (type_node);
9348 output_signature (type_node->signature, name);
9349 }
9350 else
9351 {
9352 const char *sym = AT_ref (a)->die_id.die_symbol;
9353 int size;
9354
9355 gcc_assert (sym);
9356 /* In DWARF2, DW_FORM_ref_addr is sized by target address
9357 length, whereas in DWARF3 it's always sized as an
9358 offset. */
9359 if (dwarf_version == 2)
9360 size = DWARF2_ADDR_SIZE;
9361 else
9362 size = DWARF_OFFSET_SIZE;
9363 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
9364 name);
9365 }
9366 }
9367 else
9368 {
9369 gcc_assert (AT_ref (a)->die_offset);
9370 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
9371 "%s", name);
9372 }
9373 break;
9374
9375 case dw_val_class_fde_ref:
9376 {
9377 char l1[20];
9378
9379 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
9380 a->dw_attr_val.v.val_fde_index * 2);
9381 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
9382 "%s", name);
9383 }
9384 break;
9385
9386 case dw_val_class_vms_delta:
9387 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
9388 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
9389 AT_vms_delta2 (a), AT_vms_delta1 (a),
9390 "%s", name);
9391 #else
9392 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
9393 AT_vms_delta2 (a), AT_vms_delta1 (a),
9394 "%s", name);
9395 #endif
9396 break;
9397
9398 case dw_val_class_lbl_id:
9399 output_attr_index_or_value (a);
9400 break;
9401
9402 case dw_val_class_lineptr:
9403 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
9404 debug_line_section, "%s", name);
9405 break;
9406
9407 case dw_val_class_macptr:
9408 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
9409 debug_macinfo_section, "%s", name);
9410 break;
9411
9412 case dw_val_class_str:
9413 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
9414 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
9415 a->dw_attr_val.v.val_str->label,
9416 debug_str_section,
9417 "%s: \"%s\"", name, AT_string (a));
9418 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
9419 dw2_asm_output_data_uleb128 (AT_index (a),
9420 "%s: \"%s\"", name, AT_string (a));
9421 else
9422 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
9423 break;
9424
9425 case dw_val_class_file:
9426 {
9427 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
9428
9429 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
9430 a->dw_attr_val.v.val_file->filename);
9431 break;
9432 }
9433
9434 case dw_val_class_data8:
9435 {
9436 int i;
9437
9438 for (i = 0; i < 8; i++)
9439 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
9440 i == 0 ? "%s" : NULL, name);
9441 break;
9442 }
9443
9444 case dw_val_class_high_pc:
9445 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
9446 get_AT_low_pc (die), "DW_AT_high_pc");
9447 break;
9448
9449 case dw_val_class_discr_value:
9450 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
9451 break;
9452
9453 case dw_val_class_discr_list:
9454 {
9455 dw_discr_list_ref list = AT_discr_list (a);
9456 const int size = size_of_discr_list (list);
9457
9458 /* This is a block, so output its length first. */
9459 dw2_asm_output_data (constant_size (size), size,
9460 "%s: block size", name);
9461
9462 for (; list != NULL; list = list->dw_discr_next)
9463 {
9464 /* One byte for the discriminant value descriptor, and then as
9465 many LEB128 numbers as required. */
9466 if (list->dw_discr_range)
9467 dw2_asm_output_data (1, DW_DSC_range,
9468 "%s: DW_DSC_range", name);
9469 else
9470 dw2_asm_output_data (1, DW_DSC_label,
9471 "%s: DW_DSC_label", name);
9472
9473 output_discr_value (&list->dw_discr_lower_bound, name);
9474 if (list->dw_discr_range)
9475 output_discr_value (&list->dw_discr_upper_bound, name);
9476 }
9477 break;
9478 }
9479
9480 default:
9481 gcc_unreachable ();
9482 }
9483 }
9484
9485 FOR_EACH_CHILD (die, c, output_die (c));
9486
9487 /* Add null byte to terminate sibling list. */
9488 if (die->die_child != NULL)
9489 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
9490 (unsigned long) die->die_offset);
9491 }
9492
9493 /* Output the compilation unit that appears at the beginning of the
9494 .debug_info section, and precedes the DIE descriptions. */
9495
9496 static void
9497 output_compilation_unit_header (void)
9498 {
9499 /* We don't support actual DWARFv5 units yet, we just use some
9500 DWARFv5 draft DIE tags in DWARFv4 format. */
9501 int ver = dwarf_version < 5 ? dwarf_version : 4;
9502
9503 if (!XCOFF_DEBUGGING_INFO)
9504 {
9505 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
9506 dw2_asm_output_data (4, 0xffffffff,
9507 "Initial length escape value indicating 64-bit DWARF extension");
9508 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9509 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
9510 "Length of Compilation Unit Info");
9511 }
9512
9513 dw2_asm_output_data (2, ver, "DWARF version number");
9514 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
9515 debug_abbrev_section,
9516 "Offset Into Abbrev. Section");
9517 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
9518 }
9519
9520 /* Output the compilation unit DIE and its children. */
9521
9522 static void
9523 output_comp_unit (dw_die_ref die, int output_if_empty)
9524 {
9525 const char *secname, *oldsym;
9526 char *tmp;
9527
9528 /* Unless we are outputting main CU, we may throw away empty ones. */
9529 if (!output_if_empty && die->die_child == NULL)
9530 return;
9531
9532 /* Even if there are no children of this DIE, we must output the information
9533 about the compilation unit. Otherwise, on an empty translation unit, we
9534 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
9535 will then complain when examining the file. First mark all the DIEs in
9536 this CU so we know which get local refs. */
9537 mark_dies (die);
9538
9539 external_ref_hash_type *extern_map = optimize_external_refs (die);
9540
9541 build_abbrev_table (die, extern_map);
9542
9543 delete extern_map;
9544
9545 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
9546 next_die_offset = DWARF_COMPILE_UNIT_HEADER_SIZE;
9547 calc_die_sizes (die);
9548
9549 oldsym = die->die_id.die_symbol;
9550 if (oldsym)
9551 {
9552 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
9553
9554 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
9555 secname = tmp;
9556 die->die_id.die_symbol = NULL;
9557 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
9558 }
9559 else
9560 {
9561 switch_to_section (debug_info_section);
9562 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
9563 info_section_emitted = true;
9564 }
9565
9566 /* Output debugging information. */
9567 output_compilation_unit_header ();
9568 output_die (die);
9569
9570 /* Leave the marks on the main CU, so we can check them in
9571 output_pubnames. */
9572 if (oldsym)
9573 {
9574 unmark_dies (die);
9575 die->die_id.die_symbol = oldsym;
9576 }
9577 }
9578
9579 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
9580 and .debug_pubtypes. This is configured per-target, but can be
9581 overridden by the -gpubnames or -gno-pubnames options. */
9582
9583 static inline bool
9584 want_pubnames (void)
9585 {
9586 if (debug_info_level <= DINFO_LEVEL_TERSE)
9587 return false;
9588 if (debug_generate_pub_sections != -1)
9589 return debug_generate_pub_sections;
9590 return targetm.want_debug_pub_sections;
9591 }
9592
9593 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
9594
9595 static void
9596 add_AT_pubnames (dw_die_ref die)
9597 {
9598 if (want_pubnames ())
9599 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
9600 }
9601
9602 /* Add a string attribute value to a skeleton DIE. */
9603
9604 static inline void
9605 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
9606 const char *str)
9607 {
9608 dw_attr_node attr;
9609 struct indirect_string_node *node;
9610
9611 if (! skeleton_debug_str_hash)
9612 skeleton_debug_str_hash
9613 = hash_table<indirect_string_hasher>::create_ggc (10);
9614
9615 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
9616 find_string_form (node);
9617 if (node->form == DW_FORM_GNU_str_index)
9618 node->form = DW_FORM_strp;
9619
9620 attr.dw_attr = attr_kind;
9621 attr.dw_attr_val.val_class = dw_val_class_str;
9622 attr.dw_attr_val.val_entry = NULL;
9623 attr.dw_attr_val.v.val_str = node;
9624 add_dwarf_attr (die, &attr);
9625 }
9626
9627 /* Helper function to generate top-level dies for skeleton debug_info and
9628 debug_types. */
9629
9630 static void
9631 add_top_level_skeleton_die_attrs (dw_die_ref die)
9632 {
9633 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
9634 const char *comp_dir = comp_dir_string ();
9635
9636 add_skeleton_AT_string (die, DW_AT_GNU_dwo_name, dwo_file_name);
9637 if (comp_dir != NULL)
9638 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
9639 add_AT_pubnames (die);
9640 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
9641 }
9642
9643 /* Output skeleton debug sections that point to the dwo file. */
9644
9645 static void
9646 output_skeleton_debug_sections (dw_die_ref comp_unit)
9647 {
9648 /* We don't support actual DWARFv5 units yet, we just use some
9649 DWARFv5 draft DIE tags in DWARFv4 format. */
9650 int ver = dwarf_version < 5 ? dwarf_version : 4;
9651
9652 /* These attributes will be found in the full debug_info section. */
9653 remove_AT (comp_unit, DW_AT_producer);
9654 remove_AT (comp_unit, DW_AT_language);
9655
9656 switch_to_section (debug_skeleton_info_section);
9657 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
9658
9659 /* Produce the skeleton compilation-unit header. This one differs enough from
9660 a normal CU header that it's better not to call output_compilation_unit
9661 header. */
9662 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
9663 dw2_asm_output_data (4, 0xffffffff,
9664 "Initial length escape value indicating 64-bit DWARF extension");
9665
9666 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9667 DWARF_COMPILE_UNIT_HEADER_SIZE
9668 - DWARF_INITIAL_LENGTH_SIZE
9669 + size_of_die (comp_unit),
9670 "Length of Compilation Unit Info");
9671 dw2_asm_output_data (2, ver, "DWARF version number");
9672 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
9673 debug_abbrev_section,
9674 "Offset Into Abbrev. Section");
9675 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
9676
9677 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
9678 output_die (comp_unit);
9679
9680 /* Build the skeleton debug_abbrev section. */
9681 switch_to_section (debug_skeleton_abbrev_section);
9682 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
9683
9684 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
9685
9686 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
9687 }
9688
9689 /* Output a comdat type unit DIE and its children. */
9690
9691 static void
9692 output_comdat_type_unit (comdat_type_node *node)
9693 {
9694 const char *secname;
9695 char *tmp;
9696 int i;
9697 #if defined (OBJECT_FORMAT_ELF)
9698 tree comdat_key;
9699 #endif
9700
9701 /* First mark all the DIEs in this CU so we know which get local refs. */
9702 mark_dies (node->root_die);
9703
9704 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
9705
9706 build_abbrev_table (node->root_die, extern_map);
9707
9708 delete extern_map;
9709 extern_map = NULL;
9710
9711 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
9712 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
9713 calc_die_sizes (node->root_die);
9714
9715 #if defined (OBJECT_FORMAT_ELF)
9716 if (!dwarf_split_debug_info)
9717 secname = ".debug_types";
9718 else
9719 secname = ".debug_types.dwo";
9720
9721 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
9722 sprintf (tmp, "wt.");
9723 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9724 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
9725 comdat_key = get_identifier (tmp);
9726 targetm.asm_out.named_section (secname,
9727 SECTION_DEBUG | SECTION_LINKONCE,
9728 comdat_key);
9729 #else
9730 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
9731 sprintf (tmp, ".gnu.linkonce.wt.");
9732 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9733 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
9734 secname = tmp;
9735 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
9736 #endif
9737
9738 /* Output debugging information. */
9739 output_compilation_unit_header ();
9740 output_signature (node->signature, "Type Signature");
9741 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
9742 "Offset to Type DIE");
9743 output_die (node->root_die);
9744
9745 unmark_dies (node->root_die);
9746 }
9747
9748 /* Return the DWARF2/3 pubname associated with a decl. */
9749
9750 static const char *
9751 dwarf2_name (tree decl, int scope)
9752 {
9753 if (DECL_NAMELESS (decl))
9754 return NULL;
9755 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
9756 }
9757
9758 /* Add a new entry to .debug_pubnames if appropriate. */
9759
9760 static void
9761 add_pubname_string (const char *str, dw_die_ref die)
9762 {
9763 pubname_entry e;
9764
9765 e.die = die;
9766 e.name = xstrdup (str);
9767 vec_safe_push (pubname_table, e);
9768 }
9769
9770 static void
9771 add_pubname (tree decl, dw_die_ref die)
9772 {
9773 if (!want_pubnames ())
9774 return;
9775
9776 /* Don't add items to the table when we expect that the consumer will have
9777 just read the enclosing die. For example, if the consumer is looking at a
9778 class_member, it will either be inside the class already, or will have just
9779 looked up the class to find the member. Either way, searching the class is
9780 faster than searching the index. */
9781 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
9782 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
9783 {
9784 const char *name = dwarf2_name (decl, 1);
9785
9786 if (name)
9787 add_pubname_string (name, die);
9788 }
9789 }
9790
9791 /* Add an enumerator to the pubnames section. */
9792
9793 static void
9794 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
9795 {
9796 pubname_entry e;
9797
9798 gcc_assert (scope_name);
9799 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
9800 e.die = die;
9801 vec_safe_push (pubname_table, e);
9802 }
9803
9804 /* Add a new entry to .debug_pubtypes if appropriate. */
9805
9806 static void
9807 add_pubtype (tree decl, dw_die_ref die)
9808 {
9809 pubname_entry e;
9810
9811 if (!want_pubnames ())
9812 return;
9813
9814 if ((TREE_PUBLIC (decl)
9815 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
9816 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
9817 {
9818 tree scope = NULL;
9819 const char *scope_name = "";
9820 const char *sep = is_cxx () ? "::" : ".";
9821 const char *name;
9822
9823 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
9824 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
9825 {
9826 scope_name = lang_hooks.dwarf_name (scope, 1);
9827 if (scope_name != NULL && scope_name[0] != '\0')
9828 scope_name = concat (scope_name, sep, NULL);
9829 else
9830 scope_name = "";
9831 }
9832
9833 if (TYPE_P (decl))
9834 name = type_tag (decl);
9835 else
9836 name = lang_hooks.dwarf_name (decl, 1);
9837
9838 /* If we don't have a name for the type, there's no point in adding
9839 it to the table. */
9840 if (name != NULL && name[0] != '\0')
9841 {
9842 e.die = die;
9843 e.name = concat (scope_name, name, NULL);
9844 vec_safe_push (pubtype_table, e);
9845 }
9846
9847 /* Although it might be more consistent to add the pubinfo for the
9848 enumerators as their dies are created, they should only be added if the
9849 enum type meets the criteria above. So rather than re-check the parent
9850 enum type whenever an enumerator die is created, just output them all
9851 here. This isn't protected by the name conditional because anonymous
9852 enums don't have names. */
9853 if (die->die_tag == DW_TAG_enumeration_type)
9854 {
9855 dw_die_ref c;
9856
9857 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
9858 }
9859 }
9860 }
9861
9862 /* Output a single entry in the pubnames table. */
9863
9864 static void
9865 output_pubname (dw_offset die_offset, pubname_entry *entry)
9866 {
9867 dw_die_ref die = entry->die;
9868 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
9869
9870 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
9871
9872 if (debug_generate_pub_sections == 2)
9873 {
9874 /* This logic follows gdb's method for determining the value of the flag
9875 byte. */
9876 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
9877 switch (die->die_tag)
9878 {
9879 case DW_TAG_typedef:
9880 case DW_TAG_base_type:
9881 case DW_TAG_subrange_type:
9882 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
9883 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
9884 break;
9885 case DW_TAG_enumerator:
9886 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9887 GDB_INDEX_SYMBOL_KIND_VARIABLE);
9888 if (!is_cxx () && !is_java ())
9889 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
9890 break;
9891 case DW_TAG_subprogram:
9892 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9893 GDB_INDEX_SYMBOL_KIND_FUNCTION);
9894 if (!is_ada ())
9895 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
9896 break;
9897 case DW_TAG_constant:
9898 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9899 GDB_INDEX_SYMBOL_KIND_VARIABLE);
9900 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
9901 break;
9902 case DW_TAG_variable:
9903 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
9904 GDB_INDEX_SYMBOL_KIND_VARIABLE);
9905 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
9906 break;
9907 case DW_TAG_namespace:
9908 case DW_TAG_imported_declaration:
9909 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
9910 break;
9911 case DW_TAG_class_type:
9912 case DW_TAG_interface_type:
9913 case DW_TAG_structure_type:
9914 case DW_TAG_union_type:
9915 case DW_TAG_enumeration_type:
9916 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
9917 if (!is_cxx () && !is_java ())
9918 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
9919 break;
9920 default:
9921 /* An unusual tag. Leave the flag-byte empty. */
9922 break;
9923 }
9924 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
9925 "GDB-index flags");
9926 }
9927
9928 dw2_asm_output_nstring (entry->name, -1, "external name");
9929 }
9930
9931
9932 /* Output the public names table used to speed up access to externally
9933 visible names; or the public types table used to find type definitions. */
9934
9935 static void
9936 output_pubnames (vec<pubname_entry, va_gc> *names)
9937 {
9938 unsigned i;
9939 unsigned long pubnames_length = size_of_pubnames (names);
9940 pubname_entry *pub;
9941
9942 if (!XCOFF_DEBUGGING_INFO)
9943 {
9944 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
9945 dw2_asm_output_data (4, 0xffffffff,
9946 "Initial length escape value indicating 64-bit DWARF extension");
9947 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
9948 "Pub Info Length");
9949 }
9950
9951 /* Version number for pubnames/pubtypes is independent of dwarf version. */
9952 dw2_asm_output_data (2, 2, "DWARF Version");
9953
9954 if (dwarf_split_debug_info)
9955 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
9956 debug_skeleton_info_section,
9957 "Offset of Compilation Unit Info");
9958 else
9959 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
9960 debug_info_section,
9961 "Offset of Compilation Unit Info");
9962 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
9963 "Compilation Unit Length");
9964
9965 FOR_EACH_VEC_ELT (*names, i, pub)
9966 {
9967 if (include_pubname_in_output (names, pub))
9968 {
9969 dw_offset die_offset = pub->die->die_offset;
9970
9971 /* We shouldn't see pubnames for DIEs outside of the main CU. */
9972 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
9973 gcc_assert (pub->die->die_mark);
9974
9975 /* If we're putting types in their own .debug_types sections,
9976 the .debug_pubtypes table will still point to the compile
9977 unit (not the type unit), so we want to use the offset of
9978 the skeleton DIE (if there is one). */
9979 if (pub->die->comdat_type_p && names == pubtype_table)
9980 {
9981 comdat_type_node *type_node = pub->die->die_id.die_type_node;
9982
9983 if (type_node != NULL)
9984 die_offset = (type_node->skeleton_die != NULL
9985 ? type_node->skeleton_die->die_offset
9986 : comp_unit_die ()->die_offset);
9987 }
9988
9989 output_pubname (die_offset, pub);
9990 }
9991 }
9992
9993 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
9994 }
9995
9996 /* Output public names and types tables if necessary. */
9997
9998 static void
9999 output_pubtables (void)
10000 {
10001 if (!want_pubnames () || !info_section_emitted)
10002 return;
10003
10004 switch_to_section (debug_pubnames_section);
10005 output_pubnames (pubname_table);
10006 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
10007 It shouldn't hurt to emit it always, since pure DWARF2 consumers
10008 simply won't look for the section. */
10009 switch_to_section (debug_pubtypes_section);
10010 output_pubnames (pubtype_table);
10011 }
10012
10013
10014 /* Output the information that goes into the .debug_aranges table.
10015 Namely, define the beginning and ending address range of the
10016 text section generated for this compilation unit. */
10017
10018 static void
10019 output_aranges (void)
10020 {
10021 unsigned i;
10022 unsigned long aranges_length = size_of_aranges ();
10023
10024 if (!XCOFF_DEBUGGING_INFO)
10025 {
10026 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10027 dw2_asm_output_data (4, 0xffffffff,
10028 "Initial length escape value indicating 64-bit DWARF extension");
10029 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
10030 "Length of Address Ranges Info");
10031 }
10032
10033 /* Version number for aranges is still 2, even up to DWARF5. */
10034 dw2_asm_output_data (2, 2, "DWARF Version");
10035 if (dwarf_split_debug_info)
10036 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10037 debug_skeleton_info_section,
10038 "Offset of Compilation Unit Info");
10039 else
10040 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10041 debug_info_section,
10042 "Offset of Compilation Unit Info");
10043 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
10044 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
10045
10046 /* We need to align to twice the pointer size here. */
10047 if (DWARF_ARANGES_PAD_SIZE)
10048 {
10049 /* Pad using a 2 byte words so that padding is correct for any
10050 pointer size. */
10051 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
10052 2 * DWARF2_ADDR_SIZE);
10053 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
10054 dw2_asm_output_data (2, 0, NULL);
10055 }
10056
10057 /* It is necessary not to output these entries if the sections were
10058 not used; if the sections were not used, the length will be 0 and
10059 the address may end up as 0 if the section is discarded by ld
10060 --gc-sections, leaving an invalid (0, 0) entry that can be
10061 confused with the terminator. */
10062 if (text_section_used)
10063 {
10064 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
10065 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
10066 text_section_label, "Length");
10067 }
10068 if (cold_text_section_used)
10069 {
10070 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
10071 "Address");
10072 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
10073 cold_text_section_label, "Length");
10074 }
10075
10076 if (have_multiple_function_sections)
10077 {
10078 unsigned fde_idx;
10079 dw_fde_ref fde;
10080
10081 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
10082 {
10083 if (DECL_IGNORED_P (fde->decl))
10084 continue;
10085 if (!fde->in_std_section)
10086 {
10087 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
10088 "Address");
10089 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
10090 fde->dw_fde_begin, "Length");
10091 }
10092 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
10093 {
10094 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
10095 "Address");
10096 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
10097 fde->dw_fde_second_begin, "Length");
10098 }
10099 }
10100 }
10101
10102 /* Output the terminator words. */
10103 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10104 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10105 }
10106
10107 /* Add a new entry to .debug_ranges. Return the offset at which it
10108 was placed. */
10109
10110 static unsigned int
10111 add_ranges_num (int num)
10112 {
10113 unsigned int in_use = ranges_table_in_use;
10114
10115 if (in_use == ranges_table_allocated)
10116 {
10117 ranges_table_allocated += RANGES_TABLE_INCREMENT;
10118 ranges_table = GGC_RESIZEVEC (dw_ranges, ranges_table,
10119 ranges_table_allocated);
10120 memset (ranges_table + ranges_table_in_use, 0,
10121 RANGES_TABLE_INCREMENT * sizeof (dw_ranges));
10122 }
10123
10124 ranges_table[in_use].num = num;
10125 ranges_table_in_use = in_use + 1;
10126
10127 return in_use * 2 * DWARF2_ADDR_SIZE;
10128 }
10129
10130 /* Add a new entry to .debug_ranges corresponding to a block, or a
10131 range terminator if BLOCK is NULL. */
10132
10133 static unsigned int
10134 add_ranges (const_tree block)
10135 {
10136 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0);
10137 }
10138
10139 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
10140 When using dwarf_split_debug_info, address attributes in dies destined
10141 for the final executable should be direct references--setting the
10142 parameter force_direct ensures this behavior. */
10143
10144 static void
10145 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
10146 bool *added, bool force_direct)
10147 {
10148 unsigned int in_use = ranges_by_label_in_use;
10149 unsigned int offset;
10150
10151 if (in_use == ranges_by_label_allocated)
10152 {
10153 ranges_by_label_allocated += RANGES_TABLE_INCREMENT;
10154 ranges_by_label = GGC_RESIZEVEC (dw_ranges_by_label, ranges_by_label,
10155 ranges_by_label_allocated);
10156 memset (ranges_by_label + ranges_by_label_in_use, 0,
10157 RANGES_TABLE_INCREMENT * sizeof (dw_ranges_by_label));
10158 }
10159
10160 ranges_by_label[in_use].begin = begin;
10161 ranges_by_label[in_use].end = end;
10162 ranges_by_label_in_use = in_use + 1;
10163
10164 offset = add_ranges_num (-(int)in_use - 1);
10165 if (!*added)
10166 {
10167 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
10168 *added = true;
10169 }
10170 }
10171
10172 static void
10173 output_ranges (void)
10174 {
10175 unsigned i;
10176 static const char *const start_fmt = "Offset %#x";
10177 const char *fmt = start_fmt;
10178
10179 for (i = 0; i < ranges_table_in_use; i++)
10180 {
10181 int block_num = ranges_table[i].num;
10182
10183 if (block_num > 0)
10184 {
10185 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
10186 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
10187
10188 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
10189 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
10190
10191 /* If all code is in the text section, then the compilation
10192 unit base address defaults to DW_AT_low_pc, which is the
10193 base of the text section. */
10194 if (!have_multiple_function_sections)
10195 {
10196 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
10197 text_section_label,
10198 fmt, i * 2 * DWARF2_ADDR_SIZE);
10199 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
10200 text_section_label, NULL);
10201 }
10202
10203 /* Otherwise, the compilation unit base address is zero,
10204 which allows us to use absolute addresses, and not worry
10205 about whether the target supports cross-section
10206 arithmetic. */
10207 else
10208 {
10209 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
10210 fmt, i * 2 * DWARF2_ADDR_SIZE);
10211 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
10212 }
10213
10214 fmt = NULL;
10215 }
10216
10217 /* Negative block_num stands for an index into ranges_by_label. */
10218 else if (block_num < 0)
10219 {
10220 int lab_idx = - block_num - 1;
10221
10222 if (!have_multiple_function_sections)
10223 {
10224 gcc_unreachable ();
10225 #if 0
10226 /* If we ever use add_ranges_by_labels () for a single
10227 function section, all we have to do is to take out
10228 the #if 0 above. */
10229 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
10230 ranges_by_label[lab_idx].begin,
10231 text_section_label,
10232 fmt, i * 2 * DWARF2_ADDR_SIZE);
10233 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
10234 ranges_by_label[lab_idx].end,
10235 text_section_label, NULL);
10236 #endif
10237 }
10238 else
10239 {
10240 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
10241 ranges_by_label[lab_idx].begin,
10242 fmt, i * 2 * DWARF2_ADDR_SIZE);
10243 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
10244 ranges_by_label[lab_idx].end,
10245 NULL);
10246 }
10247 }
10248 else
10249 {
10250 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10251 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
10252 fmt = start_fmt;
10253 }
10254 }
10255 }
10256
10257 /* Data structure containing information about input files. */
10258 struct file_info
10259 {
10260 const char *path; /* Complete file name. */
10261 const char *fname; /* File name part. */
10262 int length; /* Length of entire string. */
10263 struct dwarf_file_data * file_idx; /* Index in input file table. */
10264 int dir_idx; /* Index in directory table. */
10265 };
10266
10267 /* Data structure containing information about directories with source
10268 files. */
10269 struct dir_info
10270 {
10271 const char *path; /* Path including directory name. */
10272 int length; /* Path length. */
10273 int prefix; /* Index of directory entry which is a prefix. */
10274 int count; /* Number of files in this directory. */
10275 int dir_idx; /* Index of directory used as base. */
10276 };
10277
10278 /* Callback function for file_info comparison. We sort by looking at
10279 the directories in the path. */
10280
10281 static int
10282 file_info_cmp (const void *p1, const void *p2)
10283 {
10284 const struct file_info *const s1 = (const struct file_info *) p1;
10285 const struct file_info *const s2 = (const struct file_info *) p2;
10286 const unsigned char *cp1;
10287 const unsigned char *cp2;
10288
10289 /* Take care of file names without directories. We need to make sure that
10290 we return consistent values to qsort since some will get confused if
10291 we return the same value when identical operands are passed in opposite
10292 orders. So if neither has a directory, return 0 and otherwise return
10293 1 or -1 depending on which one has the directory. */
10294 if ((s1->path == s1->fname || s2->path == s2->fname))
10295 return (s2->path == s2->fname) - (s1->path == s1->fname);
10296
10297 cp1 = (const unsigned char *) s1->path;
10298 cp2 = (const unsigned char *) s2->path;
10299
10300 while (1)
10301 {
10302 ++cp1;
10303 ++cp2;
10304 /* Reached the end of the first path? If so, handle like above. */
10305 if ((cp1 == (const unsigned char *) s1->fname)
10306 || (cp2 == (const unsigned char *) s2->fname))
10307 return ((cp2 == (const unsigned char *) s2->fname)
10308 - (cp1 == (const unsigned char *) s1->fname));
10309
10310 /* Character of current path component the same? */
10311 else if (*cp1 != *cp2)
10312 return *cp1 - *cp2;
10313 }
10314 }
10315
10316 struct file_name_acquire_data
10317 {
10318 struct file_info *files;
10319 int used_files;
10320 int max_files;
10321 };
10322
10323 /* Traversal function for the hash table. */
10324
10325 int
10326 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
10327 {
10328 struct dwarf_file_data *d = *slot;
10329 struct file_info *fi;
10330 const char *f;
10331
10332 gcc_assert (fnad->max_files >= d->emitted_number);
10333
10334 if (! d->emitted_number)
10335 return 1;
10336
10337 gcc_assert (fnad->max_files != fnad->used_files);
10338
10339 fi = fnad->files + fnad->used_files++;
10340
10341 /* Skip all leading "./". */
10342 f = d->filename;
10343 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
10344 f += 2;
10345
10346 /* Create a new array entry. */
10347 fi->path = f;
10348 fi->length = strlen (f);
10349 fi->file_idx = d;
10350
10351 /* Search for the file name part. */
10352 f = strrchr (f, DIR_SEPARATOR);
10353 #if defined (DIR_SEPARATOR_2)
10354 {
10355 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
10356
10357 if (g != NULL)
10358 {
10359 if (f == NULL || f < g)
10360 f = g;
10361 }
10362 }
10363 #endif
10364
10365 fi->fname = f == NULL ? fi->path : f + 1;
10366 return 1;
10367 }
10368
10369 /* Output the directory table and the file name table. We try to minimize
10370 the total amount of memory needed. A heuristic is used to avoid large
10371 slowdowns with many input files. */
10372
10373 static void
10374 output_file_names (void)
10375 {
10376 struct file_name_acquire_data fnad;
10377 int numfiles;
10378 struct file_info *files;
10379 struct dir_info *dirs;
10380 int *saved;
10381 int *savehere;
10382 int *backmap;
10383 int ndirs;
10384 int idx_offset;
10385 int i;
10386
10387 if (!last_emitted_file)
10388 {
10389 dw2_asm_output_data (1, 0, "End directory table");
10390 dw2_asm_output_data (1, 0, "End file name table");
10391 return;
10392 }
10393
10394 numfiles = last_emitted_file->emitted_number;
10395
10396 /* Allocate the various arrays we need. */
10397 files = XALLOCAVEC (struct file_info, numfiles);
10398 dirs = XALLOCAVEC (struct dir_info, numfiles);
10399
10400 fnad.files = files;
10401 fnad.used_files = 0;
10402 fnad.max_files = numfiles;
10403 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
10404 gcc_assert (fnad.used_files == fnad.max_files);
10405
10406 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
10407
10408 /* Find all the different directories used. */
10409 dirs[0].path = files[0].path;
10410 dirs[0].length = files[0].fname - files[0].path;
10411 dirs[0].prefix = -1;
10412 dirs[0].count = 1;
10413 dirs[0].dir_idx = 0;
10414 files[0].dir_idx = 0;
10415 ndirs = 1;
10416
10417 for (i = 1; i < numfiles; i++)
10418 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
10419 && memcmp (dirs[ndirs - 1].path, files[i].path,
10420 dirs[ndirs - 1].length) == 0)
10421 {
10422 /* Same directory as last entry. */
10423 files[i].dir_idx = ndirs - 1;
10424 ++dirs[ndirs - 1].count;
10425 }
10426 else
10427 {
10428 int j;
10429
10430 /* This is a new directory. */
10431 dirs[ndirs].path = files[i].path;
10432 dirs[ndirs].length = files[i].fname - files[i].path;
10433 dirs[ndirs].count = 1;
10434 dirs[ndirs].dir_idx = ndirs;
10435 files[i].dir_idx = ndirs;
10436
10437 /* Search for a prefix. */
10438 dirs[ndirs].prefix = -1;
10439 for (j = 0; j < ndirs; j++)
10440 if (dirs[j].length < dirs[ndirs].length
10441 && dirs[j].length > 1
10442 && (dirs[ndirs].prefix == -1
10443 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
10444 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
10445 dirs[ndirs].prefix = j;
10446
10447 ++ndirs;
10448 }
10449
10450 /* Now to the actual work. We have to find a subset of the directories which
10451 allow expressing the file name using references to the directory table
10452 with the least amount of characters. We do not do an exhaustive search
10453 where we would have to check out every combination of every single
10454 possible prefix. Instead we use a heuristic which provides nearly optimal
10455 results in most cases and never is much off. */
10456 saved = XALLOCAVEC (int, ndirs);
10457 savehere = XALLOCAVEC (int, ndirs);
10458
10459 memset (saved, '\0', ndirs * sizeof (saved[0]));
10460 for (i = 0; i < ndirs; i++)
10461 {
10462 int j;
10463 int total;
10464
10465 /* We can always save some space for the current directory. But this
10466 does not mean it will be enough to justify adding the directory. */
10467 savehere[i] = dirs[i].length;
10468 total = (savehere[i] - saved[i]) * dirs[i].count;
10469
10470 for (j = i + 1; j < ndirs; j++)
10471 {
10472 savehere[j] = 0;
10473 if (saved[j] < dirs[i].length)
10474 {
10475 /* Determine whether the dirs[i] path is a prefix of the
10476 dirs[j] path. */
10477 int k;
10478
10479 k = dirs[j].prefix;
10480 while (k != -1 && k != (int) i)
10481 k = dirs[k].prefix;
10482
10483 if (k == (int) i)
10484 {
10485 /* Yes it is. We can possibly save some memory by
10486 writing the filenames in dirs[j] relative to
10487 dirs[i]. */
10488 savehere[j] = dirs[i].length;
10489 total += (savehere[j] - saved[j]) * dirs[j].count;
10490 }
10491 }
10492 }
10493
10494 /* Check whether we can save enough to justify adding the dirs[i]
10495 directory. */
10496 if (total > dirs[i].length + 1)
10497 {
10498 /* It's worthwhile adding. */
10499 for (j = i; j < ndirs; j++)
10500 if (savehere[j] > 0)
10501 {
10502 /* Remember how much we saved for this directory so far. */
10503 saved[j] = savehere[j];
10504
10505 /* Remember the prefix directory. */
10506 dirs[j].dir_idx = i;
10507 }
10508 }
10509 }
10510
10511 /* Emit the directory name table. */
10512 idx_offset = dirs[0].length > 0 ? 1 : 0;
10513 for (i = 1 - idx_offset; i < ndirs; i++)
10514 dw2_asm_output_nstring (dirs[i].path,
10515 dirs[i].length
10516 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
10517 "Directory Entry: %#x", i + idx_offset);
10518
10519 dw2_asm_output_data (1, 0, "End directory table");
10520
10521 /* We have to emit them in the order of emitted_number since that's
10522 used in the debug info generation. To do this efficiently we
10523 generate a back-mapping of the indices first. */
10524 backmap = XALLOCAVEC (int, numfiles);
10525 for (i = 0; i < numfiles; i++)
10526 backmap[files[i].file_idx->emitted_number - 1] = i;
10527
10528 /* Now write all the file names. */
10529 for (i = 0; i < numfiles; i++)
10530 {
10531 int file_idx = backmap[i];
10532 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
10533
10534 #ifdef VMS_DEBUGGING_INFO
10535 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
10536
10537 /* Setting these fields can lead to debugger miscomparisons,
10538 but VMS Debug requires them to be set correctly. */
10539
10540 int ver;
10541 long long cdt;
10542 long siz;
10543 int maxfilelen = strlen (files[file_idx].path)
10544 + dirs[dir_idx].length
10545 + MAX_VMS_VERSION_LEN + 1;
10546 char *filebuf = XALLOCAVEC (char, maxfilelen);
10547
10548 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
10549 snprintf (filebuf, maxfilelen, "%s;%d",
10550 files[file_idx].path + dirs[dir_idx].length, ver);
10551
10552 dw2_asm_output_nstring
10553 (filebuf, -1, "File Entry: %#x", (unsigned) i + 1);
10554
10555 /* Include directory index. */
10556 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
10557
10558 /* Modification time. */
10559 dw2_asm_output_data_uleb128
10560 ((vms_file_stats_name (files[file_idx].path, &cdt, 0, 0, 0) == 0)
10561 ? cdt : 0,
10562 NULL);
10563
10564 /* File length in bytes. */
10565 dw2_asm_output_data_uleb128
10566 ((vms_file_stats_name (files[file_idx].path, 0, &siz, 0, 0) == 0)
10567 ? siz : 0,
10568 NULL);
10569 #else
10570 dw2_asm_output_nstring (files[file_idx].path + dirs[dir_idx].length, -1,
10571 "File Entry: %#x", (unsigned) i + 1);
10572
10573 /* Include directory index. */
10574 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
10575
10576 /* Modification time. */
10577 dw2_asm_output_data_uleb128 (0, NULL);
10578
10579 /* File length in bytes. */
10580 dw2_asm_output_data_uleb128 (0, NULL);
10581 #endif /* VMS_DEBUGGING_INFO */
10582 }
10583
10584 dw2_asm_output_data (1, 0, "End file name table");
10585 }
10586
10587
10588 /* Output one line number table into the .debug_line section. */
10589
10590 static void
10591 output_one_line_info_table (dw_line_info_table *table)
10592 {
10593 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
10594 unsigned int current_line = 1;
10595 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
10596 dw_line_info_entry *ent;
10597 size_t i;
10598
10599 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
10600 {
10601 switch (ent->opcode)
10602 {
10603 case LI_set_address:
10604 /* ??? Unfortunately, we have little choice here currently, and
10605 must always use the most general form. GCC does not know the
10606 address delta itself, so we can't use DW_LNS_advance_pc. Many
10607 ports do have length attributes which will give an upper bound
10608 on the address range. We could perhaps use length attributes
10609 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
10610 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
10611
10612 /* This can handle any delta. This takes
10613 4+DWARF2_ADDR_SIZE bytes. */
10614 dw2_asm_output_data (1, 0, "set address %s", line_label);
10615 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
10616 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
10617 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
10618 break;
10619
10620 case LI_set_line:
10621 if (ent->val == current_line)
10622 {
10623 /* We still need to start a new row, so output a copy insn. */
10624 dw2_asm_output_data (1, DW_LNS_copy,
10625 "copy line %u", current_line);
10626 }
10627 else
10628 {
10629 int line_offset = ent->val - current_line;
10630 int line_delta = line_offset - DWARF_LINE_BASE;
10631
10632 current_line = ent->val;
10633 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
10634 {
10635 /* This can handle deltas from -10 to 234, using the current
10636 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
10637 This takes 1 byte. */
10638 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
10639 "line %u", current_line);
10640 }
10641 else
10642 {
10643 /* This can handle any delta. This takes at least 4 bytes,
10644 depending on the value being encoded. */
10645 dw2_asm_output_data (1, DW_LNS_advance_line,
10646 "advance to line %u", current_line);
10647 dw2_asm_output_data_sleb128 (line_offset, NULL);
10648 dw2_asm_output_data (1, DW_LNS_copy, NULL);
10649 }
10650 }
10651 break;
10652
10653 case LI_set_file:
10654 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
10655 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
10656 break;
10657
10658 case LI_set_column:
10659 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
10660 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
10661 break;
10662
10663 case LI_negate_stmt:
10664 current_is_stmt = !current_is_stmt;
10665 dw2_asm_output_data (1, DW_LNS_negate_stmt,
10666 "is_stmt %d", current_is_stmt);
10667 break;
10668
10669 case LI_set_prologue_end:
10670 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
10671 "set prologue end");
10672 break;
10673
10674 case LI_set_epilogue_begin:
10675 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
10676 "set epilogue begin");
10677 break;
10678
10679 case LI_set_discriminator:
10680 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
10681 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
10682 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
10683 dw2_asm_output_data_uleb128 (ent->val, NULL);
10684 break;
10685 }
10686 }
10687
10688 /* Emit debug info for the address of the end of the table. */
10689 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
10690 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
10691 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
10692 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
10693
10694 dw2_asm_output_data (1, 0, "end sequence");
10695 dw2_asm_output_data_uleb128 (1, NULL);
10696 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
10697 }
10698
10699 /* Output the source line number correspondence information. This
10700 information goes into the .debug_line section. */
10701
10702 static void
10703 output_line_info (bool prologue_only)
10704 {
10705 char l1[20], l2[20], p1[20], p2[20];
10706 /* We don't support DWARFv5 line tables yet. */
10707 int ver = dwarf_version < 5 ? dwarf_version : 4;
10708 bool saw_one = false;
10709 int opc;
10710
10711 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, 0);
10712 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, 0);
10713 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, 0);
10714 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, 0);
10715
10716 if (!XCOFF_DEBUGGING_INFO)
10717 {
10718 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10719 dw2_asm_output_data (4, 0xffffffff,
10720 "Initial length escape value indicating 64-bit DWARF extension");
10721 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
10722 "Length of Source Line Info");
10723 }
10724
10725 ASM_OUTPUT_LABEL (asm_out_file, l1);
10726
10727 dw2_asm_output_data (2, ver, "DWARF Version");
10728 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
10729 ASM_OUTPUT_LABEL (asm_out_file, p1);
10730
10731 /* Define the architecture-dependent minimum instruction length (in bytes).
10732 In this implementation of DWARF, this field is used for information
10733 purposes only. Since GCC generates assembly language, we have no
10734 a priori knowledge of how many instruction bytes are generated for each
10735 source line, and therefore can use only the DW_LNE_set_address and
10736 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
10737 this as '1', which is "correct enough" for all architectures,
10738 and don't let the target override. */
10739 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
10740
10741 if (ver >= 4)
10742 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
10743 "Maximum Operations Per Instruction");
10744 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
10745 "Default is_stmt_start flag");
10746 dw2_asm_output_data (1, DWARF_LINE_BASE,
10747 "Line Base Value (Special Opcodes)");
10748 dw2_asm_output_data (1, DWARF_LINE_RANGE,
10749 "Line Range Value (Special Opcodes)");
10750 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
10751 "Special Opcode Base");
10752
10753 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
10754 {
10755 int n_op_args;
10756 switch (opc)
10757 {
10758 case DW_LNS_advance_pc:
10759 case DW_LNS_advance_line:
10760 case DW_LNS_set_file:
10761 case DW_LNS_set_column:
10762 case DW_LNS_fixed_advance_pc:
10763 case DW_LNS_set_isa:
10764 n_op_args = 1;
10765 break;
10766 default:
10767 n_op_args = 0;
10768 break;
10769 }
10770
10771 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
10772 opc, n_op_args);
10773 }
10774
10775 /* Write out the information about the files we use. */
10776 output_file_names ();
10777 ASM_OUTPUT_LABEL (asm_out_file, p2);
10778 if (prologue_only)
10779 {
10780 /* Output the marker for the end of the line number info. */
10781 ASM_OUTPUT_LABEL (asm_out_file, l2);
10782 return;
10783 }
10784
10785 if (separate_line_info)
10786 {
10787 dw_line_info_table *table;
10788 size_t i;
10789
10790 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
10791 if (table->in_use)
10792 {
10793 output_one_line_info_table (table);
10794 saw_one = true;
10795 }
10796 }
10797 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
10798 {
10799 output_one_line_info_table (cold_text_section_line_info);
10800 saw_one = true;
10801 }
10802
10803 /* ??? Some Darwin linkers crash on a .debug_line section with no
10804 sequences. Further, merely a DW_LNE_end_sequence entry is not
10805 sufficient -- the address column must also be initialized.
10806 Make sure to output at least one set_address/end_sequence pair,
10807 choosing .text since that section is always present. */
10808 if (text_section_line_info->in_use || !saw_one)
10809 output_one_line_info_table (text_section_line_info);
10810
10811 /* Output the marker for the end of the line number info. */
10812 ASM_OUTPUT_LABEL (asm_out_file, l2);
10813 }
10814 \f
10815 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
10816
10817 static inline bool
10818 need_endianity_attribute_p (bool reverse)
10819 {
10820 return reverse && (dwarf_version >= 3 || !dwarf_strict);
10821 }
10822
10823 /* Given a pointer to a tree node for some base type, return a pointer to
10824 a DIE that describes the given type. REVERSE is true if the type is
10825 to be interpreted in the reverse storage order wrt the target order.
10826
10827 This routine must only be called for GCC type nodes that correspond to
10828 Dwarf base (fundamental) types. */
10829
10830 static dw_die_ref
10831 base_type_die (tree type, bool reverse)
10832 {
10833 dw_die_ref base_type_result;
10834 enum dwarf_type encoding;
10835 bool fpt_used = false;
10836 struct fixed_point_type_info fpt_info;
10837 tree type_bias = NULL_TREE;
10838
10839 if (TREE_CODE (type) == ERROR_MARK || TREE_CODE (type) == VOID_TYPE)
10840 return 0;
10841
10842 /* If this is a subtype that should not be emitted as a subrange type,
10843 use the base type. See subrange_type_for_debug_p. */
10844 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
10845 type = TREE_TYPE (type);
10846
10847 switch (TREE_CODE (type))
10848 {
10849 case INTEGER_TYPE:
10850 if ((dwarf_version >= 4 || !dwarf_strict)
10851 && TYPE_NAME (type)
10852 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
10853 && DECL_IS_BUILTIN (TYPE_NAME (type))
10854 && DECL_NAME (TYPE_NAME (type)))
10855 {
10856 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
10857 if (strcmp (name, "char16_t") == 0
10858 || strcmp (name, "char32_t") == 0)
10859 {
10860 encoding = DW_ATE_UTF;
10861 break;
10862 }
10863 }
10864 if ((dwarf_version >= 3 || !dwarf_strict)
10865 && lang_hooks.types.get_fixed_point_type_info)
10866 {
10867 memset (&fpt_info, 0, sizeof (fpt_info));
10868 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
10869 {
10870 fpt_used = true;
10871 encoding = ((TYPE_UNSIGNED (type))
10872 ? DW_ATE_unsigned_fixed
10873 : DW_ATE_signed_fixed);
10874 break;
10875 }
10876 }
10877 if (TYPE_STRING_FLAG (type))
10878 {
10879 if (TYPE_UNSIGNED (type))
10880 encoding = DW_ATE_unsigned_char;
10881 else
10882 encoding = DW_ATE_signed_char;
10883 }
10884 else if (TYPE_UNSIGNED (type))
10885 encoding = DW_ATE_unsigned;
10886 else
10887 encoding = DW_ATE_signed;
10888
10889 if (!dwarf_strict
10890 && lang_hooks.types.get_type_bias)
10891 type_bias = lang_hooks.types.get_type_bias (type);
10892 break;
10893
10894 case REAL_TYPE:
10895 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
10896 {
10897 if (dwarf_version >= 3 || !dwarf_strict)
10898 encoding = DW_ATE_decimal_float;
10899 else
10900 encoding = DW_ATE_lo_user;
10901 }
10902 else
10903 encoding = DW_ATE_float;
10904 break;
10905
10906 case FIXED_POINT_TYPE:
10907 if (!(dwarf_version >= 3 || !dwarf_strict))
10908 encoding = DW_ATE_lo_user;
10909 else if (TYPE_UNSIGNED (type))
10910 encoding = DW_ATE_unsigned_fixed;
10911 else
10912 encoding = DW_ATE_signed_fixed;
10913 break;
10914
10915 /* Dwarf2 doesn't know anything about complex ints, so use
10916 a user defined type for it. */
10917 case COMPLEX_TYPE:
10918 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
10919 encoding = DW_ATE_complex_float;
10920 else
10921 encoding = DW_ATE_lo_user;
10922 break;
10923
10924 case BOOLEAN_TYPE:
10925 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
10926 encoding = DW_ATE_boolean;
10927 break;
10928
10929 default:
10930 /* No other TREE_CODEs are Dwarf fundamental types. */
10931 gcc_unreachable ();
10932 }
10933
10934 base_type_result = new_die (DW_TAG_base_type, comp_unit_die (), type);
10935
10936 add_AT_unsigned (base_type_result, DW_AT_byte_size,
10937 int_size_in_bytes (type));
10938 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
10939
10940 if (need_endianity_attribute_p (reverse))
10941 add_AT_unsigned (base_type_result, DW_AT_endianity,
10942 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
10943
10944 if (fpt_used)
10945 {
10946 switch (fpt_info.scale_factor_kind)
10947 {
10948 case fixed_point_scale_factor_binary:
10949 add_AT_int (base_type_result, DW_AT_binary_scale,
10950 fpt_info.scale_factor.binary);
10951 break;
10952
10953 case fixed_point_scale_factor_decimal:
10954 add_AT_int (base_type_result, DW_AT_decimal_scale,
10955 fpt_info.scale_factor.decimal);
10956 break;
10957
10958 case fixed_point_scale_factor_arbitrary:
10959 /* Arbitrary scale factors cannot be described in standard DWARF,
10960 yet. */
10961 if (!dwarf_strict)
10962 {
10963 /* Describe the scale factor as a rational constant. */
10964 const dw_die_ref scale_factor
10965 = new_die (DW_TAG_constant, comp_unit_die (), type);
10966
10967 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
10968 fpt_info.scale_factor.arbitrary.numerator);
10969 add_AT_int (scale_factor, DW_AT_GNU_denominator,
10970 fpt_info.scale_factor.arbitrary.denominator);
10971
10972 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
10973 }
10974 break;
10975
10976 default:
10977 gcc_unreachable ();
10978 }
10979 }
10980
10981 if (type_bias)
10982 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
10983 dw_scalar_form_constant
10984 | dw_scalar_form_exprloc
10985 | dw_scalar_form_reference,
10986 NULL);
10987
10988 add_pubtype (type, base_type_result);
10989
10990 return base_type_result;
10991 }
10992
10993 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
10994 named 'auto' in its type: return true for it, false otherwise. */
10995
10996 static inline bool
10997 is_cxx_auto (tree type)
10998 {
10999 if (is_cxx ())
11000 {
11001 tree name = TYPE_IDENTIFIER (type);
11002 if (name == get_identifier ("auto")
11003 || name == get_identifier ("decltype(auto)"))
11004 return true;
11005 }
11006 return false;
11007 }
11008
11009 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
11010 given input type is a Dwarf "fundamental" type. Otherwise return null. */
11011
11012 static inline int
11013 is_base_type (tree type)
11014 {
11015 switch (TREE_CODE (type))
11016 {
11017 case ERROR_MARK:
11018 case VOID_TYPE:
11019 case INTEGER_TYPE:
11020 case REAL_TYPE:
11021 case FIXED_POINT_TYPE:
11022 case COMPLEX_TYPE:
11023 case BOOLEAN_TYPE:
11024 case POINTER_BOUNDS_TYPE:
11025 return 1;
11026
11027 case ARRAY_TYPE:
11028 case RECORD_TYPE:
11029 case UNION_TYPE:
11030 case QUAL_UNION_TYPE:
11031 case ENUMERAL_TYPE:
11032 case FUNCTION_TYPE:
11033 case METHOD_TYPE:
11034 case POINTER_TYPE:
11035 case REFERENCE_TYPE:
11036 case NULLPTR_TYPE:
11037 case OFFSET_TYPE:
11038 case LANG_TYPE:
11039 case VECTOR_TYPE:
11040 return 0;
11041
11042 default:
11043 if (is_cxx_auto (type))
11044 return 0;
11045 gcc_unreachable ();
11046 }
11047
11048 return 0;
11049 }
11050
11051 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
11052 node, return the size in bits for the type if it is a constant, or else
11053 return the alignment for the type if the type's size is not constant, or
11054 else return BITS_PER_WORD if the type actually turns out to be an
11055 ERROR_MARK node. */
11056
11057 static inline unsigned HOST_WIDE_INT
11058 simple_type_size_in_bits (const_tree type)
11059 {
11060 if (TREE_CODE (type) == ERROR_MARK)
11061 return BITS_PER_WORD;
11062 else if (TYPE_SIZE (type) == NULL_TREE)
11063 return 0;
11064 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
11065 return tree_to_uhwi (TYPE_SIZE (type));
11066 else
11067 return TYPE_ALIGN (type);
11068 }
11069
11070 /* Similarly, but return an offset_int instead of UHWI. */
11071
11072 static inline offset_int
11073 offset_int_type_size_in_bits (const_tree type)
11074 {
11075 if (TREE_CODE (type) == ERROR_MARK)
11076 return BITS_PER_WORD;
11077 else if (TYPE_SIZE (type) == NULL_TREE)
11078 return 0;
11079 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
11080 return wi::to_offset (TYPE_SIZE (type));
11081 else
11082 return TYPE_ALIGN (type);
11083 }
11084
11085 /* Given a pointer to a tree node for a subrange type, return a pointer
11086 to a DIE that describes the given type. */
11087
11088 static dw_die_ref
11089 subrange_type_die (tree type, tree low, tree high, tree bias,
11090 dw_die_ref context_die)
11091 {
11092 dw_die_ref subrange_die;
11093 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
11094
11095 if (context_die == NULL)
11096 context_die = comp_unit_die ();
11097
11098 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
11099
11100 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
11101 {
11102 /* The size of the subrange type and its base type do not match,
11103 so we need to generate a size attribute for the subrange type. */
11104 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
11105 }
11106
11107 if (low)
11108 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
11109 if (high)
11110 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
11111 if (bias && !dwarf_strict)
11112 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
11113 dw_scalar_form_constant
11114 | dw_scalar_form_exprloc
11115 | dw_scalar_form_reference,
11116 NULL);
11117
11118 return subrange_die;
11119 }
11120
11121 /* Returns the (const and/or volatile) cv_qualifiers associated with
11122 the decl node. This will normally be augmented with the
11123 cv_qualifiers of the underlying type in add_type_attribute. */
11124
11125 static int
11126 decl_quals (const_tree decl)
11127 {
11128 return ((TREE_READONLY (decl)
11129 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
11130 | (TREE_THIS_VOLATILE (decl)
11131 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
11132 }
11133
11134 /* Determine the TYPE whose qualifiers match the largest strict subset
11135 of the given TYPE_QUALS, and return its qualifiers. Ignore all
11136 qualifiers outside QUAL_MASK. */
11137
11138 static int
11139 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
11140 {
11141 tree t;
11142 int best_rank = 0, best_qual = 0, max_rank;
11143
11144 type_quals &= qual_mask;
11145 max_rank = popcount_hwi (type_quals) - 1;
11146
11147 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
11148 t = TYPE_NEXT_VARIANT (t))
11149 {
11150 int q = TYPE_QUALS (t) & qual_mask;
11151
11152 if ((q & type_quals) == q && q != type_quals
11153 && check_base_type (t, type))
11154 {
11155 int rank = popcount_hwi (q);
11156
11157 if (rank > best_rank)
11158 {
11159 best_rank = rank;
11160 best_qual = q;
11161 }
11162 }
11163 }
11164
11165 return best_qual;
11166 }
11167
11168 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
11169 static const dwarf_qual_info_t dwarf_qual_info[] =
11170 {
11171 { TYPE_QUAL_CONST, DW_TAG_const_type },
11172 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
11173 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
11174 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
11175 };
11176 static const unsigned int dwarf_qual_info_size
11177 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
11178
11179 /* If DIE is a qualified DIE of some base DIE with the same parent,
11180 return the base DIE, otherwise return NULL. Set MASK to the
11181 qualifiers added compared to the returned DIE. */
11182
11183 static dw_die_ref
11184 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
11185 {
11186 unsigned int i;
11187 for (i = 0; i < dwarf_qual_info_size; i++)
11188 if (die->die_tag == dwarf_qual_info[i].t)
11189 break;
11190 if (i == dwarf_qual_info_size)
11191 return NULL;
11192 if (vec_safe_length (die->die_attr) != 1)
11193 return NULL;
11194 dw_die_ref type = get_AT_ref (die, DW_AT_type);
11195 if (type == NULL || type->die_parent != die->die_parent)
11196 return NULL;
11197 *mask |= dwarf_qual_info[i].q;
11198 if (depth)
11199 {
11200 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
11201 if (ret)
11202 return ret;
11203 }
11204 return type;
11205 }
11206
11207 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
11208 entry that chains the modifiers specified by CV_QUALS in front of the
11209 given type. REVERSE is true if the type is to be interpreted in the
11210 reverse storage order wrt the target order. */
11211
11212 static dw_die_ref
11213 modified_type_die (tree type, int cv_quals, bool reverse,
11214 dw_die_ref context_die)
11215 {
11216 enum tree_code code = TREE_CODE (type);
11217 dw_die_ref mod_type_die;
11218 dw_die_ref sub_die = NULL;
11219 tree item_type = NULL;
11220 tree qualified_type;
11221 tree name, low, high;
11222 dw_die_ref mod_scope;
11223 /* Only these cv-qualifiers are currently handled. */
11224 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
11225 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC);
11226
11227 if (code == ERROR_MARK)
11228 return NULL;
11229
11230 if (lang_hooks.types.get_debug_type)
11231 {
11232 tree debug_type = lang_hooks.types.get_debug_type (type);
11233
11234 if (debug_type != NULL_TREE && debug_type != type)
11235 return modified_type_die (debug_type, cv_quals, reverse, context_die);
11236 }
11237
11238 cv_quals &= cv_qual_mask;
11239
11240 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
11241 tag modifier (and not an attribute) old consumers won't be able
11242 to handle it. */
11243 if (dwarf_version < 3)
11244 cv_quals &= ~TYPE_QUAL_RESTRICT;
11245
11246 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
11247 if (dwarf_version < 5)
11248 cv_quals &= ~TYPE_QUAL_ATOMIC;
11249
11250 /* See if we already have the appropriately qualified variant of
11251 this type. */
11252 qualified_type = get_qualified_type (type, cv_quals);
11253
11254 if (qualified_type == sizetype
11255 && TYPE_NAME (qualified_type)
11256 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
11257 {
11258 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
11259
11260 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
11261 && TYPE_PRECISION (t)
11262 == TYPE_PRECISION (qualified_type)
11263 && TYPE_UNSIGNED (t)
11264 == TYPE_UNSIGNED (qualified_type));
11265 qualified_type = t;
11266 }
11267
11268 /* If we do, then we can just use its DIE, if it exists. */
11269 if (qualified_type)
11270 {
11271 mod_type_die = lookup_type_die (qualified_type);
11272
11273 /* DW_AT_endianity doesn't come from a qualifier on the type. */
11274 if (mod_type_die
11275 && (!need_endianity_attribute_p (reverse)
11276 || !is_base_type (type)
11277 || get_AT_unsigned (mod_type_die, DW_AT_endianity)))
11278 return mod_type_die;
11279 }
11280
11281 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
11282
11283 /* Handle C typedef types. */
11284 if (name && TREE_CODE (name) == TYPE_DECL && DECL_ORIGINAL_TYPE (name)
11285 && !DECL_ARTIFICIAL (name))
11286 {
11287 tree dtype = TREE_TYPE (name);
11288
11289 if (qualified_type == dtype)
11290 {
11291 /* For a named type, use the typedef. */
11292 gen_type_die (qualified_type, context_die);
11293 return lookup_type_die (qualified_type);
11294 }
11295 else
11296 {
11297 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
11298 dquals &= cv_qual_mask;
11299 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
11300 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
11301 /* cv-unqualified version of named type. Just use
11302 the unnamed type to which it refers. */
11303 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
11304 reverse, context_die);
11305 /* Else cv-qualified version of named type; fall through. */
11306 }
11307 }
11308
11309 mod_scope = scope_die_for (type, context_die);
11310
11311 if (cv_quals)
11312 {
11313 int sub_quals = 0, first_quals = 0;
11314 unsigned i;
11315 dw_die_ref first = NULL, last = NULL;
11316
11317 /* Determine a lesser qualified type that most closely matches
11318 this one. Then generate DW_TAG_* entries for the remaining
11319 qualifiers. */
11320 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
11321 cv_qual_mask);
11322 if (sub_quals && use_debug_types)
11323 {
11324 bool needed = false;
11325 /* If emitting type units, make sure the order of qualifiers
11326 is canonical. Thus, start from unqualified type if
11327 an earlier qualifier is missing in sub_quals, but some later
11328 one is present there. */
11329 for (i = 0; i < dwarf_qual_info_size; i++)
11330 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
11331 needed = true;
11332 else if (needed && (dwarf_qual_info[i].q & cv_quals))
11333 {
11334 sub_quals = 0;
11335 break;
11336 }
11337 }
11338 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
11339 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
11340 {
11341 /* As not all intermediate qualified DIEs have corresponding
11342 tree types, ensure that qualified DIEs in the same scope
11343 as their DW_AT_type are emitted after their DW_AT_type,
11344 only with other qualified DIEs for the same type possibly
11345 in between them. Determine the range of such qualified
11346 DIEs now (first being the base type, last being corresponding
11347 last qualified DIE for it). */
11348 unsigned int count = 0;
11349 first = qualified_die_p (mod_type_die, &first_quals,
11350 dwarf_qual_info_size);
11351 if (first == NULL)
11352 first = mod_type_die;
11353 gcc_assert ((first_quals & ~sub_quals) == 0);
11354 for (count = 0, last = first;
11355 count < (1U << dwarf_qual_info_size);
11356 count++, last = last->die_sib)
11357 {
11358 int quals = 0;
11359 if (last == mod_scope->die_child)
11360 break;
11361 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
11362 != first)
11363 break;
11364 }
11365 }
11366
11367 for (i = 0; i < dwarf_qual_info_size; i++)
11368 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
11369 {
11370 dw_die_ref d;
11371 if (first && first != last)
11372 {
11373 for (d = first->die_sib; ; d = d->die_sib)
11374 {
11375 int quals = 0;
11376 qualified_die_p (d, &quals, dwarf_qual_info_size);
11377 if (quals == (first_quals | dwarf_qual_info[i].q))
11378 break;
11379 if (d == last)
11380 {
11381 d = NULL;
11382 break;
11383 }
11384 }
11385 if (d)
11386 {
11387 mod_type_die = d;
11388 continue;
11389 }
11390 }
11391 if (first)
11392 {
11393 d = ggc_cleared_alloc<die_node> ();
11394 d->die_tag = dwarf_qual_info[i].t;
11395 add_child_die_after (mod_scope, d, last);
11396 last = d;
11397 }
11398 else
11399 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
11400 if (mod_type_die)
11401 add_AT_die_ref (d, DW_AT_type, mod_type_die);
11402 mod_type_die = d;
11403 first_quals |= dwarf_qual_info[i].q;
11404 }
11405 }
11406 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
11407 {
11408 dwarf_tag tag = DW_TAG_pointer_type;
11409 if (code == REFERENCE_TYPE)
11410 {
11411 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
11412 tag = DW_TAG_rvalue_reference_type;
11413 else
11414 tag = DW_TAG_reference_type;
11415 }
11416 mod_type_die = new_die (tag, mod_scope, type);
11417
11418 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
11419 simple_type_size_in_bits (type) / BITS_PER_UNIT);
11420 item_type = TREE_TYPE (type);
11421
11422 addr_space_t as = TYPE_ADDR_SPACE (item_type);
11423 if (!ADDR_SPACE_GENERIC_P (as))
11424 {
11425 int action = targetm.addr_space.debug (as);
11426 if (action >= 0)
11427 {
11428 /* Positive values indicate an address_class. */
11429 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
11430 }
11431 else
11432 {
11433 /* Negative values indicate an (inverted) segment base reg. */
11434 dw_loc_descr_ref d
11435 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
11436 add_AT_loc (mod_type_die, DW_AT_segment, d);
11437 }
11438 }
11439 }
11440 else if (code == INTEGER_TYPE
11441 && TREE_TYPE (type) != NULL_TREE
11442 && subrange_type_for_debug_p (type, &low, &high))
11443 {
11444 tree bias = NULL_TREE;
11445 if (lang_hooks.types.get_type_bias)
11446 bias = lang_hooks.types.get_type_bias (type);
11447 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
11448 item_type = TREE_TYPE (type);
11449 }
11450 else if (is_base_type (type))
11451 mod_type_die = base_type_die (type, reverse);
11452 else
11453 {
11454 gen_type_die (type, context_die);
11455
11456 /* We have to get the type_main_variant here (and pass that to the
11457 `lookup_type_die' routine) because the ..._TYPE node we have
11458 might simply be a *copy* of some original type node (where the
11459 copy was created to help us keep track of typedef names) and
11460 that copy might have a different TYPE_UID from the original
11461 ..._TYPE node. */
11462 if (TREE_CODE (type) != VECTOR_TYPE)
11463 return lookup_type_die (type_main_variant (type));
11464 else
11465 /* Vectors have the debugging information in the type,
11466 not the main variant. */
11467 return lookup_type_die (type);
11468 }
11469
11470 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
11471 don't output a DW_TAG_typedef, since there isn't one in the
11472 user's program; just attach a DW_AT_name to the type.
11473 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
11474 if the base type already has the same name. */
11475 if (name
11476 && ((TREE_CODE (name) != TYPE_DECL
11477 && (qualified_type == TYPE_MAIN_VARIANT (type)
11478 || (cv_quals == TYPE_UNQUALIFIED)))
11479 || (TREE_CODE (name) == TYPE_DECL
11480 && TREE_TYPE (name) == qualified_type
11481 && DECL_NAME (name))))
11482 {
11483 if (TREE_CODE (name) == TYPE_DECL)
11484 /* Could just call add_name_and_src_coords_attributes here,
11485 but since this is a builtin type it doesn't have any
11486 useful source coordinates anyway. */
11487 name = DECL_NAME (name);
11488 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
11489 }
11490 /* This probably indicates a bug. */
11491 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
11492 {
11493 name = TYPE_IDENTIFIER (type);
11494 add_name_attribute (mod_type_die,
11495 name ? IDENTIFIER_POINTER (name) : "__unknown__");
11496 }
11497
11498 if (qualified_type)
11499 equate_type_number_to_die (qualified_type, mod_type_die);
11500
11501 if (item_type)
11502 /* We must do this after the equate_type_number_to_die call, in case
11503 this is a recursive type. This ensures that the modified_type_die
11504 recursion will terminate even if the type is recursive. Recursive
11505 types are possible in Ada. */
11506 sub_die = modified_type_die (item_type,
11507 TYPE_QUALS_NO_ADDR_SPACE (item_type),
11508 reverse,
11509 context_die);
11510
11511 if (sub_die != NULL)
11512 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
11513
11514 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
11515 if (TYPE_ARTIFICIAL (type))
11516 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
11517
11518 return mod_type_die;
11519 }
11520
11521 /* Generate DIEs for the generic parameters of T.
11522 T must be either a generic type or a generic function.
11523 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
11524
11525 static void
11526 gen_generic_params_dies (tree t)
11527 {
11528 tree parms, args;
11529 int parms_num, i;
11530 dw_die_ref die = NULL;
11531 int non_default;
11532
11533 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
11534 return;
11535
11536 if (TYPE_P (t))
11537 die = lookup_type_die (t);
11538 else if (DECL_P (t))
11539 die = lookup_decl_die (t);
11540
11541 gcc_assert (die);
11542
11543 parms = lang_hooks.get_innermost_generic_parms (t);
11544 if (!parms)
11545 /* T has no generic parameter. It means T is neither a generic type
11546 or function. End of story. */
11547 return;
11548
11549 parms_num = TREE_VEC_LENGTH (parms);
11550 args = lang_hooks.get_innermost_generic_args (t);
11551 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
11552 non_default = int_cst_value (TREE_CHAIN (args));
11553 else
11554 non_default = TREE_VEC_LENGTH (args);
11555 for (i = 0; i < parms_num; i++)
11556 {
11557 tree parm, arg, arg_pack_elems;
11558 dw_die_ref parm_die;
11559
11560 parm = TREE_VEC_ELT (parms, i);
11561 arg = TREE_VEC_ELT (args, i);
11562 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
11563 gcc_assert (parm && TREE_VALUE (parm) && arg);
11564
11565 if (parm && TREE_VALUE (parm) && arg)
11566 {
11567 /* If PARM represents a template parameter pack,
11568 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
11569 by DW_TAG_template_*_parameter DIEs for the argument
11570 pack elements of ARG. Note that ARG would then be
11571 an argument pack. */
11572 if (arg_pack_elems)
11573 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
11574 arg_pack_elems,
11575 die);
11576 else
11577 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
11578 true /* emit name */, die);
11579 if (i >= non_default)
11580 add_AT_flag (parm_die, DW_AT_default_value, 1);
11581 }
11582 }
11583 }
11584
11585 /* Create and return a DIE for PARM which should be
11586 the representation of a generic type parameter.
11587 For instance, in the C++ front end, PARM would be a template parameter.
11588 ARG is the argument to PARM.
11589 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
11590 name of the PARM.
11591 PARENT_DIE is the parent DIE which the new created DIE should be added to,
11592 as a child node. */
11593
11594 static dw_die_ref
11595 generic_parameter_die (tree parm, tree arg,
11596 bool emit_name_p,
11597 dw_die_ref parent_die)
11598 {
11599 dw_die_ref tmpl_die = NULL;
11600 const char *name = NULL;
11601
11602 if (!parm || !DECL_NAME (parm) || !arg)
11603 return NULL;
11604
11605 /* We support non-type generic parameters and arguments,
11606 type generic parameters and arguments, as well as
11607 generic generic parameters (a.k.a. template template parameters in C++)
11608 and arguments. */
11609 if (TREE_CODE (parm) == PARM_DECL)
11610 /* PARM is a nontype generic parameter */
11611 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
11612 else if (TREE_CODE (parm) == TYPE_DECL)
11613 /* PARM is a type generic parameter. */
11614 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
11615 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
11616 /* PARM is a generic generic parameter.
11617 Its DIE is a GNU extension. It shall have a
11618 DW_AT_name attribute to represent the name of the template template
11619 parameter, and a DW_AT_GNU_template_name attribute to represent the
11620 name of the template template argument. */
11621 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
11622 parent_die, parm);
11623 else
11624 gcc_unreachable ();
11625
11626 if (tmpl_die)
11627 {
11628 tree tmpl_type;
11629
11630 /* If PARM is a generic parameter pack, it means we are
11631 emitting debug info for a template argument pack element.
11632 In other terms, ARG is a template argument pack element.
11633 In that case, we don't emit any DW_AT_name attribute for
11634 the die. */
11635 if (emit_name_p)
11636 {
11637 name = IDENTIFIER_POINTER (DECL_NAME (parm));
11638 gcc_assert (name);
11639 add_AT_string (tmpl_die, DW_AT_name, name);
11640 }
11641
11642 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
11643 {
11644 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
11645 TMPL_DIE should have a child DW_AT_type attribute that is set
11646 to the type of the argument to PARM, which is ARG.
11647 If PARM is a type generic parameter, TMPL_DIE should have a
11648 child DW_AT_type that is set to ARG. */
11649 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
11650 add_type_attribute (tmpl_die, tmpl_type,
11651 (TREE_THIS_VOLATILE (tmpl_type)
11652 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
11653 false, parent_die);
11654 }
11655 else
11656 {
11657 /* So TMPL_DIE is a DIE representing a
11658 a generic generic template parameter, a.k.a template template
11659 parameter in C++ and arg is a template. */
11660
11661 /* The DW_AT_GNU_template_name attribute of the DIE must be set
11662 to the name of the argument. */
11663 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
11664 if (name)
11665 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
11666 }
11667
11668 if (TREE_CODE (parm) == PARM_DECL)
11669 /* So PARM is a non-type generic parameter.
11670 DWARF3 5.6.8 says we must set a DW_AT_const_value child
11671 attribute of TMPL_DIE which value represents the value
11672 of ARG.
11673 We must be careful here:
11674 The value of ARG might reference some function decls.
11675 We might currently be emitting debug info for a generic
11676 type and types are emitted before function decls, we don't
11677 know if the function decls referenced by ARG will actually be
11678 emitted after cgraph computations.
11679 So must defer the generation of the DW_AT_const_value to
11680 after cgraph is ready. */
11681 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
11682 }
11683
11684 return tmpl_die;
11685 }
11686
11687 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
11688 PARM_PACK must be a template parameter pack. The returned DIE
11689 will be child DIE of PARENT_DIE. */
11690
11691 static dw_die_ref
11692 template_parameter_pack_die (tree parm_pack,
11693 tree parm_pack_args,
11694 dw_die_ref parent_die)
11695 {
11696 dw_die_ref die;
11697 int j;
11698
11699 gcc_assert (parent_die && parm_pack);
11700
11701 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
11702 add_name_and_src_coords_attributes (die, parm_pack);
11703 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
11704 generic_parameter_die (parm_pack,
11705 TREE_VEC_ELT (parm_pack_args, j),
11706 false /* Don't emit DW_AT_name */,
11707 die);
11708 return die;
11709 }
11710
11711 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
11712 an enumerated type. */
11713
11714 static inline int
11715 type_is_enum (const_tree type)
11716 {
11717 return TREE_CODE (type) == ENUMERAL_TYPE;
11718 }
11719
11720 /* Return the DBX register number described by a given RTL node. */
11721
11722 static unsigned int
11723 dbx_reg_number (const_rtx rtl)
11724 {
11725 unsigned regno = REGNO (rtl);
11726
11727 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
11728
11729 #ifdef LEAF_REG_REMAP
11730 if (crtl->uses_only_leaf_regs)
11731 {
11732 int leaf_reg = LEAF_REG_REMAP (regno);
11733 if (leaf_reg != -1)
11734 regno = (unsigned) leaf_reg;
11735 }
11736 #endif
11737
11738 regno = DBX_REGISTER_NUMBER (regno);
11739 gcc_assert (regno != INVALID_REGNUM);
11740 return regno;
11741 }
11742
11743 /* Optionally add a DW_OP_piece term to a location description expression.
11744 DW_OP_piece is only added if the location description expression already
11745 doesn't end with DW_OP_piece. */
11746
11747 static void
11748 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
11749 {
11750 dw_loc_descr_ref loc;
11751
11752 if (*list_head != NULL)
11753 {
11754 /* Find the end of the chain. */
11755 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
11756 ;
11757
11758 if (loc->dw_loc_opc != DW_OP_piece)
11759 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
11760 }
11761 }
11762
11763 /* Return a location descriptor that designates a machine register or
11764 zero if there is none. */
11765
11766 static dw_loc_descr_ref
11767 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
11768 {
11769 rtx regs;
11770
11771 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
11772 return 0;
11773
11774 /* We only use "frame base" when we're sure we're talking about the
11775 post-prologue local stack frame. We do this by *not* running
11776 register elimination until this point, and recognizing the special
11777 argument pointer and soft frame pointer rtx's.
11778 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
11779 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
11780 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
11781 {
11782 dw_loc_descr_ref result = NULL;
11783
11784 if (dwarf_version >= 4 || !dwarf_strict)
11785 {
11786 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
11787 initialized);
11788 if (result)
11789 add_loc_descr (&result,
11790 new_loc_descr (DW_OP_stack_value, 0, 0));
11791 }
11792 return result;
11793 }
11794
11795 regs = targetm.dwarf_register_span (rtl);
11796
11797 if (REG_NREGS (rtl) > 1 || regs)
11798 return multiple_reg_loc_descriptor (rtl, regs, initialized);
11799 else
11800 {
11801 unsigned int dbx_regnum = dbx_reg_number (rtl);
11802 if (dbx_regnum == IGNORED_DWARF_REGNUM)
11803 return 0;
11804 return one_reg_loc_descriptor (dbx_regnum, initialized);
11805 }
11806 }
11807
11808 /* Return a location descriptor that designates a machine register for
11809 a given hard register number. */
11810
11811 static dw_loc_descr_ref
11812 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
11813 {
11814 dw_loc_descr_ref reg_loc_descr;
11815
11816 if (regno <= 31)
11817 reg_loc_descr
11818 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
11819 else
11820 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
11821
11822 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
11823 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
11824
11825 return reg_loc_descr;
11826 }
11827
11828 /* Given an RTL of a register, return a location descriptor that
11829 designates a value that spans more than one register. */
11830
11831 static dw_loc_descr_ref
11832 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
11833 enum var_init_status initialized)
11834 {
11835 int size, i;
11836 dw_loc_descr_ref loc_result = NULL;
11837
11838 /* Simple, contiguous registers. */
11839 if (regs == NULL_RTX)
11840 {
11841 unsigned reg = REGNO (rtl);
11842 int nregs;
11843
11844 #ifdef LEAF_REG_REMAP
11845 if (crtl->uses_only_leaf_regs)
11846 {
11847 int leaf_reg = LEAF_REG_REMAP (reg);
11848 if (leaf_reg != -1)
11849 reg = (unsigned) leaf_reg;
11850 }
11851 #endif
11852
11853 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
11854 nregs = REG_NREGS (rtl);
11855
11856 size = GET_MODE_SIZE (GET_MODE (rtl)) / nregs;
11857
11858 loc_result = NULL;
11859 while (nregs--)
11860 {
11861 dw_loc_descr_ref t;
11862
11863 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
11864 VAR_INIT_STATUS_INITIALIZED);
11865 add_loc_descr (&loc_result, t);
11866 add_loc_descr_op_piece (&loc_result, size);
11867 ++reg;
11868 }
11869 return loc_result;
11870 }
11871
11872 /* Now onto stupid register sets in non contiguous locations. */
11873
11874 gcc_assert (GET_CODE (regs) == PARALLEL);
11875
11876 size = GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0)));
11877 loc_result = NULL;
11878
11879 for (i = 0; i < XVECLEN (regs, 0); ++i)
11880 {
11881 dw_loc_descr_ref t;
11882
11883 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
11884 VAR_INIT_STATUS_INITIALIZED);
11885 add_loc_descr (&loc_result, t);
11886 add_loc_descr_op_piece (&loc_result, size);
11887 }
11888
11889 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
11890 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
11891 return loc_result;
11892 }
11893
11894 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
11895
11896 /* Return a location descriptor that designates a constant i,
11897 as a compound operation from constant (i >> shift), constant shift
11898 and DW_OP_shl. */
11899
11900 static dw_loc_descr_ref
11901 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
11902 {
11903 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
11904 add_loc_descr (&ret, int_loc_descriptor (shift));
11905 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
11906 return ret;
11907 }
11908
11909 /* Return a location descriptor that designates a constant. */
11910
11911 static dw_loc_descr_ref
11912 int_loc_descriptor (HOST_WIDE_INT i)
11913 {
11914 enum dwarf_location_atom op;
11915
11916 /* Pick the smallest representation of a constant, rather than just
11917 defaulting to the LEB encoding. */
11918 if (i >= 0)
11919 {
11920 int clz = clz_hwi (i);
11921 int ctz = ctz_hwi (i);
11922 if (i <= 31)
11923 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
11924 else if (i <= 0xff)
11925 op = DW_OP_const1u;
11926 else if (i <= 0xffff)
11927 op = DW_OP_const2u;
11928 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
11929 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
11930 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
11931 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
11932 while DW_OP_const4u is 5 bytes. */
11933 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
11934 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
11935 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
11936 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
11937 while DW_OP_const4u is 5 bytes. */
11938 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
11939 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
11940 op = DW_OP_const4u;
11941 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
11942 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
11943 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes,
11944 while DW_OP_constu of constant >= 0x100000000 takes at least
11945 6 bytes. */
11946 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
11947 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
11948 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
11949 >= HOST_BITS_PER_WIDE_INT)
11950 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
11951 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes,
11952 while DW_OP_constu takes in this case at least 6 bytes. */
11953 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
11954 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
11955 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
11956 && size_of_uleb128 (i) > 6)
11957 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
11958 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
11959 else
11960 op = DW_OP_constu;
11961 }
11962 else
11963 {
11964 if (i >= -0x80)
11965 op = DW_OP_const1s;
11966 else if (i >= -0x8000)
11967 op = DW_OP_const2s;
11968 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
11969 {
11970 if (size_of_int_loc_descriptor (i) < 5)
11971 {
11972 dw_loc_descr_ref ret = int_loc_descriptor (-i);
11973 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
11974 return ret;
11975 }
11976 op = DW_OP_const4s;
11977 }
11978 else
11979 {
11980 if (size_of_int_loc_descriptor (i)
11981 < (unsigned long) 1 + size_of_sleb128 (i))
11982 {
11983 dw_loc_descr_ref ret = int_loc_descriptor (-i);
11984 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
11985 return ret;
11986 }
11987 op = DW_OP_consts;
11988 }
11989 }
11990
11991 return new_loc_descr (op, i, 0);
11992 }
11993
11994 /* Likewise, for unsigned constants. */
11995
11996 static dw_loc_descr_ref
11997 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
11998 {
11999 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
12000 const unsigned HOST_WIDE_INT max_uint
12001 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
12002
12003 /* If possible, use the clever signed constants handling. */
12004 if (i <= max_int)
12005 return int_loc_descriptor ((HOST_WIDE_INT) i);
12006
12007 /* Here, we are left with positive numbers that cannot be represented as
12008 HOST_WIDE_INT, i.e.:
12009 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
12010
12011 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
12012 whereas may be better to output a negative integer: thanks to integer
12013 wrapping, we know that:
12014 x = x - 2 ** DWARF2_ADDR_SIZE
12015 = x - 2 * (max (HOST_WIDE_INT) + 1)
12016 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
12017 small negative integers. Let's try that in cases it will clearly improve
12018 the encoding: there is no gain turning DW_OP_const4u into
12019 DW_OP_const4s. */
12020 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
12021 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
12022 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
12023 {
12024 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
12025
12026 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
12027 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
12028 const HOST_WIDE_INT second_shift
12029 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
12030
12031 /* So we finally have:
12032 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
12033 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
12034 return int_loc_descriptor (second_shift);
12035 }
12036
12037 /* Last chance: fallback to a simple constant operation. */
12038 return new_loc_descr
12039 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
12040 ? DW_OP_const4u
12041 : DW_OP_const8u,
12042 i, 0);
12043 }
12044
12045 /* Generate and return a location description that computes the unsigned
12046 comparison of the two stack top entries (a OP b where b is the top-most
12047 entry and a is the second one). The KIND of comparison can be LT_EXPR,
12048 LE_EXPR, GT_EXPR or GE_EXPR. */
12049
12050 static dw_loc_descr_ref
12051 uint_comparison_loc_list (enum tree_code kind)
12052 {
12053 enum dwarf_location_atom op, flip_op;
12054 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
12055
12056 switch (kind)
12057 {
12058 case LT_EXPR:
12059 op = DW_OP_lt;
12060 break;
12061 case LE_EXPR:
12062 op = DW_OP_le;
12063 break;
12064 case GT_EXPR:
12065 op = DW_OP_gt;
12066 break;
12067 case GE_EXPR:
12068 op = DW_OP_ge;
12069 break;
12070 default:
12071 gcc_unreachable ();
12072 }
12073
12074 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
12075 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
12076
12077 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
12078 possible to perform unsigned comparisons: we just have to distinguish
12079 three cases:
12080
12081 1. when a and b have the same sign (as signed integers); then we should
12082 return: a OP(signed) b;
12083
12084 2. when a is a negative signed integer while b is a positive one, then a
12085 is a greater unsigned integer than b; likewise when a and b's roles
12086 are flipped.
12087
12088 So first, compare the sign of the two operands. */
12089 ret = new_loc_descr (DW_OP_over, 0, 0);
12090 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
12091 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
12092 /* If they have different signs (i.e. they have different sign bits), then
12093 the stack top value has now the sign bit set and thus it's smaller than
12094 zero. */
12095 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
12096 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
12097 add_loc_descr (&ret, bra_node);
12098
12099 /* We are in case 1. At this point, we know both operands have the same
12100 sign, to it's safe to use the built-in signed comparison. */
12101 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
12102 add_loc_descr (&ret, jmp_node);
12103
12104 /* We are in case 2. Here, we know both operands do not have the same sign,
12105 so we have to flip the signed comparison. */
12106 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
12107 tmp = new_loc_descr (flip_op, 0, 0);
12108 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
12109 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
12110 add_loc_descr (&ret, tmp);
12111
12112 /* This dummy operation is necessary to make the two branches join. */
12113 tmp = new_loc_descr (DW_OP_nop, 0, 0);
12114 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
12115 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
12116 add_loc_descr (&ret, tmp);
12117
12118 return ret;
12119 }
12120
12121 /* Likewise, but takes the location description lists (might be destructive on
12122 them). Return NULL if either is NULL or if concatenation fails. */
12123
12124 static dw_loc_list_ref
12125 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
12126 enum tree_code kind)
12127 {
12128 if (left == NULL || right == NULL)
12129 return NULL;
12130
12131 add_loc_list (&left, right);
12132 if (left == NULL)
12133 return NULL;
12134
12135 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
12136 return left;
12137 }
12138
12139 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
12140 without actually allocating it. */
12141
12142 static unsigned long
12143 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
12144 {
12145 return size_of_int_loc_descriptor (i >> shift)
12146 + size_of_int_loc_descriptor (shift)
12147 + 1;
12148 }
12149
12150 /* Return size_of_locs (int_loc_descriptor (i)) without
12151 actually allocating it. */
12152
12153 static unsigned long
12154 size_of_int_loc_descriptor (HOST_WIDE_INT i)
12155 {
12156 unsigned long s;
12157
12158 if (i >= 0)
12159 {
12160 int clz, ctz;
12161 if (i <= 31)
12162 return 1;
12163 else if (i <= 0xff)
12164 return 2;
12165 else if (i <= 0xffff)
12166 return 3;
12167 clz = clz_hwi (i);
12168 ctz = ctz_hwi (i);
12169 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
12170 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
12171 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12172 - clz - 5);
12173 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
12174 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
12175 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12176 - clz - 8);
12177 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
12178 return 5;
12179 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
12180 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
12181 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
12182 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12183 - clz - 8);
12184 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
12185 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
12186 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12187 - clz - 16);
12188 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
12189 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
12190 && s > 6)
12191 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
12192 - clz - 32);
12193 else
12194 return 1 + s;
12195 }
12196 else
12197 {
12198 if (i >= -0x80)
12199 return 2;
12200 else if (i >= -0x8000)
12201 return 3;
12202 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
12203 {
12204 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
12205 {
12206 s = size_of_int_loc_descriptor (-i) + 1;
12207 if (s < 5)
12208 return s;
12209 }
12210 return 5;
12211 }
12212 else
12213 {
12214 unsigned long r = 1 + size_of_sleb128 (i);
12215 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
12216 {
12217 s = size_of_int_loc_descriptor (-i) + 1;
12218 if (s < r)
12219 return s;
12220 }
12221 return r;
12222 }
12223 }
12224 }
12225
12226 /* Return loc description representing "address" of integer value.
12227 This can appear only as toplevel expression. */
12228
12229 static dw_loc_descr_ref
12230 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
12231 {
12232 int litsize;
12233 dw_loc_descr_ref loc_result = NULL;
12234
12235 if (!(dwarf_version >= 4 || !dwarf_strict))
12236 return NULL;
12237
12238 litsize = size_of_int_loc_descriptor (i);
12239 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
12240 is more compact. For DW_OP_stack_value we need:
12241 litsize + 1 (DW_OP_stack_value)
12242 and for DW_OP_implicit_value:
12243 1 (DW_OP_implicit_value) + 1 (length) + size. */
12244 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
12245 {
12246 loc_result = int_loc_descriptor (i);
12247 add_loc_descr (&loc_result,
12248 new_loc_descr (DW_OP_stack_value, 0, 0));
12249 return loc_result;
12250 }
12251
12252 loc_result = new_loc_descr (DW_OP_implicit_value,
12253 size, 0);
12254 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
12255 loc_result->dw_loc_oprnd2.v.val_int = i;
12256 return loc_result;
12257 }
12258
12259 /* Return a location descriptor that designates a base+offset location. */
12260
12261 static dw_loc_descr_ref
12262 based_loc_descr (rtx reg, HOST_WIDE_INT offset,
12263 enum var_init_status initialized)
12264 {
12265 unsigned int regno;
12266 dw_loc_descr_ref result;
12267 dw_fde_ref fde = cfun->fde;
12268
12269 /* We only use "frame base" when we're sure we're talking about the
12270 post-prologue local stack frame. We do this by *not* running
12271 register elimination until this point, and recognizing the special
12272 argument pointer and soft frame pointer rtx's. */
12273 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
12274 {
12275 rtx elim = (ira_use_lra_p
12276 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
12277 : eliminate_regs (reg, VOIDmode, NULL_RTX));
12278
12279 if (elim != reg)
12280 {
12281 if (GET_CODE (elim) == PLUS)
12282 {
12283 offset += INTVAL (XEXP (elim, 1));
12284 elim = XEXP (elim, 0);
12285 }
12286 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
12287 && (elim == hard_frame_pointer_rtx
12288 || elim == stack_pointer_rtx))
12289 || elim == (frame_pointer_needed
12290 ? hard_frame_pointer_rtx
12291 : stack_pointer_rtx));
12292
12293 /* If drap register is used to align stack, use frame
12294 pointer + offset to access stack variables. If stack
12295 is aligned without drap, use stack pointer + offset to
12296 access stack variables. */
12297 if (crtl->stack_realign_tried
12298 && reg == frame_pointer_rtx)
12299 {
12300 int base_reg
12301 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
12302 ? HARD_FRAME_POINTER_REGNUM
12303 : REGNO (elim));
12304 return new_reg_loc_descr (base_reg, offset);
12305 }
12306
12307 gcc_assert (frame_pointer_fb_offset_valid);
12308 offset += frame_pointer_fb_offset;
12309 return new_loc_descr (DW_OP_fbreg, offset, 0);
12310 }
12311 }
12312
12313 regno = REGNO (reg);
12314 #ifdef LEAF_REG_REMAP
12315 if (crtl->uses_only_leaf_regs)
12316 {
12317 int leaf_reg = LEAF_REG_REMAP (regno);
12318 if (leaf_reg != -1)
12319 regno = (unsigned) leaf_reg;
12320 }
12321 #endif
12322 regno = DWARF_FRAME_REGNUM (regno);
12323
12324 if (!optimize && fde
12325 && (fde->drap_reg == regno || fde->vdrap_reg == regno))
12326 {
12327 /* Use cfa+offset to represent the location of arguments passed
12328 on the stack when drap is used to align stack.
12329 Only do this when not optimizing, for optimized code var-tracking
12330 is supposed to track where the arguments live and the register
12331 used as vdrap or drap in some spot might be used for something
12332 else in other part of the routine. */
12333 return new_loc_descr (DW_OP_fbreg, offset, 0);
12334 }
12335
12336 if (regno <= 31)
12337 result = new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + regno),
12338 offset, 0);
12339 else
12340 result = new_loc_descr (DW_OP_bregx, regno, offset);
12341
12342 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
12343 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
12344
12345 return result;
12346 }
12347
12348 /* Return true if this RTL expression describes a base+offset calculation. */
12349
12350 static inline int
12351 is_based_loc (const_rtx rtl)
12352 {
12353 return (GET_CODE (rtl) == PLUS
12354 && ((REG_P (XEXP (rtl, 0))
12355 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
12356 && CONST_INT_P (XEXP (rtl, 1)))));
12357 }
12358
12359 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
12360 failed. */
12361
12362 static dw_loc_descr_ref
12363 tls_mem_loc_descriptor (rtx mem)
12364 {
12365 tree base;
12366 dw_loc_descr_ref loc_result;
12367
12368 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
12369 return NULL;
12370
12371 base = get_base_address (MEM_EXPR (mem));
12372 if (base == NULL
12373 || TREE_CODE (base) != VAR_DECL
12374 || !DECL_THREAD_LOCAL_P (base))
12375 return NULL;
12376
12377 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
12378 if (loc_result == NULL)
12379 return NULL;
12380
12381 if (MEM_OFFSET (mem))
12382 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
12383
12384 return loc_result;
12385 }
12386
12387 /* Output debug info about reason why we failed to expand expression as dwarf
12388 expression. */
12389
12390 static void
12391 expansion_failed (tree expr, rtx rtl, char const *reason)
12392 {
12393 if (dump_file && (dump_flags & TDF_DETAILS))
12394 {
12395 fprintf (dump_file, "Failed to expand as dwarf: ");
12396 if (expr)
12397 print_generic_expr (dump_file, expr, dump_flags);
12398 if (rtl)
12399 {
12400 fprintf (dump_file, "\n");
12401 print_rtl (dump_file, rtl);
12402 }
12403 fprintf (dump_file, "\nReason: %s\n", reason);
12404 }
12405 }
12406
12407 /* Helper function for const_ok_for_output. */
12408
12409 static bool
12410 const_ok_for_output_1 (rtx rtl)
12411 {
12412 if (GET_CODE (rtl) == UNSPEC)
12413 {
12414 /* If delegitimize_address couldn't do anything with the UNSPEC, assume
12415 we can't express it in the debug info. */
12416 /* Don't complain about TLS UNSPECs, those are just too hard to
12417 delegitimize. Note this could be a non-decl SYMBOL_REF such as
12418 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
12419 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
12420 if (flag_checking
12421 && (XVECLEN (rtl, 0) == 0
12422 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
12423 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
12424 inform (current_function_decl
12425 ? DECL_SOURCE_LOCATION (current_function_decl)
12426 : UNKNOWN_LOCATION,
12427 #if NUM_UNSPEC_VALUES > 0
12428 "non-delegitimized UNSPEC %s (%d) found in variable location",
12429 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
12430 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
12431 XINT (rtl, 1));
12432 #else
12433 "non-delegitimized UNSPEC %d found in variable location",
12434 XINT (rtl, 1));
12435 #endif
12436 expansion_failed (NULL_TREE, rtl,
12437 "UNSPEC hasn't been delegitimized.\n");
12438 return false;
12439 }
12440
12441 if (targetm.const_not_ok_for_debug_p (rtl))
12442 {
12443 expansion_failed (NULL_TREE, rtl,
12444 "Expression rejected for debug by the backend.\n");
12445 return false;
12446 }
12447
12448 /* FIXME: Refer to PR60655. It is possible for simplification
12449 of rtl expressions in var tracking to produce such expressions.
12450 We should really identify / validate expressions
12451 enclosed in CONST that can be handled by assemblers on various
12452 targets and only handle legitimate cases here. */
12453 if (GET_CODE (rtl) != SYMBOL_REF)
12454 {
12455 if (GET_CODE (rtl) == NOT)
12456 return false;
12457 return true;
12458 }
12459
12460 if (CONSTANT_POOL_ADDRESS_P (rtl))
12461 {
12462 bool marked;
12463 get_pool_constant_mark (rtl, &marked);
12464 /* If all references to this pool constant were optimized away,
12465 it was not output and thus we can't represent it. */
12466 if (!marked)
12467 {
12468 expansion_failed (NULL_TREE, rtl,
12469 "Constant was removed from constant pool.\n");
12470 return false;
12471 }
12472 }
12473
12474 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
12475 return false;
12476
12477 /* Avoid references to external symbols in debug info, on several targets
12478 the linker might even refuse to link when linking a shared library,
12479 and in many other cases the relocations for .debug_info/.debug_loc are
12480 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
12481 to be defined within the same shared library or executable are fine. */
12482 if (SYMBOL_REF_EXTERNAL_P (rtl))
12483 {
12484 tree decl = SYMBOL_REF_DECL (rtl);
12485
12486 if (decl == NULL || !targetm.binds_local_p (decl))
12487 {
12488 expansion_failed (NULL_TREE, rtl,
12489 "Symbol not defined in current TU.\n");
12490 return false;
12491 }
12492 }
12493
12494 return true;
12495 }
12496
12497 /* Return true if constant RTL can be emitted in DW_OP_addr or
12498 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
12499 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
12500
12501 static bool
12502 const_ok_for_output (rtx rtl)
12503 {
12504 if (GET_CODE (rtl) == SYMBOL_REF)
12505 return const_ok_for_output_1 (rtl);
12506
12507 if (GET_CODE (rtl) == CONST)
12508 {
12509 subrtx_var_iterator::array_type array;
12510 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
12511 if (!const_ok_for_output_1 (*iter))
12512 return false;
12513 return true;
12514 }
12515
12516 return true;
12517 }
12518
12519 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
12520 if possible, NULL otherwise. */
12521
12522 static dw_die_ref
12523 base_type_for_mode (machine_mode mode, bool unsignedp)
12524 {
12525 dw_die_ref type_die;
12526 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
12527
12528 if (type == NULL)
12529 return NULL;
12530 switch (TREE_CODE (type))
12531 {
12532 case INTEGER_TYPE:
12533 case REAL_TYPE:
12534 break;
12535 default:
12536 return NULL;
12537 }
12538 type_die = lookup_type_die (type);
12539 if (!type_die)
12540 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
12541 comp_unit_die ());
12542 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
12543 return NULL;
12544 return type_die;
12545 }
12546
12547 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
12548 type matching MODE, or, if MODE is narrower than or as wide as
12549 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
12550 possible. */
12551
12552 static dw_loc_descr_ref
12553 convert_descriptor_to_mode (machine_mode mode, dw_loc_descr_ref op)
12554 {
12555 machine_mode outer_mode = mode;
12556 dw_die_ref type_die;
12557 dw_loc_descr_ref cvt;
12558
12559 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
12560 {
12561 add_loc_descr (&op, new_loc_descr (DW_OP_GNU_convert, 0, 0));
12562 return op;
12563 }
12564 type_die = base_type_for_mode (outer_mode, 1);
12565 if (type_die == NULL)
12566 return NULL;
12567 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12568 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12569 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12570 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12571 add_loc_descr (&op, cvt);
12572 return op;
12573 }
12574
12575 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
12576
12577 static dw_loc_descr_ref
12578 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
12579 dw_loc_descr_ref op1)
12580 {
12581 dw_loc_descr_ref ret = op0;
12582 add_loc_descr (&ret, op1);
12583 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
12584 if (STORE_FLAG_VALUE != 1)
12585 {
12586 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
12587 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
12588 }
12589 return ret;
12590 }
12591
12592 /* Return location descriptor for signed comparison OP RTL. */
12593
12594 static dw_loc_descr_ref
12595 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
12596 machine_mode mem_mode)
12597 {
12598 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
12599 dw_loc_descr_ref op0, op1;
12600 int shift;
12601
12602 if (op_mode == VOIDmode)
12603 op_mode = GET_MODE (XEXP (rtl, 1));
12604 if (op_mode == VOIDmode)
12605 return NULL;
12606
12607 if (dwarf_strict
12608 && (GET_MODE_CLASS (op_mode) != MODE_INT
12609 || GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE))
12610 return NULL;
12611
12612 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
12613 VAR_INIT_STATUS_INITIALIZED);
12614 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
12615 VAR_INIT_STATUS_INITIALIZED);
12616
12617 if (op0 == NULL || op1 == NULL)
12618 return NULL;
12619
12620 if (GET_MODE_CLASS (op_mode) != MODE_INT
12621 || GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
12622 return compare_loc_descriptor (op, op0, op1);
12623
12624 if (GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
12625 {
12626 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
12627 dw_loc_descr_ref cvt;
12628
12629 if (type_die == NULL)
12630 return NULL;
12631 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12632 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12633 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12634 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12635 add_loc_descr (&op0, cvt);
12636 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12637 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12638 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12639 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12640 add_loc_descr (&op1, cvt);
12641 return compare_loc_descriptor (op, op0, op1);
12642 }
12643
12644 shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
12645 /* For eq/ne, if the operands are known to be zero-extended,
12646 there is no need to do the fancy shifting up. */
12647 if (op == DW_OP_eq || op == DW_OP_ne)
12648 {
12649 dw_loc_descr_ref last0, last1;
12650 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
12651 ;
12652 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
12653 ;
12654 /* deref_size zero extends, and for constants we can check
12655 whether they are zero extended or not. */
12656 if (((last0->dw_loc_opc == DW_OP_deref_size
12657 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
12658 || (CONST_INT_P (XEXP (rtl, 0))
12659 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
12660 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
12661 && ((last1->dw_loc_opc == DW_OP_deref_size
12662 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
12663 || (CONST_INT_P (XEXP (rtl, 1))
12664 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
12665 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
12666 return compare_loc_descriptor (op, op0, op1);
12667
12668 /* EQ/NE comparison against constant in narrower type than
12669 DWARF2_ADDR_SIZE can be performed either as
12670 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
12671 DW_OP_{eq,ne}
12672 or
12673 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
12674 DW_OP_{eq,ne}. Pick whatever is shorter. */
12675 if (CONST_INT_P (XEXP (rtl, 1))
12676 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
12677 && (size_of_int_loc_descriptor (shift) + 1
12678 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift)
12679 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
12680 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
12681 & GET_MODE_MASK (op_mode))))
12682 {
12683 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
12684 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
12685 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
12686 & GET_MODE_MASK (op_mode));
12687 return compare_loc_descriptor (op, op0, op1);
12688 }
12689 }
12690 add_loc_descr (&op0, int_loc_descriptor (shift));
12691 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
12692 if (CONST_INT_P (XEXP (rtl, 1)))
12693 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) << shift);
12694 else
12695 {
12696 add_loc_descr (&op1, int_loc_descriptor (shift));
12697 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
12698 }
12699 return compare_loc_descriptor (op, op0, op1);
12700 }
12701
12702 /* Return location descriptor for unsigned comparison OP RTL. */
12703
12704 static dw_loc_descr_ref
12705 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
12706 machine_mode mem_mode)
12707 {
12708 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
12709 dw_loc_descr_ref op0, op1;
12710
12711 if (op_mode == VOIDmode)
12712 op_mode = GET_MODE (XEXP (rtl, 1));
12713 if (op_mode == VOIDmode)
12714 return NULL;
12715 if (GET_MODE_CLASS (op_mode) != MODE_INT)
12716 return NULL;
12717
12718 if (dwarf_strict && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
12719 return NULL;
12720
12721 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
12722 VAR_INIT_STATUS_INITIALIZED);
12723 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
12724 VAR_INIT_STATUS_INITIALIZED);
12725
12726 if (op0 == NULL || op1 == NULL)
12727 return NULL;
12728
12729 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
12730 {
12731 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
12732 dw_loc_descr_ref last0, last1;
12733 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
12734 ;
12735 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
12736 ;
12737 if (CONST_INT_P (XEXP (rtl, 0)))
12738 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
12739 /* deref_size zero extends, so no need to mask it again. */
12740 else if (last0->dw_loc_opc != DW_OP_deref_size
12741 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
12742 {
12743 add_loc_descr (&op0, int_loc_descriptor (mask));
12744 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
12745 }
12746 if (CONST_INT_P (XEXP (rtl, 1)))
12747 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
12748 /* deref_size zero extends, so no need to mask it again. */
12749 else if (last1->dw_loc_opc != DW_OP_deref_size
12750 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
12751 {
12752 add_loc_descr (&op1, int_loc_descriptor (mask));
12753 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
12754 }
12755 }
12756 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
12757 {
12758 HOST_WIDE_INT bias = 1;
12759 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
12760 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
12761 if (CONST_INT_P (XEXP (rtl, 1)))
12762 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
12763 + INTVAL (XEXP (rtl, 1)));
12764 else
12765 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
12766 bias, 0));
12767 }
12768 return compare_loc_descriptor (op, op0, op1);
12769 }
12770
12771 /* Return location descriptor for {U,S}{MIN,MAX}. */
12772
12773 static dw_loc_descr_ref
12774 minmax_loc_descriptor (rtx rtl, machine_mode mode,
12775 machine_mode mem_mode)
12776 {
12777 enum dwarf_location_atom op;
12778 dw_loc_descr_ref op0, op1, ret;
12779 dw_loc_descr_ref bra_node, drop_node;
12780
12781 if (dwarf_strict
12782 && (GET_MODE_CLASS (mode) != MODE_INT
12783 || GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE))
12784 return NULL;
12785
12786 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
12787 VAR_INIT_STATUS_INITIALIZED);
12788 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
12789 VAR_INIT_STATUS_INITIALIZED);
12790
12791 if (op0 == NULL || op1 == NULL)
12792 return NULL;
12793
12794 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
12795 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
12796 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
12797 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
12798 {
12799 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
12800 {
12801 HOST_WIDE_INT mask = GET_MODE_MASK (mode);
12802 add_loc_descr (&op0, int_loc_descriptor (mask));
12803 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
12804 add_loc_descr (&op1, int_loc_descriptor (mask));
12805 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
12806 }
12807 else if (GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE)
12808 {
12809 HOST_WIDE_INT bias = 1;
12810 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
12811 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
12812 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
12813 }
12814 }
12815 else if (GET_MODE_CLASS (mode) == MODE_INT
12816 && GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
12817 {
12818 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode)) * BITS_PER_UNIT;
12819 add_loc_descr (&op0, int_loc_descriptor (shift));
12820 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
12821 add_loc_descr (&op1, int_loc_descriptor (shift));
12822 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
12823 }
12824 else if (GET_MODE_CLASS (mode) == MODE_INT
12825 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
12826 {
12827 dw_die_ref type_die = base_type_for_mode (mode, 0);
12828 dw_loc_descr_ref cvt;
12829 if (type_die == NULL)
12830 return NULL;
12831 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12832 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12833 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12834 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12835 add_loc_descr (&op0, cvt);
12836 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12837 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12838 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12839 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12840 add_loc_descr (&op1, cvt);
12841 }
12842
12843 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
12844 op = DW_OP_lt;
12845 else
12846 op = DW_OP_gt;
12847 ret = op0;
12848 add_loc_descr (&ret, op1);
12849 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
12850 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
12851 add_loc_descr (&ret, bra_node);
12852 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
12853 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
12854 add_loc_descr (&ret, drop_node);
12855 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
12856 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
12857 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
12858 && GET_MODE_CLASS (mode) == MODE_INT
12859 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
12860 ret = convert_descriptor_to_mode (mode, ret);
12861 return ret;
12862 }
12863
12864 /* Helper function for mem_loc_descriptor. Perform OP binary op,
12865 but after converting arguments to type_die, afterwards
12866 convert back to unsigned. */
12867
12868 static dw_loc_descr_ref
12869 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
12870 machine_mode mode, machine_mode mem_mode)
12871 {
12872 dw_loc_descr_ref cvt, op0, op1;
12873
12874 if (type_die == NULL)
12875 return NULL;
12876 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
12877 VAR_INIT_STATUS_INITIALIZED);
12878 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
12879 VAR_INIT_STATUS_INITIALIZED);
12880 if (op0 == NULL || op1 == NULL)
12881 return NULL;
12882 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12883 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12884 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12885 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12886 add_loc_descr (&op0, cvt);
12887 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
12888 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
12889 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
12890 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
12891 add_loc_descr (&op1, cvt);
12892 add_loc_descr (&op0, op1);
12893 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
12894 return convert_descriptor_to_mode (mode, op0);
12895 }
12896
12897 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
12898 const0 is DW_OP_lit0 or corresponding typed constant,
12899 const1 is DW_OP_lit1 or corresponding typed constant
12900 and constMSB is constant with just the MSB bit set
12901 for the mode):
12902 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
12903 L1: const0 DW_OP_swap
12904 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
12905 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
12906 L3: DW_OP_drop
12907 L4: DW_OP_nop
12908
12909 CTZ is similar:
12910 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
12911 L1: const0 DW_OP_swap
12912 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
12913 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
12914 L3: DW_OP_drop
12915 L4: DW_OP_nop
12916
12917 FFS is similar:
12918 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
12919 L1: const1 DW_OP_swap
12920 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
12921 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
12922 L3: DW_OP_drop
12923 L4: DW_OP_nop */
12924
12925 static dw_loc_descr_ref
12926 clz_loc_descriptor (rtx rtl, machine_mode mode,
12927 machine_mode mem_mode)
12928 {
12929 dw_loc_descr_ref op0, ret, tmp;
12930 HOST_WIDE_INT valv;
12931 dw_loc_descr_ref l1jump, l1label;
12932 dw_loc_descr_ref l2jump, l2label;
12933 dw_loc_descr_ref l3jump, l3label;
12934 dw_loc_descr_ref l4jump, l4label;
12935 rtx msb;
12936
12937 if (GET_MODE_CLASS (mode) != MODE_INT
12938 || GET_MODE (XEXP (rtl, 0)) != mode)
12939 return NULL;
12940
12941 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
12942 VAR_INIT_STATUS_INITIALIZED);
12943 if (op0 == NULL)
12944 return NULL;
12945 ret = op0;
12946 if (GET_CODE (rtl) == CLZ)
12947 {
12948 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
12949 valv = GET_MODE_BITSIZE (mode);
12950 }
12951 else if (GET_CODE (rtl) == FFS)
12952 valv = 0;
12953 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
12954 valv = GET_MODE_BITSIZE (mode);
12955 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
12956 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
12957 add_loc_descr (&ret, l1jump);
12958 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
12959 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
12960 VAR_INIT_STATUS_INITIALIZED);
12961 if (tmp == NULL)
12962 return NULL;
12963 add_loc_descr (&ret, tmp);
12964 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
12965 add_loc_descr (&ret, l4jump);
12966 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
12967 ? const1_rtx : const0_rtx,
12968 mode, mem_mode,
12969 VAR_INIT_STATUS_INITIALIZED);
12970 if (l1label == NULL)
12971 return NULL;
12972 add_loc_descr (&ret, l1label);
12973 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
12974 l2label = new_loc_descr (DW_OP_dup, 0, 0);
12975 add_loc_descr (&ret, l2label);
12976 if (GET_CODE (rtl) != CLZ)
12977 msb = const1_rtx;
12978 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
12979 msb = GEN_INT ((unsigned HOST_WIDE_INT) 1
12980 << (GET_MODE_BITSIZE (mode) - 1));
12981 else
12982 msb = immed_wide_int_const
12983 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
12984 GET_MODE_PRECISION (mode)), mode);
12985 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
12986 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
12987 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
12988 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
12989 else
12990 tmp = mem_loc_descriptor (msb, mode, mem_mode,
12991 VAR_INIT_STATUS_INITIALIZED);
12992 if (tmp == NULL)
12993 return NULL;
12994 add_loc_descr (&ret, tmp);
12995 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
12996 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
12997 add_loc_descr (&ret, l3jump);
12998 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
12999 VAR_INIT_STATUS_INITIALIZED);
13000 if (tmp == NULL)
13001 return NULL;
13002 add_loc_descr (&ret, tmp);
13003 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
13004 ? DW_OP_shl : DW_OP_shr, 0, 0));
13005 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13006 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
13007 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13008 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
13009 add_loc_descr (&ret, l2jump);
13010 l3label = new_loc_descr (DW_OP_drop, 0, 0);
13011 add_loc_descr (&ret, l3label);
13012 l4label = new_loc_descr (DW_OP_nop, 0, 0);
13013 add_loc_descr (&ret, l4label);
13014 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13015 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
13016 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13017 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
13018 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13019 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
13020 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13021 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
13022 return ret;
13023 }
13024
13025 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
13026 const1 is DW_OP_lit1 or corresponding typed constant):
13027 const0 DW_OP_swap
13028 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
13029 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
13030 L2: DW_OP_drop
13031
13032 PARITY is similar:
13033 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
13034 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
13035 L2: DW_OP_drop */
13036
13037 static dw_loc_descr_ref
13038 popcount_loc_descriptor (rtx rtl, machine_mode mode,
13039 machine_mode mem_mode)
13040 {
13041 dw_loc_descr_ref op0, ret, tmp;
13042 dw_loc_descr_ref l1jump, l1label;
13043 dw_loc_descr_ref l2jump, l2label;
13044
13045 if (GET_MODE_CLASS (mode) != MODE_INT
13046 || GET_MODE (XEXP (rtl, 0)) != mode)
13047 return NULL;
13048
13049 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13050 VAR_INIT_STATUS_INITIALIZED);
13051 if (op0 == NULL)
13052 return NULL;
13053 ret = op0;
13054 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
13055 VAR_INIT_STATUS_INITIALIZED);
13056 if (tmp == NULL)
13057 return NULL;
13058 add_loc_descr (&ret, tmp);
13059 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13060 l1label = new_loc_descr (DW_OP_dup, 0, 0);
13061 add_loc_descr (&ret, l1label);
13062 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
13063 add_loc_descr (&ret, l2jump);
13064 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
13065 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
13066 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
13067 VAR_INIT_STATUS_INITIALIZED);
13068 if (tmp == NULL)
13069 return NULL;
13070 add_loc_descr (&ret, tmp);
13071 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
13072 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
13073 ? DW_OP_plus : DW_OP_xor, 0, 0));
13074 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13075 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
13076 VAR_INIT_STATUS_INITIALIZED);
13077 add_loc_descr (&ret, tmp);
13078 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13079 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
13080 add_loc_descr (&ret, l1jump);
13081 l2label = new_loc_descr (DW_OP_drop, 0, 0);
13082 add_loc_descr (&ret, l2label);
13083 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13084 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
13085 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13086 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
13087 return ret;
13088 }
13089
13090 /* BSWAP (constS is initial shift count, either 56 or 24):
13091 constS const0
13092 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
13093 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
13094 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
13095 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
13096 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
13097
13098 static dw_loc_descr_ref
13099 bswap_loc_descriptor (rtx rtl, machine_mode mode,
13100 machine_mode mem_mode)
13101 {
13102 dw_loc_descr_ref op0, ret, tmp;
13103 dw_loc_descr_ref l1jump, l1label;
13104 dw_loc_descr_ref l2jump, l2label;
13105
13106 if (GET_MODE_CLASS (mode) != MODE_INT
13107 || BITS_PER_UNIT != 8
13108 || (GET_MODE_BITSIZE (mode) != 32
13109 && GET_MODE_BITSIZE (mode) != 64))
13110 return NULL;
13111
13112 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13113 VAR_INIT_STATUS_INITIALIZED);
13114 if (op0 == NULL)
13115 return NULL;
13116
13117 ret = op0;
13118 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
13119 mode, mem_mode,
13120 VAR_INIT_STATUS_INITIALIZED);
13121 if (tmp == NULL)
13122 return NULL;
13123 add_loc_descr (&ret, tmp);
13124 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
13125 VAR_INIT_STATUS_INITIALIZED);
13126 if (tmp == NULL)
13127 return NULL;
13128 add_loc_descr (&ret, tmp);
13129 l1label = new_loc_descr (DW_OP_pick, 2, 0);
13130 add_loc_descr (&ret, l1label);
13131 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
13132 mode, mem_mode,
13133 VAR_INIT_STATUS_INITIALIZED);
13134 add_loc_descr (&ret, tmp);
13135 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
13136 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
13137 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13138 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
13139 VAR_INIT_STATUS_INITIALIZED);
13140 if (tmp == NULL)
13141 return NULL;
13142 add_loc_descr (&ret, tmp);
13143 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
13144 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
13145 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13146 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
13147 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13148 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
13149 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
13150 VAR_INIT_STATUS_INITIALIZED);
13151 add_loc_descr (&ret, tmp);
13152 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
13153 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
13154 add_loc_descr (&ret, l2jump);
13155 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
13156 VAR_INIT_STATUS_INITIALIZED);
13157 add_loc_descr (&ret, tmp);
13158 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
13159 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13160 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
13161 add_loc_descr (&ret, l1jump);
13162 l2label = new_loc_descr (DW_OP_drop, 0, 0);
13163 add_loc_descr (&ret, l2label);
13164 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13165 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
13166 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13167 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
13168 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
13169 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
13170 return ret;
13171 }
13172
13173 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
13174 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
13175 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
13176 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
13177
13178 ROTATERT is similar:
13179 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
13180 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
13181 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
13182
13183 static dw_loc_descr_ref
13184 rotate_loc_descriptor (rtx rtl, machine_mode mode,
13185 machine_mode mem_mode)
13186 {
13187 rtx rtlop1 = XEXP (rtl, 1);
13188 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
13189 int i;
13190
13191 if (GET_MODE_CLASS (mode) != MODE_INT)
13192 return NULL;
13193
13194 if (GET_MODE (rtlop1) != VOIDmode
13195 && GET_MODE_BITSIZE (GET_MODE (rtlop1)) < GET_MODE_BITSIZE (mode))
13196 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
13197 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13198 VAR_INIT_STATUS_INITIALIZED);
13199 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
13200 VAR_INIT_STATUS_INITIALIZED);
13201 if (op0 == NULL || op1 == NULL)
13202 return NULL;
13203 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
13204 for (i = 0; i < 2; i++)
13205 {
13206 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
13207 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
13208 mode, mem_mode,
13209 VAR_INIT_STATUS_INITIALIZED);
13210 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
13211 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
13212 ? DW_OP_const4u
13213 : HOST_BITS_PER_WIDE_INT == 64
13214 ? DW_OP_const8u : DW_OP_constu,
13215 GET_MODE_MASK (mode), 0);
13216 else
13217 mask[i] = NULL;
13218 if (mask[i] == NULL)
13219 return NULL;
13220 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
13221 }
13222 ret = op0;
13223 add_loc_descr (&ret, op1);
13224 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13225 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13226 if (GET_CODE (rtl) == ROTATERT)
13227 {
13228 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13229 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
13230 GET_MODE_BITSIZE (mode), 0));
13231 }
13232 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13233 if (mask[0] != NULL)
13234 add_loc_descr (&ret, mask[0]);
13235 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
13236 if (mask[1] != NULL)
13237 {
13238 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13239 add_loc_descr (&ret, mask[1]);
13240 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
13241 }
13242 if (GET_CODE (rtl) == ROTATE)
13243 {
13244 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13245 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
13246 GET_MODE_BITSIZE (mode), 0));
13247 }
13248 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13249 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
13250 return ret;
13251 }
13252
13253 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
13254 for DEBUG_PARAMETER_REF RTL. */
13255
13256 static dw_loc_descr_ref
13257 parameter_ref_descriptor (rtx rtl)
13258 {
13259 dw_loc_descr_ref ret;
13260 dw_die_ref ref;
13261
13262 if (dwarf_strict)
13263 return NULL;
13264 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
13265 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
13266 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
13267 if (ref)
13268 {
13269 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13270 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
13271 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
13272 }
13273 else
13274 {
13275 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
13276 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
13277 }
13278 return ret;
13279 }
13280
13281 /* The following routine converts the RTL for a variable or parameter
13282 (resident in memory) into an equivalent Dwarf representation of a
13283 mechanism for getting the address of that same variable onto the top of a
13284 hypothetical "address evaluation" stack.
13285
13286 When creating memory location descriptors, we are effectively transforming
13287 the RTL for a memory-resident object into its Dwarf postfix expression
13288 equivalent. This routine recursively descends an RTL tree, turning
13289 it into Dwarf postfix code as it goes.
13290
13291 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
13292
13293 MEM_MODE is the mode of the memory reference, needed to handle some
13294 autoincrement addressing modes.
13295
13296 Return 0 if we can't represent the location. */
13297
13298 dw_loc_descr_ref
13299 mem_loc_descriptor (rtx rtl, machine_mode mode,
13300 machine_mode mem_mode,
13301 enum var_init_status initialized)
13302 {
13303 dw_loc_descr_ref mem_loc_result = NULL;
13304 enum dwarf_location_atom op;
13305 dw_loc_descr_ref op0, op1;
13306 rtx inner = NULL_RTX;
13307
13308 if (mode == VOIDmode)
13309 mode = GET_MODE (rtl);
13310
13311 /* Note that for a dynamically sized array, the location we will generate a
13312 description of here will be the lowest numbered location which is
13313 actually within the array. That's *not* necessarily the same as the
13314 zeroth element of the array. */
13315
13316 rtl = targetm.delegitimize_address (rtl);
13317
13318 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
13319 return NULL;
13320
13321 switch (GET_CODE (rtl))
13322 {
13323 case POST_INC:
13324 case POST_DEC:
13325 case POST_MODIFY:
13326 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
13327
13328 case SUBREG:
13329 /* The case of a subreg may arise when we have a local (register)
13330 variable or a formal (register) parameter which doesn't quite fill
13331 up an entire register. For now, just assume that it is
13332 legitimate to make the Dwarf info refer to the whole register which
13333 contains the given subreg. */
13334 if (!subreg_lowpart_p (rtl))
13335 break;
13336 inner = SUBREG_REG (rtl);
13337 case TRUNCATE:
13338 if (inner == NULL_RTX)
13339 inner = XEXP (rtl, 0);
13340 if (GET_MODE_CLASS (mode) == MODE_INT
13341 && GET_MODE_CLASS (GET_MODE (inner)) == MODE_INT
13342 && (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13343 #ifdef POINTERS_EXTEND_UNSIGNED
13344 || (mode == Pmode && mem_mode != VOIDmode)
13345 #endif
13346 )
13347 && GET_MODE_SIZE (GET_MODE (inner)) <= DWARF2_ADDR_SIZE)
13348 {
13349 mem_loc_result = mem_loc_descriptor (inner,
13350 GET_MODE (inner),
13351 mem_mode, initialized);
13352 break;
13353 }
13354 if (dwarf_strict)
13355 break;
13356 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (inner)))
13357 break;
13358 if (GET_MODE_SIZE (mode) != GET_MODE_SIZE (GET_MODE (inner))
13359 && (GET_MODE_CLASS (mode) != MODE_INT
13360 || GET_MODE_CLASS (GET_MODE (inner)) != MODE_INT))
13361 break;
13362 else
13363 {
13364 dw_die_ref type_die;
13365 dw_loc_descr_ref cvt;
13366
13367 mem_loc_result = mem_loc_descriptor (inner,
13368 GET_MODE (inner),
13369 mem_mode, initialized);
13370 if (mem_loc_result == NULL)
13371 break;
13372 type_die = base_type_for_mode (mode,
13373 GET_MODE_CLASS (mode) == MODE_INT);
13374 if (type_die == NULL)
13375 {
13376 mem_loc_result = NULL;
13377 break;
13378 }
13379 if (GET_MODE_SIZE (mode)
13380 != GET_MODE_SIZE (GET_MODE (inner)))
13381 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13382 else
13383 cvt = new_loc_descr (DW_OP_GNU_reinterpret, 0, 0);
13384 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13385 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13386 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13387 add_loc_descr (&mem_loc_result, cvt);
13388 if (GET_MODE_CLASS (mode) == MODE_INT
13389 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13390 {
13391 /* Convert it to untyped afterwards. */
13392 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13393 add_loc_descr (&mem_loc_result, cvt);
13394 }
13395 }
13396 break;
13397
13398 case REG:
13399 if (GET_MODE_CLASS (mode) != MODE_INT
13400 || (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
13401 && rtl != arg_pointer_rtx
13402 && rtl != frame_pointer_rtx
13403 #ifdef POINTERS_EXTEND_UNSIGNED
13404 && (mode != Pmode || mem_mode == VOIDmode)
13405 #endif
13406 ))
13407 {
13408 dw_die_ref type_die;
13409 unsigned int dbx_regnum;
13410
13411 if (dwarf_strict)
13412 break;
13413 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
13414 break;
13415 type_die = base_type_for_mode (mode,
13416 GET_MODE_CLASS (mode) == MODE_INT);
13417 if (type_die == NULL)
13418 break;
13419
13420 dbx_regnum = dbx_reg_number (rtl);
13421 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13422 break;
13423 mem_loc_result = new_loc_descr (DW_OP_GNU_regval_type,
13424 dbx_regnum, 0);
13425 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
13426 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
13427 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
13428 break;
13429 }
13430 /* Whenever a register number forms a part of the description of the
13431 method for calculating the (dynamic) address of a memory resident
13432 object, DWARF rules require the register number be referred to as
13433 a "base register". This distinction is not based in any way upon
13434 what category of register the hardware believes the given register
13435 belongs to. This is strictly DWARF terminology we're dealing with
13436 here. Note that in cases where the location of a memory-resident
13437 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
13438 OP_CONST (0)) the actual DWARF location descriptor that we generate
13439 may just be OP_BASEREG (basereg). This may look deceptively like
13440 the object in question was allocated to a register (rather than in
13441 memory) so DWARF consumers need to be aware of the subtle
13442 distinction between OP_REG and OP_BASEREG. */
13443 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
13444 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
13445 else if (stack_realign_drap
13446 && crtl->drap_reg
13447 && crtl->args.internal_arg_pointer == rtl
13448 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
13449 {
13450 /* If RTL is internal_arg_pointer, which has been optimized
13451 out, use DRAP instead. */
13452 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
13453 VAR_INIT_STATUS_INITIALIZED);
13454 }
13455 break;
13456
13457 case SIGN_EXTEND:
13458 case ZERO_EXTEND:
13459 if (GET_MODE_CLASS (mode) != MODE_INT)
13460 break;
13461 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
13462 mem_mode, VAR_INIT_STATUS_INITIALIZED);
13463 if (op0 == 0)
13464 break;
13465 else if (GET_CODE (rtl) == ZERO_EXTEND
13466 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13467 && GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
13468 < HOST_BITS_PER_WIDE_INT
13469 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
13470 to expand zero extend as two shifts instead of
13471 masking. */
13472 && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= 4)
13473 {
13474 machine_mode imode = GET_MODE (XEXP (rtl, 0));
13475 mem_loc_result = op0;
13476 add_loc_descr (&mem_loc_result,
13477 int_loc_descriptor (GET_MODE_MASK (imode)));
13478 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
13479 }
13480 else if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13481 {
13482 int shift = DWARF2_ADDR_SIZE
13483 - GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)));
13484 shift *= BITS_PER_UNIT;
13485 if (GET_CODE (rtl) == SIGN_EXTEND)
13486 op = DW_OP_shra;
13487 else
13488 op = DW_OP_shr;
13489 mem_loc_result = op0;
13490 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
13491 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
13492 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
13493 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13494 }
13495 else if (!dwarf_strict)
13496 {
13497 dw_die_ref type_die1, type_die2;
13498 dw_loc_descr_ref cvt;
13499
13500 type_die1 = base_type_for_mode (GET_MODE (XEXP (rtl, 0)),
13501 GET_CODE (rtl) == ZERO_EXTEND);
13502 if (type_die1 == NULL)
13503 break;
13504 type_die2 = base_type_for_mode (mode, 1);
13505 if (type_die2 == NULL)
13506 break;
13507 mem_loc_result = op0;
13508 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13509 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13510 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
13511 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13512 add_loc_descr (&mem_loc_result, cvt);
13513 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13514 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13515 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
13516 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13517 add_loc_descr (&mem_loc_result, cvt);
13518 }
13519 break;
13520
13521 case MEM:
13522 {
13523 rtx new_rtl = avoid_constant_pool_reference (rtl);
13524 if (new_rtl != rtl)
13525 {
13526 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
13527 initialized);
13528 if (mem_loc_result != NULL)
13529 return mem_loc_result;
13530 }
13531 }
13532 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
13533 get_address_mode (rtl), mode,
13534 VAR_INIT_STATUS_INITIALIZED);
13535 if (mem_loc_result == NULL)
13536 mem_loc_result = tls_mem_loc_descriptor (rtl);
13537 if (mem_loc_result != NULL)
13538 {
13539 if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
13540 || GET_MODE_CLASS (mode) != MODE_INT)
13541 {
13542 dw_die_ref type_die;
13543 dw_loc_descr_ref deref;
13544
13545 if (dwarf_strict)
13546 return NULL;
13547 type_die
13548 = base_type_for_mode (mode, GET_MODE_CLASS (mode) == MODE_INT);
13549 if (type_die == NULL)
13550 return NULL;
13551 deref = new_loc_descr (DW_OP_GNU_deref_type,
13552 GET_MODE_SIZE (mode), 0);
13553 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
13554 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
13555 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
13556 add_loc_descr (&mem_loc_result, deref);
13557 }
13558 else if (GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE)
13559 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
13560 else
13561 add_loc_descr (&mem_loc_result,
13562 new_loc_descr (DW_OP_deref_size,
13563 GET_MODE_SIZE (mode), 0));
13564 }
13565 break;
13566
13567 case LO_SUM:
13568 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
13569
13570 case LABEL_REF:
13571 /* Some ports can transform a symbol ref into a label ref, because
13572 the symbol ref is too far away and has to be dumped into a constant
13573 pool. */
13574 case CONST:
13575 case SYMBOL_REF:
13576 if ((GET_MODE_CLASS (mode) != MODE_INT
13577 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
13578 || (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
13579 #ifdef POINTERS_EXTEND_UNSIGNED
13580 && (mode != Pmode || mem_mode == VOIDmode)
13581 #endif
13582 ))
13583 break;
13584 if (GET_CODE (rtl) == SYMBOL_REF
13585 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
13586 {
13587 dw_loc_descr_ref temp;
13588
13589 /* If this is not defined, we have no way to emit the data. */
13590 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
13591 break;
13592
13593 temp = new_addr_loc_descr (rtl, dtprel_true);
13594
13595 mem_loc_result = new_loc_descr (DW_OP_GNU_push_tls_address, 0, 0);
13596 add_loc_descr (&mem_loc_result, temp);
13597
13598 break;
13599 }
13600
13601 if (!const_ok_for_output (rtl))
13602 {
13603 if (GET_CODE (rtl) == CONST)
13604 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13605 initialized);
13606 break;
13607 }
13608
13609 symref:
13610 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
13611 vec_safe_push (used_rtx_array, rtl);
13612 break;
13613
13614 case CONCAT:
13615 case CONCATN:
13616 case VAR_LOCATION:
13617 case DEBUG_IMPLICIT_PTR:
13618 expansion_failed (NULL_TREE, rtl,
13619 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
13620 return 0;
13621
13622 case ENTRY_VALUE:
13623 if (dwarf_strict)
13624 return NULL;
13625 if (REG_P (ENTRY_VALUE_EXP (rtl)))
13626 {
13627 if (GET_MODE_CLASS (mode) != MODE_INT
13628 || GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
13629 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
13630 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
13631 else
13632 {
13633 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
13634 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13635 return NULL;
13636 op0 = one_reg_loc_descriptor (dbx_regnum,
13637 VAR_INIT_STATUS_INITIALIZED);
13638 }
13639 }
13640 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
13641 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
13642 {
13643 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
13644 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
13645 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
13646 return NULL;
13647 }
13648 else
13649 gcc_unreachable ();
13650 if (op0 == NULL)
13651 return NULL;
13652 mem_loc_result = new_loc_descr (DW_OP_GNU_entry_value, 0, 0);
13653 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
13654 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
13655 break;
13656
13657 case DEBUG_PARAMETER_REF:
13658 mem_loc_result = parameter_ref_descriptor (rtl);
13659 break;
13660
13661 case PRE_MODIFY:
13662 /* Extract the PLUS expression nested inside and fall into
13663 PLUS code below. */
13664 rtl = XEXP (rtl, 1);
13665 goto plus;
13666
13667 case PRE_INC:
13668 case PRE_DEC:
13669 /* Turn these into a PLUS expression and fall into the PLUS code
13670 below. */
13671 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
13672 gen_int_mode (GET_CODE (rtl) == PRE_INC
13673 ? GET_MODE_UNIT_SIZE (mem_mode)
13674 : -GET_MODE_UNIT_SIZE (mem_mode),
13675 mode));
13676
13677 /* ... fall through ... */
13678
13679 case PLUS:
13680 plus:
13681 if (is_based_loc (rtl)
13682 && (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13683 || XEXP (rtl, 0) == arg_pointer_rtx
13684 || XEXP (rtl, 0) == frame_pointer_rtx)
13685 && GET_MODE_CLASS (mode) == MODE_INT)
13686 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
13687 INTVAL (XEXP (rtl, 1)),
13688 VAR_INIT_STATUS_INITIALIZED);
13689 else
13690 {
13691 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13692 VAR_INIT_STATUS_INITIALIZED);
13693 if (mem_loc_result == 0)
13694 break;
13695
13696 if (CONST_INT_P (XEXP (rtl, 1))
13697 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13698 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
13699 else
13700 {
13701 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
13702 VAR_INIT_STATUS_INITIALIZED);
13703 if (op1 == 0)
13704 return NULL;
13705 add_loc_descr (&mem_loc_result, op1);
13706 add_loc_descr (&mem_loc_result,
13707 new_loc_descr (DW_OP_plus, 0, 0));
13708 }
13709 }
13710 break;
13711
13712 /* If a pseudo-reg is optimized away, it is possible for it to
13713 be replaced with a MEM containing a multiply or shift. */
13714 case MINUS:
13715 op = DW_OP_minus;
13716 goto do_binop;
13717
13718 case MULT:
13719 op = DW_OP_mul;
13720 goto do_binop;
13721
13722 case DIV:
13723 if (!dwarf_strict
13724 && GET_MODE_CLASS (mode) == MODE_INT
13725 && GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
13726 {
13727 mem_loc_result = typed_binop (DW_OP_div, rtl,
13728 base_type_for_mode (mode, 0),
13729 mode, mem_mode);
13730 break;
13731 }
13732 op = DW_OP_div;
13733 goto do_binop;
13734
13735 case UMOD:
13736 op = DW_OP_mod;
13737 goto do_binop;
13738
13739 case ASHIFT:
13740 op = DW_OP_shl;
13741 goto do_shift;
13742
13743 case ASHIFTRT:
13744 op = DW_OP_shra;
13745 goto do_shift;
13746
13747 case LSHIFTRT:
13748 op = DW_OP_shr;
13749 goto do_shift;
13750
13751 do_shift:
13752 if (GET_MODE_CLASS (mode) != MODE_INT)
13753 break;
13754 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13755 VAR_INIT_STATUS_INITIALIZED);
13756 {
13757 rtx rtlop1 = XEXP (rtl, 1);
13758 if (GET_MODE (rtlop1) != VOIDmode
13759 && GET_MODE_BITSIZE (GET_MODE (rtlop1))
13760 < GET_MODE_BITSIZE (mode))
13761 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
13762 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
13763 VAR_INIT_STATUS_INITIALIZED);
13764 }
13765
13766 if (op0 == 0 || op1 == 0)
13767 break;
13768
13769 mem_loc_result = op0;
13770 add_loc_descr (&mem_loc_result, op1);
13771 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13772 break;
13773
13774 case AND:
13775 op = DW_OP_and;
13776 goto do_binop;
13777
13778 case IOR:
13779 op = DW_OP_or;
13780 goto do_binop;
13781
13782 case XOR:
13783 op = DW_OP_xor;
13784 goto do_binop;
13785
13786 do_binop:
13787 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13788 VAR_INIT_STATUS_INITIALIZED);
13789 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
13790 VAR_INIT_STATUS_INITIALIZED);
13791
13792 if (op0 == 0 || op1 == 0)
13793 break;
13794
13795 mem_loc_result = op0;
13796 add_loc_descr (&mem_loc_result, op1);
13797 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13798 break;
13799
13800 case MOD:
13801 if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE && !dwarf_strict)
13802 {
13803 mem_loc_result = typed_binop (DW_OP_mod, rtl,
13804 base_type_for_mode (mode, 0),
13805 mode, mem_mode);
13806 break;
13807 }
13808
13809 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13810 VAR_INIT_STATUS_INITIALIZED);
13811 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
13812 VAR_INIT_STATUS_INITIALIZED);
13813
13814 if (op0 == 0 || op1 == 0)
13815 break;
13816
13817 mem_loc_result = op0;
13818 add_loc_descr (&mem_loc_result, op1);
13819 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
13820 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
13821 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
13822 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
13823 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
13824 break;
13825
13826 case UDIV:
13827 if (!dwarf_strict && GET_MODE_CLASS (mode) == MODE_INT)
13828 {
13829 if (GET_MODE_CLASS (mode) > DWARF2_ADDR_SIZE)
13830 {
13831 op = DW_OP_div;
13832 goto do_binop;
13833 }
13834 mem_loc_result = typed_binop (DW_OP_div, rtl,
13835 base_type_for_mode (mode, 1),
13836 mode, mem_mode);
13837 }
13838 break;
13839
13840 case NOT:
13841 op = DW_OP_not;
13842 goto do_unop;
13843
13844 case ABS:
13845 op = DW_OP_abs;
13846 goto do_unop;
13847
13848 case NEG:
13849 op = DW_OP_neg;
13850 goto do_unop;
13851
13852 do_unop:
13853 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
13854 VAR_INIT_STATUS_INITIALIZED);
13855
13856 if (op0 == 0)
13857 break;
13858
13859 mem_loc_result = op0;
13860 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
13861 break;
13862
13863 case CONST_INT:
13864 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
13865 #ifdef POINTERS_EXTEND_UNSIGNED
13866 || (mode == Pmode
13867 && mem_mode != VOIDmode
13868 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
13869 #endif
13870 )
13871 {
13872 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
13873 break;
13874 }
13875 if (!dwarf_strict
13876 && (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT
13877 || GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT))
13878 {
13879 dw_die_ref type_die = base_type_for_mode (mode, 1);
13880 machine_mode amode;
13881 if (type_die == NULL)
13882 return NULL;
13883 amode = mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT,
13884 MODE_INT, 0);
13885 if (INTVAL (rtl) >= 0
13886 && amode != BLKmode
13887 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
13888 /* const DW_OP_GNU_convert <XXX> vs.
13889 DW_OP_GNU_const_type <XXX, 1, const>. */
13890 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
13891 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (mode))
13892 {
13893 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
13894 op0 = new_loc_descr (DW_OP_GNU_convert, 0, 0);
13895 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13896 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13897 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
13898 add_loc_descr (&mem_loc_result, op0);
13899 return mem_loc_result;
13900 }
13901 mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0,
13902 INTVAL (rtl));
13903 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13904 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13905 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
13906 if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
13907 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
13908 else
13909 {
13910 mem_loc_result->dw_loc_oprnd2.val_class
13911 = dw_val_class_const_double;
13912 mem_loc_result->dw_loc_oprnd2.v.val_double
13913 = double_int::from_shwi (INTVAL (rtl));
13914 }
13915 }
13916 break;
13917
13918 case CONST_DOUBLE:
13919 if (!dwarf_strict)
13920 {
13921 dw_die_ref type_die;
13922
13923 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
13924 CONST_DOUBLE rtx could represent either a large integer
13925 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
13926 the value is always a floating point constant.
13927
13928 When it is an integer, a CONST_DOUBLE is used whenever
13929 the constant requires 2 HWIs to be adequately represented.
13930 We output CONST_DOUBLEs as blocks. */
13931 if (mode == VOIDmode
13932 || (GET_MODE (rtl) == VOIDmode
13933 && GET_MODE_BITSIZE (mode) != HOST_BITS_PER_DOUBLE_INT))
13934 break;
13935 type_die = base_type_for_mode (mode,
13936 GET_MODE_CLASS (mode) == MODE_INT);
13937 if (type_die == NULL)
13938 return NULL;
13939 mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0, 0);
13940 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13941 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13942 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
13943 #if TARGET_SUPPORTS_WIDE_INT == 0
13944 if (!SCALAR_FLOAT_MODE_P (mode))
13945 {
13946 mem_loc_result->dw_loc_oprnd2.val_class
13947 = dw_val_class_const_double;
13948 mem_loc_result->dw_loc_oprnd2.v.val_double
13949 = rtx_to_double_int (rtl);
13950 }
13951 else
13952 #endif
13953 {
13954 unsigned int length = GET_MODE_SIZE (mode);
13955 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
13956
13957 insert_float (rtl, array);
13958 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
13959 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
13960 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
13961 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
13962 }
13963 }
13964 break;
13965
13966 case CONST_WIDE_INT:
13967 if (!dwarf_strict)
13968 {
13969 dw_die_ref type_die;
13970
13971 type_die = base_type_for_mode (mode,
13972 GET_MODE_CLASS (mode) == MODE_INT);
13973 if (type_die == NULL)
13974 return NULL;
13975 mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0, 0);
13976 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13977 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13978 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
13979 mem_loc_result->dw_loc_oprnd2.val_class
13980 = dw_val_class_wide_int;
13981 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
13982 *mem_loc_result->dw_loc_oprnd2.v.val_wide = std::make_pair (rtl, mode);
13983 }
13984 break;
13985
13986 case EQ:
13987 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
13988 break;
13989
13990 case GE:
13991 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
13992 break;
13993
13994 case GT:
13995 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
13996 break;
13997
13998 case LE:
13999 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
14000 break;
14001
14002 case LT:
14003 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
14004 break;
14005
14006 case NE:
14007 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
14008 break;
14009
14010 case GEU:
14011 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
14012 break;
14013
14014 case GTU:
14015 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
14016 break;
14017
14018 case LEU:
14019 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
14020 break;
14021
14022 case LTU:
14023 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
14024 break;
14025
14026 case UMIN:
14027 case UMAX:
14028 if (GET_MODE_CLASS (mode) != MODE_INT)
14029 break;
14030 /* FALLTHRU */
14031 case SMIN:
14032 case SMAX:
14033 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
14034 break;
14035
14036 case ZERO_EXTRACT:
14037 case SIGN_EXTRACT:
14038 if (CONST_INT_P (XEXP (rtl, 1))
14039 && CONST_INT_P (XEXP (rtl, 2))
14040 && ((unsigned) INTVAL (XEXP (rtl, 1))
14041 + (unsigned) INTVAL (XEXP (rtl, 2))
14042 <= GET_MODE_BITSIZE (mode))
14043 && GET_MODE_CLASS (mode) == MODE_INT
14044 && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
14045 && GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= DWARF2_ADDR_SIZE)
14046 {
14047 int shift, size;
14048 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
14049 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14050 if (op0 == 0)
14051 break;
14052 if (GET_CODE (rtl) == SIGN_EXTRACT)
14053 op = DW_OP_shra;
14054 else
14055 op = DW_OP_shr;
14056 mem_loc_result = op0;
14057 size = INTVAL (XEXP (rtl, 1));
14058 shift = INTVAL (XEXP (rtl, 2));
14059 if (BITS_BIG_ENDIAN)
14060 shift = GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
14061 - shift - size;
14062 if (shift + size != (int) DWARF2_ADDR_SIZE)
14063 {
14064 add_loc_descr (&mem_loc_result,
14065 int_loc_descriptor (DWARF2_ADDR_SIZE
14066 - shift - size));
14067 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
14068 }
14069 if (size != (int) DWARF2_ADDR_SIZE)
14070 {
14071 add_loc_descr (&mem_loc_result,
14072 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
14073 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14074 }
14075 }
14076 break;
14077
14078 case IF_THEN_ELSE:
14079 {
14080 dw_loc_descr_ref op2, bra_node, drop_node;
14081 op0 = mem_loc_descriptor (XEXP (rtl, 0),
14082 GET_MODE (XEXP (rtl, 0)) == VOIDmode
14083 ? word_mode : GET_MODE (XEXP (rtl, 0)),
14084 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14085 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14086 VAR_INIT_STATUS_INITIALIZED);
14087 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
14088 VAR_INIT_STATUS_INITIALIZED);
14089 if (op0 == NULL || op1 == NULL || op2 == NULL)
14090 break;
14091
14092 mem_loc_result = op1;
14093 add_loc_descr (&mem_loc_result, op2);
14094 add_loc_descr (&mem_loc_result, op0);
14095 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14096 add_loc_descr (&mem_loc_result, bra_node);
14097 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
14098 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14099 add_loc_descr (&mem_loc_result, drop_node);
14100 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14101 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14102 }
14103 break;
14104
14105 case FLOAT_EXTEND:
14106 case FLOAT_TRUNCATE:
14107 case FLOAT:
14108 case UNSIGNED_FLOAT:
14109 case FIX:
14110 case UNSIGNED_FIX:
14111 if (!dwarf_strict)
14112 {
14113 dw_die_ref type_die;
14114 dw_loc_descr_ref cvt;
14115
14116 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
14117 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14118 if (op0 == NULL)
14119 break;
14120 if (GET_MODE_CLASS (GET_MODE (XEXP (rtl, 0))) == MODE_INT
14121 && (GET_CODE (rtl) == FLOAT
14122 || GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)))
14123 <= DWARF2_ADDR_SIZE))
14124 {
14125 type_die = base_type_for_mode (GET_MODE (XEXP (rtl, 0)),
14126 GET_CODE (rtl) == UNSIGNED_FLOAT);
14127 if (type_die == NULL)
14128 break;
14129 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
14130 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14131 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14132 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14133 add_loc_descr (&op0, cvt);
14134 }
14135 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
14136 if (type_die == NULL)
14137 break;
14138 cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
14139 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14140 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14141 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14142 add_loc_descr (&op0, cvt);
14143 if (GET_MODE_CLASS (mode) == MODE_INT
14144 && (GET_CODE (rtl) == FIX
14145 || GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE))
14146 {
14147 op0 = convert_descriptor_to_mode (mode, op0);
14148 if (op0 == NULL)
14149 break;
14150 }
14151 mem_loc_result = op0;
14152 }
14153 break;
14154
14155 case CLZ:
14156 case CTZ:
14157 case FFS:
14158 mem_loc_result = clz_loc_descriptor (rtl, mode, mem_mode);
14159 break;
14160
14161 case POPCOUNT:
14162 case PARITY:
14163 mem_loc_result = popcount_loc_descriptor (rtl, mode, mem_mode);
14164 break;
14165
14166 case BSWAP:
14167 mem_loc_result = bswap_loc_descriptor (rtl, mode, mem_mode);
14168 break;
14169
14170 case ROTATE:
14171 case ROTATERT:
14172 mem_loc_result = rotate_loc_descriptor (rtl, mode, mem_mode);
14173 break;
14174
14175 case COMPARE:
14176 /* In theory, we could implement the above. */
14177 /* DWARF cannot represent the unsigned compare operations
14178 natively. */
14179 case SS_MULT:
14180 case US_MULT:
14181 case SS_DIV:
14182 case US_DIV:
14183 case SS_PLUS:
14184 case US_PLUS:
14185 case SS_MINUS:
14186 case US_MINUS:
14187 case SS_NEG:
14188 case US_NEG:
14189 case SS_ABS:
14190 case SS_ASHIFT:
14191 case US_ASHIFT:
14192 case SS_TRUNCATE:
14193 case US_TRUNCATE:
14194 case UNORDERED:
14195 case ORDERED:
14196 case UNEQ:
14197 case UNGE:
14198 case UNGT:
14199 case UNLE:
14200 case UNLT:
14201 case LTGT:
14202 case FRACT_CONVERT:
14203 case UNSIGNED_FRACT_CONVERT:
14204 case SAT_FRACT:
14205 case UNSIGNED_SAT_FRACT:
14206 case SQRT:
14207 case ASM_OPERANDS:
14208 case VEC_MERGE:
14209 case VEC_SELECT:
14210 case VEC_CONCAT:
14211 case VEC_DUPLICATE:
14212 case UNSPEC:
14213 case HIGH:
14214 case FMA:
14215 case STRICT_LOW_PART:
14216 case CONST_VECTOR:
14217 case CONST_FIXED:
14218 case CLRSB:
14219 case CLOBBER:
14220 /* If delegitimize_address couldn't do anything with the UNSPEC, we
14221 can't express it in the debug info. This can happen e.g. with some
14222 TLS UNSPECs. */
14223 break;
14224
14225 case CONST_STRING:
14226 resolve_one_addr (&rtl);
14227 goto symref;
14228
14229 default:
14230 if (flag_checking)
14231 {
14232 print_rtl (stderr, rtl);
14233 gcc_unreachable ();
14234 }
14235 break;
14236 }
14237
14238 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
14239 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14240
14241 return mem_loc_result;
14242 }
14243
14244 /* Return a descriptor that describes the concatenation of two locations.
14245 This is typically a complex variable. */
14246
14247 static dw_loc_descr_ref
14248 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
14249 {
14250 dw_loc_descr_ref cc_loc_result = NULL;
14251 dw_loc_descr_ref x0_ref
14252 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14253 dw_loc_descr_ref x1_ref
14254 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14255
14256 if (x0_ref == 0 || x1_ref == 0)
14257 return 0;
14258
14259 cc_loc_result = x0_ref;
14260 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x0)));
14261
14262 add_loc_descr (&cc_loc_result, x1_ref);
14263 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x1)));
14264
14265 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14266 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14267
14268 return cc_loc_result;
14269 }
14270
14271 /* Return a descriptor that describes the concatenation of N
14272 locations. */
14273
14274 static dw_loc_descr_ref
14275 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
14276 {
14277 unsigned int i;
14278 dw_loc_descr_ref cc_loc_result = NULL;
14279 unsigned int n = XVECLEN (concatn, 0);
14280
14281 for (i = 0; i < n; ++i)
14282 {
14283 dw_loc_descr_ref ref;
14284 rtx x = XVECEXP (concatn, 0, i);
14285
14286 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14287 if (ref == NULL)
14288 return NULL;
14289
14290 add_loc_descr (&cc_loc_result, ref);
14291 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x)));
14292 }
14293
14294 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
14295 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14296
14297 return cc_loc_result;
14298 }
14299
14300 /* Helper function for loc_descriptor. Return DW_OP_GNU_implicit_pointer
14301 for DEBUG_IMPLICIT_PTR RTL. */
14302
14303 static dw_loc_descr_ref
14304 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
14305 {
14306 dw_loc_descr_ref ret;
14307 dw_die_ref ref;
14308
14309 if (dwarf_strict)
14310 return NULL;
14311 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
14312 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
14313 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
14314 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
14315 ret = new_loc_descr (DW_OP_GNU_implicit_pointer, 0, offset);
14316 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
14317 if (ref)
14318 {
14319 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14320 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
14321 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
14322 }
14323 else
14324 {
14325 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
14326 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
14327 }
14328 return ret;
14329 }
14330
14331 /* Output a proper Dwarf location descriptor for a variable or parameter
14332 which is either allocated in a register or in a memory location. For a
14333 register, we just generate an OP_REG and the register number. For a
14334 memory location we provide a Dwarf postfix expression describing how to
14335 generate the (dynamic) address of the object onto the address stack.
14336
14337 MODE is mode of the decl if this loc_descriptor is going to be used in
14338 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
14339 allowed, VOIDmode otherwise.
14340
14341 If we don't know how to describe it, return 0. */
14342
14343 static dw_loc_descr_ref
14344 loc_descriptor (rtx rtl, machine_mode mode,
14345 enum var_init_status initialized)
14346 {
14347 dw_loc_descr_ref loc_result = NULL;
14348
14349 switch (GET_CODE (rtl))
14350 {
14351 case SUBREG:
14352 /* The case of a subreg may arise when we have a local (register)
14353 variable or a formal (register) parameter which doesn't quite fill
14354 up an entire register. For now, just assume that it is
14355 legitimate to make the Dwarf info refer to the whole register which
14356 contains the given subreg. */
14357 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
14358 loc_result = loc_descriptor (SUBREG_REG (rtl),
14359 GET_MODE (SUBREG_REG (rtl)), initialized);
14360 else
14361 goto do_default;
14362 break;
14363
14364 case REG:
14365 loc_result = reg_loc_descriptor (rtl, initialized);
14366 break;
14367
14368 case MEM:
14369 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
14370 GET_MODE (rtl), initialized);
14371 if (loc_result == NULL)
14372 loc_result = tls_mem_loc_descriptor (rtl);
14373 if (loc_result == NULL)
14374 {
14375 rtx new_rtl = avoid_constant_pool_reference (rtl);
14376 if (new_rtl != rtl)
14377 loc_result = loc_descriptor (new_rtl, mode, initialized);
14378 }
14379 break;
14380
14381 case CONCAT:
14382 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
14383 initialized);
14384 break;
14385
14386 case CONCATN:
14387 loc_result = concatn_loc_descriptor (rtl, initialized);
14388 break;
14389
14390 case VAR_LOCATION:
14391 /* Single part. */
14392 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
14393 {
14394 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
14395 if (GET_CODE (loc) == EXPR_LIST)
14396 loc = XEXP (loc, 0);
14397 loc_result = loc_descriptor (loc, mode, initialized);
14398 break;
14399 }
14400
14401 rtl = XEXP (rtl, 1);
14402 /* FALLTHRU */
14403
14404 case PARALLEL:
14405 {
14406 rtvec par_elems = XVEC (rtl, 0);
14407 int num_elem = GET_NUM_ELEM (par_elems);
14408 machine_mode mode;
14409 int i;
14410
14411 /* Create the first one, so we have something to add to. */
14412 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
14413 VOIDmode, initialized);
14414 if (loc_result == NULL)
14415 return NULL;
14416 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
14417 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
14418 for (i = 1; i < num_elem; i++)
14419 {
14420 dw_loc_descr_ref temp;
14421
14422 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
14423 VOIDmode, initialized);
14424 if (temp == NULL)
14425 return NULL;
14426 add_loc_descr (&loc_result, temp);
14427 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
14428 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
14429 }
14430 }
14431 break;
14432
14433 case CONST_INT:
14434 if (mode != VOIDmode && mode != BLKmode)
14435 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (mode),
14436 INTVAL (rtl));
14437 break;
14438
14439 case CONST_DOUBLE:
14440 if (mode == VOIDmode)
14441 mode = GET_MODE (rtl);
14442
14443 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
14444 {
14445 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
14446
14447 /* Note that a CONST_DOUBLE rtx could represent either an integer
14448 or a floating-point constant. A CONST_DOUBLE is used whenever
14449 the constant requires more than one word in order to be
14450 adequately represented. We output CONST_DOUBLEs as blocks. */
14451 loc_result = new_loc_descr (DW_OP_implicit_value,
14452 GET_MODE_SIZE (mode), 0);
14453 #if TARGET_SUPPORTS_WIDE_INT == 0
14454 if (!SCALAR_FLOAT_MODE_P (mode))
14455 {
14456 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
14457 loc_result->dw_loc_oprnd2.v.val_double
14458 = rtx_to_double_int (rtl);
14459 }
14460 else
14461 #endif
14462 {
14463 unsigned int length = GET_MODE_SIZE (mode);
14464 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
14465
14466 insert_float (rtl, array);
14467 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
14468 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
14469 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
14470 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
14471 }
14472 }
14473 break;
14474
14475 case CONST_WIDE_INT:
14476 if (mode == VOIDmode)
14477 mode = GET_MODE (rtl);
14478
14479 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
14480 {
14481 loc_result = new_loc_descr (DW_OP_implicit_value,
14482 GET_MODE_SIZE (mode), 0);
14483 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
14484 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
14485 *loc_result->dw_loc_oprnd2.v.val_wide = std::make_pair (rtl, mode);
14486 }
14487 break;
14488
14489 case CONST_VECTOR:
14490 if (mode == VOIDmode)
14491 mode = GET_MODE (rtl);
14492
14493 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
14494 {
14495 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
14496 unsigned int length = CONST_VECTOR_NUNITS (rtl);
14497 unsigned char *array
14498 = ggc_vec_alloc<unsigned char> (length * elt_size);
14499 unsigned int i;
14500 unsigned char *p;
14501 machine_mode imode = GET_MODE_INNER (mode);
14502
14503 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
14504 switch (GET_MODE_CLASS (mode))
14505 {
14506 case MODE_VECTOR_INT:
14507 for (i = 0, p = array; i < length; i++, p += elt_size)
14508 {
14509 rtx elt = CONST_VECTOR_ELT (rtl, i);
14510 insert_wide_int (std::make_pair (elt, imode), p, elt_size);
14511 }
14512 break;
14513
14514 case MODE_VECTOR_FLOAT:
14515 for (i = 0, p = array; i < length; i++, p += elt_size)
14516 {
14517 rtx elt = CONST_VECTOR_ELT (rtl, i);
14518 insert_float (elt, p);
14519 }
14520 break;
14521
14522 default:
14523 gcc_unreachable ();
14524 }
14525
14526 loc_result = new_loc_descr (DW_OP_implicit_value,
14527 length * elt_size, 0);
14528 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
14529 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
14530 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
14531 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
14532 }
14533 break;
14534
14535 case CONST:
14536 if (mode == VOIDmode
14537 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
14538 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
14539 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
14540 {
14541 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
14542 break;
14543 }
14544 /* FALLTHROUGH */
14545 case SYMBOL_REF:
14546 if (!const_ok_for_output (rtl))
14547 break;
14548 case LABEL_REF:
14549 if (mode != VOIDmode && GET_MODE_SIZE (mode) == DWARF2_ADDR_SIZE
14550 && (dwarf_version >= 4 || !dwarf_strict))
14551 {
14552 loc_result = new_addr_loc_descr (rtl, dtprel_false);
14553 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
14554 vec_safe_push (used_rtx_array, rtl);
14555 }
14556 break;
14557
14558 case DEBUG_IMPLICIT_PTR:
14559 loc_result = implicit_ptr_descriptor (rtl, 0);
14560 break;
14561
14562 case PLUS:
14563 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
14564 && CONST_INT_P (XEXP (rtl, 1)))
14565 {
14566 loc_result
14567 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
14568 break;
14569 }
14570 /* FALLTHRU */
14571 do_default:
14572 default:
14573 if ((GET_MODE_CLASS (mode) == MODE_INT && GET_MODE (rtl) == mode
14574 && GET_MODE_SIZE (GET_MODE (rtl)) <= DWARF2_ADDR_SIZE
14575 && dwarf_version >= 4)
14576 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
14577 {
14578 /* Value expression. */
14579 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
14580 if (loc_result)
14581 add_loc_descr (&loc_result,
14582 new_loc_descr (DW_OP_stack_value, 0, 0));
14583 }
14584 break;
14585 }
14586
14587 return loc_result;
14588 }
14589
14590 /* We need to figure out what section we should use as the base for the
14591 address ranges where a given location is valid.
14592 1. If this particular DECL has a section associated with it, use that.
14593 2. If this function has a section associated with it, use that.
14594 3. Otherwise, use the text section.
14595 XXX: If you split a variable across multiple sections, we won't notice. */
14596
14597 static const char *
14598 secname_for_decl (const_tree decl)
14599 {
14600 const char *secname;
14601
14602 if (VAR_OR_FUNCTION_DECL_P (decl)
14603 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
14604 && DECL_SECTION_NAME (decl))
14605 secname = DECL_SECTION_NAME (decl);
14606 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
14607 secname = DECL_SECTION_NAME (current_function_decl);
14608 else if (cfun && in_cold_section_p)
14609 secname = crtl->subsections.cold_section_label;
14610 else
14611 secname = text_section_label;
14612
14613 return secname;
14614 }
14615
14616 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
14617
14618 static bool
14619 decl_by_reference_p (tree decl)
14620 {
14621 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
14622 || TREE_CODE (decl) == VAR_DECL)
14623 && DECL_BY_REFERENCE (decl));
14624 }
14625
14626 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
14627 for VARLOC. */
14628
14629 static dw_loc_descr_ref
14630 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
14631 enum var_init_status initialized)
14632 {
14633 int have_address = 0;
14634 dw_loc_descr_ref descr;
14635 machine_mode mode;
14636
14637 if (want_address != 2)
14638 {
14639 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
14640 /* Single part. */
14641 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
14642 {
14643 varloc = PAT_VAR_LOCATION_LOC (varloc);
14644 if (GET_CODE (varloc) == EXPR_LIST)
14645 varloc = XEXP (varloc, 0);
14646 mode = GET_MODE (varloc);
14647 if (MEM_P (varloc))
14648 {
14649 rtx addr = XEXP (varloc, 0);
14650 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
14651 mode, initialized);
14652 if (descr)
14653 have_address = 1;
14654 else
14655 {
14656 rtx x = avoid_constant_pool_reference (varloc);
14657 if (x != varloc)
14658 descr = mem_loc_descriptor (x, mode, VOIDmode,
14659 initialized);
14660 }
14661 }
14662 else
14663 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
14664 }
14665 else
14666 return 0;
14667 }
14668 else
14669 {
14670 if (GET_CODE (varloc) == VAR_LOCATION)
14671 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
14672 else
14673 mode = DECL_MODE (loc);
14674 descr = loc_descriptor (varloc, mode, initialized);
14675 have_address = 1;
14676 }
14677
14678 if (!descr)
14679 return 0;
14680
14681 if (want_address == 2 && !have_address
14682 && (dwarf_version >= 4 || !dwarf_strict))
14683 {
14684 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
14685 {
14686 expansion_failed (loc, NULL_RTX,
14687 "DWARF address size mismatch");
14688 return 0;
14689 }
14690 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
14691 have_address = 1;
14692 }
14693 /* Show if we can't fill the request for an address. */
14694 if (want_address && !have_address)
14695 {
14696 expansion_failed (loc, NULL_RTX,
14697 "Want address and only have value");
14698 return 0;
14699 }
14700
14701 /* If we've got an address and don't want one, dereference. */
14702 if (!want_address && have_address)
14703 {
14704 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
14705 enum dwarf_location_atom op;
14706
14707 if (size > DWARF2_ADDR_SIZE || size == -1)
14708 {
14709 expansion_failed (loc, NULL_RTX,
14710 "DWARF address size mismatch");
14711 return 0;
14712 }
14713 else if (size == DWARF2_ADDR_SIZE)
14714 op = DW_OP_deref;
14715 else
14716 op = DW_OP_deref_size;
14717
14718 add_loc_descr (&descr, new_loc_descr (op, size, 0));
14719 }
14720
14721 return descr;
14722 }
14723
14724 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
14725 if it is not possible. */
14726
14727 static dw_loc_descr_ref
14728 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
14729 {
14730 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
14731 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
14732 else if (dwarf_version >= 3 || !dwarf_strict)
14733 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
14734 else
14735 return NULL;
14736 }
14737
14738 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
14739 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
14740
14741 static dw_loc_descr_ref
14742 dw_sra_loc_expr (tree decl, rtx loc)
14743 {
14744 rtx p;
14745 unsigned HOST_WIDE_INT padsize = 0;
14746 dw_loc_descr_ref descr, *descr_tail;
14747 unsigned HOST_WIDE_INT decl_size;
14748 rtx varloc;
14749 enum var_init_status initialized;
14750
14751 if (DECL_SIZE (decl) == NULL
14752 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
14753 return NULL;
14754
14755 decl_size = tree_to_uhwi (DECL_SIZE (decl));
14756 descr = NULL;
14757 descr_tail = &descr;
14758
14759 for (p = loc; p; p = XEXP (p, 1))
14760 {
14761 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
14762 rtx loc_note = *decl_piece_varloc_ptr (p);
14763 dw_loc_descr_ref cur_descr;
14764 dw_loc_descr_ref *tail, last = NULL;
14765 unsigned HOST_WIDE_INT opsize = 0;
14766
14767 if (loc_note == NULL_RTX
14768 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
14769 {
14770 padsize += bitsize;
14771 continue;
14772 }
14773 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
14774 varloc = NOTE_VAR_LOCATION (loc_note);
14775 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
14776 if (cur_descr == NULL)
14777 {
14778 padsize += bitsize;
14779 continue;
14780 }
14781
14782 /* Check that cur_descr either doesn't use
14783 DW_OP_*piece operations, or their sum is equal
14784 to bitsize. Otherwise we can't embed it. */
14785 for (tail = &cur_descr; *tail != NULL;
14786 tail = &(*tail)->dw_loc_next)
14787 if ((*tail)->dw_loc_opc == DW_OP_piece)
14788 {
14789 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
14790 * BITS_PER_UNIT;
14791 last = *tail;
14792 }
14793 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
14794 {
14795 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
14796 last = *tail;
14797 }
14798
14799 if (last != NULL && opsize != bitsize)
14800 {
14801 padsize += bitsize;
14802 /* Discard the current piece of the descriptor and release any
14803 addr_table entries it uses. */
14804 remove_loc_list_addr_table_entries (cur_descr);
14805 continue;
14806 }
14807
14808 /* If there is a hole, add DW_OP_*piece after empty DWARF
14809 expression, which means that those bits are optimized out. */
14810 if (padsize)
14811 {
14812 if (padsize > decl_size)
14813 {
14814 remove_loc_list_addr_table_entries (cur_descr);
14815 goto discard_descr;
14816 }
14817 decl_size -= padsize;
14818 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
14819 if (*descr_tail == NULL)
14820 {
14821 remove_loc_list_addr_table_entries (cur_descr);
14822 goto discard_descr;
14823 }
14824 descr_tail = &(*descr_tail)->dw_loc_next;
14825 padsize = 0;
14826 }
14827 *descr_tail = cur_descr;
14828 descr_tail = tail;
14829 if (bitsize > decl_size)
14830 goto discard_descr;
14831 decl_size -= bitsize;
14832 if (last == NULL)
14833 {
14834 HOST_WIDE_INT offset = 0;
14835 if (GET_CODE (varloc) == VAR_LOCATION
14836 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
14837 {
14838 varloc = PAT_VAR_LOCATION_LOC (varloc);
14839 if (GET_CODE (varloc) == EXPR_LIST)
14840 varloc = XEXP (varloc, 0);
14841 }
14842 do
14843 {
14844 if (GET_CODE (varloc) == CONST
14845 || GET_CODE (varloc) == SIGN_EXTEND
14846 || GET_CODE (varloc) == ZERO_EXTEND)
14847 varloc = XEXP (varloc, 0);
14848 else if (GET_CODE (varloc) == SUBREG)
14849 varloc = SUBREG_REG (varloc);
14850 else
14851 break;
14852 }
14853 while (1);
14854 /* DW_OP_bit_size offset should be zero for register
14855 or implicit location descriptions and empty location
14856 descriptions, but for memory addresses needs big endian
14857 adjustment. */
14858 if (MEM_P (varloc))
14859 {
14860 unsigned HOST_WIDE_INT memsize
14861 = MEM_SIZE (varloc) * BITS_PER_UNIT;
14862 if (memsize != bitsize)
14863 {
14864 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
14865 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
14866 goto discard_descr;
14867 if (memsize < bitsize)
14868 goto discard_descr;
14869 if (BITS_BIG_ENDIAN)
14870 offset = memsize - bitsize;
14871 }
14872 }
14873
14874 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
14875 if (*descr_tail == NULL)
14876 goto discard_descr;
14877 descr_tail = &(*descr_tail)->dw_loc_next;
14878 }
14879 }
14880
14881 /* If there were any non-empty expressions, add padding till the end of
14882 the decl. */
14883 if (descr != NULL && decl_size != 0)
14884 {
14885 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
14886 if (*descr_tail == NULL)
14887 goto discard_descr;
14888 }
14889 return descr;
14890
14891 discard_descr:
14892 /* Discard the descriptor and release any addr_table entries it uses. */
14893 remove_loc_list_addr_table_entries (descr);
14894 return NULL;
14895 }
14896
14897 /* Return the dwarf representation of the location list LOC_LIST of
14898 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
14899 function. */
14900
14901 static dw_loc_list_ref
14902 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
14903 {
14904 const char *endname, *secname;
14905 rtx varloc;
14906 enum var_init_status initialized;
14907 struct var_loc_node *node;
14908 dw_loc_descr_ref descr;
14909 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
14910 dw_loc_list_ref list = NULL;
14911 dw_loc_list_ref *listp = &list;
14912
14913 /* Now that we know what section we are using for a base,
14914 actually construct the list of locations.
14915 The first location information is what is passed to the
14916 function that creates the location list, and the remaining
14917 locations just get added on to that list.
14918 Note that we only know the start address for a location
14919 (IE location changes), so to build the range, we use
14920 the range [current location start, next location start].
14921 This means we have to special case the last node, and generate
14922 a range of [last location start, end of function label]. */
14923
14924 secname = secname_for_decl (decl);
14925
14926 for (node = loc_list->first; node; node = node->next)
14927 if (GET_CODE (node->loc) == EXPR_LIST
14928 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
14929 {
14930 if (GET_CODE (node->loc) == EXPR_LIST)
14931 {
14932 /* This requires DW_OP_{,bit_}piece, which is not usable
14933 inside DWARF expressions. */
14934 if (want_address != 2)
14935 continue;
14936 descr = dw_sra_loc_expr (decl, node->loc);
14937 if (descr == NULL)
14938 continue;
14939 }
14940 else
14941 {
14942 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
14943 varloc = NOTE_VAR_LOCATION (node->loc);
14944 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
14945 }
14946 if (descr)
14947 {
14948 bool range_across_switch = false;
14949 /* If section switch happens in between node->label
14950 and node->next->label (or end of function) and
14951 we can't emit it as a single entry list,
14952 emit two ranges, first one ending at the end
14953 of first partition and second one starting at the
14954 beginning of second partition. */
14955 if (node == loc_list->last_before_switch
14956 && (node != loc_list->first || loc_list->first->next)
14957 && current_function_decl)
14958 {
14959 endname = cfun->fde->dw_fde_end;
14960 range_across_switch = true;
14961 }
14962 /* The variable has a location between NODE->LABEL and
14963 NODE->NEXT->LABEL. */
14964 else if (node->next)
14965 endname = node->next->label;
14966 /* If the variable has a location at the last label
14967 it keeps its location until the end of function. */
14968 else if (!current_function_decl)
14969 endname = text_end_label;
14970 else
14971 {
14972 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
14973 current_function_funcdef_no);
14974 endname = ggc_strdup (label_id);
14975 }
14976
14977 *listp = new_loc_list (descr, node->label, endname, secname);
14978 if (TREE_CODE (decl) == PARM_DECL
14979 && node == loc_list->first
14980 && NOTE_P (node->loc)
14981 && strcmp (node->label, endname) == 0)
14982 (*listp)->force = true;
14983 listp = &(*listp)->dw_loc_next;
14984
14985 if (range_across_switch)
14986 {
14987 if (GET_CODE (node->loc) == EXPR_LIST)
14988 descr = dw_sra_loc_expr (decl, node->loc);
14989 else
14990 {
14991 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
14992 varloc = NOTE_VAR_LOCATION (node->loc);
14993 descr = dw_loc_list_1 (decl, varloc, want_address,
14994 initialized);
14995 }
14996 gcc_assert (descr);
14997 /* The variable has a location between NODE->LABEL and
14998 NODE->NEXT->LABEL. */
14999 if (node->next)
15000 endname = node->next->label;
15001 else
15002 endname = cfun->fde->dw_fde_second_end;
15003 *listp = new_loc_list (descr,
15004 cfun->fde->dw_fde_second_begin,
15005 endname, secname);
15006 listp = &(*listp)->dw_loc_next;
15007 }
15008 }
15009 }
15010
15011 /* Try to avoid the overhead of a location list emitting a location
15012 expression instead, but only if we didn't have more than one
15013 location entry in the first place. If some entries were not
15014 representable, we don't want to pretend a single entry that was
15015 applies to the entire scope in which the variable is
15016 available. */
15017 if (list && loc_list->first->next)
15018 gen_llsym (list);
15019
15020 return list;
15021 }
15022
15023 /* Return if the loc_list has only single element and thus can be represented
15024 as location description. */
15025
15026 static bool
15027 single_element_loc_list_p (dw_loc_list_ref list)
15028 {
15029 gcc_assert (!list->dw_loc_next || list->ll_symbol);
15030 return !list->ll_symbol;
15031 }
15032
15033 /* To each location in list LIST add loc descr REF. */
15034
15035 static void
15036 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
15037 {
15038 dw_loc_descr_ref copy;
15039 add_loc_descr (&list->expr, ref);
15040 list = list->dw_loc_next;
15041 while (list)
15042 {
15043 copy = ggc_alloc<dw_loc_descr_node> ();
15044 memcpy (copy, ref, sizeof (dw_loc_descr_node));
15045 add_loc_descr (&list->expr, copy);
15046 while (copy->dw_loc_next)
15047 {
15048 dw_loc_descr_ref new_copy = ggc_alloc<dw_loc_descr_node> ();
15049 memcpy (new_copy, copy->dw_loc_next, sizeof (dw_loc_descr_node));
15050 copy->dw_loc_next = new_copy;
15051 copy = new_copy;
15052 }
15053 list = list->dw_loc_next;
15054 }
15055 }
15056
15057 /* Given two lists RET and LIST
15058 produce location list that is result of adding expression in LIST
15059 to expression in RET on each position in program.
15060 Might be destructive on both RET and LIST.
15061
15062 TODO: We handle only simple cases of RET or LIST having at most one
15063 element. General case would inolve sorting the lists in program order
15064 and merging them that will need some additional work.
15065 Adding that will improve quality of debug info especially for SRA-ed
15066 structures. */
15067
15068 static void
15069 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
15070 {
15071 if (!list)
15072 return;
15073 if (!*ret)
15074 {
15075 *ret = list;
15076 return;
15077 }
15078 if (!list->dw_loc_next)
15079 {
15080 add_loc_descr_to_each (*ret, list->expr);
15081 return;
15082 }
15083 if (!(*ret)->dw_loc_next)
15084 {
15085 add_loc_descr_to_each (list, (*ret)->expr);
15086 *ret = list;
15087 return;
15088 }
15089 expansion_failed (NULL_TREE, NULL_RTX,
15090 "Don't know how to merge two non-trivial"
15091 " location lists.\n");
15092 *ret = NULL;
15093 return;
15094 }
15095
15096 /* LOC is constant expression. Try a luck, look it up in constant
15097 pool and return its loc_descr of its address. */
15098
15099 static dw_loc_descr_ref
15100 cst_pool_loc_descr (tree loc)
15101 {
15102 /* Get an RTL for this, if something has been emitted. */
15103 rtx rtl = lookup_constant_def (loc);
15104
15105 if (!rtl || !MEM_P (rtl))
15106 {
15107 gcc_assert (!rtl);
15108 return 0;
15109 }
15110 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
15111
15112 /* TODO: We might get more coverage if we was actually delaying expansion
15113 of all expressions till end of compilation when constant pools are fully
15114 populated. */
15115 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
15116 {
15117 expansion_failed (loc, NULL_RTX,
15118 "CST value in contant pool but not marked.");
15119 return 0;
15120 }
15121 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
15122 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
15123 }
15124
15125 /* Return dw_loc_list representing address of addr_expr LOC
15126 by looking for inner INDIRECT_REF expression and turning
15127 it into simple arithmetics.
15128
15129 See loc_list_from_tree for the meaning of CONTEXT. */
15130
15131 static dw_loc_list_ref
15132 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
15133 const loc_descr_context *context)
15134 {
15135 tree obj, offset;
15136 HOST_WIDE_INT bitsize, bitpos, bytepos;
15137 machine_mode mode;
15138 int unsignedp, reversep, volatilep = 0;
15139 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
15140
15141 obj = get_inner_reference (TREE_OPERAND (loc, 0),
15142 &bitsize, &bitpos, &offset, &mode,
15143 &unsignedp, &reversep, &volatilep, false);
15144 STRIP_NOPS (obj);
15145 if (bitpos % BITS_PER_UNIT)
15146 {
15147 expansion_failed (loc, NULL_RTX, "bitfield access");
15148 return 0;
15149 }
15150 if (!INDIRECT_REF_P (obj))
15151 {
15152 expansion_failed (obj,
15153 NULL_RTX, "no indirect ref in inner refrence");
15154 return 0;
15155 }
15156 if (!offset && !bitpos)
15157 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
15158 context);
15159 else if (toplev
15160 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
15161 && (dwarf_version >= 4 || !dwarf_strict))
15162 {
15163 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
15164 if (!list_ret)
15165 return 0;
15166 if (offset)
15167 {
15168 /* Variable offset. */
15169 list_ret1 = loc_list_from_tree (offset, 0, context);
15170 if (list_ret1 == 0)
15171 return 0;
15172 add_loc_list (&list_ret, list_ret1);
15173 if (!list_ret)
15174 return 0;
15175 add_loc_descr_to_each (list_ret,
15176 new_loc_descr (DW_OP_plus, 0, 0));
15177 }
15178 bytepos = bitpos / BITS_PER_UNIT;
15179 if (bytepos > 0)
15180 add_loc_descr_to_each (list_ret,
15181 new_loc_descr (DW_OP_plus_uconst,
15182 bytepos, 0));
15183 else if (bytepos < 0)
15184 loc_list_plus_const (list_ret, bytepos);
15185 add_loc_descr_to_each (list_ret,
15186 new_loc_descr (DW_OP_stack_value, 0, 0));
15187 }
15188 return list_ret;
15189 }
15190
15191 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
15192 all operations from LOC are nops, move to the last one. Insert in NOPS all
15193 operations that are skipped. */
15194
15195 static void
15196 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
15197 hash_set<dw_loc_descr_ref> &nops)
15198 {
15199 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
15200 {
15201 nops.add (loc);
15202 loc = loc->dw_loc_next;
15203 }
15204 }
15205
15206 /* Helper for loc_descr_without_nops: free the location description operation
15207 P. */
15208
15209 bool
15210 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
15211 {
15212 ggc_free (loc);
15213 return true;
15214 }
15215
15216 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
15217 finishes LOC. */
15218
15219 static void
15220 loc_descr_without_nops (dw_loc_descr_ref &loc)
15221 {
15222 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
15223 return;
15224
15225 /* Set of all DW_OP_nop operations we remove. */
15226 hash_set<dw_loc_descr_ref> nops;
15227
15228 /* First, strip all prefix NOP operations in order to keep the head of the
15229 operations list. */
15230 loc_descr_to_next_no_nop (loc, nops);
15231
15232 for (dw_loc_descr_ref cur = loc; cur != NULL;)
15233 {
15234 /* For control flow operations: strip "prefix" nops in destination
15235 labels. */
15236 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
15237 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
15238 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
15239 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
15240
15241 /* Do the same for the operations that follow, then move to the next
15242 iteration. */
15243 if (cur->dw_loc_next != NULL)
15244 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
15245 cur = cur->dw_loc_next;
15246 }
15247
15248 nops.traverse<void *, free_loc_descr> (NULL);
15249 }
15250
15251
15252 struct dwarf_procedure_info;
15253
15254 /* Helper structure for location descriptions generation. */
15255 struct loc_descr_context
15256 {
15257 /* The type that is implicitly referenced by DW_OP_push_object_address, or
15258 NULL_TREE if DW_OP_push_object_address in invalid for this location
15259 description. This is used when processing PLACEHOLDER_EXPR nodes. */
15260 tree context_type;
15261 /* The ..._DECL node that should be translated as a
15262 DW_OP_push_object_address operation. */
15263 tree base_decl;
15264 /* Information about the DWARF procedure we are currently generating. NULL if
15265 we are not generating a DWARF procedure. */
15266 struct dwarf_procedure_info *dpi;
15267 };
15268
15269 /* DWARF procedures generation
15270
15271 DWARF expressions (aka. location descriptions) are used to encode variable
15272 things such as sizes or offsets. Such computations can have redundant parts
15273 that can be factorized in order to reduce the size of the output debug
15274 information. This is the whole point of DWARF procedures.
15275
15276 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
15277 already factorized into functions ("size functions") in order to handle very
15278 big and complex types. Such functions are quite simple: they have integral
15279 arguments, they return an integral result and their body contains only a
15280 return statement with arithmetic expressions. This is the only kind of
15281 function we are interested in translating into DWARF procedures, here.
15282
15283 DWARF expressions and DWARF procedure are executed using a stack, so we have
15284 to define some calling convention for them to interact. Let's say that:
15285
15286 - Before calling a DWARF procedure, DWARF expressions must push on the stack
15287 all arguments in reverse order (right-to-left) so that when the DWARF
15288 procedure execution starts, the first argument is the top of the stack.
15289
15290 - Then, when returning, the DWARF procedure must have consumed all arguments
15291 on the stack, must have pushed the result and touched nothing else.
15292
15293 - Each integral argument and the result are integral types can be hold in a
15294 single stack slot.
15295
15296 - We call "frame offset" the number of stack slots that are "under DWARF
15297 procedure control": it includes the arguments slots, the temporaries and
15298 the result slot. Thus, it is equal to the number of arguments when the
15299 procedure execution starts and must be equal to one (the result) when it
15300 returns. */
15301
15302 /* Helper structure used when generating operations for a DWARF procedure. */
15303 struct dwarf_procedure_info
15304 {
15305 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
15306 currently translated. */
15307 tree fndecl;
15308 /* The number of arguments FNDECL takes. */
15309 unsigned args_count;
15310 };
15311
15312 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
15313 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
15314 equate it to this DIE. */
15315
15316 static dw_die_ref
15317 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
15318 dw_die_ref parent_die)
15319 {
15320 const bool dwarf_proc_supported = dwarf_version >= 4;
15321 dw_die_ref dwarf_proc_die;
15322
15323 if ((dwarf_version < 3 && dwarf_strict)
15324 || location == NULL)
15325 return NULL;
15326
15327 dwarf_proc_die = new_die (dwarf_proc_supported
15328 ? DW_TAG_dwarf_procedure
15329 : DW_TAG_variable,
15330 parent_die,
15331 fndecl);
15332 if (fndecl)
15333 equate_decl_number_to_die (fndecl, dwarf_proc_die);
15334 if (!dwarf_proc_supported)
15335 add_AT_flag (dwarf_proc_die, DW_AT_artificial, 1);
15336 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
15337 return dwarf_proc_die;
15338 }
15339
15340 /* Return whether TYPE is a supported type as a DWARF procedure argument
15341 type or return type (we handle only scalar types and pointer types that
15342 aren't wider than the DWARF expression evaluation stack. */
15343
15344 static bool
15345 is_handled_procedure_type (tree type)
15346 {
15347 return ((INTEGRAL_TYPE_P (type)
15348 || TREE_CODE (type) == OFFSET_TYPE
15349 || TREE_CODE (type) == POINTER_TYPE)
15350 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
15351 }
15352
15353 /* Helper for resolve_args_picking: do the same but stop when coming across
15354 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
15355 offset *before* evaluating the corresponding operation. */
15356
15357 static bool
15358 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
15359 struct dwarf_procedure_info *dpi,
15360 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
15361 {
15362 /* The "frame_offset" identifier is already used to name a macro... */
15363 unsigned frame_offset_ = initial_frame_offset;
15364 dw_loc_descr_ref l;
15365
15366 for (l = loc; l != NULL;)
15367 {
15368 bool existed;
15369 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
15370
15371 /* If we already met this node, there is nothing to compute anymore. */
15372 if (existed)
15373 {
15374 /* Make sure that the stack size is consistent wherever the execution
15375 flow comes from. */
15376 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
15377 break;
15378 }
15379 l_frame_offset = frame_offset_;
15380
15381 /* If needed, relocate the picking offset with respect to the frame
15382 offset. */
15383 if (l->dw_loc_opc == DW_OP_pick && l->frame_offset_rel)
15384 {
15385 /* frame_offset_ is the size of the current stack frame, including
15386 incoming arguments. Besides, the arguments are pushed
15387 right-to-left. Thus, in order to access the Nth argument from
15388 this operation node, the picking has to skip temporaries *plus*
15389 one stack slot per argument (0 for the first one, 1 for the second
15390 one, etc.).
15391
15392 The targetted argument number (N) is already set as the operand,
15393 and the number of temporaries can be computed with:
15394 frame_offsets_ - dpi->args_count */
15395 l->dw_loc_oprnd1.v.val_unsigned += frame_offset_ - dpi->args_count;
15396
15397 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
15398 if (l->dw_loc_oprnd1.v.val_unsigned > 255)
15399 return false;
15400 }
15401
15402 /* Update frame_offset according to the effect the current operation has
15403 on the stack. */
15404 switch (l->dw_loc_opc)
15405 {
15406 case DW_OP_deref:
15407 case DW_OP_swap:
15408 case DW_OP_rot:
15409 case DW_OP_abs:
15410 case DW_OP_not:
15411 case DW_OP_plus_uconst:
15412 case DW_OP_skip:
15413 case DW_OP_reg0:
15414 case DW_OP_reg1:
15415 case DW_OP_reg2:
15416 case DW_OP_reg3:
15417 case DW_OP_reg4:
15418 case DW_OP_reg5:
15419 case DW_OP_reg6:
15420 case DW_OP_reg7:
15421 case DW_OP_reg8:
15422 case DW_OP_reg9:
15423 case DW_OP_reg10:
15424 case DW_OP_reg11:
15425 case DW_OP_reg12:
15426 case DW_OP_reg13:
15427 case DW_OP_reg14:
15428 case DW_OP_reg15:
15429 case DW_OP_reg16:
15430 case DW_OP_reg17:
15431 case DW_OP_reg18:
15432 case DW_OP_reg19:
15433 case DW_OP_reg20:
15434 case DW_OP_reg21:
15435 case DW_OP_reg22:
15436 case DW_OP_reg23:
15437 case DW_OP_reg24:
15438 case DW_OP_reg25:
15439 case DW_OP_reg26:
15440 case DW_OP_reg27:
15441 case DW_OP_reg28:
15442 case DW_OP_reg29:
15443 case DW_OP_reg30:
15444 case DW_OP_reg31:
15445 case DW_OP_bregx:
15446 case DW_OP_piece:
15447 case DW_OP_deref_size:
15448 case DW_OP_nop:
15449 case DW_OP_form_tls_address:
15450 case DW_OP_bit_piece:
15451 case DW_OP_implicit_value:
15452 case DW_OP_stack_value:
15453 break;
15454
15455 case DW_OP_addr:
15456 case DW_OP_const1u:
15457 case DW_OP_const1s:
15458 case DW_OP_const2u:
15459 case DW_OP_const2s:
15460 case DW_OP_const4u:
15461 case DW_OP_const4s:
15462 case DW_OP_const8u:
15463 case DW_OP_const8s:
15464 case DW_OP_constu:
15465 case DW_OP_consts:
15466 case DW_OP_dup:
15467 case DW_OP_over:
15468 case DW_OP_pick:
15469 case DW_OP_lit0:
15470 case DW_OP_lit1:
15471 case DW_OP_lit2:
15472 case DW_OP_lit3:
15473 case DW_OP_lit4:
15474 case DW_OP_lit5:
15475 case DW_OP_lit6:
15476 case DW_OP_lit7:
15477 case DW_OP_lit8:
15478 case DW_OP_lit9:
15479 case DW_OP_lit10:
15480 case DW_OP_lit11:
15481 case DW_OP_lit12:
15482 case DW_OP_lit13:
15483 case DW_OP_lit14:
15484 case DW_OP_lit15:
15485 case DW_OP_lit16:
15486 case DW_OP_lit17:
15487 case DW_OP_lit18:
15488 case DW_OP_lit19:
15489 case DW_OP_lit20:
15490 case DW_OP_lit21:
15491 case DW_OP_lit22:
15492 case DW_OP_lit23:
15493 case DW_OP_lit24:
15494 case DW_OP_lit25:
15495 case DW_OP_lit26:
15496 case DW_OP_lit27:
15497 case DW_OP_lit28:
15498 case DW_OP_lit29:
15499 case DW_OP_lit30:
15500 case DW_OP_lit31:
15501 case DW_OP_breg0:
15502 case DW_OP_breg1:
15503 case DW_OP_breg2:
15504 case DW_OP_breg3:
15505 case DW_OP_breg4:
15506 case DW_OP_breg5:
15507 case DW_OP_breg6:
15508 case DW_OP_breg7:
15509 case DW_OP_breg8:
15510 case DW_OP_breg9:
15511 case DW_OP_breg10:
15512 case DW_OP_breg11:
15513 case DW_OP_breg12:
15514 case DW_OP_breg13:
15515 case DW_OP_breg14:
15516 case DW_OP_breg15:
15517 case DW_OP_breg16:
15518 case DW_OP_breg17:
15519 case DW_OP_breg18:
15520 case DW_OP_breg19:
15521 case DW_OP_breg20:
15522 case DW_OP_breg21:
15523 case DW_OP_breg22:
15524 case DW_OP_breg23:
15525 case DW_OP_breg24:
15526 case DW_OP_breg25:
15527 case DW_OP_breg26:
15528 case DW_OP_breg27:
15529 case DW_OP_breg28:
15530 case DW_OP_breg29:
15531 case DW_OP_breg30:
15532 case DW_OP_breg31:
15533 case DW_OP_fbreg:
15534 case DW_OP_push_object_address:
15535 case DW_OP_call_frame_cfa:
15536 ++frame_offset_;
15537 break;
15538
15539 case DW_OP_drop:
15540 case DW_OP_xderef:
15541 case DW_OP_and:
15542 case DW_OP_div:
15543 case DW_OP_minus:
15544 case DW_OP_mod:
15545 case DW_OP_mul:
15546 case DW_OP_neg:
15547 case DW_OP_or:
15548 case DW_OP_plus:
15549 case DW_OP_shl:
15550 case DW_OP_shr:
15551 case DW_OP_shra:
15552 case DW_OP_xor:
15553 case DW_OP_bra:
15554 case DW_OP_eq:
15555 case DW_OP_ge:
15556 case DW_OP_gt:
15557 case DW_OP_le:
15558 case DW_OP_lt:
15559 case DW_OP_ne:
15560 case DW_OP_regx:
15561 case DW_OP_xderef_size:
15562 --frame_offset_;
15563 break;
15564
15565 case DW_OP_call2:
15566 case DW_OP_call4:
15567 case DW_OP_call_ref:
15568 {
15569 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
15570 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
15571
15572 if (stack_usage == NULL)
15573 return false;
15574 frame_offset += *stack_usage;
15575 break;
15576 }
15577
15578 case DW_OP_GNU_push_tls_address:
15579 case DW_OP_GNU_uninit:
15580 case DW_OP_GNU_encoded_addr:
15581 case DW_OP_GNU_implicit_pointer:
15582 case DW_OP_GNU_entry_value:
15583 case DW_OP_GNU_const_type:
15584 case DW_OP_GNU_regval_type:
15585 case DW_OP_GNU_deref_type:
15586 case DW_OP_GNU_convert:
15587 case DW_OP_GNU_reinterpret:
15588 case DW_OP_GNU_parameter_ref:
15589 /* loc_list_from_tree will probably not output these operations for
15590 size functions, so assume they will not appear here. */
15591 /* Fall through... */
15592
15593 default:
15594 gcc_unreachable ();
15595 }
15596
15597 /* Now, follow the control flow (except subroutine calls). */
15598 switch (l->dw_loc_opc)
15599 {
15600 case DW_OP_bra:
15601 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
15602 frame_offsets))
15603 return false;
15604 /* Fall through... */
15605
15606 case DW_OP_skip:
15607 l = l->dw_loc_oprnd1.v.val_loc;
15608 break;
15609
15610 case DW_OP_stack_value:
15611 return true;
15612
15613 default:
15614 l = l->dw_loc_next;
15615 break;
15616 }
15617 }
15618
15619 return true;
15620 }
15621
15622 /* Make a DFS over operations reachable through LOC (i.e. follow branch
15623 operations) in order to resolve the operand of DW_OP_pick operations that
15624 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
15625 offset *before* LOC is executed. Return if all relocations were
15626 successful. */
15627
15628 static bool
15629 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
15630 struct dwarf_procedure_info *dpi)
15631 {
15632 /* Associate to all visited operations the frame offset *before* evaluating
15633 this operation. */
15634 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
15635
15636 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
15637 frame_offsets);
15638 }
15639
15640 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
15641 Return NULL if it is not possible. */
15642
15643 static dw_die_ref
15644 function_to_dwarf_procedure (tree fndecl)
15645 {
15646 struct loc_descr_context ctx;
15647 struct dwarf_procedure_info dpi;
15648 dw_die_ref dwarf_proc_die;
15649 tree tree_body = DECL_SAVED_TREE (fndecl);
15650 dw_loc_descr_ref loc_body, epilogue;
15651
15652 tree cursor;
15653 unsigned i;
15654
15655 /* Do not generate multiple DWARF procedures for the same function
15656 declaration. */
15657 dwarf_proc_die = lookup_decl_die (fndecl);
15658 if (dwarf_proc_die != NULL)
15659 return dwarf_proc_die;
15660
15661 /* DWARF procedures are available starting with the DWARFv3 standard, but
15662 it's the DWARFv4 standard that introduces the DW_TAG_dwarf_procedure
15663 DIE. */
15664 if (dwarf_version < 3 && dwarf_strict)
15665 return NULL;
15666
15667 /* We handle only functions for which we still have a body, that return a
15668 supported type and that takes arguments with supported types. Note that
15669 there is no point translating functions that return nothing. */
15670 if (tree_body == NULL_TREE
15671 || DECL_RESULT (fndecl) == NULL_TREE
15672 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
15673 return NULL;
15674
15675 for (cursor = DECL_ARGUMENTS (fndecl);
15676 cursor != NULL_TREE;
15677 cursor = TREE_CHAIN (cursor))
15678 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
15679 return NULL;
15680
15681 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
15682 if (TREE_CODE (tree_body) != RETURN_EXPR)
15683 return NULL;
15684 tree_body = TREE_OPERAND (tree_body, 0);
15685 if (TREE_CODE (tree_body) != MODIFY_EXPR
15686 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
15687 return NULL;
15688 tree_body = TREE_OPERAND (tree_body, 1);
15689
15690 /* Try to translate the body expression itself. Note that this will probably
15691 cause an infinite recursion if its call graph has a cycle. This is very
15692 unlikely for size functions, however, so don't bother with such things at
15693 the moment. */
15694 ctx.context_type = NULL_TREE;
15695 ctx.base_decl = NULL_TREE;
15696 ctx.dpi = &dpi;
15697 dpi.fndecl = fndecl;
15698 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
15699 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
15700 if (!loc_body)
15701 return NULL;
15702
15703 /* After evaluating all operands in "loc_body", we should still have on the
15704 stack all arguments plus the desired function result (top of the stack).
15705 Generate code in order to keep only the result in our stack frame. */
15706 epilogue = NULL;
15707 for (i = 0; i < dpi.args_count; ++i)
15708 {
15709 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
15710 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
15711 op_couple->dw_loc_next->dw_loc_next = epilogue;
15712 epilogue = op_couple;
15713 }
15714 add_loc_descr (&loc_body, epilogue);
15715 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
15716 return NULL;
15717
15718 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
15719 because they are considered useful. Now there is an epilogue, they are
15720 not anymore, so give it another try. */
15721 loc_descr_without_nops (loc_body);
15722
15723 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
15724 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
15725 though, given that size functions do not come from source, so they should
15726 not have a dedicated DW_TAG_subprogram DIE. */
15727 dwarf_proc_die
15728 = new_dwarf_proc_die (loc_body, fndecl,
15729 get_context_die (DECL_CONTEXT (fndecl)));
15730
15731 /* The called DWARF procedure consumes one stack slot per argument and
15732 returns one stack slot. */
15733 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
15734
15735 return dwarf_proc_die;
15736 }
15737
15738
15739 /* Generate Dwarf location list representing LOC.
15740 If WANT_ADDRESS is false, expression computing LOC will be computed
15741 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
15742 if WANT_ADDRESS is 2, expression computing address useable in location
15743 will be returned (i.e. DW_OP_reg can be used
15744 to refer to register values).
15745
15746 CONTEXT provides information to customize the location descriptions
15747 generation. Its context_type field specifies what type is implicitly
15748 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
15749 will not be generated.
15750
15751 Its DPI field determines whether we are generating a DWARF expression for a
15752 DWARF procedure, so PARM_DECL references are processed specifically.
15753
15754 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
15755 and dpi fields were null. */
15756
15757 static dw_loc_list_ref
15758 loc_list_from_tree_1 (tree loc, int want_address,
15759 const struct loc_descr_context *context)
15760 {
15761 dw_loc_descr_ref ret = NULL, ret1 = NULL;
15762 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
15763 int have_address = 0;
15764 enum dwarf_location_atom op;
15765
15766 /* ??? Most of the time we do not take proper care for sign/zero
15767 extending the values properly. Hopefully this won't be a real
15768 problem... */
15769
15770 if (context != NULL
15771 && context->base_decl == loc
15772 && want_address == 0)
15773 {
15774 if (dwarf_version >= 3 || !dwarf_strict)
15775 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
15776 NULL, NULL, NULL);
15777 else
15778 return NULL;
15779 }
15780
15781 switch (TREE_CODE (loc))
15782 {
15783 case ERROR_MARK:
15784 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
15785 return 0;
15786
15787 case PLACEHOLDER_EXPR:
15788 /* This case involves extracting fields from an object to determine the
15789 position of other fields. It is supposed to appear only as the first
15790 operand of COMPONENT_REF nodes and to reference precisely the type
15791 that the context allows. */
15792 if (context != NULL
15793 && TREE_TYPE (loc) == context->context_type
15794 && want_address >= 1)
15795 {
15796 if (dwarf_version >= 3 || !dwarf_strict)
15797 {
15798 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
15799 have_address = 1;
15800 break;
15801 }
15802 else
15803 return NULL;
15804 }
15805 else
15806 expansion_failed (loc, NULL_RTX,
15807 "PLACEHOLDER_EXPR for an unexpected type");
15808 break;
15809
15810 case CALL_EXPR:
15811 {
15812 const int nargs = call_expr_nargs (loc);
15813 tree callee = get_callee_fndecl (loc);
15814 int i;
15815 dw_die_ref dwarf_proc;
15816
15817 if (callee == NULL_TREE)
15818 goto call_expansion_failed;
15819
15820 /* We handle only functions that return an integer. */
15821 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
15822 goto call_expansion_failed;
15823
15824 dwarf_proc = function_to_dwarf_procedure (callee);
15825 if (dwarf_proc == NULL)
15826 goto call_expansion_failed;
15827
15828 /* Evaluate arguments right-to-left so that the first argument will
15829 be the top-most one on the stack. */
15830 for (i = nargs - 1; i >= 0; --i)
15831 {
15832 dw_loc_descr_ref loc_descr
15833 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
15834 context);
15835
15836 if (loc_descr == NULL)
15837 goto call_expansion_failed;
15838
15839 add_loc_descr (&ret, loc_descr);
15840 }
15841
15842 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
15843 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15844 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
15845 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
15846 add_loc_descr (&ret, ret1);
15847 break;
15848
15849 call_expansion_failed:
15850 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
15851 /* There are no opcodes for these operations. */
15852 return 0;
15853 }
15854
15855 case PREINCREMENT_EXPR:
15856 case PREDECREMENT_EXPR:
15857 case POSTINCREMENT_EXPR:
15858 case POSTDECREMENT_EXPR:
15859 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
15860 /* There are no opcodes for these operations. */
15861 return 0;
15862
15863 case ADDR_EXPR:
15864 /* If we already want an address, see if there is INDIRECT_REF inside
15865 e.g. for &this->field. */
15866 if (want_address)
15867 {
15868 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
15869 (loc, want_address == 2, context);
15870 if (list_ret)
15871 have_address = 1;
15872 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
15873 && (ret = cst_pool_loc_descr (loc)))
15874 have_address = 1;
15875 }
15876 /* Otherwise, process the argument and look for the address. */
15877 if (!list_ret && !ret)
15878 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
15879 else
15880 {
15881 if (want_address)
15882 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
15883 return NULL;
15884 }
15885 break;
15886
15887 case VAR_DECL:
15888 if (DECL_THREAD_LOCAL_P (loc))
15889 {
15890 rtx rtl;
15891 enum dwarf_location_atom tls_op;
15892 enum dtprel_bool dtprel = dtprel_false;
15893
15894 if (targetm.have_tls)
15895 {
15896 /* If this is not defined, we have no way to emit the
15897 data. */
15898 if (!targetm.asm_out.output_dwarf_dtprel)
15899 return 0;
15900
15901 /* The way DW_OP_GNU_push_tls_address is specified, we
15902 can only look up addresses of objects in the current
15903 module. We used DW_OP_addr as first op, but that's
15904 wrong, because DW_OP_addr is relocated by the debug
15905 info consumer, while DW_OP_GNU_push_tls_address
15906 operand shouldn't be. */
15907 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
15908 return 0;
15909 dtprel = dtprel_true;
15910 tls_op = DW_OP_GNU_push_tls_address;
15911 }
15912 else
15913 {
15914 if (!targetm.emutls.debug_form_tls_address
15915 || !(dwarf_version >= 3 || !dwarf_strict))
15916 return 0;
15917 /* We stuffed the control variable into the DECL_VALUE_EXPR
15918 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
15919 no longer appear in gimple code. We used the control
15920 variable in specific so that we could pick it up here. */
15921 loc = DECL_VALUE_EXPR (loc);
15922 tls_op = DW_OP_form_tls_address;
15923 }
15924
15925 rtl = rtl_for_decl_location (loc);
15926 if (rtl == NULL_RTX)
15927 return 0;
15928
15929 if (!MEM_P (rtl))
15930 return 0;
15931 rtl = XEXP (rtl, 0);
15932 if (! CONSTANT_P (rtl))
15933 return 0;
15934
15935 ret = new_addr_loc_descr (rtl, dtprel);
15936 ret1 = new_loc_descr (tls_op, 0, 0);
15937 add_loc_descr (&ret, ret1);
15938
15939 have_address = 1;
15940 break;
15941 }
15942 /* FALLTHRU */
15943
15944 case PARM_DECL:
15945 if (context != NULL && context->dpi != NULL
15946 && DECL_CONTEXT (loc) == context->dpi->fndecl)
15947 {
15948 /* We are generating code for a DWARF procedure and we want to access
15949 one of its arguments: find the appropriate argument offset and let
15950 the resolve_args_picking pass compute the offset that complies
15951 with the stack frame size. */
15952 unsigned i = 0;
15953 tree cursor;
15954
15955 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
15956 cursor != NULL_TREE && cursor != loc;
15957 cursor = TREE_CHAIN (cursor), ++i)
15958 ;
15959 /* If we are translating a DWARF procedure, all referenced parameters
15960 must belong to the current function. */
15961 gcc_assert (cursor != NULL_TREE);
15962
15963 ret = new_loc_descr (DW_OP_pick, i, 0);
15964 ret->frame_offset_rel = 1;
15965 break;
15966 }
15967 /* FALLTHRU */
15968
15969 case RESULT_DECL:
15970 if (DECL_HAS_VALUE_EXPR_P (loc))
15971 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
15972 want_address, context);
15973 /* FALLTHRU */
15974
15975 case FUNCTION_DECL:
15976 {
15977 rtx rtl;
15978 var_loc_list *loc_list = lookup_decl_loc (loc);
15979
15980 if (loc_list && loc_list->first)
15981 {
15982 list_ret = dw_loc_list (loc_list, loc, want_address);
15983 have_address = want_address != 0;
15984 break;
15985 }
15986 rtl = rtl_for_decl_location (loc);
15987 if (rtl == NULL_RTX)
15988 {
15989 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
15990 return 0;
15991 }
15992 else if (CONST_INT_P (rtl))
15993 {
15994 HOST_WIDE_INT val = INTVAL (rtl);
15995 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
15996 val &= GET_MODE_MASK (DECL_MODE (loc));
15997 ret = int_loc_descriptor (val);
15998 }
15999 else if (GET_CODE (rtl) == CONST_STRING)
16000 {
16001 expansion_failed (loc, NULL_RTX, "CONST_STRING");
16002 return 0;
16003 }
16004 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
16005 ret = new_addr_loc_descr (rtl, dtprel_false);
16006 else
16007 {
16008 machine_mode mode, mem_mode;
16009
16010 /* Certain constructs can only be represented at top-level. */
16011 if (want_address == 2)
16012 {
16013 ret = loc_descriptor (rtl, VOIDmode,
16014 VAR_INIT_STATUS_INITIALIZED);
16015 have_address = 1;
16016 }
16017 else
16018 {
16019 mode = GET_MODE (rtl);
16020 mem_mode = VOIDmode;
16021 if (MEM_P (rtl))
16022 {
16023 mem_mode = mode;
16024 mode = get_address_mode (rtl);
16025 rtl = XEXP (rtl, 0);
16026 have_address = 1;
16027 }
16028 ret = mem_loc_descriptor (rtl, mode, mem_mode,
16029 VAR_INIT_STATUS_INITIALIZED);
16030 }
16031 if (!ret)
16032 expansion_failed (loc, rtl,
16033 "failed to produce loc descriptor for rtl");
16034 }
16035 }
16036 break;
16037
16038 case MEM_REF:
16039 if (!integer_zerop (TREE_OPERAND (loc, 1)))
16040 {
16041 have_address = 1;
16042 goto do_plus;
16043 }
16044 /* Fallthru. */
16045 case INDIRECT_REF:
16046 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16047 have_address = 1;
16048 break;
16049
16050 case TARGET_MEM_REF:
16051 case SSA_NAME:
16052 case DEBUG_EXPR_DECL:
16053 return NULL;
16054
16055 case COMPOUND_EXPR:
16056 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
16057 context);
16058
16059 CASE_CONVERT:
16060 case VIEW_CONVERT_EXPR:
16061 case SAVE_EXPR:
16062 case MODIFY_EXPR:
16063 case NON_LVALUE_EXPR:
16064 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
16065 context);
16066
16067 case COMPONENT_REF:
16068 case BIT_FIELD_REF:
16069 case ARRAY_REF:
16070 case ARRAY_RANGE_REF:
16071 case REALPART_EXPR:
16072 case IMAGPART_EXPR:
16073 {
16074 tree obj, offset;
16075 HOST_WIDE_INT bitsize, bitpos, bytepos;
16076 machine_mode mode;
16077 int unsignedp, reversep, volatilep = 0;
16078
16079 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
16080 &unsignedp, &reversep, &volatilep, false);
16081
16082 gcc_assert (obj != loc);
16083
16084 list_ret = loc_list_from_tree_1 (obj,
16085 want_address == 2
16086 && !bitpos && !offset ? 2 : 1,
16087 context);
16088 /* TODO: We can extract value of the small expression via shifting even
16089 for nonzero bitpos. */
16090 if (list_ret == 0)
16091 return 0;
16092 if (bitpos % BITS_PER_UNIT != 0 || bitsize % BITS_PER_UNIT != 0)
16093 {
16094 expansion_failed (loc, NULL_RTX,
16095 "bitfield access");
16096 return 0;
16097 }
16098
16099 if (offset != NULL_TREE)
16100 {
16101 /* Variable offset. */
16102 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
16103 if (list_ret1 == 0)
16104 return 0;
16105 add_loc_list (&list_ret, list_ret1);
16106 if (!list_ret)
16107 return 0;
16108 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
16109 }
16110
16111 bytepos = bitpos / BITS_PER_UNIT;
16112 if (bytepos > 0)
16113 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
16114 else if (bytepos < 0)
16115 loc_list_plus_const (list_ret, bytepos);
16116
16117 have_address = 1;
16118 break;
16119 }
16120
16121 case INTEGER_CST:
16122 if ((want_address || !tree_fits_shwi_p (loc))
16123 && (ret = cst_pool_loc_descr (loc)))
16124 have_address = 1;
16125 else if (want_address == 2
16126 && tree_fits_shwi_p (loc)
16127 && (ret = address_of_int_loc_descriptor
16128 (int_size_in_bytes (TREE_TYPE (loc)),
16129 tree_to_shwi (loc))))
16130 have_address = 1;
16131 else if (tree_fits_shwi_p (loc))
16132 ret = int_loc_descriptor (tree_to_shwi (loc));
16133 else if (tree_fits_uhwi_p (loc))
16134 ret = uint_loc_descriptor (tree_to_uhwi (loc));
16135 else
16136 {
16137 expansion_failed (loc, NULL_RTX,
16138 "Integer operand is not host integer");
16139 return 0;
16140 }
16141 break;
16142
16143 case CONSTRUCTOR:
16144 case REAL_CST:
16145 case STRING_CST:
16146 case COMPLEX_CST:
16147 if ((ret = cst_pool_loc_descr (loc)))
16148 have_address = 1;
16149 else
16150 /* We can construct small constants here using int_loc_descriptor. */
16151 expansion_failed (loc, NULL_RTX,
16152 "constructor or constant not in constant pool");
16153 break;
16154
16155 case TRUTH_AND_EXPR:
16156 case TRUTH_ANDIF_EXPR:
16157 case BIT_AND_EXPR:
16158 op = DW_OP_and;
16159 goto do_binop;
16160
16161 case TRUTH_XOR_EXPR:
16162 case BIT_XOR_EXPR:
16163 op = DW_OP_xor;
16164 goto do_binop;
16165
16166 case TRUTH_OR_EXPR:
16167 case TRUTH_ORIF_EXPR:
16168 case BIT_IOR_EXPR:
16169 op = DW_OP_or;
16170 goto do_binop;
16171
16172 case FLOOR_DIV_EXPR:
16173 case CEIL_DIV_EXPR:
16174 case ROUND_DIV_EXPR:
16175 case TRUNC_DIV_EXPR:
16176 case EXACT_DIV_EXPR:
16177 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
16178 return 0;
16179 op = DW_OP_div;
16180 goto do_binop;
16181
16182 case MINUS_EXPR:
16183 op = DW_OP_minus;
16184 goto do_binop;
16185
16186 case FLOOR_MOD_EXPR:
16187 case CEIL_MOD_EXPR:
16188 case ROUND_MOD_EXPR:
16189 case TRUNC_MOD_EXPR:
16190 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
16191 {
16192 op = DW_OP_mod;
16193 goto do_binop;
16194 }
16195 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16196 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
16197 if (list_ret == 0 || list_ret1 == 0)
16198 return 0;
16199
16200 add_loc_list (&list_ret, list_ret1);
16201 if (list_ret == 0)
16202 return 0;
16203 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
16204 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
16205 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
16206 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
16207 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
16208 break;
16209
16210 case MULT_EXPR:
16211 op = DW_OP_mul;
16212 goto do_binop;
16213
16214 case LSHIFT_EXPR:
16215 op = DW_OP_shl;
16216 goto do_binop;
16217
16218 case RSHIFT_EXPR:
16219 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
16220 goto do_binop;
16221
16222 case POINTER_PLUS_EXPR:
16223 case PLUS_EXPR:
16224 do_plus:
16225 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
16226 {
16227 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
16228 smarter to encode their opposite. The DW_OP_plus_uconst operation
16229 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
16230 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
16231 bytes, Y being the size of the operation that pushes the opposite
16232 of the addend. So let's choose the smallest representation. */
16233 const tree tree_addend = TREE_OPERAND (loc, 1);
16234 offset_int wi_addend;
16235 HOST_WIDE_INT shwi_addend;
16236 dw_loc_descr_ref loc_naddend;
16237
16238 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16239 if (list_ret == 0)
16240 return 0;
16241
16242 /* Try to get the literal to push. It is the opposite of the addend,
16243 so as we rely on wrapping during DWARF evaluation, first decode
16244 the literal as a "DWARF-sized" signed number. */
16245 wi_addend = wi::to_offset (tree_addend);
16246 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
16247 shwi_addend = wi_addend.to_shwi ();
16248 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
16249 ? int_loc_descriptor (-shwi_addend)
16250 : NULL;
16251
16252 if (loc_naddend != NULL
16253 && ((unsigned) size_of_uleb128 (shwi_addend)
16254 > size_of_loc_descr (loc_naddend)))
16255 {
16256 add_loc_descr_to_each (list_ret, loc_naddend);
16257 add_loc_descr_to_each (list_ret,
16258 new_loc_descr (DW_OP_minus, 0, 0));
16259 }
16260 else
16261 {
16262 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
16263 {
16264 loc_naddend = loc_cur;
16265 loc_cur = loc_cur->dw_loc_next;
16266 ggc_free (loc_naddend);
16267 }
16268 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
16269 }
16270 break;
16271 }
16272
16273 op = DW_OP_plus;
16274 goto do_binop;
16275
16276 case LE_EXPR:
16277 op = DW_OP_le;
16278 goto do_comp_binop;
16279
16280 case GE_EXPR:
16281 op = DW_OP_ge;
16282 goto do_comp_binop;
16283
16284 case LT_EXPR:
16285 op = DW_OP_lt;
16286 goto do_comp_binop;
16287
16288 case GT_EXPR:
16289 op = DW_OP_gt;
16290 goto do_comp_binop;
16291
16292 do_comp_binop:
16293 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
16294 {
16295 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
16296 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
16297 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
16298 TREE_CODE (loc));
16299 break;
16300 }
16301 else
16302 goto do_binop;
16303
16304 case EQ_EXPR:
16305 op = DW_OP_eq;
16306 goto do_binop;
16307
16308 case NE_EXPR:
16309 op = DW_OP_ne;
16310 goto do_binop;
16311
16312 do_binop:
16313 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16314 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
16315 if (list_ret == 0 || list_ret1 == 0)
16316 return 0;
16317
16318 add_loc_list (&list_ret, list_ret1);
16319 if (list_ret == 0)
16320 return 0;
16321 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
16322 break;
16323
16324 case TRUTH_NOT_EXPR:
16325 case BIT_NOT_EXPR:
16326 op = DW_OP_not;
16327 goto do_unop;
16328
16329 case ABS_EXPR:
16330 op = DW_OP_abs;
16331 goto do_unop;
16332
16333 case NEGATE_EXPR:
16334 op = DW_OP_neg;
16335 goto do_unop;
16336
16337 do_unop:
16338 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16339 if (list_ret == 0)
16340 return 0;
16341
16342 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
16343 break;
16344
16345 case MIN_EXPR:
16346 case MAX_EXPR:
16347 {
16348 const enum tree_code code =
16349 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
16350
16351 loc = build3 (COND_EXPR, TREE_TYPE (loc),
16352 build2 (code, integer_type_node,
16353 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
16354 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
16355 }
16356
16357 /* ... fall through ... */
16358
16359 case COND_EXPR:
16360 {
16361 dw_loc_descr_ref lhs
16362 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
16363 dw_loc_list_ref rhs
16364 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
16365 dw_loc_descr_ref bra_node, jump_node, tmp;
16366
16367 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
16368 if (list_ret == 0 || lhs == 0 || rhs == 0)
16369 return 0;
16370
16371 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16372 add_loc_descr_to_each (list_ret, bra_node);
16373
16374 add_loc_list (&list_ret, rhs);
16375 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
16376 add_loc_descr_to_each (list_ret, jump_node);
16377
16378 add_loc_descr_to_each (list_ret, lhs);
16379 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16380 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
16381
16382 /* ??? Need a node to point the skip at. Use a nop. */
16383 tmp = new_loc_descr (DW_OP_nop, 0, 0);
16384 add_loc_descr_to_each (list_ret, tmp);
16385 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16386 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
16387 }
16388 break;
16389
16390 case FIX_TRUNC_EXPR:
16391 return 0;
16392
16393 default:
16394 /* Leave front-end specific codes as simply unknown. This comes
16395 up, for instance, with the C STMT_EXPR. */
16396 if ((unsigned int) TREE_CODE (loc)
16397 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
16398 {
16399 expansion_failed (loc, NULL_RTX,
16400 "language specific tree node");
16401 return 0;
16402 }
16403
16404 /* Otherwise this is a generic code; we should just lists all of
16405 these explicitly. We forgot one. */
16406 if (flag_checking)
16407 gcc_unreachable ();
16408
16409 /* In a release build, we want to degrade gracefully: better to
16410 generate incomplete debugging information than to crash. */
16411 return NULL;
16412 }
16413
16414 if (!ret && !list_ret)
16415 return 0;
16416
16417 if (want_address == 2 && !have_address
16418 && (dwarf_version >= 4 || !dwarf_strict))
16419 {
16420 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16421 {
16422 expansion_failed (loc, NULL_RTX,
16423 "DWARF address size mismatch");
16424 return 0;
16425 }
16426 if (ret)
16427 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
16428 else
16429 add_loc_descr_to_each (list_ret,
16430 new_loc_descr (DW_OP_stack_value, 0, 0));
16431 have_address = 1;
16432 }
16433 /* Show if we can't fill the request for an address. */
16434 if (want_address && !have_address)
16435 {
16436 expansion_failed (loc, NULL_RTX,
16437 "Want address and only have value");
16438 return 0;
16439 }
16440
16441 gcc_assert (!ret || !list_ret);
16442
16443 /* If we've got an address and don't want one, dereference. */
16444 if (!want_address && have_address)
16445 {
16446 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16447
16448 if (size > DWARF2_ADDR_SIZE || size == -1)
16449 {
16450 expansion_failed (loc, NULL_RTX,
16451 "DWARF address size mismatch");
16452 return 0;
16453 }
16454 else if (size == DWARF2_ADDR_SIZE)
16455 op = DW_OP_deref;
16456 else
16457 op = DW_OP_deref_size;
16458
16459 if (ret)
16460 add_loc_descr (&ret, new_loc_descr (op, size, 0));
16461 else
16462 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
16463 }
16464 if (ret)
16465 list_ret = new_loc_list (ret, NULL, NULL, NULL);
16466
16467 return list_ret;
16468 }
16469
16470 /* Likewise, but strip useless DW_OP_nop operations in the resulting
16471 expressions. */
16472
16473 static dw_loc_list_ref
16474 loc_list_from_tree (tree loc, int want_address,
16475 const struct loc_descr_context *context)
16476 {
16477 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
16478
16479 for (dw_loc_list_ref loc_cur = result;
16480 loc_cur != NULL; loc_cur =
16481 loc_cur->dw_loc_next)
16482 loc_descr_without_nops (loc_cur->expr);
16483 return result;
16484 }
16485
16486 /* Same as above but return only single location expression. */
16487 static dw_loc_descr_ref
16488 loc_descriptor_from_tree (tree loc, int want_address,
16489 const struct loc_descr_context *context)
16490 {
16491 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
16492 if (!ret)
16493 return NULL;
16494 if (ret->dw_loc_next)
16495 {
16496 expansion_failed (loc, NULL_RTX,
16497 "Location list where only loc descriptor needed");
16498 return NULL;
16499 }
16500 return ret->expr;
16501 }
16502
16503 /* Given a value, round it up to the lowest multiple of `boundary'
16504 which is not less than the value itself. */
16505
16506 static inline HOST_WIDE_INT
16507 ceiling (HOST_WIDE_INT value, unsigned int boundary)
16508 {
16509 return (((value + boundary - 1) / boundary) * boundary);
16510 }
16511
16512 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
16513 pointer to the declared type for the relevant field variable, or return
16514 `integer_type_node' if the given node turns out to be an
16515 ERROR_MARK node. */
16516
16517 static inline tree
16518 field_type (const_tree decl)
16519 {
16520 tree type;
16521
16522 if (TREE_CODE (decl) == ERROR_MARK)
16523 return integer_type_node;
16524
16525 type = DECL_BIT_FIELD_TYPE (decl);
16526 if (type == NULL_TREE)
16527 type = TREE_TYPE (decl);
16528
16529 return type;
16530 }
16531
16532 /* Given a pointer to a tree node, return the alignment in bits for
16533 it, or else return BITS_PER_WORD if the node actually turns out to
16534 be an ERROR_MARK node. */
16535
16536 static inline unsigned
16537 simple_type_align_in_bits (const_tree type)
16538 {
16539 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
16540 }
16541
16542 static inline unsigned
16543 simple_decl_align_in_bits (const_tree decl)
16544 {
16545 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
16546 }
16547
16548 /* Return the result of rounding T up to ALIGN. */
16549
16550 static inline offset_int
16551 round_up_to_align (const offset_int &t, unsigned int align)
16552 {
16553 return wi::udiv_trunc (t + align - 1, align) * align;
16554 }
16555
16556 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
16557 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
16558 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
16559 if we fail to return the size in one of these two forms. */
16560
16561 static dw_loc_descr_ref
16562 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
16563 {
16564 tree tree_size;
16565 struct loc_descr_context ctx;
16566
16567 /* Return a constant integer in priority, if possible. */
16568 *cst_size = int_size_in_bytes (type);
16569 if (*cst_size != -1)
16570 return NULL;
16571
16572 ctx.context_type = const_cast<tree> (type);
16573 ctx.base_decl = NULL_TREE;
16574 ctx.dpi = NULL;
16575
16576 type = TYPE_MAIN_VARIANT (type);
16577 tree_size = TYPE_SIZE_UNIT (type);
16578 return ((tree_size != NULL_TREE)
16579 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
16580 : NULL);
16581 }
16582
16583 /* Helper structure for RECORD_TYPE processing. */
16584 struct vlr_context
16585 {
16586 /* Root RECORD_TYPE. It is needed to generate data member location
16587 descriptions in variable-length records (VLR), but also to cope with
16588 variants, which are composed of nested structures multiplexed with
16589 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
16590 function processing a FIELD_DECL, it is required to be non null. */
16591 tree struct_type;
16592 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
16593 QUAL_UNION_TYPE), this holds an expression that computes the offset for
16594 this variant part as part of the root record (in storage units). For
16595 regular records, it must be NULL_TREE. */
16596 tree variant_part_offset;
16597 };
16598
16599 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
16600 addressed byte of the "containing object" for the given FIELD_DECL. If
16601 possible, return a native constant through CST_OFFSET (in which case NULL is
16602 returned); otherwise return a DWARF expression that computes the offset.
16603
16604 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
16605 that offset is, either because the argument turns out to be a pointer to an
16606 ERROR_MARK node, or because the offset expression is too complex for us.
16607
16608 CTX is required: see the comment for VLR_CONTEXT. */
16609
16610 static dw_loc_descr_ref
16611 field_byte_offset (const_tree decl, struct vlr_context *ctx,
16612 HOST_WIDE_INT *cst_offset)
16613 {
16614 offset_int object_offset_in_bits;
16615 offset_int object_offset_in_bytes;
16616 offset_int bitpos_int;
16617 bool is_byte_offset_cst, is_bit_offset_cst;
16618 tree tree_result;
16619 dw_loc_list_ref loc_result;
16620
16621 *cst_offset = 0;
16622
16623 if (TREE_CODE (decl) == ERROR_MARK)
16624 return NULL;
16625 else
16626 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
16627
16628 is_bit_offset_cst = TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST;
16629 is_byte_offset_cst = TREE_CODE (DECL_FIELD_OFFSET (decl)) != INTEGER_CST;
16630
16631 /* We cannot handle variable bit offsets at the moment, so abort if it's the
16632 case. */
16633 if (is_bit_offset_cst)
16634 return NULL;
16635
16636 #ifdef PCC_BITFIELD_TYPE_MATTERS
16637 /* We used to handle only constant offsets in all cases. Now, we handle
16638 properly dynamic byte offsets only when PCC bitfield type doesn't
16639 matter. */
16640 if (PCC_BITFIELD_TYPE_MATTERS && is_byte_offset_cst && is_bit_offset_cst)
16641 {
16642 tree type;
16643 tree field_size_tree;
16644 offset_int deepest_bitpos;
16645 offset_int field_size_in_bits;
16646 unsigned int type_align_in_bits;
16647 unsigned int decl_align_in_bits;
16648 offset_int type_size_in_bits;
16649
16650 bitpos_int = wi::to_offset (bit_position (decl));
16651 type = field_type (decl);
16652 type_size_in_bits = offset_int_type_size_in_bits (type);
16653 type_align_in_bits = simple_type_align_in_bits (type);
16654
16655 field_size_tree = DECL_SIZE (decl);
16656
16657 /* The size could be unspecified if there was an error, or for
16658 a flexible array member. */
16659 if (!field_size_tree)
16660 field_size_tree = bitsize_zero_node;
16661
16662 /* If the size of the field is not constant, use the type size. */
16663 if (TREE_CODE (field_size_tree) == INTEGER_CST)
16664 field_size_in_bits = wi::to_offset (field_size_tree);
16665 else
16666 field_size_in_bits = type_size_in_bits;
16667
16668 decl_align_in_bits = simple_decl_align_in_bits (decl);
16669
16670 /* The GCC front-end doesn't make any attempt to keep track of the
16671 starting bit offset (relative to the start of the containing
16672 structure type) of the hypothetical "containing object" for a
16673 bit-field. Thus, when computing the byte offset value for the
16674 start of the "containing object" of a bit-field, we must deduce
16675 this information on our own. This can be rather tricky to do in
16676 some cases. For example, handling the following structure type
16677 definition when compiling for an i386/i486 target (which only
16678 aligns long long's to 32-bit boundaries) can be very tricky:
16679
16680 struct S { int field1; long long field2:31; };
16681
16682 Fortunately, there is a simple rule-of-thumb which can be used
16683 in such cases. When compiling for an i386/i486, GCC will
16684 allocate 8 bytes for the structure shown above. It decides to
16685 do this based upon one simple rule for bit-field allocation.
16686 GCC allocates each "containing object" for each bit-field at
16687 the first (i.e. lowest addressed) legitimate alignment boundary
16688 (based upon the required minimum alignment for the declared
16689 type of the field) which it can possibly use, subject to the
16690 condition that there is still enough available space remaining
16691 in the containing object (when allocated at the selected point)
16692 to fully accommodate all of the bits of the bit-field itself.
16693
16694 This simple rule makes it obvious why GCC allocates 8 bytes for
16695 each object of the structure type shown above. When looking
16696 for a place to allocate the "containing object" for `field2',
16697 the compiler simply tries to allocate a 64-bit "containing
16698 object" at each successive 32-bit boundary (starting at zero)
16699 until it finds a place to allocate that 64- bit field such that
16700 at least 31 contiguous (and previously unallocated) bits remain
16701 within that selected 64 bit field. (As it turns out, for the
16702 example above, the compiler finds it is OK to allocate the
16703 "containing object" 64-bit field at bit-offset zero within the
16704 structure type.)
16705
16706 Here we attempt to work backwards from the limited set of facts
16707 we're given, and we try to deduce from those facts, where GCC
16708 must have believed that the containing object started (within
16709 the structure type). The value we deduce is then used (by the
16710 callers of this routine) to generate DW_AT_location and
16711 DW_AT_bit_offset attributes for fields (both bit-fields and, in
16712 the case of DW_AT_location, regular fields as well). */
16713
16714 /* Figure out the bit-distance from the start of the structure to
16715 the "deepest" bit of the bit-field. */
16716 deepest_bitpos = bitpos_int + field_size_in_bits;
16717
16718 /* This is the tricky part. Use some fancy footwork to deduce
16719 where the lowest addressed bit of the containing object must
16720 be. */
16721 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
16722
16723 /* Round up to type_align by default. This works best for
16724 bitfields. */
16725 object_offset_in_bits
16726 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
16727
16728 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
16729 {
16730 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
16731
16732 /* Round up to decl_align instead. */
16733 object_offset_in_bits
16734 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
16735 }
16736 }
16737 #endif /* PCC_BITFIELD_TYPE_MATTERS */
16738
16739 tree_result = byte_position (decl);
16740 if (ctx->variant_part_offset != NULL_TREE)
16741 tree_result = fold (build2 (PLUS_EXPR, TREE_TYPE (tree_result),
16742 ctx->variant_part_offset, tree_result));
16743
16744 /* If the byte offset is a constant, it's simplier to handle a native
16745 constant rather than a DWARF expression. */
16746 if (TREE_CODE (tree_result) == INTEGER_CST)
16747 {
16748 *cst_offset = wi::to_offset (tree_result).to_shwi ();
16749 return NULL;
16750 }
16751 struct loc_descr_context loc_ctx = {
16752 ctx->struct_type, /* context_type */
16753 NULL_TREE, /* base_decl */
16754 NULL /* dpi */
16755 };
16756 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
16757
16758 /* We want a DWARF expression: abort if we only have a location list with
16759 multiple elements. */
16760 if (!loc_result || !single_element_loc_list_p (loc_result))
16761 return NULL;
16762 else
16763 return loc_result->expr;
16764 }
16765 \f
16766 /* The following routines define various Dwarf attributes and any data
16767 associated with them. */
16768
16769 /* Add a location description attribute value to a DIE.
16770
16771 This emits location attributes suitable for whole variables and
16772 whole parameters. Note that the location attributes for struct fields are
16773 generated by the routine `data_member_location_attribute' below. */
16774
16775 static inline void
16776 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
16777 dw_loc_list_ref descr)
16778 {
16779 if (descr == 0)
16780 return;
16781 if (single_element_loc_list_p (descr))
16782 add_AT_loc (die, attr_kind, descr->expr);
16783 else
16784 add_AT_loc_list (die, attr_kind, descr);
16785 }
16786
16787 /* Add DW_AT_accessibility attribute to DIE if needed. */
16788
16789 static void
16790 add_accessibility_attribute (dw_die_ref die, tree decl)
16791 {
16792 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
16793 children, otherwise the default is DW_ACCESS_public. In DWARF2
16794 the default has always been DW_ACCESS_public. */
16795 if (TREE_PROTECTED (decl))
16796 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
16797 else if (TREE_PRIVATE (decl))
16798 {
16799 if (dwarf_version == 2
16800 || die->die_parent == NULL
16801 || die->die_parent->die_tag != DW_TAG_class_type)
16802 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
16803 }
16804 else if (dwarf_version > 2
16805 && die->die_parent
16806 && die->die_parent->die_tag == DW_TAG_class_type)
16807 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
16808 }
16809
16810 /* Attach the specialized form of location attribute used for data members of
16811 struct and union types. In the special case of a FIELD_DECL node which
16812 represents a bit-field, the "offset" part of this special location
16813 descriptor must indicate the distance in bytes from the lowest-addressed
16814 byte of the containing struct or union type to the lowest-addressed byte of
16815 the "containing object" for the bit-field. (See the `field_byte_offset'
16816 function above).
16817
16818 For any given bit-field, the "containing object" is a hypothetical object
16819 (of some integral or enum type) within which the given bit-field lives. The
16820 type of this hypothetical "containing object" is always the same as the
16821 declared type of the individual bit-field itself (for GCC anyway... the
16822 DWARF spec doesn't actually mandate this). Note that it is the size (in
16823 bytes) of the hypothetical "containing object" which will be given in the
16824 DW_AT_byte_size attribute for this bit-field. (See the
16825 `byte_size_attribute' function below.) It is also used when calculating the
16826 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
16827 function below.)
16828
16829 CTX is required: see the comment for VLR_CONTEXT. */
16830
16831 static void
16832 add_data_member_location_attribute (dw_die_ref die,
16833 tree decl,
16834 struct vlr_context *ctx)
16835 {
16836 HOST_WIDE_INT offset;
16837 dw_loc_descr_ref loc_descr = 0;
16838
16839 if (TREE_CODE (decl) == TREE_BINFO)
16840 {
16841 /* We're working on the TAG_inheritance for a base class. */
16842 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
16843 {
16844 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
16845 aren't at a fixed offset from all (sub)objects of the same
16846 type. We need to extract the appropriate offset from our
16847 vtable. The following dwarf expression means
16848
16849 BaseAddr = ObAddr + *((*ObAddr) - Offset)
16850
16851 This is specific to the V3 ABI, of course. */
16852
16853 dw_loc_descr_ref tmp;
16854
16855 /* Make a copy of the object address. */
16856 tmp = new_loc_descr (DW_OP_dup, 0, 0);
16857 add_loc_descr (&loc_descr, tmp);
16858
16859 /* Extract the vtable address. */
16860 tmp = new_loc_descr (DW_OP_deref, 0, 0);
16861 add_loc_descr (&loc_descr, tmp);
16862
16863 /* Calculate the address of the offset. */
16864 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
16865 gcc_assert (offset < 0);
16866
16867 tmp = int_loc_descriptor (-offset);
16868 add_loc_descr (&loc_descr, tmp);
16869 tmp = new_loc_descr (DW_OP_minus, 0, 0);
16870 add_loc_descr (&loc_descr, tmp);
16871
16872 /* Extract the offset. */
16873 tmp = new_loc_descr (DW_OP_deref, 0, 0);
16874 add_loc_descr (&loc_descr, tmp);
16875
16876 /* Add it to the object address. */
16877 tmp = new_loc_descr (DW_OP_plus, 0, 0);
16878 add_loc_descr (&loc_descr, tmp);
16879 }
16880 else
16881 offset = tree_to_shwi (BINFO_OFFSET (decl));
16882 }
16883 else
16884 {
16885 loc_descr = field_byte_offset (decl, ctx, &offset);
16886
16887 /* If loc_descr is available then we know the field offset is dynamic.
16888 However, GDB does not handle dynamic field offsets very well at the
16889 moment. */
16890 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
16891 {
16892 loc_descr = NULL;
16893 offset = 0;
16894 }
16895
16896 /* Data member location evalutation starts with the base address on the
16897 stack. Compute the field offset and add it to this base address. */
16898 else if (loc_descr != NULL)
16899 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
16900 }
16901
16902 if (! loc_descr)
16903 {
16904 if (dwarf_version > 2)
16905 {
16906 /* Don't need to output a location expression, just the constant. */
16907 if (offset < 0)
16908 add_AT_int (die, DW_AT_data_member_location, offset);
16909 else
16910 add_AT_unsigned (die, DW_AT_data_member_location, offset);
16911 return;
16912 }
16913 else
16914 {
16915 enum dwarf_location_atom op;
16916
16917 /* The DWARF2 standard says that we should assume that the structure
16918 address is already on the stack, so we can specify a structure
16919 field address by using DW_OP_plus_uconst. */
16920 op = DW_OP_plus_uconst;
16921 loc_descr = new_loc_descr (op, offset, 0);
16922 }
16923 }
16924
16925 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
16926 }
16927
16928 /* Writes integer values to dw_vec_const array. */
16929
16930 static void
16931 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
16932 {
16933 while (size != 0)
16934 {
16935 *dest++ = val & 0xff;
16936 val >>= 8;
16937 --size;
16938 }
16939 }
16940
16941 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
16942
16943 static HOST_WIDE_INT
16944 extract_int (const unsigned char *src, unsigned int size)
16945 {
16946 HOST_WIDE_INT val = 0;
16947
16948 src += size;
16949 while (size != 0)
16950 {
16951 val <<= 8;
16952 val |= *--src & 0xff;
16953 --size;
16954 }
16955 return val;
16956 }
16957
16958 /* Writes wide_int values to dw_vec_const array. */
16959
16960 static void
16961 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
16962 {
16963 int i;
16964
16965 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
16966 {
16967 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
16968 return;
16969 }
16970
16971 /* We'd have to extend this code to support odd sizes. */
16972 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
16973
16974 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
16975
16976 if (WORDS_BIG_ENDIAN)
16977 for (i = n - 1; i >= 0; i--)
16978 {
16979 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
16980 dest += sizeof (HOST_WIDE_INT);
16981 }
16982 else
16983 for (i = 0; i < n; i++)
16984 {
16985 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
16986 dest += sizeof (HOST_WIDE_INT);
16987 }
16988 }
16989
16990 /* Writes floating point values to dw_vec_const array. */
16991
16992 static void
16993 insert_float (const_rtx rtl, unsigned char *array)
16994 {
16995 long val[4];
16996 int i;
16997
16998 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), GET_MODE (rtl));
16999
17000 /* real_to_target puts 32-bit pieces in each long. Pack them. */
17001 for (i = 0; i < GET_MODE_SIZE (GET_MODE (rtl)) / 4; i++)
17002 {
17003 insert_int (val[i], 4, array);
17004 array += 4;
17005 }
17006 }
17007
17008 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
17009 does not have a "location" either in memory or in a register. These
17010 things can arise in GNU C when a constant is passed as an actual parameter
17011 to an inlined function. They can also arise in C++ where declared
17012 constants do not necessarily get memory "homes". */
17013
17014 static bool
17015 add_const_value_attribute (dw_die_ref die, rtx rtl)
17016 {
17017 switch (GET_CODE (rtl))
17018 {
17019 case CONST_INT:
17020 {
17021 HOST_WIDE_INT val = INTVAL (rtl);
17022
17023 if (val < 0)
17024 add_AT_int (die, DW_AT_const_value, val);
17025 else
17026 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
17027 }
17028 return true;
17029
17030 case CONST_WIDE_INT:
17031 {
17032 wide_int w1 = std::make_pair (rtl, MAX_MODE_INT);
17033 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
17034 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
17035 wide_int w = wi::zext (w1, prec);
17036 add_AT_wide (die, DW_AT_const_value, w);
17037 }
17038 return true;
17039
17040 case CONST_DOUBLE:
17041 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
17042 floating-point constant. A CONST_DOUBLE is used whenever the
17043 constant requires more than one word in order to be adequately
17044 represented. */
17045 {
17046 machine_mode mode = GET_MODE (rtl);
17047
17048 if (TARGET_SUPPORTS_WIDE_INT == 0 && !SCALAR_FLOAT_MODE_P (mode))
17049 add_AT_double (die, DW_AT_const_value,
17050 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
17051 else
17052 {
17053 unsigned int length = GET_MODE_SIZE (mode);
17054 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
17055
17056 insert_float (rtl, array);
17057 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
17058 }
17059 }
17060 return true;
17061
17062 case CONST_VECTOR:
17063 {
17064 machine_mode mode = GET_MODE (rtl);
17065 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
17066 unsigned int length = CONST_VECTOR_NUNITS (rtl);
17067 unsigned char *array
17068 = ggc_vec_alloc<unsigned char> (length * elt_size);
17069 unsigned int i;
17070 unsigned char *p;
17071 machine_mode imode = GET_MODE_INNER (mode);
17072
17073 switch (GET_MODE_CLASS (mode))
17074 {
17075 case MODE_VECTOR_INT:
17076 for (i = 0, p = array; i < length; i++, p += elt_size)
17077 {
17078 rtx elt = CONST_VECTOR_ELT (rtl, i);
17079 insert_wide_int (std::make_pair (elt, imode), p, elt_size);
17080 }
17081 break;
17082
17083 case MODE_VECTOR_FLOAT:
17084 for (i = 0, p = array; i < length; i++, p += elt_size)
17085 {
17086 rtx elt = CONST_VECTOR_ELT (rtl, i);
17087 insert_float (elt, p);
17088 }
17089 break;
17090
17091 default:
17092 gcc_unreachable ();
17093 }
17094
17095 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
17096 }
17097 return true;
17098
17099 case CONST_STRING:
17100 if (dwarf_version >= 4 || !dwarf_strict)
17101 {
17102 dw_loc_descr_ref loc_result;
17103 resolve_one_addr (&rtl);
17104 rtl_addr:
17105 loc_result = new_addr_loc_descr (rtl, dtprel_false);
17106 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
17107 add_AT_loc (die, DW_AT_location, loc_result);
17108 vec_safe_push (used_rtx_array, rtl);
17109 return true;
17110 }
17111 return false;
17112
17113 case CONST:
17114 if (CONSTANT_P (XEXP (rtl, 0)))
17115 return add_const_value_attribute (die, XEXP (rtl, 0));
17116 /* FALLTHROUGH */
17117 case SYMBOL_REF:
17118 if (!const_ok_for_output (rtl))
17119 return false;
17120 case LABEL_REF:
17121 if (dwarf_version >= 4 || !dwarf_strict)
17122 goto rtl_addr;
17123 return false;
17124
17125 case PLUS:
17126 /* In cases where an inlined instance of an inline function is passed
17127 the address of an `auto' variable (which is local to the caller) we
17128 can get a situation where the DECL_RTL of the artificial local
17129 variable (for the inlining) which acts as a stand-in for the
17130 corresponding formal parameter (of the inline function) will look
17131 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
17132 exactly a compile-time constant expression, but it isn't the address
17133 of the (artificial) local variable either. Rather, it represents the
17134 *value* which the artificial local variable always has during its
17135 lifetime. We currently have no way to represent such quasi-constant
17136 values in Dwarf, so for now we just punt and generate nothing. */
17137 return false;
17138
17139 case HIGH:
17140 case CONST_FIXED:
17141 return false;
17142
17143 case MEM:
17144 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
17145 && MEM_READONLY_P (rtl)
17146 && GET_MODE (rtl) == BLKmode)
17147 {
17148 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
17149 return true;
17150 }
17151 return false;
17152
17153 default:
17154 /* No other kinds of rtx should be possible here. */
17155 gcc_unreachable ();
17156 }
17157 return false;
17158 }
17159
17160 /* Determine whether the evaluation of EXPR references any variables
17161 or functions which aren't otherwise used (and therefore may not be
17162 output). */
17163 static tree
17164 reference_to_unused (tree * tp, int * walk_subtrees,
17165 void * data ATTRIBUTE_UNUSED)
17166 {
17167 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
17168 *walk_subtrees = 0;
17169
17170 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
17171 && ! TREE_ASM_WRITTEN (*tp))
17172 return *tp;
17173 /* ??? The C++ FE emits debug information for using decls, so
17174 putting gcc_unreachable here falls over. See PR31899. For now
17175 be conservative. */
17176 else if (!symtab->global_info_ready
17177 && (TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == FUNCTION_DECL))
17178 return *tp;
17179 else if (TREE_CODE (*tp) == VAR_DECL)
17180 {
17181 varpool_node *node = varpool_node::get (*tp);
17182 if (!node || !node->definition)
17183 return *tp;
17184 }
17185 else if (TREE_CODE (*tp) == FUNCTION_DECL
17186 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
17187 {
17188 /* The call graph machinery must have finished analyzing,
17189 optimizing and gimplifying the CU by now.
17190 So if *TP has no call graph node associated
17191 to it, it means *TP will not be emitted. */
17192 if (!cgraph_node::get (*tp))
17193 return *tp;
17194 }
17195 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
17196 return *tp;
17197
17198 return NULL_TREE;
17199 }
17200
17201 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
17202 for use in a later add_const_value_attribute call. */
17203
17204 static rtx
17205 rtl_for_decl_init (tree init, tree type)
17206 {
17207 rtx rtl = NULL_RTX;
17208
17209 STRIP_NOPS (init);
17210
17211 /* If a variable is initialized with a string constant without embedded
17212 zeros, build CONST_STRING. */
17213 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
17214 {
17215 tree enttype = TREE_TYPE (type);
17216 tree domain = TYPE_DOMAIN (type);
17217 machine_mode mode = TYPE_MODE (enttype);
17218
17219 if (GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_SIZE (mode) == 1
17220 && domain
17221 && integer_zerop (TYPE_MIN_VALUE (domain))
17222 && compare_tree_int (TYPE_MAX_VALUE (domain),
17223 TREE_STRING_LENGTH (init) - 1) == 0
17224 && ((size_t) TREE_STRING_LENGTH (init)
17225 == strlen (TREE_STRING_POINTER (init)) + 1))
17226 {
17227 rtl = gen_rtx_CONST_STRING (VOIDmode,
17228 ggc_strdup (TREE_STRING_POINTER (init)));
17229 rtl = gen_rtx_MEM (BLKmode, rtl);
17230 MEM_READONLY_P (rtl) = 1;
17231 }
17232 }
17233 /* Other aggregates, and complex values, could be represented using
17234 CONCAT: FIXME! */
17235 else if (AGGREGATE_TYPE_P (type)
17236 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
17237 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
17238 || TREE_CODE (type) == COMPLEX_TYPE)
17239 ;
17240 /* Vectors only work if their mode is supported by the target.
17241 FIXME: generic vectors ought to work too. */
17242 else if (TREE_CODE (type) == VECTOR_TYPE
17243 && !VECTOR_MODE_P (TYPE_MODE (type)))
17244 ;
17245 /* If the initializer is something that we know will expand into an
17246 immediate RTL constant, expand it now. We must be careful not to
17247 reference variables which won't be output. */
17248 else if (initializer_constant_valid_p (init, type)
17249 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
17250 {
17251 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
17252 possible. */
17253 if (TREE_CODE (type) == VECTOR_TYPE)
17254 switch (TREE_CODE (init))
17255 {
17256 case VECTOR_CST:
17257 break;
17258 case CONSTRUCTOR:
17259 if (TREE_CONSTANT (init))
17260 {
17261 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
17262 bool constant_p = true;
17263 tree value;
17264 unsigned HOST_WIDE_INT ix;
17265
17266 /* Even when ctor is constant, it might contain non-*_CST
17267 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
17268 belong into VECTOR_CST nodes. */
17269 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
17270 if (!CONSTANT_CLASS_P (value))
17271 {
17272 constant_p = false;
17273 break;
17274 }
17275
17276 if (constant_p)
17277 {
17278 init = build_vector_from_ctor (type, elts);
17279 break;
17280 }
17281 }
17282 /* FALLTHRU */
17283
17284 default:
17285 return NULL;
17286 }
17287
17288 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
17289
17290 /* If expand_expr returns a MEM, it wasn't immediate. */
17291 gcc_assert (!rtl || !MEM_P (rtl));
17292 }
17293
17294 return rtl;
17295 }
17296
17297 /* Generate RTL for the variable DECL to represent its location. */
17298
17299 static rtx
17300 rtl_for_decl_location (tree decl)
17301 {
17302 rtx rtl;
17303
17304 /* Here we have to decide where we are going to say the parameter "lives"
17305 (as far as the debugger is concerned). We only have a couple of
17306 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
17307
17308 DECL_RTL normally indicates where the parameter lives during most of the
17309 activation of the function. If optimization is enabled however, this
17310 could be either NULL or else a pseudo-reg. Both of those cases indicate
17311 that the parameter doesn't really live anywhere (as far as the code
17312 generation parts of GCC are concerned) during most of the function's
17313 activation. That will happen (for example) if the parameter is never
17314 referenced within the function.
17315
17316 We could just generate a location descriptor here for all non-NULL
17317 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
17318 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
17319 where DECL_RTL is NULL or is a pseudo-reg.
17320
17321 Note however that we can only get away with using DECL_INCOMING_RTL as
17322 a backup substitute for DECL_RTL in certain limited cases. In cases
17323 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
17324 we can be sure that the parameter was passed using the same type as it is
17325 declared to have within the function, and that its DECL_INCOMING_RTL
17326 points us to a place where a value of that type is passed.
17327
17328 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
17329 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
17330 because in these cases DECL_INCOMING_RTL points us to a value of some
17331 type which is *different* from the type of the parameter itself. Thus,
17332 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
17333 such cases, the debugger would end up (for example) trying to fetch a
17334 `float' from a place which actually contains the first part of a
17335 `double'. That would lead to really incorrect and confusing
17336 output at debug-time.
17337
17338 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
17339 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
17340 are a couple of exceptions however. On little-endian machines we can
17341 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
17342 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
17343 an integral type that is smaller than TREE_TYPE (decl). These cases arise
17344 when (on a little-endian machine) a non-prototyped function has a
17345 parameter declared to be of type `short' or `char'. In such cases,
17346 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
17347 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
17348 passed `int' value. If the debugger then uses that address to fetch
17349 a `short' or a `char' (on a little-endian machine) the result will be
17350 the correct data, so we allow for such exceptional cases below.
17351
17352 Note that our goal here is to describe the place where the given formal
17353 parameter lives during most of the function's activation (i.e. between the
17354 end of the prologue and the start of the epilogue). We'll do that as best
17355 as we can. Note however that if the given formal parameter is modified
17356 sometime during the execution of the function, then a stack backtrace (at
17357 debug-time) will show the function as having been called with the *new*
17358 value rather than the value which was originally passed in. This happens
17359 rarely enough that it is not a major problem, but it *is* a problem, and
17360 I'd like to fix it.
17361
17362 A future version of dwarf2out.c may generate two additional attributes for
17363 any given DW_TAG_formal_parameter DIE which will describe the "passed
17364 type" and the "passed location" for the given formal parameter in addition
17365 to the attributes we now generate to indicate the "declared type" and the
17366 "active location" for each parameter. This additional set of attributes
17367 could be used by debuggers for stack backtraces. Separately, note that
17368 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
17369 This happens (for example) for inlined-instances of inline function formal
17370 parameters which are never referenced. This really shouldn't be
17371 happening. All PARM_DECL nodes should get valid non-NULL
17372 DECL_INCOMING_RTL values. FIXME. */
17373
17374 /* Use DECL_RTL as the "location" unless we find something better. */
17375 rtl = DECL_RTL_IF_SET (decl);
17376
17377 /* When generating abstract instances, ignore everything except
17378 constants, symbols living in memory, and symbols living in
17379 fixed registers. */
17380 if (! reload_completed)
17381 {
17382 if (rtl
17383 && (CONSTANT_P (rtl)
17384 || (MEM_P (rtl)
17385 && CONSTANT_P (XEXP (rtl, 0)))
17386 || (REG_P (rtl)
17387 && TREE_CODE (decl) == VAR_DECL
17388 && TREE_STATIC (decl))))
17389 {
17390 rtl = targetm.delegitimize_address (rtl);
17391 return rtl;
17392 }
17393 rtl = NULL_RTX;
17394 }
17395 else if (TREE_CODE (decl) == PARM_DECL)
17396 {
17397 if (rtl == NULL_RTX
17398 || is_pseudo_reg (rtl)
17399 || (MEM_P (rtl)
17400 && is_pseudo_reg (XEXP (rtl, 0))
17401 && DECL_INCOMING_RTL (decl)
17402 && MEM_P (DECL_INCOMING_RTL (decl))
17403 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
17404 {
17405 tree declared_type = TREE_TYPE (decl);
17406 tree passed_type = DECL_ARG_TYPE (decl);
17407 machine_mode dmode = TYPE_MODE (declared_type);
17408 machine_mode pmode = TYPE_MODE (passed_type);
17409
17410 /* This decl represents a formal parameter which was optimized out.
17411 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
17412 all cases where (rtl == NULL_RTX) just below. */
17413 if (dmode == pmode)
17414 rtl = DECL_INCOMING_RTL (decl);
17415 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
17416 && SCALAR_INT_MODE_P (dmode)
17417 && GET_MODE_SIZE (dmode) <= GET_MODE_SIZE (pmode)
17418 && DECL_INCOMING_RTL (decl))
17419 {
17420 rtx inc = DECL_INCOMING_RTL (decl);
17421 if (REG_P (inc))
17422 rtl = inc;
17423 else if (MEM_P (inc))
17424 {
17425 if (BYTES_BIG_ENDIAN)
17426 rtl = adjust_address_nv (inc, dmode,
17427 GET_MODE_SIZE (pmode)
17428 - GET_MODE_SIZE (dmode));
17429 else
17430 rtl = inc;
17431 }
17432 }
17433 }
17434
17435 /* If the parm was passed in registers, but lives on the stack, then
17436 make a big endian correction if the mode of the type of the
17437 parameter is not the same as the mode of the rtl. */
17438 /* ??? This is the same series of checks that are made in dbxout.c before
17439 we reach the big endian correction code there. It isn't clear if all
17440 of these checks are necessary here, but keeping them all is the safe
17441 thing to do. */
17442 else if (MEM_P (rtl)
17443 && XEXP (rtl, 0) != const0_rtx
17444 && ! CONSTANT_P (XEXP (rtl, 0))
17445 /* Not passed in memory. */
17446 && !MEM_P (DECL_INCOMING_RTL (decl))
17447 /* Not passed by invisible reference. */
17448 && (!REG_P (XEXP (rtl, 0))
17449 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
17450 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
17451 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
17452 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
17453 #endif
17454 )
17455 /* Big endian correction check. */
17456 && BYTES_BIG_ENDIAN
17457 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
17458 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
17459 < UNITS_PER_WORD))
17460 {
17461 machine_mode addr_mode = get_address_mode (rtl);
17462 int offset = (UNITS_PER_WORD
17463 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
17464
17465 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
17466 plus_constant (addr_mode, XEXP (rtl, 0), offset));
17467 }
17468 }
17469 else if (TREE_CODE (decl) == VAR_DECL
17470 && rtl
17471 && MEM_P (rtl)
17472 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl))
17473 && BYTES_BIG_ENDIAN)
17474 {
17475 machine_mode addr_mode = get_address_mode (rtl);
17476 int rsize = GET_MODE_SIZE (GET_MODE (rtl));
17477 int dsize = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)));
17478
17479 /* If a variable is declared "register" yet is smaller than
17480 a register, then if we store the variable to memory, it
17481 looks like we're storing a register-sized value, when in
17482 fact we are not. We need to adjust the offset of the
17483 storage location to reflect the actual value's bytes,
17484 else gdb will not be able to display it. */
17485 if (rsize > dsize)
17486 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
17487 plus_constant (addr_mode, XEXP (rtl, 0),
17488 rsize - dsize));
17489 }
17490
17491 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
17492 and will have been substituted directly into all expressions that use it.
17493 C does not have such a concept, but C++ and other languages do. */
17494 if (!rtl && TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
17495 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
17496
17497 if (rtl)
17498 rtl = targetm.delegitimize_address (rtl);
17499
17500 /* If we don't look past the constant pool, we risk emitting a
17501 reference to a constant pool entry that isn't referenced from
17502 code, and thus is not emitted. */
17503 if (rtl)
17504 rtl = avoid_constant_pool_reference (rtl);
17505
17506 /* Try harder to get a rtl. If this symbol ends up not being emitted
17507 in the current CU, resolve_addr will remove the expression referencing
17508 it. */
17509 if (rtl == NULL_RTX
17510 && TREE_CODE (decl) == VAR_DECL
17511 && !DECL_EXTERNAL (decl)
17512 && TREE_STATIC (decl)
17513 && DECL_NAME (decl)
17514 && !DECL_HARD_REGISTER (decl)
17515 && DECL_MODE (decl) != VOIDmode)
17516 {
17517 rtl = make_decl_rtl_for_debug (decl);
17518 if (!MEM_P (rtl)
17519 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
17520 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
17521 rtl = NULL_RTX;
17522 }
17523
17524 return rtl;
17525 }
17526
17527 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
17528 returned. If so, the decl for the COMMON block is returned, and the
17529 value is the offset into the common block for the symbol. */
17530
17531 static tree
17532 fortran_common (tree decl, HOST_WIDE_INT *value)
17533 {
17534 tree val_expr, cvar;
17535 machine_mode mode;
17536 HOST_WIDE_INT bitsize, bitpos;
17537 tree offset;
17538 int unsignedp, reversep, volatilep = 0;
17539
17540 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
17541 it does not have a value (the offset into the common area), or if it
17542 is thread local (as opposed to global) then it isn't common, and shouldn't
17543 be handled as such. */
17544 if (TREE_CODE (decl) != VAR_DECL
17545 || !TREE_STATIC (decl)
17546 || !DECL_HAS_VALUE_EXPR_P (decl)
17547 || !is_fortran ())
17548 return NULL_TREE;
17549
17550 val_expr = DECL_VALUE_EXPR (decl);
17551 if (TREE_CODE (val_expr) != COMPONENT_REF)
17552 return NULL_TREE;
17553
17554 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
17555 &unsignedp, &reversep, &volatilep, true);
17556
17557 if (cvar == NULL_TREE
17558 || TREE_CODE (cvar) != VAR_DECL
17559 || DECL_ARTIFICIAL (cvar)
17560 || !TREE_PUBLIC (cvar))
17561 return NULL_TREE;
17562
17563 *value = 0;
17564 if (offset != NULL)
17565 {
17566 if (!tree_fits_shwi_p (offset))
17567 return NULL_TREE;
17568 *value = tree_to_shwi (offset);
17569 }
17570 if (bitpos != 0)
17571 *value += bitpos / BITS_PER_UNIT;
17572
17573 return cvar;
17574 }
17575
17576 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
17577 data attribute for a variable or a parameter. We generate the
17578 DW_AT_const_value attribute only in those cases where the given variable
17579 or parameter does not have a true "location" either in memory or in a
17580 register. This can happen (for example) when a constant is passed as an
17581 actual argument in a call to an inline function. (It's possible that
17582 these things can crop up in other ways also.) Note that one type of
17583 constant value which can be passed into an inlined function is a constant
17584 pointer. This can happen for example if an actual argument in an inlined
17585 function call evaluates to a compile-time constant address.
17586
17587 CACHE_P is true if it is worth caching the location list for DECL,
17588 so that future calls can reuse it rather than regenerate it from scratch.
17589 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
17590 since we will need to refer to them each time the function is inlined. */
17591
17592 static bool
17593 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
17594 {
17595 rtx rtl;
17596 dw_loc_list_ref list;
17597 var_loc_list *loc_list;
17598 cached_dw_loc_list *cache;
17599
17600 if (early_dwarf)
17601 return false;
17602
17603 if (TREE_CODE (decl) == ERROR_MARK)
17604 return false;
17605
17606 if (get_AT (die, DW_AT_location)
17607 || get_AT (die, DW_AT_const_value))
17608 return true;
17609
17610 gcc_assert (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL
17611 || TREE_CODE (decl) == RESULT_DECL);
17612
17613 /* Try to get some constant RTL for this decl, and use that as the value of
17614 the location. */
17615
17616 rtl = rtl_for_decl_location (decl);
17617 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
17618 && add_const_value_attribute (die, rtl))
17619 return true;
17620
17621 /* See if we have single element location list that is equivalent to
17622 a constant value. That way we are better to use add_const_value_attribute
17623 rather than expanding constant value equivalent. */
17624 loc_list = lookup_decl_loc (decl);
17625 if (loc_list
17626 && loc_list->first
17627 && loc_list->first->next == NULL
17628 && NOTE_P (loc_list->first->loc)
17629 && NOTE_VAR_LOCATION (loc_list->first->loc)
17630 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
17631 {
17632 struct var_loc_node *node;
17633
17634 node = loc_list->first;
17635 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
17636 if (GET_CODE (rtl) == EXPR_LIST)
17637 rtl = XEXP (rtl, 0);
17638 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
17639 && add_const_value_attribute (die, rtl))
17640 return true;
17641 }
17642 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
17643 list several times. See if we've already cached the contents. */
17644 list = NULL;
17645 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
17646 cache_p = false;
17647 if (cache_p)
17648 {
17649 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
17650 if (cache)
17651 list = cache->loc_list;
17652 }
17653 if (list == NULL)
17654 {
17655 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
17656 NULL);
17657 /* It is usually worth caching this result if the decl is from
17658 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
17659 if (cache_p && list && list->dw_loc_next)
17660 {
17661 cached_dw_loc_list **slot
17662 = cached_dw_loc_list_table->find_slot_with_hash (decl,
17663 DECL_UID (decl),
17664 INSERT);
17665 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
17666 cache->decl_id = DECL_UID (decl);
17667 cache->loc_list = list;
17668 *slot = cache;
17669 }
17670 }
17671 if (list)
17672 {
17673 add_AT_location_description (die, DW_AT_location, list);
17674 return true;
17675 }
17676 /* None of that worked, so it must not really have a location;
17677 try adding a constant value attribute from the DECL_INITIAL. */
17678 return tree_add_const_value_attribute_for_decl (die, decl);
17679 }
17680
17681 /* Helper function for tree_add_const_value_attribute. Natively encode
17682 initializer INIT into an array. Return true if successful. */
17683
17684 static bool
17685 native_encode_initializer (tree init, unsigned char *array, int size)
17686 {
17687 tree type;
17688
17689 if (init == NULL_TREE)
17690 return false;
17691
17692 STRIP_NOPS (init);
17693 switch (TREE_CODE (init))
17694 {
17695 case STRING_CST:
17696 type = TREE_TYPE (init);
17697 if (TREE_CODE (type) == ARRAY_TYPE)
17698 {
17699 tree enttype = TREE_TYPE (type);
17700 machine_mode mode = TYPE_MODE (enttype);
17701
17702 if (GET_MODE_CLASS (mode) != MODE_INT || GET_MODE_SIZE (mode) != 1)
17703 return false;
17704 if (int_size_in_bytes (type) != size)
17705 return false;
17706 if (size > TREE_STRING_LENGTH (init))
17707 {
17708 memcpy (array, TREE_STRING_POINTER (init),
17709 TREE_STRING_LENGTH (init));
17710 memset (array + TREE_STRING_LENGTH (init),
17711 '\0', size - TREE_STRING_LENGTH (init));
17712 }
17713 else
17714 memcpy (array, TREE_STRING_POINTER (init), size);
17715 return true;
17716 }
17717 return false;
17718 case CONSTRUCTOR:
17719 type = TREE_TYPE (init);
17720 if (int_size_in_bytes (type) != size)
17721 return false;
17722 if (TREE_CODE (type) == ARRAY_TYPE)
17723 {
17724 HOST_WIDE_INT min_index;
17725 unsigned HOST_WIDE_INT cnt;
17726 int curpos = 0, fieldsize;
17727 constructor_elt *ce;
17728
17729 if (TYPE_DOMAIN (type) == NULL_TREE
17730 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
17731 return false;
17732
17733 fieldsize = int_size_in_bytes (TREE_TYPE (type));
17734 if (fieldsize <= 0)
17735 return false;
17736
17737 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
17738 memset (array, '\0', size);
17739 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
17740 {
17741 tree val = ce->value;
17742 tree index = ce->index;
17743 int pos = curpos;
17744 if (index && TREE_CODE (index) == RANGE_EXPR)
17745 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
17746 * fieldsize;
17747 else if (index)
17748 pos = (tree_to_shwi (index) - min_index) * fieldsize;
17749
17750 if (val)
17751 {
17752 STRIP_NOPS (val);
17753 if (!native_encode_initializer (val, array + pos, fieldsize))
17754 return false;
17755 }
17756 curpos = pos + fieldsize;
17757 if (index && TREE_CODE (index) == RANGE_EXPR)
17758 {
17759 int count = tree_to_shwi (TREE_OPERAND (index, 1))
17760 - tree_to_shwi (TREE_OPERAND (index, 0));
17761 while (count-- > 0)
17762 {
17763 if (val)
17764 memcpy (array + curpos, array + pos, fieldsize);
17765 curpos += fieldsize;
17766 }
17767 }
17768 gcc_assert (curpos <= size);
17769 }
17770 return true;
17771 }
17772 else if (TREE_CODE (type) == RECORD_TYPE
17773 || TREE_CODE (type) == UNION_TYPE)
17774 {
17775 tree field = NULL_TREE;
17776 unsigned HOST_WIDE_INT cnt;
17777 constructor_elt *ce;
17778
17779 if (int_size_in_bytes (type) != size)
17780 return false;
17781
17782 if (TREE_CODE (type) == RECORD_TYPE)
17783 field = TYPE_FIELDS (type);
17784
17785 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
17786 {
17787 tree val = ce->value;
17788 int pos, fieldsize;
17789
17790 if (ce->index != 0)
17791 field = ce->index;
17792
17793 if (val)
17794 STRIP_NOPS (val);
17795
17796 if (field == NULL_TREE || DECL_BIT_FIELD (field))
17797 return false;
17798
17799 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
17800 && TYPE_DOMAIN (TREE_TYPE (field))
17801 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
17802 return false;
17803 else if (DECL_SIZE_UNIT (field) == NULL_TREE
17804 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
17805 return false;
17806 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
17807 pos = int_byte_position (field);
17808 gcc_assert (pos + fieldsize <= size);
17809 if (val
17810 && !native_encode_initializer (val, array + pos, fieldsize))
17811 return false;
17812 }
17813 return true;
17814 }
17815 return false;
17816 case VIEW_CONVERT_EXPR:
17817 case NON_LVALUE_EXPR:
17818 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
17819 default:
17820 return native_encode_expr (init, array, size) == size;
17821 }
17822 }
17823
17824 /* Attach a DW_AT_const_value attribute to DIE. The value of the
17825 attribute is the const value T. */
17826
17827 static bool
17828 tree_add_const_value_attribute (dw_die_ref die, tree t)
17829 {
17830 tree init;
17831 tree type = TREE_TYPE (t);
17832 rtx rtl;
17833
17834 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
17835 return false;
17836
17837 init = t;
17838 gcc_assert (!DECL_P (init));
17839
17840 rtl = rtl_for_decl_init (init, type);
17841 if (rtl)
17842 return add_const_value_attribute (die, rtl);
17843 /* If the host and target are sane, try harder. */
17844 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
17845 && initializer_constant_valid_p (init, type))
17846 {
17847 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
17848 if (size > 0 && (int) size == size)
17849 {
17850 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
17851
17852 if (native_encode_initializer (init, array, size))
17853 {
17854 add_AT_vec (die, DW_AT_const_value, size, 1, array);
17855 return true;
17856 }
17857 ggc_free (array);
17858 }
17859 }
17860 return false;
17861 }
17862
17863 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
17864 attribute is the const value of T, where T is an integral constant
17865 variable with static storage duration
17866 (so it can't be a PARM_DECL or a RESULT_DECL). */
17867
17868 static bool
17869 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
17870 {
17871
17872 if (!decl
17873 || (TREE_CODE (decl) != VAR_DECL
17874 && TREE_CODE (decl) != CONST_DECL)
17875 || (TREE_CODE (decl) == VAR_DECL
17876 && !TREE_STATIC (decl)))
17877 return false;
17878
17879 if (TREE_READONLY (decl)
17880 && ! TREE_THIS_VOLATILE (decl)
17881 && DECL_INITIAL (decl))
17882 /* OK */;
17883 else
17884 return false;
17885
17886 /* Don't add DW_AT_const_value if abstract origin already has one. */
17887 if (get_AT (var_die, DW_AT_const_value))
17888 return false;
17889
17890 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
17891 }
17892
17893 /* Convert the CFI instructions for the current function into a
17894 location list. This is used for DW_AT_frame_base when we targeting
17895 a dwarf2 consumer that does not support the dwarf3
17896 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
17897 expressions. */
17898
17899 static dw_loc_list_ref
17900 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
17901 {
17902 int ix;
17903 dw_fde_ref fde;
17904 dw_loc_list_ref list, *list_tail;
17905 dw_cfi_ref cfi;
17906 dw_cfa_location last_cfa, next_cfa;
17907 const char *start_label, *last_label, *section;
17908 dw_cfa_location remember;
17909
17910 fde = cfun->fde;
17911 gcc_assert (fde != NULL);
17912
17913 section = secname_for_decl (current_function_decl);
17914 list_tail = &list;
17915 list = NULL;
17916
17917 memset (&next_cfa, 0, sizeof (next_cfa));
17918 next_cfa.reg = INVALID_REGNUM;
17919 remember = next_cfa;
17920
17921 start_label = fde->dw_fde_begin;
17922
17923 /* ??? Bald assumption that the CIE opcode list does not contain
17924 advance opcodes. */
17925 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
17926 lookup_cfa_1 (cfi, &next_cfa, &remember);
17927
17928 last_cfa = next_cfa;
17929 last_label = start_label;
17930
17931 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
17932 {
17933 /* If the first partition contained no CFI adjustments, the
17934 CIE opcodes apply to the whole first partition. */
17935 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
17936 fde->dw_fde_begin, fde->dw_fde_end, section);
17937 list_tail =&(*list_tail)->dw_loc_next;
17938 start_label = last_label = fde->dw_fde_second_begin;
17939 }
17940
17941 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
17942 {
17943 switch (cfi->dw_cfi_opc)
17944 {
17945 case DW_CFA_set_loc:
17946 case DW_CFA_advance_loc1:
17947 case DW_CFA_advance_loc2:
17948 case DW_CFA_advance_loc4:
17949 if (!cfa_equal_p (&last_cfa, &next_cfa))
17950 {
17951 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
17952 start_label, last_label, section);
17953
17954 list_tail = &(*list_tail)->dw_loc_next;
17955 last_cfa = next_cfa;
17956 start_label = last_label;
17957 }
17958 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
17959 break;
17960
17961 case DW_CFA_advance_loc:
17962 /* The encoding is complex enough that we should never emit this. */
17963 gcc_unreachable ();
17964
17965 default:
17966 lookup_cfa_1 (cfi, &next_cfa, &remember);
17967 break;
17968 }
17969 if (ix + 1 == fde->dw_fde_switch_cfi_index)
17970 {
17971 if (!cfa_equal_p (&last_cfa, &next_cfa))
17972 {
17973 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
17974 start_label, last_label, section);
17975
17976 list_tail = &(*list_tail)->dw_loc_next;
17977 last_cfa = next_cfa;
17978 start_label = last_label;
17979 }
17980 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
17981 start_label, fde->dw_fde_end, section);
17982 list_tail = &(*list_tail)->dw_loc_next;
17983 start_label = last_label = fde->dw_fde_second_begin;
17984 }
17985 }
17986
17987 if (!cfa_equal_p (&last_cfa, &next_cfa))
17988 {
17989 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
17990 start_label, last_label, section);
17991 list_tail = &(*list_tail)->dw_loc_next;
17992 start_label = last_label;
17993 }
17994
17995 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
17996 start_label,
17997 fde->dw_fde_second_begin
17998 ? fde->dw_fde_second_end : fde->dw_fde_end,
17999 section);
18000
18001 if (list && list->dw_loc_next)
18002 gen_llsym (list);
18003
18004 return list;
18005 }
18006
18007 /* Compute a displacement from the "steady-state frame pointer" to the
18008 frame base (often the same as the CFA), and store it in
18009 frame_pointer_fb_offset. OFFSET is added to the displacement
18010 before the latter is negated. */
18011
18012 static void
18013 compute_frame_pointer_to_fb_displacement (HOST_WIDE_INT offset)
18014 {
18015 rtx reg, elim;
18016
18017 #ifdef FRAME_POINTER_CFA_OFFSET
18018 reg = frame_pointer_rtx;
18019 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
18020 #else
18021 reg = arg_pointer_rtx;
18022 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
18023 #endif
18024
18025 elim = (ira_use_lra_p
18026 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
18027 : eliminate_regs (reg, VOIDmode, NULL_RTX));
18028 if (GET_CODE (elim) == PLUS)
18029 {
18030 offset += INTVAL (XEXP (elim, 1));
18031 elim = XEXP (elim, 0);
18032 }
18033
18034 frame_pointer_fb_offset = -offset;
18035
18036 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
18037 in which to eliminate. This is because it's stack pointer isn't
18038 directly accessible as a register within the ISA. To work around
18039 this, assume that while we cannot provide a proper value for
18040 frame_pointer_fb_offset, we won't need one either. */
18041 frame_pointer_fb_offset_valid
18042 = ((SUPPORTS_STACK_ALIGNMENT
18043 && (elim == hard_frame_pointer_rtx
18044 || elim == stack_pointer_rtx))
18045 || elim == (frame_pointer_needed
18046 ? hard_frame_pointer_rtx
18047 : stack_pointer_rtx));
18048 }
18049
18050 /* Generate a DW_AT_name attribute given some string value to be included as
18051 the value of the attribute. */
18052
18053 static void
18054 add_name_attribute (dw_die_ref die, const char *name_string)
18055 {
18056 if (name_string != NULL && *name_string != 0)
18057 {
18058 if (demangle_name_func)
18059 name_string = (*demangle_name_func) (name_string);
18060
18061 add_AT_string (die, DW_AT_name, name_string);
18062 }
18063 }
18064
18065 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
18066 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
18067 of TYPE accordingly.
18068
18069 ??? This is a temporary measure until after we're able to generate
18070 regular DWARF for the complex Ada type system. */
18071
18072 static void
18073 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
18074 dw_die_ref context_die)
18075 {
18076 tree dtype;
18077 dw_die_ref dtype_die;
18078
18079 if (!lang_hooks.types.descriptive_type)
18080 return;
18081
18082 dtype = lang_hooks.types.descriptive_type (type);
18083 if (!dtype)
18084 return;
18085
18086 dtype_die = lookup_type_die (dtype);
18087 if (!dtype_die)
18088 {
18089 gen_type_die (dtype, context_die);
18090 dtype_die = lookup_type_die (dtype);
18091 gcc_assert (dtype_die);
18092 }
18093
18094 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
18095 }
18096
18097 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
18098
18099 static const char *
18100 comp_dir_string (void)
18101 {
18102 const char *wd;
18103 char *wd1;
18104 static const char *cached_wd = NULL;
18105
18106 if (cached_wd != NULL)
18107 return cached_wd;
18108
18109 wd = get_src_pwd ();
18110 if (wd == NULL)
18111 return NULL;
18112
18113 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
18114 {
18115 int wdlen;
18116
18117 wdlen = strlen (wd);
18118 wd1 = ggc_vec_alloc<char> (wdlen + 2);
18119 strcpy (wd1, wd);
18120 wd1 [wdlen] = DIR_SEPARATOR;
18121 wd1 [wdlen + 1] = 0;
18122 wd = wd1;
18123 }
18124
18125 cached_wd = remap_debug_filename (wd);
18126 return cached_wd;
18127 }
18128
18129 /* Generate a DW_AT_comp_dir attribute for DIE. */
18130
18131 static void
18132 add_comp_dir_attribute (dw_die_ref die)
18133 {
18134 const char * wd = comp_dir_string ();
18135 if (wd != NULL)
18136 add_AT_string (die, DW_AT_comp_dir, wd);
18137 }
18138
18139 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
18140 pointer computation, ...), output a representation for that bound according
18141 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
18142 loc_list_from_tree for the meaning of CONTEXT. */
18143
18144 static void
18145 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
18146 int forms, const struct loc_descr_context *context)
18147 {
18148 dw_die_ref context_die, decl_die;
18149 dw_loc_list_ref list;
18150
18151 bool strip_conversions = true;
18152
18153 while (strip_conversions)
18154 switch (TREE_CODE (value))
18155 {
18156 case ERROR_MARK:
18157 case SAVE_EXPR:
18158 return;
18159
18160 CASE_CONVERT:
18161 case VIEW_CONVERT_EXPR:
18162 value = TREE_OPERAND (value, 0);
18163 break;
18164
18165 default:
18166 strip_conversions = false;
18167 break;
18168 }
18169
18170 /* If possible and permitted, output the attribute as a constant. */
18171 if ((forms & dw_scalar_form_constant) != 0
18172 && TREE_CODE (value) == INTEGER_CST)
18173 {
18174 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
18175
18176 /* If HOST_WIDE_INT is big enough then represent the bound as
18177 a constant value. We need to choose a form based on
18178 whether the type is signed or unsigned. We cannot just
18179 call add_AT_unsigned if the value itself is positive
18180 (add_AT_unsigned might add the unsigned value encoded as
18181 DW_FORM_data[1248]). Some DWARF consumers will lookup the
18182 bounds type and then sign extend any unsigned values found
18183 for signed types. This is needed only for
18184 DW_AT_{lower,upper}_bound, since for most other attributes,
18185 consumers will treat DW_FORM_data[1248] as unsigned values,
18186 regardless of the underlying type. */
18187 if (prec <= HOST_BITS_PER_WIDE_INT
18188 || tree_fits_uhwi_p (value))
18189 {
18190 if (TYPE_UNSIGNED (TREE_TYPE (value)))
18191 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
18192 else
18193 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
18194 }
18195 else
18196 /* Otherwise represent the bound as an unsigned value with
18197 the precision of its type. The precision and signedness
18198 of the type will be necessary to re-interpret it
18199 unambiguously. */
18200 add_AT_wide (die, attr, value);
18201 return;
18202 }
18203
18204 /* Otherwise, if it's possible and permitted too, output a reference to
18205 another DIE. */
18206 if ((forms & dw_scalar_form_reference) != 0)
18207 {
18208 tree decl = NULL_TREE;
18209
18210 /* Some type attributes reference an outer type. For instance, the upper
18211 bound of an array may reference an embedding record (this happens in
18212 Ada). */
18213 if (TREE_CODE (value) == COMPONENT_REF
18214 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
18215 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
18216 decl = TREE_OPERAND (value, 1);
18217
18218 else if (TREE_CODE (value) == VAR_DECL
18219 || TREE_CODE (value) == PARM_DECL
18220 || TREE_CODE (value) == RESULT_DECL)
18221 decl = value;
18222
18223 if (decl != NULL_TREE)
18224 {
18225 dw_die_ref decl_die = lookup_decl_die (decl);
18226
18227 /* ??? Can this happen, or should the variable have been bound
18228 first? Probably it can, since I imagine that we try to create
18229 the types of parameters in the order in which they exist in
18230 the list, and won't have created a forward reference to a
18231 later parameter. */
18232 if (decl_die != NULL)
18233 {
18234 add_AT_die_ref (die, attr, decl_die);
18235 return;
18236 }
18237 }
18238 }
18239
18240 /* Last chance: try to create a stack operation procedure to evaluate the
18241 value. Do nothing if even that is not possible or permitted. */
18242 if ((forms & dw_scalar_form_exprloc) == 0)
18243 return;
18244
18245 list = loc_list_from_tree (value, 2, context);
18246 if (list == NULL || single_element_loc_list_p (list))
18247 {
18248 /* If this attribute is not a reference nor constant, it is
18249 a DWARF expression rather than location description. For that
18250 loc_list_from_tree (value, 0, &context) is needed. */
18251 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
18252 if (list2 && single_element_loc_list_p (list2))
18253 {
18254 add_AT_loc (die, attr, list2->expr);
18255 return;
18256 }
18257 }
18258
18259 /* If that failed to give a single element location list, fall back to
18260 outputting this as a reference... still if permitted. */
18261 if (list == NULL || (forms & dw_scalar_form_reference) == 0)
18262 return;
18263
18264 if (current_function_decl == 0)
18265 context_die = comp_unit_die ();
18266 else
18267 context_die = lookup_decl_die (current_function_decl);
18268
18269 decl_die = new_die (DW_TAG_variable, context_die, value);
18270 add_AT_flag (decl_die, DW_AT_artificial, 1);
18271 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
18272 context_die);
18273 add_AT_location_description (decl_die, DW_AT_location, list);
18274 add_AT_die_ref (die, attr, decl_die);
18275 }
18276
18277 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
18278 default. */
18279
18280 static int
18281 lower_bound_default (void)
18282 {
18283 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
18284 {
18285 case DW_LANG_C:
18286 case DW_LANG_C89:
18287 case DW_LANG_C99:
18288 case DW_LANG_C11:
18289 case DW_LANG_C_plus_plus:
18290 case DW_LANG_C_plus_plus_11:
18291 case DW_LANG_C_plus_plus_14:
18292 case DW_LANG_ObjC:
18293 case DW_LANG_ObjC_plus_plus:
18294 case DW_LANG_Java:
18295 return 0;
18296 case DW_LANG_Fortran77:
18297 case DW_LANG_Fortran90:
18298 case DW_LANG_Fortran95:
18299 case DW_LANG_Fortran03:
18300 case DW_LANG_Fortran08:
18301 return 1;
18302 case DW_LANG_UPC:
18303 case DW_LANG_D:
18304 case DW_LANG_Python:
18305 return dwarf_version >= 4 ? 0 : -1;
18306 case DW_LANG_Ada95:
18307 case DW_LANG_Ada83:
18308 case DW_LANG_Cobol74:
18309 case DW_LANG_Cobol85:
18310 case DW_LANG_Pascal83:
18311 case DW_LANG_Modula2:
18312 case DW_LANG_PLI:
18313 return dwarf_version >= 4 ? 1 : -1;
18314 default:
18315 return -1;
18316 }
18317 }
18318
18319 /* Given a tree node describing an array bound (either lower or upper) output
18320 a representation for that bound. */
18321
18322 static void
18323 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
18324 tree bound, const struct loc_descr_context *context)
18325 {
18326 int dflt;
18327
18328 while (1)
18329 switch (TREE_CODE (bound))
18330 {
18331 /* Strip all conversions. */
18332 CASE_CONVERT:
18333 case VIEW_CONVERT_EXPR:
18334 bound = TREE_OPERAND (bound, 0);
18335 break;
18336
18337 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
18338 are even omitted when they are the default. */
18339 case INTEGER_CST:
18340 /* If the value for this bound is the default one, we can even omit the
18341 attribute. */
18342 if (bound_attr == DW_AT_lower_bound
18343 && tree_fits_shwi_p (bound)
18344 && (dflt = lower_bound_default ()) != -1
18345 && tree_to_shwi (bound) == dflt)
18346 return;
18347
18348 /* FALLTHRU */
18349
18350 default:
18351 /* Because of the complex interaction there can be with other GNAT
18352 encodings, GDB isn't ready yet to handle proper DWARF description
18353 for self-referencial subrange bounds: let GNAT encodings do the
18354 magic in such a case. */
18355 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
18356 && contains_placeholder_p (bound))
18357 return;
18358
18359 add_scalar_info (subrange_die, bound_attr, bound,
18360 dw_scalar_form_constant
18361 | dw_scalar_form_exprloc
18362 | dw_scalar_form_reference,
18363 context);
18364 return;
18365 }
18366 }
18367
18368 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
18369 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
18370 Note that the block of subscript information for an array type also
18371 includes information about the element type of the given array type.
18372
18373 This function reuses previously set type and bound information if
18374 available. */
18375
18376 static void
18377 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
18378 {
18379 unsigned dimension_number;
18380 tree lower, upper;
18381 dw_die_ref child = type_die->die_child;
18382
18383 for (dimension_number = 0;
18384 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
18385 type = TREE_TYPE (type), dimension_number++)
18386 {
18387 tree domain = TYPE_DOMAIN (type);
18388
18389 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
18390 break;
18391
18392 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
18393 and (in GNU C only) variable bounds. Handle all three forms
18394 here. */
18395
18396 /* Find and reuse a previously generated DW_TAG_subrange_type if
18397 available.
18398
18399 For multi-dimensional arrays, as we iterate through the
18400 various dimensions in the enclosing for loop above, we also
18401 iterate through the DIE children and pick at each
18402 DW_TAG_subrange_type previously generated (if available).
18403 Each child DW_TAG_subrange_type DIE describes the range of
18404 the current dimension. At this point we should have as many
18405 DW_TAG_subrange_type's as we have dimensions in the
18406 array. */
18407 dw_die_ref subrange_die = NULL;
18408 if (child)
18409 while (1)
18410 {
18411 child = child->die_sib;
18412 if (child->die_tag == DW_TAG_subrange_type)
18413 subrange_die = child;
18414 if (child == type_die->die_child)
18415 {
18416 /* If we wrapped around, stop looking next time. */
18417 child = NULL;
18418 break;
18419 }
18420 if (child->die_tag == DW_TAG_subrange_type)
18421 break;
18422 }
18423 if (!subrange_die)
18424 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
18425
18426 if (domain)
18427 {
18428 /* We have an array type with specified bounds. */
18429 lower = TYPE_MIN_VALUE (domain);
18430 upper = TYPE_MAX_VALUE (domain);
18431
18432 /* Define the index type. */
18433 if (TREE_TYPE (domain)
18434 && !get_AT (subrange_die, DW_AT_type))
18435 {
18436 /* ??? This is probably an Ada unnamed subrange type. Ignore the
18437 TREE_TYPE field. We can't emit debug info for this
18438 because it is an unnamed integral type. */
18439 if (TREE_CODE (domain) == INTEGER_TYPE
18440 && TYPE_NAME (domain) == NULL_TREE
18441 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
18442 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
18443 ;
18444 else
18445 add_type_attribute (subrange_die, TREE_TYPE (domain),
18446 TYPE_UNQUALIFIED, false, type_die);
18447 }
18448
18449 /* ??? If upper is NULL, the array has unspecified length,
18450 but it does have a lower bound. This happens with Fortran
18451 dimension arr(N:*)
18452 Since the debugger is definitely going to need to know N
18453 to produce useful results, go ahead and output the lower
18454 bound solo, and hope the debugger can cope. */
18455
18456 if (!get_AT (subrange_die, DW_AT_lower_bound))
18457 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
18458 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
18459 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
18460 }
18461
18462 /* Otherwise we have an array type with an unspecified length. The
18463 DWARF-2 spec does not say how to handle this; let's just leave out the
18464 bounds. */
18465 }
18466 }
18467
18468 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
18469
18470 static void
18471 add_byte_size_attribute (dw_die_ref die, tree tree_node)
18472 {
18473 dw_die_ref decl_die;
18474 HOST_WIDE_INT size;
18475 dw_loc_descr_ref size_expr = NULL;
18476
18477 switch (TREE_CODE (tree_node))
18478 {
18479 case ERROR_MARK:
18480 size = 0;
18481 break;
18482 case ENUMERAL_TYPE:
18483 case RECORD_TYPE:
18484 case UNION_TYPE:
18485 case QUAL_UNION_TYPE:
18486 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
18487 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
18488 {
18489 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
18490 return;
18491 }
18492 size_expr = type_byte_size (tree_node, &size);
18493 break;
18494 case FIELD_DECL:
18495 /* For a data member of a struct or union, the DW_AT_byte_size is
18496 generally given as the number of bytes normally allocated for an
18497 object of the *declared* type of the member itself. This is true
18498 even for bit-fields. */
18499 size = int_size_in_bytes (field_type (tree_node));
18500 break;
18501 default:
18502 gcc_unreachable ();
18503 }
18504
18505 /* Support for dynamically-sized objects was introduced by DWARFv3.
18506 At the moment, GDB does not handle variable byte sizes very well,
18507 though. */
18508 if ((dwarf_version >= 3 || !dwarf_strict)
18509 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
18510 && size_expr != NULL)
18511 add_AT_loc (die, DW_AT_byte_size, size_expr);
18512
18513 /* Note that `size' might be -1 when we get to this point. If it is, that
18514 indicates that the byte size of the entity in question is variable and
18515 that we could not generate a DWARF expression that computes it. */
18516 if (size >= 0)
18517 add_AT_unsigned (die, DW_AT_byte_size, size);
18518 }
18519
18520 /* For a FIELD_DECL node which represents a bit-field, output an attribute
18521 which specifies the distance in bits from the highest order bit of the
18522 "containing object" for the bit-field to the highest order bit of the
18523 bit-field itself.
18524
18525 For any given bit-field, the "containing object" is a hypothetical object
18526 (of some integral or enum type) within which the given bit-field lives. The
18527 type of this hypothetical "containing object" is always the same as the
18528 declared type of the individual bit-field itself. The determination of the
18529 exact location of the "containing object" for a bit-field is rather
18530 complicated. It's handled by the `field_byte_offset' function (above).
18531
18532 CTX is required: see the comment for VLR_CONTEXT.
18533
18534 Note that it is the size (in bytes) of the hypothetical "containing object"
18535 which will be given in the DW_AT_byte_size attribute for this bit-field.
18536 (See `byte_size_attribute' above). */
18537
18538 static inline void
18539 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
18540 {
18541 HOST_WIDE_INT object_offset_in_bytes;
18542 tree original_type = DECL_BIT_FIELD_TYPE (decl);
18543 HOST_WIDE_INT bitpos_int;
18544 HOST_WIDE_INT highest_order_object_bit_offset;
18545 HOST_WIDE_INT highest_order_field_bit_offset;
18546 HOST_WIDE_INT bit_offset;
18547
18548 field_byte_offset (decl, ctx, &object_offset_in_bytes);
18549
18550 /* Must be a field and a bit field. */
18551 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
18552
18553 /* We can't yet handle bit-fields whose offsets are variable, so if we
18554 encounter such things, just return without generating any attribute
18555 whatsoever. Likewise for variable or too large size. */
18556 if (! tree_fits_shwi_p (bit_position (decl))
18557 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
18558 return;
18559
18560 bitpos_int = int_bit_position (decl);
18561
18562 /* Note that the bit offset is always the distance (in bits) from the
18563 highest-order bit of the "containing object" to the highest-order bit of
18564 the bit-field itself. Since the "high-order end" of any object or field
18565 is different on big-endian and little-endian machines, the computation
18566 below must take account of these differences. */
18567 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
18568 highest_order_field_bit_offset = bitpos_int;
18569
18570 if (! BYTES_BIG_ENDIAN)
18571 {
18572 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
18573 highest_order_object_bit_offset +=
18574 simple_type_size_in_bits (original_type);
18575 }
18576
18577 bit_offset
18578 = (! BYTES_BIG_ENDIAN
18579 ? highest_order_object_bit_offset - highest_order_field_bit_offset
18580 : highest_order_field_bit_offset - highest_order_object_bit_offset);
18581
18582 if (bit_offset < 0)
18583 add_AT_int (die, DW_AT_bit_offset, bit_offset);
18584 else
18585 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
18586 }
18587
18588 /* For a FIELD_DECL node which represents a bit field, output an attribute
18589 which specifies the length in bits of the given field. */
18590
18591 static inline void
18592 add_bit_size_attribute (dw_die_ref die, tree decl)
18593 {
18594 /* Must be a field and a bit field. */
18595 gcc_assert (TREE_CODE (decl) == FIELD_DECL
18596 && DECL_BIT_FIELD_TYPE (decl));
18597
18598 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
18599 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
18600 }
18601
18602 /* If the compiled language is ANSI C, then add a 'prototyped'
18603 attribute, if arg types are given for the parameters of a function. */
18604
18605 static inline void
18606 add_prototyped_attribute (dw_die_ref die, tree func_type)
18607 {
18608 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
18609 {
18610 case DW_LANG_C:
18611 case DW_LANG_C89:
18612 case DW_LANG_C99:
18613 case DW_LANG_C11:
18614 case DW_LANG_ObjC:
18615 if (prototype_p (func_type))
18616 add_AT_flag (die, DW_AT_prototyped, 1);
18617 break;
18618 default:
18619 break;
18620 }
18621 }
18622
18623 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
18624 by looking in either the type declaration or object declaration
18625 equate table. */
18626
18627 static inline dw_die_ref
18628 add_abstract_origin_attribute (dw_die_ref die, tree origin)
18629 {
18630 dw_die_ref origin_die = NULL;
18631
18632 if (TREE_CODE (origin) != FUNCTION_DECL)
18633 {
18634 /* We may have gotten separated from the block for the inlined
18635 function, if we're in an exception handler or some such; make
18636 sure that the abstract function has been written out.
18637
18638 Doing this for nested functions is wrong, however; functions are
18639 distinct units, and our context might not even be inline. */
18640 tree fn = origin;
18641
18642 if (TYPE_P (fn))
18643 fn = TYPE_STUB_DECL (fn);
18644
18645 fn = decl_function_context (fn);
18646 if (fn)
18647 dwarf2out_abstract_function (fn);
18648 }
18649
18650 if (DECL_P (origin))
18651 origin_die = lookup_decl_die (origin);
18652 else if (TYPE_P (origin))
18653 origin_die = lookup_type_die (origin);
18654
18655 /* XXX: Functions that are never lowered don't always have correct block
18656 trees (in the case of java, they simply have no block tree, in some other
18657 languages). For these functions, there is nothing we can really do to
18658 output correct debug info for inlined functions in all cases. Rather
18659 than die, we'll just produce deficient debug info now, in that we will
18660 have variables without a proper abstract origin. In the future, when all
18661 functions are lowered, we should re-add a gcc_assert (origin_die)
18662 here. */
18663
18664 if (origin_die)
18665 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
18666 return origin_die;
18667 }
18668
18669 /* We do not currently support the pure_virtual attribute. */
18670
18671 static inline void
18672 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
18673 {
18674 if (DECL_VINDEX (func_decl))
18675 {
18676 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
18677
18678 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
18679 add_AT_loc (die, DW_AT_vtable_elem_location,
18680 new_loc_descr (DW_OP_constu,
18681 tree_to_shwi (DECL_VINDEX (func_decl)),
18682 0));
18683
18684 /* GNU extension: Record what type this method came from originally. */
18685 if (debug_info_level > DINFO_LEVEL_TERSE
18686 && DECL_CONTEXT (func_decl))
18687 add_AT_die_ref (die, DW_AT_containing_type,
18688 lookup_type_die (DECL_CONTEXT (func_decl)));
18689 }
18690 }
18691 \f
18692 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
18693 given decl. This used to be a vendor extension until after DWARF 4
18694 standardized it. */
18695
18696 static void
18697 add_linkage_attr (dw_die_ref die, tree decl)
18698 {
18699 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18700
18701 /* Mimic what assemble_name_raw does with a leading '*'. */
18702 if (name[0] == '*')
18703 name = &name[1];
18704
18705 if (dwarf_version >= 4)
18706 add_AT_string (die, DW_AT_linkage_name, name);
18707 else
18708 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
18709 }
18710
18711 /* Add source coordinate attributes for the given decl. */
18712
18713 static void
18714 add_src_coords_attributes (dw_die_ref die, tree decl)
18715 {
18716 expanded_location s;
18717
18718 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
18719 return;
18720 s = expand_location (DECL_SOURCE_LOCATION (decl));
18721 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
18722 add_AT_unsigned (die, DW_AT_decl_line, s.line);
18723 }
18724
18725 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
18726
18727 static void
18728 add_linkage_name_raw (dw_die_ref die, tree decl)
18729 {
18730 /* Defer until we have an assembler name set. */
18731 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
18732 {
18733 limbo_die_node *asm_name;
18734
18735 asm_name = ggc_cleared_alloc<limbo_die_node> ();
18736 asm_name->die = die;
18737 asm_name->created_for = decl;
18738 asm_name->next = deferred_asm_name;
18739 deferred_asm_name = asm_name;
18740 }
18741 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
18742 add_linkage_attr (die, decl);
18743 }
18744
18745 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
18746
18747 static void
18748 add_linkage_name (dw_die_ref die, tree decl)
18749 {
18750 if (debug_info_level > DINFO_LEVEL_NONE
18751 && (TREE_CODE (decl) == FUNCTION_DECL || TREE_CODE (decl) == VAR_DECL)
18752 && TREE_PUBLIC (decl)
18753 && !(TREE_CODE (decl) == VAR_DECL && DECL_REGISTER (decl))
18754 && die->die_tag != DW_TAG_member)
18755 add_linkage_name_raw (die, decl);
18756 }
18757
18758 /* Add a DW_AT_name attribute and source coordinate attribute for the
18759 given decl, but only if it actually has a name. */
18760
18761 static void
18762 add_name_and_src_coords_attributes (dw_die_ref die, tree decl)
18763 {
18764 tree decl_name;
18765
18766 decl_name = DECL_NAME (decl);
18767 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
18768 {
18769 const char *name = dwarf2_name (decl, 0);
18770 if (name)
18771 add_name_attribute (die, name);
18772 if (! DECL_ARTIFICIAL (decl))
18773 add_src_coords_attributes (die, decl);
18774
18775 add_linkage_name (die, decl);
18776 }
18777
18778 #ifdef VMS_DEBUGGING_INFO
18779 /* Get the function's name, as described by its RTL. This may be different
18780 from the DECL_NAME name used in the source file. */
18781 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
18782 {
18783 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
18784 XEXP (DECL_RTL (decl), 0), false);
18785 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
18786 }
18787 #endif /* VMS_DEBUGGING_INFO */
18788 }
18789
18790 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
18791
18792 static void
18793 add_discr_value (dw_die_ref die, dw_discr_value *value)
18794 {
18795 dw_attr_node attr;
18796
18797 attr.dw_attr = DW_AT_discr_value;
18798 attr.dw_attr_val.val_class = dw_val_class_discr_value;
18799 attr.dw_attr_val.val_entry = NULL;
18800 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
18801 if (value->pos)
18802 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
18803 else
18804 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
18805 add_dwarf_attr (die, &attr);
18806 }
18807
18808 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
18809
18810 static void
18811 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
18812 {
18813 dw_attr_node attr;
18814
18815 attr.dw_attr = DW_AT_discr_list;
18816 attr.dw_attr_val.val_class = dw_val_class_discr_list;
18817 attr.dw_attr_val.val_entry = NULL;
18818 attr.dw_attr_val.v.val_discr_list = discr_list;
18819 add_dwarf_attr (die, &attr);
18820 }
18821
18822 static inline dw_discr_list_ref
18823 AT_discr_list (dw_attr_node *attr)
18824 {
18825 return attr->dw_attr_val.v.val_discr_list;
18826 }
18827
18828 #ifdef VMS_DEBUGGING_INFO
18829 /* Output the debug main pointer die for VMS */
18830
18831 void
18832 dwarf2out_vms_debug_main_pointer (void)
18833 {
18834 char label[MAX_ARTIFICIAL_LABEL_BYTES];
18835 dw_die_ref die;
18836
18837 /* Allocate the VMS debug main subprogram die. */
18838 die = ggc_cleared_alloc<die_node> ();
18839 die->die_tag = DW_TAG_subprogram;
18840 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
18841 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
18842 current_function_funcdef_no);
18843 add_AT_lbl_id (die, DW_AT_entry_pc, label);
18844
18845 /* Make it the first child of comp_unit_die (). */
18846 die->die_parent = comp_unit_die ();
18847 if (comp_unit_die ()->die_child)
18848 {
18849 die->die_sib = comp_unit_die ()->die_child->die_sib;
18850 comp_unit_die ()->die_child->die_sib = die;
18851 }
18852 else
18853 {
18854 die->die_sib = die;
18855 comp_unit_die ()->die_child = die;
18856 }
18857 }
18858 #endif /* VMS_DEBUGGING_INFO */
18859
18860 /* Push a new declaration scope. */
18861
18862 static void
18863 push_decl_scope (tree scope)
18864 {
18865 vec_safe_push (decl_scope_table, scope);
18866 }
18867
18868 /* Pop a declaration scope. */
18869
18870 static inline void
18871 pop_decl_scope (void)
18872 {
18873 decl_scope_table->pop ();
18874 }
18875
18876 /* walk_tree helper function for uses_local_type, below. */
18877
18878 static tree
18879 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
18880 {
18881 if (!TYPE_P (*tp))
18882 *walk_subtrees = 0;
18883 else
18884 {
18885 tree name = TYPE_NAME (*tp);
18886 if (name && DECL_P (name) && decl_function_context (name))
18887 return *tp;
18888 }
18889 return NULL_TREE;
18890 }
18891
18892 /* If TYPE involves a function-local type (including a local typedef to a
18893 non-local type), returns that type; otherwise returns NULL_TREE. */
18894
18895 static tree
18896 uses_local_type (tree type)
18897 {
18898 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
18899 return used;
18900 }
18901
18902 /* Return the DIE for the scope that immediately contains this type.
18903 Non-named types that do not involve a function-local type get global
18904 scope. Named types nested in namespaces or other types get their
18905 containing scope. All other types (i.e. function-local named types) get
18906 the current active scope. */
18907
18908 static dw_die_ref
18909 scope_die_for (tree t, dw_die_ref context_die)
18910 {
18911 dw_die_ref scope_die = NULL;
18912 tree containing_scope;
18913
18914 /* Non-types always go in the current scope. */
18915 gcc_assert (TYPE_P (t));
18916
18917 /* Use the scope of the typedef, rather than the scope of the type
18918 it refers to. */
18919 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
18920 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
18921 else
18922 containing_scope = TYPE_CONTEXT (t);
18923
18924 /* Use the containing namespace if there is one. */
18925 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
18926 {
18927 if (context_die == lookup_decl_die (containing_scope))
18928 /* OK */;
18929 else if (debug_info_level > DINFO_LEVEL_TERSE)
18930 context_die = get_context_die (containing_scope);
18931 else
18932 containing_scope = NULL_TREE;
18933 }
18934
18935 /* Ignore function type "scopes" from the C frontend. They mean that
18936 a tagged type is local to a parmlist of a function declarator, but
18937 that isn't useful to DWARF. */
18938 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
18939 containing_scope = NULL_TREE;
18940
18941 if (SCOPE_FILE_SCOPE_P (containing_scope))
18942 {
18943 /* If T uses a local type keep it local as well, to avoid references
18944 to function-local DIEs from outside the function. */
18945 if (current_function_decl && uses_local_type (t))
18946 scope_die = context_die;
18947 else
18948 scope_die = comp_unit_die ();
18949 }
18950 else if (TYPE_P (containing_scope))
18951 {
18952 /* For types, we can just look up the appropriate DIE. */
18953 if (debug_info_level > DINFO_LEVEL_TERSE)
18954 scope_die = get_context_die (containing_scope);
18955 else
18956 {
18957 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
18958 if (scope_die == NULL)
18959 scope_die = comp_unit_die ();
18960 }
18961 }
18962 else
18963 scope_die = context_die;
18964
18965 return scope_die;
18966 }
18967
18968 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
18969
18970 static inline int
18971 local_scope_p (dw_die_ref context_die)
18972 {
18973 for (; context_die; context_die = context_die->die_parent)
18974 if (context_die->die_tag == DW_TAG_inlined_subroutine
18975 || context_die->die_tag == DW_TAG_subprogram)
18976 return 1;
18977
18978 return 0;
18979 }
18980
18981 /* Returns nonzero if CONTEXT_DIE is a class. */
18982
18983 static inline int
18984 class_scope_p (dw_die_ref context_die)
18985 {
18986 return (context_die
18987 && (context_die->die_tag == DW_TAG_structure_type
18988 || context_die->die_tag == DW_TAG_class_type
18989 || context_die->die_tag == DW_TAG_interface_type
18990 || context_die->die_tag == DW_TAG_union_type));
18991 }
18992
18993 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
18994 whether or not to treat a DIE in this context as a declaration. */
18995
18996 static inline int
18997 class_or_namespace_scope_p (dw_die_ref context_die)
18998 {
18999 return (class_scope_p (context_die)
19000 || (context_die && context_die->die_tag == DW_TAG_namespace));
19001 }
19002
19003 /* Many forms of DIEs require a "type description" attribute. This
19004 routine locates the proper "type descriptor" die for the type given
19005 by 'type' plus any additional qualifiers given by 'cv_quals', and
19006 adds a DW_AT_type attribute below the given die. */
19007
19008 static void
19009 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
19010 bool reverse, dw_die_ref context_die)
19011 {
19012 enum tree_code code = TREE_CODE (type);
19013 dw_die_ref type_die = NULL;
19014
19015 /* ??? If this type is an unnamed subrange type of an integral, floating-point
19016 or fixed-point type, use the inner type. This is because we have no
19017 support for unnamed types in base_type_die. This can happen if this is
19018 an Ada subrange type. Correct solution is emit a subrange type die. */
19019 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
19020 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
19021 type = TREE_TYPE (type), code = TREE_CODE (type);
19022
19023 if (code == ERROR_MARK
19024 /* Handle a special case. For functions whose return type is void, we
19025 generate *no* type attribute. (Note that no object may have type
19026 `void', so this only applies to function return types). */
19027 || code == VOID_TYPE)
19028 return;
19029
19030 type_die = modified_type_die (type,
19031 cv_quals | TYPE_QUALS_NO_ADDR_SPACE (type),
19032 reverse,
19033 context_die);
19034
19035 if (type_die != NULL)
19036 add_AT_die_ref (object_die, DW_AT_type, type_die);
19037 }
19038
19039 /* Given an object die, add the calling convention attribute for the
19040 function call type. */
19041 static void
19042 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
19043 {
19044 enum dwarf_calling_convention value = DW_CC_normal;
19045
19046 value = ((enum dwarf_calling_convention)
19047 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
19048
19049 if (is_fortran ()
19050 && !strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "MAIN__"))
19051 {
19052 /* DWARF 2 doesn't provide a way to identify a program's source-level
19053 entry point. DW_AT_calling_convention attributes are only meant
19054 to describe functions' calling conventions. However, lacking a
19055 better way to signal the Fortran main program, we used this for
19056 a long time, following existing custom. Now, DWARF 4 has
19057 DW_AT_main_subprogram, which we add below, but some tools still
19058 rely on the old way, which we thus keep. */
19059 value = DW_CC_program;
19060
19061 if (dwarf_version >= 4 || !dwarf_strict)
19062 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
19063 }
19064
19065 /* Only add the attribute if the backend requests it, and
19066 is not DW_CC_normal. */
19067 if (value && (value != DW_CC_normal))
19068 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
19069 }
19070
19071 /* Given a tree pointer to a struct, class, union, or enum type node, return
19072 a pointer to the (string) tag name for the given type, or zero if the type
19073 was declared without a tag. */
19074
19075 static const char *
19076 type_tag (const_tree type)
19077 {
19078 const char *name = 0;
19079
19080 if (TYPE_NAME (type) != 0)
19081 {
19082 tree t = 0;
19083
19084 /* Find the IDENTIFIER_NODE for the type name. */
19085 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
19086 && !TYPE_NAMELESS (type))
19087 t = TYPE_NAME (type);
19088
19089 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
19090 a TYPE_DECL node, regardless of whether or not a `typedef' was
19091 involved. */
19092 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
19093 && ! DECL_IGNORED_P (TYPE_NAME (type)))
19094 {
19095 /* We want to be extra verbose. Don't call dwarf_name if
19096 DECL_NAME isn't set. The default hook for decl_printable_name
19097 doesn't like that, and in this context it's correct to return
19098 0, instead of "<anonymous>" or the like. */
19099 if (DECL_NAME (TYPE_NAME (type))
19100 && !DECL_NAMELESS (TYPE_NAME (type)))
19101 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
19102 }
19103
19104 /* Now get the name as a string, or invent one. */
19105 if (!name && t != 0)
19106 name = IDENTIFIER_POINTER (t);
19107 }
19108
19109 return (name == 0 || *name == '\0') ? 0 : name;
19110 }
19111
19112 /* Return the type associated with a data member, make a special check
19113 for bit field types. */
19114
19115 static inline tree
19116 member_declared_type (const_tree member)
19117 {
19118 return (DECL_BIT_FIELD_TYPE (member)
19119 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
19120 }
19121
19122 /* Get the decl's label, as described by its RTL. This may be different
19123 from the DECL_NAME name used in the source file. */
19124
19125 #if 0
19126 static const char *
19127 decl_start_label (tree decl)
19128 {
19129 rtx x;
19130 const char *fnname;
19131
19132 x = DECL_RTL (decl);
19133 gcc_assert (MEM_P (x));
19134
19135 x = XEXP (x, 0);
19136 gcc_assert (GET_CODE (x) == SYMBOL_REF);
19137
19138 fnname = XSTR (x, 0);
19139 return fnname;
19140 }
19141 #endif
19142 \f
19143 /* For variable-length arrays that have been previously generated, but
19144 may be incomplete due to missing subscript info, fill the subscript
19145 info. Return TRUE if this is one of those cases. */
19146 static bool
19147 fill_variable_array_bounds (tree type)
19148 {
19149 if (TREE_ASM_WRITTEN (type)
19150 && TREE_CODE (type) == ARRAY_TYPE
19151 && variably_modified_type_p (type, NULL))
19152 {
19153 dw_die_ref array_die = lookup_type_die (type);
19154 if (!array_die)
19155 return false;
19156 add_subscript_info (array_die, type, !is_ada ());
19157 return true;
19158 }
19159 return false;
19160 }
19161
19162 /* These routines generate the internal representation of the DIE's for
19163 the compilation unit. Debugging information is collected by walking
19164 the declaration trees passed in from dwarf2out_decl(). */
19165
19166 static void
19167 gen_array_type_die (tree type, dw_die_ref context_die)
19168 {
19169 dw_die_ref array_die;
19170
19171 /* GNU compilers represent multidimensional array types as sequences of one
19172 dimensional array types whose element types are themselves array types.
19173 We sometimes squish that down to a single array_type DIE with multiple
19174 subscripts in the Dwarf debugging info. The draft Dwarf specification
19175 say that we are allowed to do this kind of compression in C, because
19176 there is no difference between an array of arrays and a multidimensional
19177 array. We don't do this for Ada to remain as close as possible to the
19178 actual representation, which is especially important against the language
19179 flexibilty wrt arrays of variable size. */
19180
19181 bool collapse_nested_arrays = !is_ada ();
19182
19183 if (fill_variable_array_bounds (type))
19184 return;
19185
19186 dw_die_ref scope_die = scope_die_for (type, context_die);
19187 tree element_type;
19188
19189 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
19190 DW_TAG_string_type doesn't have DW_AT_type attribute). */
19191 if (TYPE_STRING_FLAG (type)
19192 && TREE_CODE (type) == ARRAY_TYPE
19193 && is_fortran ()
19194 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
19195 {
19196 HOST_WIDE_INT size;
19197
19198 array_die = new_die (DW_TAG_string_type, scope_die, type);
19199 add_name_attribute (array_die, type_tag (type));
19200 equate_type_number_to_die (type, array_die);
19201 size = int_size_in_bytes (type);
19202 if (size >= 0)
19203 add_AT_unsigned (array_die, DW_AT_byte_size, size);
19204 else if (TYPE_DOMAIN (type) != NULL_TREE
19205 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE
19206 && DECL_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
19207 {
19208 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
19209 dw_loc_list_ref loc = loc_list_from_tree (szdecl, 2, NULL);
19210
19211 size = int_size_in_bytes (TREE_TYPE (szdecl));
19212 if (loc && size > 0)
19213 {
19214 add_AT_location_description (array_die, DW_AT_string_length, loc);
19215 if (size != DWARF2_ADDR_SIZE)
19216 add_AT_unsigned (array_die, DW_AT_byte_size, size);
19217 }
19218 }
19219 return;
19220 }
19221
19222 array_die = new_die (DW_TAG_array_type, scope_die, type);
19223 add_name_attribute (array_die, type_tag (type));
19224 equate_type_number_to_die (type, array_die);
19225
19226 if (TREE_CODE (type) == VECTOR_TYPE)
19227 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
19228
19229 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
19230 if (is_fortran ()
19231 && TREE_CODE (type) == ARRAY_TYPE
19232 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
19233 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
19234 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
19235
19236 #if 0
19237 /* We default the array ordering. SDB will probably do
19238 the right things even if DW_AT_ordering is not present. It's not even
19239 an issue until we start to get into multidimensional arrays anyway. If
19240 SDB is ever caught doing the Wrong Thing for multi-dimensional arrays,
19241 then we'll have to put the DW_AT_ordering attribute back in. (But if
19242 and when we find out that we need to put these in, we will only do so
19243 for multidimensional arrays. */
19244 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
19245 #endif
19246
19247 if (TREE_CODE (type) == VECTOR_TYPE)
19248 {
19249 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
19250 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
19251 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
19252 add_bound_info (subrange_die, DW_AT_upper_bound,
19253 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
19254 }
19255 else
19256 add_subscript_info (array_die, type, collapse_nested_arrays);
19257
19258 /* Add representation of the type of the elements of this array type and
19259 emit the corresponding DIE if we haven't done it already. */
19260 element_type = TREE_TYPE (type);
19261 if (collapse_nested_arrays)
19262 while (TREE_CODE (element_type) == ARRAY_TYPE)
19263 {
19264 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
19265 break;
19266 element_type = TREE_TYPE (element_type);
19267 }
19268
19269 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
19270 TREE_CODE (type) == ARRAY_TYPE
19271 && TYPE_REVERSE_STORAGE_ORDER (type),
19272 context_die);
19273
19274 add_gnat_descriptive_type_attribute (array_die, type, context_die);
19275 if (TYPE_ARTIFICIAL (type))
19276 add_AT_flag (array_die, DW_AT_artificial, 1);
19277
19278 if (get_AT (array_die, DW_AT_name))
19279 add_pubtype (type, array_die);
19280 }
19281
19282 /* This routine generates DIE for array with hidden descriptor, details
19283 are filled into *info by a langhook. */
19284
19285 static void
19286 gen_descr_array_type_die (tree type, struct array_descr_info *info,
19287 dw_die_ref context_die)
19288 {
19289 const dw_die_ref scope_die = scope_die_for (type, context_die);
19290 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
19291 const struct loc_descr_context context = { type, info->base_decl, NULL };
19292 int dim;
19293
19294 add_name_attribute (array_die, type_tag (type));
19295 equate_type_number_to_die (type, array_die);
19296
19297 if (info->ndimensions > 1)
19298 switch (info->ordering)
19299 {
19300 case array_descr_ordering_row_major:
19301 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
19302 break;
19303 case array_descr_ordering_column_major:
19304 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
19305 break;
19306 default:
19307 break;
19308 }
19309
19310 if (dwarf_version >= 3 || !dwarf_strict)
19311 {
19312 if (info->data_location)
19313 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
19314 dw_scalar_form_exprloc, &context);
19315 if (info->associated)
19316 add_scalar_info (array_die, DW_AT_associated, info->associated,
19317 dw_scalar_form_constant
19318 | dw_scalar_form_exprloc
19319 | dw_scalar_form_reference, &context);
19320 if (info->allocated)
19321 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
19322 dw_scalar_form_constant
19323 | dw_scalar_form_exprloc
19324 | dw_scalar_form_reference, &context);
19325 if (info->stride)
19326 {
19327 const enum dwarf_attribute attr
19328 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
19329 const int forms
19330 = (info->stride_in_bits)
19331 ? dw_scalar_form_constant
19332 : (dw_scalar_form_constant
19333 | dw_scalar_form_exprloc
19334 | dw_scalar_form_reference);
19335
19336 add_scalar_info (array_die, attr, info->stride, forms, &context);
19337 }
19338 }
19339
19340 add_gnat_descriptive_type_attribute (array_die, type, context_die);
19341
19342 for (dim = 0; dim < info->ndimensions; dim++)
19343 {
19344 dw_die_ref subrange_die
19345 = new_die (DW_TAG_subrange_type, array_die, NULL);
19346
19347 if (info->dimen[dim].bounds_type)
19348 add_type_attribute (subrange_die,
19349 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
19350 false, context_die);
19351 if (info->dimen[dim].lower_bound)
19352 add_bound_info (subrange_die, DW_AT_lower_bound,
19353 info->dimen[dim].lower_bound, &context);
19354 if (info->dimen[dim].upper_bound)
19355 add_bound_info (subrange_die, DW_AT_upper_bound,
19356 info->dimen[dim].upper_bound, &context);
19357 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
19358 add_scalar_info (subrange_die, DW_AT_byte_stride,
19359 info->dimen[dim].stride,
19360 dw_scalar_form_constant
19361 | dw_scalar_form_exprloc
19362 | dw_scalar_form_reference,
19363 &context);
19364 }
19365
19366 gen_type_die (info->element_type, context_die);
19367 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
19368 TREE_CODE (type) == ARRAY_TYPE
19369 && TYPE_REVERSE_STORAGE_ORDER (type),
19370 context_die);
19371
19372 if (get_AT (array_die, DW_AT_name))
19373 add_pubtype (type, array_die);
19374 }
19375
19376 #if 0
19377 static void
19378 gen_entry_point_die (tree decl, dw_die_ref context_die)
19379 {
19380 tree origin = decl_ultimate_origin (decl);
19381 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
19382
19383 if (origin != NULL)
19384 add_abstract_origin_attribute (decl_die, origin);
19385 else
19386 {
19387 add_name_and_src_coords_attributes (decl_die, decl);
19388 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
19389 TYPE_UNQUALIFIED, false, context_die);
19390 }
19391
19392 if (DECL_ABSTRACT_P (decl))
19393 equate_decl_number_to_die (decl, decl_die);
19394 else
19395 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
19396 }
19397 #endif
19398
19399 /* Walk through the list of incomplete types again, trying once more to
19400 emit full debugging info for them. */
19401
19402 static void
19403 retry_incomplete_types (void)
19404 {
19405 int i;
19406
19407 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
19408 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
19409 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
19410 }
19411
19412 /* Determine what tag to use for a record type. */
19413
19414 static enum dwarf_tag
19415 record_type_tag (tree type)
19416 {
19417 if (! lang_hooks.types.classify_record)
19418 return DW_TAG_structure_type;
19419
19420 switch (lang_hooks.types.classify_record (type))
19421 {
19422 case RECORD_IS_STRUCT:
19423 return DW_TAG_structure_type;
19424
19425 case RECORD_IS_CLASS:
19426 return DW_TAG_class_type;
19427
19428 case RECORD_IS_INTERFACE:
19429 if (dwarf_version >= 3 || !dwarf_strict)
19430 return DW_TAG_interface_type;
19431 return DW_TAG_structure_type;
19432
19433 default:
19434 gcc_unreachable ();
19435 }
19436 }
19437
19438 /* Generate a DIE to represent an enumeration type. Note that these DIEs
19439 include all of the information about the enumeration values also. Each
19440 enumerated type name/value is listed as a child of the enumerated type
19441 DIE. */
19442
19443 static dw_die_ref
19444 gen_enumeration_type_die (tree type, dw_die_ref context_die)
19445 {
19446 dw_die_ref type_die = lookup_type_die (type);
19447
19448 if (type_die == NULL)
19449 {
19450 type_die = new_die (DW_TAG_enumeration_type,
19451 scope_die_for (type, context_die), type);
19452 equate_type_number_to_die (type, type_die);
19453 add_name_attribute (type_die, type_tag (type));
19454 if (dwarf_version >= 4 || !dwarf_strict)
19455 {
19456 if (ENUM_IS_SCOPED (type))
19457 add_AT_flag (type_die, DW_AT_enum_class, 1);
19458 if (ENUM_IS_OPAQUE (type))
19459 add_AT_flag (type_die, DW_AT_declaration, 1);
19460 }
19461 }
19462 else if (! TYPE_SIZE (type))
19463 return type_die;
19464 else
19465 remove_AT (type_die, DW_AT_declaration);
19466
19467 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
19468 given enum type is incomplete, do not generate the DW_AT_byte_size
19469 attribute or the DW_AT_element_list attribute. */
19470 if (TYPE_SIZE (type))
19471 {
19472 tree link;
19473
19474 TREE_ASM_WRITTEN (type) = 1;
19475 add_byte_size_attribute (type_die, type);
19476 if (dwarf_version >= 3 || !dwarf_strict)
19477 {
19478 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
19479 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
19480 context_die);
19481 }
19482 if (TYPE_STUB_DECL (type) != NULL_TREE)
19483 {
19484 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
19485 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
19486 }
19487
19488 /* If the first reference to this type was as the return type of an
19489 inline function, then it may not have a parent. Fix this now. */
19490 if (type_die->die_parent == NULL)
19491 add_child_die (scope_die_for (type, context_die), type_die);
19492
19493 for (link = TYPE_VALUES (type);
19494 link != NULL; link = TREE_CHAIN (link))
19495 {
19496 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
19497 tree value = TREE_VALUE (link);
19498
19499 add_name_attribute (enum_die,
19500 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
19501
19502 if (TREE_CODE (value) == CONST_DECL)
19503 value = DECL_INITIAL (value);
19504
19505 if (simple_type_size_in_bits (TREE_TYPE (value))
19506 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
19507 {
19508 /* For constant forms created by add_AT_unsigned DWARF
19509 consumers (GDB, elfutils, etc.) always zero extend
19510 the value. Only when the actual value is negative
19511 do we need to use add_AT_int to generate a constant
19512 form that can represent negative values. */
19513 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
19514 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
19515 add_AT_unsigned (enum_die, DW_AT_const_value,
19516 (unsigned HOST_WIDE_INT) val);
19517 else
19518 add_AT_int (enum_die, DW_AT_const_value, val);
19519 }
19520 else
19521 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
19522 that here. TODO: This should be re-worked to use correct
19523 signed/unsigned double tags for all cases. */
19524 add_AT_wide (enum_die, DW_AT_const_value, value);
19525 }
19526
19527 add_gnat_descriptive_type_attribute (type_die, type, context_die);
19528 if (TYPE_ARTIFICIAL (type))
19529 add_AT_flag (type_die, DW_AT_artificial, 1);
19530 }
19531 else
19532 add_AT_flag (type_die, DW_AT_declaration, 1);
19533
19534 add_pubtype (type, type_die);
19535
19536 return type_die;
19537 }
19538
19539 /* Generate a DIE to represent either a real live formal parameter decl or to
19540 represent just the type of some formal parameter position in some function
19541 type.
19542
19543 Note that this routine is a bit unusual because its argument may be a
19544 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
19545 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
19546 node. If it's the former then this function is being called to output a
19547 DIE to represent a formal parameter object (or some inlining thereof). If
19548 it's the latter, then this function is only being called to output a
19549 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
19550 argument type of some subprogram type.
19551 If EMIT_NAME_P is true, name and source coordinate attributes
19552 are emitted. */
19553
19554 static dw_die_ref
19555 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
19556 dw_die_ref context_die)
19557 {
19558 tree node_or_origin = node ? node : origin;
19559 tree ultimate_origin;
19560 dw_die_ref parm_die = NULL;
19561
19562 if (TREE_CODE_CLASS (TREE_CODE (node_or_origin)) == tcc_declaration)
19563 {
19564 parm_die = lookup_decl_die (node);
19565
19566 /* If the contexts differ, we may not be talking about the same
19567 thing. */
19568 if (parm_die && parm_die->die_parent != context_die)
19569 {
19570 if (!DECL_ABSTRACT_P (node))
19571 {
19572 /* This can happen when creating an inlined instance, in
19573 which case we need to create a new DIE that will get
19574 annotated with DW_AT_abstract_origin. */
19575 parm_die = NULL;
19576 }
19577 else
19578 {
19579 /* FIXME: Reuse DIE even with a differing context.
19580
19581 This can happen when calling
19582 dwarf2out_abstract_function to build debug info for
19583 the abstract instance of a function for which we have
19584 already generated a DIE in
19585 dwarf2out_early_global_decl.
19586
19587 Once we remove dwarf2out_abstract_function, we should
19588 have a call to gcc_unreachable here. */
19589 }
19590 }
19591
19592 if (parm_die && parm_die->die_parent == NULL)
19593 {
19594 /* Check that parm_die already has the right attributes that
19595 we would have added below. If any attributes are
19596 missing, fall through to add them. */
19597 if (! DECL_ABSTRACT_P (node_or_origin)
19598 && !get_AT (parm_die, DW_AT_location)
19599 && !get_AT (parm_die, DW_AT_const_value))
19600 /* We are missing location info, and are about to add it. */
19601 ;
19602 else
19603 {
19604 add_child_die (context_die, parm_die);
19605 return parm_die;
19606 }
19607 }
19608 }
19609
19610 /* If we have a previously generated DIE, use it, unless this is an
19611 concrete instance (origin != NULL), in which case we need a new
19612 DIE with a corresponding DW_AT_abstract_origin. */
19613 bool reusing_die;
19614 if (parm_die && origin == NULL)
19615 reusing_die = true;
19616 else
19617 {
19618 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
19619 reusing_die = false;
19620 }
19621
19622 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
19623 {
19624 case tcc_declaration:
19625 ultimate_origin = decl_ultimate_origin (node_or_origin);
19626 if (node || ultimate_origin)
19627 origin = ultimate_origin;
19628
19629 if (reusing_die)
19630 goto add_location;
19631
19632 if (origin != NULL)
19633 add_abstract_origin_attribute (parm_die, origin);
19634 else if (emit_name_p)
19635 add_name_and_src_coords_attributes (parm_die, node);
19636 if (origin == NULL
19637 || (! DECL_ABSTRACT_P (node_or_origin)
19638 && variably_modified_type_p (TREE_TYPE (node_or_origin),
19639 decl_function_context
19640 (node_or_origin))))
19641 {
19642 tree type = TREE_TYPE (node_or_origin);
19643 if (decl_by_reference_p (node_or_origin))
19644 add_type_attribute (parm_die, TREE_TYPE (type),
19645 TYPE_UNQUALIFIED,
19646 false, context_die);
19647 else
19648 add_type_attribute (parm_die, type,
19649 decl_quals (node_or_origin),
19650 false, context_die);
19651 }
19652 if (origin == NULL && DECL_ARTIFICIAL (node))
19653 add_AT_flag (parm_die, DW_AT_artificial, 1);
19654 add_location:
19655 if (node && node != origin)
19656 equate_decl_number_to_die (node, parm_die);
19657 if (! DECL_ABSTRACT_P (node_or_origin))
19658 add_location_or_const_value_attribute (parm_die, node_or_origin,
19659 node == NULL);
19660
19661 break;
19662
19663 case tcc_type:
19664 /* We were called with some kind of a ..._TYPE node. */
19665 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
19666 context_die);
19667 break;
19668
19669 default:
19670 gcc_unreachable ();
19671 }
19672
19673 return parm_die;
19674 }
19675
19676 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
19677 children DW_TAG_formal_parameter DIEs representing the arguments of the
19678 parameter pack.
19679
19680 PARM_PACK must be a function parameter pack.
19681 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
19682 must point to the subsequent arguments of the function PACK_ARG belongs to.
19683 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
19684 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
19685 following the last one for which a DIE was generated. */
19686
19687 static dw_die_ref
19688 gen_formal_parameter_pack_die (tree parm_pack,
19689 tree pack_arg,
19690 dw_die_ref subr_die,
19691 tree *next_arg)
19692 {
19693 tree arg;
19694 dw_die_ref parm_pack_die;
19695
19696 gcc_assert (parm_pack
19697 && lang_hooks.function_parameter_pack_p (parm_pack)
19698 && subr_die);
19699
19700 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
19701 add_src_coords_attributes (parm_pack_die, parm_pack);
19702
19703 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
19704 {
19705 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
19706 parm_pack))
19707 break;
19708 gen_formal_parameter_die (arg, NULL,
19709 false /* Don't emit name attribute. */,
19710 parm_pack_die);
19711 }
19712 if (next_arg)
19713 *next_arg = arg;
19714 return parm_pack_die;
19715 }
19716
19717 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
19718 at the end of an (ANSI prototyped) formal parameters list. */
19719
19720 static void
19721 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
19722 {
19723 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
19724 }
19725
19726 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
19727 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
19728 parameters as specified in some function type specification (except for
19729 those which appear as part of a function *definition*). */
19730
19731 static void
19732 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
19733 {
19734 tree link;
19735 tree formal_type = NULL;
19736 tree first_parm_type;
19737 tree arg;
19738
19739 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
19740 {
19741 arg = DECL_ARGUMENTS (function_or_method_type);
19742 function_or_method_type = TREE_TYPE (function_or_method_type);
19743 }
19744 else
19745 arg = NULL_TREE;
19746
19747 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
19748
19749 /* Make our first pass over the list of formal parameter types and output a
19750 DW_TAG_formal_parameter DIE for each one. */
19751 for (link = first_parm_type; link; )
19752 {
19753 dw_die_ref parm_die;
19754
19755 formal_type = TREE_VALUE (link);
19756 if (formal_type == void_type_node)
19757 break;
19758
19759 /* Output a (nameless) DIE to represent the formal parameter itself. */
19760 if (!POINTER_BOUNDS_TYPE_P (formal_type))
19761 {
19762 parm_die = gen_formal_parameter_die (formal_type, NULL,
19763 true /* Emit name attribute. */,
19764 context_die);
19765 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
19766 && link == first_parm_type)
19767 {
19768 add_AT_flag (parm_die, DW_AT_artificial, 1);
19769 if (dwarf_version >= 3 || !dwarf_strict)
19770 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
19771 }
19772 else if (arg && DECL_ARTIFICIAL (arg))
19773 add_AT_flag (parm_die, DW_AT_artificial, 1);
19774 }
19775
19776 link = TREE_CHAIN (link);
19777 if (arg)
19778 arg = DECL_CHAIN (arg);
19779 }
19780
19781 /* If this function type has an ellipsis, add a
19782 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
19783 if (formal_type != void_type_node)
19784 gen_unspecified_parameters_die (function_or_method_type, context_die);
19785
19786 /* Make our second (and final) pass over the list of formal parameter types
19787 and output DIEs to represent those types (as necessary). */
19788 for (link = TYPE_ARG_TYPES (function_or_method_type);
19789 link && TREE_VALUE (link);
19790 link = TREE_CHAIN (link))
19791 gen_type_die (TREE_VALUE (link), context_die);
19792 }
19793
19794 /* We want to generate the DIE for TYPE so that we can generate the
19795 die for MEMBER, which has been defined; we will need to refer back
19796 to the member declaration nested within TYPE. If we're trying to
19797 generate minimal debug info for TYPE, processing TYPE won't do the
19798 trick; we need to attach the member declaration by hand. */
19799
19800 static void
19801 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
19802 {
19803 gen_type_die (type, context_die);
19804
19805 /* If we're trying to avoid duplicate debug info, we may not have
19806 emitted the member decl for this function. Emit it now. */
19807 if (TYPE_STUB_DECL (type)
19808 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
19809 && ! lookup_decl_die (member))
19810 {
19811 dw_die_ref type_die;
19812 gcc_assert (!decl_ultimate_origin (member));
19813
19814 push_decl_scope (type);
19815 type_die = lookup_type_die_strip_naming_typedef (type);
19816 if (TREE_CODE (member) == FUNCTION_DECL)
19817 gen_subprogram_die (member, type_die);
19818 else if (TREE_CODE (member) == FIELD_DECL)
19819 {
19820 /* Ignore the nameless fields that are used to skip bits but handle
19821 C++ anonymous unions and structs. */
19822 if (DECL_NAME (member) != NULL_TREE
19823 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
19824 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
19825 {
19826 struct vlr_context vlr_ctx = {
19827 DECL_CONTEXT (member), /* struct_type */
19828 NULL_TREE /* variant_part_offset */
19829 };
19830 gen_type_die (member_declared_type (member), type_die);
19831 gen_field_die (member, &vlr_ctx, type_die);
19832 }
19833 }
19834 else
19835 gen_variable_die (member, NULL_TREE, type_die);
19836
19837 pop_decl_scope ();
19838 }
19839 }
19840 \f
19841 /* Forward declare these functions, because they are mutually recursive
19842 with their set_block_* pairing functions. */
19843 static void set_decl_origin_self (tree);
19844 static void set_decl_abstract_flags (tree, vec<tree> &);
19845
19846 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
19847 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
19848 that it points to the node itself, thus indicating that the node is its
19849 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
19850 the given node is NULL, recursively descend the decl/block tree which
19851 it is the root of, and for each other ..._DECL or BLOCK node contained
19852 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
19853 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
19854 values to point to themselves. */
19855
19856 static void
19857 set_block_origin_self (tree stmt)
19858 {
19859 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
19860 {
19861 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
19862
19863 {
19864 tree local_decl;
19865
19866 for (local_decl = BLOCK_VARS (stmt);
19867 local_decl != NULL_TREE;
19868 local_decl = DECL_CHAIN (local_decl))
19869 /* Do not recurse on nested functions since the inlining status
19870 of parent and child can be different as per the DWARF spec. */
19871 if (TREE_CODE (local_decl) != FUNCTION_DECL
19872 && !DECL_EXTERNAL (local_decl))
19873 set_decl_origin_self (local_decl);
19874 }
19875
19876 {
19877 tree subblock;
19878
19879 for (subblock = BLOCK_SUBBLOCKS (stmt);
19880 subblock != NULL_TREE;
19881 subblock = BLOCK_CHAIN (subblock))
19882 set_block_origin_self (subblock); /* Recurse. */
19883 }
19884 }
19885 }
19886
19887 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
19888 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
19889 node to so that it points to the node itself, thus indicating that the
19890 node represents its own (abstract) origin. Additionally, if the
19891 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
19892 the decl/block tree of which the given node is the root of, and for
19893 each other ..._DECL or BLOCK node contained therein whose
19894 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
19895 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
19896 point to themselves. */
19897
19898 static void
19899 set_decl_origin_self (tree decl)
19900 {
19901 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
19902 {
19903 DECL_ABSTRACT_ORIGIN (decl) = decl;
19904 if (TREE_CODE (decl) == FUNCTION_DECL)
19905 {
19906 tree arg;
19907
19908 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
19909 DECL_ABSTRACT_ORIGIN (arg) = arg;
19910 if (DECL_INITIAL (decl) != NULL_TREE
19911 && DECL_INITIAL (decl) != error_mark_node)
19912 set_block_origin_self (DECL_INITIAL (decl));
19913 }
19914 }
19915 }
19916 \f
19917 /* Given a pointer to some BLOCK node, set the BLOCK_ABSTRACT flag to 1
19918 and if it wasn't 1 before, push it to abstract_vec vector.
19919 For all local decls and all local sub-blocks (recursively) do it
19920 too. */
19921
19922 static void
19923 set_block_abstract_flags (tree stmt, vec<tree> &abstract_vec)
19924 {
19925 tree local_decl;
19926 tree subblock;
19927 unsigned int i;
19928
19929 if (!BLOCK_ABSTRACT (stmt))
19930 {
19931 abstract_vec.safe_push (stmt);
19932 BLOCK_ABSTRACT (stmt) = 1;
19933 }
19934
19935 for (local_decl = BLOCK_VARS (stmt);
19936 local_decl != NULL_TREE;
19937 local_decl = DECL_CHAIN (local_decl))
19938 if (! DECL_EXTERNAL (local_decl))
19939 set_decl_abstract_flags (local_decl, abstract_vec);
19940
19941 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
19942 {
19943 local_decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
19944 if ((TREE_CODE (local_decl) == VAR_DECL && !TREE_STATIC (local_decl))
19945 || TREE_CODE (local_decl) == PARM_DECL)
19946 set_decl_abstract_flags (local_decl, abstract_vec);
19947 }
19948
19949 for (subblock = BLOCK_SUBBLOCKS (stmt);
19950 subblock != NULL_TREE;
19951 subblock = BLOCK_CHAIN (subblock))
19952 set_block_abstract_flags (subblock, abstract_vec);
19953 }
19954
19955 /* Given a pointer to some ..._DECL node, set DECL_ABSTRACT_P flag on it
19956 to 1 and if it wasn't 1 before, push to abstract_vec vector.
19957 In the case where the decl is a FUNCTION_DECL also set the abstract
19958 flags for all of the parameters, local vars, local
19959 blocks and sub-blocks (recursively). */
19960
19961 static void
19962 set_decl_abstract_flags (tree decl, vec<tree> &abstract_vec)
19963 {
19964 if (!DECL_ABSTRACT_P (decl))
19965 {
19966 abstract_vec.safe_push (decl);
19967 DECL_ABSTRACT_P (decl) = 1;
19968 }
19969
19970 if (TREE_CODE (decl) == FUNCTION_DECL)
19971 {
19972 tree arg;
19973
19974 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
19975 if (!DECL_ABSTRACT_P (arg))
19976 {
19977 abstract_vec.safe_push (arg);
19978 DECL_ABSTRACT_P (arg) = 1;
19979 }
19980 if (DECL_INITIAL (decl) != NULL_TREE
19981 && DECL_INITIAL (decl) != error_mark_node)
19982 set_block_abstract_flags (DECL_INITIAL (decl), abstract_vec);
19983 }
19984 }
19985
19986 /* Generate the DWARF2 info for the "abstract" instance of a function which we
19987 may later generate inlined and/or out-of-line instances of.
19988
19989 FIXME: In the early-dwarf world, this function, and most of the
19990 DECL_ABSTRACT code should be obsoleted. The early DIE _is_
19991 the abstract instance. All we would need to do is annotate
19992 the early DIE with the appropriate DW_AT_inline in late
19993 dwarf (perhaps in gen_inlined_subroutine_die).
19994
19995 However, we can't do this yet, because LTO streaming of DIEs
19996 has not been implemented yet. */
19997
19998 static void
19999 dwarf2out_abstract_function (tree decl)
20000 {
20001 dw_die_ref old_die;
20002 tree save_fn;
20003 tree context;
20004 hash_table<decl_loc_hasher> *old_decl_loc_table;
20005 hash_table<dw_loc_list_hasher> *old_cached_dw_loc_list_table;
20006 int old_call_site_count, old_tail_call_site_count;
20007 struct call_arg_loc_node *old_call_arg_locations;
20008
20009 /* Make sure we have the actual abstract inline, not a clone. */
20010 decl = DECL_ORIGIN (decl);
20011
20012 old_die = lookup_decl_die (decl);
20013 if (old_die && get_AT (old_die, DW_AT_inline))
20014 /* We've already generated the abstract instance. */
20015 return;
20016
20017 /* We can be called while recursively when seeing block defining inlined subroutine
20018 DIE. Be sure to not clobber the outer location table nor use it or we would
20019 get locations in abstract instantces. */
20020 old_decl_loc_table = decl_loc_table;
20021 decl_loc_table = NULL;
20022 old_cached_dw_loc_list_table = cached_dw_loc_list_table;
20023 cached_dw_loc_list_table = NULL;
20024 old_call_arg_locations = call_arg_locations;
20025 call_arg_locations = NULL;
20026 old_call_site_count = call_site_count;
20027 call_site_count = -1;
20028 old_tail_call_site_count = tail_call_site_count;
20029 tail_call_site_count = -1;
20030
20031 /* Be sure we've emitted the in-class declaration DIE (if any) first, so
20032 we don't get confused by DECL_ABSTRACT_P. */
20033 if (debug_info_level > DINFO_LEVEL_TERSE)
20034 {
20035 context = decl_class_context (decl);
20036 if (context)
20037 gen_type_die_for_member
20038 (context, decl, decl_function_context (decl) ? NULL : comp_unit_die ());
20039 }
20040
20041 /* Pretend we've just finished compiling this function. */
20042 save_fn = current_function_decl;
20043 current_function_decl = decl;
20044
20045 auto_vec<tree, 64> abstract_vec;
20046 set_decl_abstract_flags (decl, abstract_vec);
20047 dwarf2out_decl (decl);
20048 unsigned int i;
20049 tree t;
20050 FOR_EACH_VEC_ELT (abstract_vec, i, t)
20051 if (TREE_CODE (t) == BLOCK)
20052 BLOCK_ABSTRACT (t) = 0;
20053 else
20054 DECL_ABSTRACT_P (t) = 0;
20055
20056 current_function_decl = save_fn;
20057 decl_loc_table = old_decl_loc_table;
20058 cached_dw_loc_list_table = old_cached_dw_loc_list_table;
20059 call_arg_locations = old_call_arg_locations;
20060 call_site_count = old_call_site_count;
20061 tail_call_site_count = old_tail_call_site_count;
20062 }
20063
20064 /* Helper function of premark_used_types() which gets called through
20065 htab_traverse.
20066
20067 Marks the DIE of a given type in *SLOT as perennial, so it never gets
20068 marked as unused by prune_unused_types. */
20069
20070 bool
20071 premark_used_types_helper (tree const &type, void *)
20072 {
20073 dw_die_ref die;
20074
20075 die = lookup_type_die (type);
20076 if (die != NULL)
20077 die->die_perennial_p = 1;
20078 return true;
20079 }
20080
20081 /* Helper function of premark_types_used_by_global_vars which gets called
20082 through htab_traverse.
20083
20084 Marks the DIE of a given type in *SLOT as perennial, so it never gets
20085 marked as unused by prune_unused_types. The DIE of the type is marked
20086 only if the global variable using the type will actually be emitted. */
20087
20088 int
20089 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
20090 void *)
20091 {
20092 struct types_used_by_vars_entry *entry;
20093 dw_die_ref die;
20094
20095 entry = (struct types_used_by_vars_entry *) *slot;
20096 gcc_assert (entry->type != NULL
20097 && entry->var_decl != NULL);
20098 die = lookup_type_die (entry->type);
20099 if (die)
20100 {
20101 /* Ask cgraph if the global variable really is to be emitted.
20102 If yes, then we'll keep the DIE of ENTRY->TYPE. */
20103 varpool_node *node = varpool_node::get (entry->var_decl);
20104 if (node && node->definition)
20105 {
20106 die->die_perennial_p = 1;
20107 /* Keep the parent DIEs as well. */
20108 while ((die = die->die_parent) && die->die_perennial_p == 0)
20109 die->die_perennial_p = 1;
20110 }
20111 }
20112 return 1;
20113 }
20114
20115 /* Mark all members of used_types_hash as perennial. */
20116
20117 static void
20118 premark_used_types (struct function *fun)
20119 {
20120 if (fun && fun->used_types_hash)
20121 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
20122 }
20123
20124 /* Mark all members of types_used_by_vars_entry as perennial. */
20125
20126 static void
20127 premark_types_used_by_global_vars (void)
20128 {
20129 if (types_used_by_vars_hash)
20130 types_used_by_vars_hash
20131 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
20132 }
20133
20134 /* Generate a DW_TAG_GNU_call_site DIE in function DECL under SUBR_DIE
20135 for CA_LOC call arg loc node. */
20136
20137 static dw_die_ref
20138 gen_call_site_die (tree decl, dw_die_ref subr_die,
20139 struct call_arg_loc_node *ca_loc)
20140 {
20141 dw_die_ref stmt_die = NULL, die;
20142 tree block = ca_loc->block;
20143
20144 while (block
20145 && block != DECL_INITIAL (decl)
20146 && TREE_CODE (block) == BLOCK)
20147 {
20148 stmt_die = BLOCK_DIE (block);
20149 if (stmt_die)
20150 break;
20151 block = BLOCK_SUPERCONTEXT (block);
20152 }
20153 if (stmt_die == NULL)
20154 stmt_die = subr_die;
20155 die = new_die (DW_TAG_GNU_call_site, stmt_die, NULL_TREE);
20156 add_AT_lbl_id (die, DW_AT_low_pc, ca_loc->label);
20157 if (ca_loc->tail_call_p)
20158 add_AT_flag (die, DW_AT_GNU_tail_call, 1);
20159 if (ca_loc->symbol_ref)
20160 {
20161 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
20162 if (tdie)
20163 add_AT_die_ref (die, DW_AT_abstract_origin, tdie);
20164 else
20165 add_AT_addr (die, DW_AT_abstract_origin, ca_loc->symbol_ref, false);
20166 }
20167 return die;
20168 }
20169
20170 /* Generate a DIE to represent a declared function (either file-scope or
20171 block-local). */
20172
20173 static void
20174 gen_subprogram_die (tree decl, dw_die_ref context_die)
20175 {
20176 tree origin = decl_ultimate_origin (decl);
20177 dw_die_ref subr_die;
20178 dw_die_ref old_die = lookup_decl_die (decl);
20179
20180 /* This function gets called multiple times for different stages of
20181 the debug process. For example, for func() in this code:
20182
20183 namespace S
20184 {
20185 void func() { ... }
20186 }
20187
20188 ...we get called 4 times. Twice in early debug and twice in
20189 late debug:
20190
20191 Early debug
20192 -----------
20193
20194 1. Once while generating func() within the namespace. This is
20195 the declaration. The declaration bit below is set, as the
20196 context is the namespace.
20197
20198 A new DIE will be generated with DW_AT_declaration set.
20199
20200 2. Once for func() itself. This is the specification. The
20201 declaration bit below is clear as the context is the CU.
20202
20203 We will use the cached DIE from (1) to create a new DIE with
20204 DW_AT_specification pointing to the declaration in (1).
20205
20206 Late debug via rest_of_handle_final()
20207 -------------------------------------
20208
20209 3. Once generating func() within the namespace. This is also the
20210 declaration, as in (1), but this time we will early exit below
20211 as we have a cached DIE and a declaration needs no additional
20212 annotations (no locations), as the source declaration line
20213 info is enough.
20214
20215 4. Once for func() itself. As in (2), this is the specification,
20216 but this time we will re-use the cached DIE, and just annotate
20217 it with the location information that should now be available.
20218
20219 For something without namespaces, but with abstract instances, we
20220 are also called a multiple times:
20221
20222 class Base
20223 {
20224 public:
20225 Base (); // constructor declaration (1)
20226 };
20227
20228 Base::Base () { } // constructor specification (2)
20229
20230 Early debug
20231 -----------
20232
20233 1. Once for the Base() constructor by virtue of it being a
20234 member of the Base class. This is done via
20235 rest_of_type_compilation.
20236
20237 This is a declaration, so a new DIE will be created with
20238 DW_AT_declaration.
20239
20240 2. Once for the Base() constructor definition, but this time
20241 while generating the abstract instance of the base
20242 constructor (__base_ctor) which is being generated via early
20243 debug of reachable functions.
20244
20245 Even though we have a cached version of the declaration (1),
20246 we will create a DW_AT_specification of the declaration DIE
20247 in (1).
20248
20249 3. Once for the __base_ctor itself, but this time, we generate
20250 an DW_AT_abstract_origin version of the DW_AT_specification in
20251 (2).
20252
20253 Late debug via rest_of_handle_final
20254 -----------------------------------
20255
20256 4. One final time for the __base_ctor (which will have a cached
20257 DIE with DW_AT_abstract_origin created in (3). This time,
20258 we will just annotate the location information now
20259 available.
20260 */
20261 int declaration = (current_function_decl != decl
20262 || class_or_namespace_scope_p (context_die));
20263
20264 premark_used_types (DECL_STRUCT_FUNCTION (decl));
20265
20266 /* Now that the C++ front end lazily declares artificial member fns, we
20267 might need to retrofit the declaration into its class. */
20268 if (!declaration && !origin && !old_die
20269 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
20270 && !class_or_namespace_scope_p (context_die)
20271 && debug_info_level > DINFO_LEVEL_TERSE)
20272 old_die = force_decl_die (decl);
20273
20274 /* An inlined instance, tag a new DIE with DW_AT_abstract_origin. */
20275 if (origin != NULL)
20276 {
20277 gcc_assert (!declaration || local_scope_p (context_die));
20278
20279 /* Fixup die_parent for the abstract instance of a nested
20280 inline function. */
20281 if (old_die && old_die->die_parent == NULL)
20282 add_child_die (context_die, old_die);
20283
20284 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
20285 {
20286 /* If we have a DW_AT_abstract_origin we have a working
20287 cached version. */
20288 subr_die = old_die;
20289 }
20290 else
20291 {
20292 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
20293 add_abstract_origin_attribute (subr_die, origin);
20294 /* This is where the actual code for a cloned function is.
20295 Let's emit linkage name attribute for it. This helps
20296 debuggers to e.g, set breakpoints into
20297 constructors/destructors when the user asks "break
20298 K::K". */
20299 add_linkage_name (subr_die, decl);
20300 }
20301 }
20302 /* A cached copy, possibly from early dwarf generation. Reuse as
20303 much as possible. */
20304 else if (old_die)
20305 {
20306 /* A declaration that has been previously dumped needs no
20307 additional information. */
20308 if (declaration)
20309 return;
20310
20311 if (!get_AT_flag (old_die, DW_AT_declaration)
20312 /* We can have a normal definition following an inline one in the
20313 case of redefinition of GNU C extern inlines.
20314 It seems reasonable to use AT_specification in this case. */
20315 && !get_AT (old_die, DW_AT_inline))
20316 {
20317 /* Detect and ignore this case, where we are trying to output
20318 something we have already output. */
20319 if (get_AT (old_die, DW_AT_low_pc)
20320 || get_AT (old_die, DW_AT_ranges))
20321 return;
20322
20323 /* If we have no location information, this must be a
20324 partially generated DIE from early dwarf generation.
20325 Fall through and generate it. */
20326 }
20327
20328 /* If the definition comes from the same place as the declaration,
20329 maybe use the old DIE. We always want the DIE for this function
20330 that has the *_pc attributes to be under comp_unit_die so the
20331 debugger can find it. We also need to do this for abstract
20332 instances of inlines, since the spec requires the out-of-line copy
20333 to have the same parent. For local class methods, this doesn't
20334 apply; we just use the old DIE. */
20335 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
20336 struct dwarf_file_data * file_index = lookup_filename (s.file);
20337 if ((is_cu_die (old_die->die_parent)
20338 /* This condition fixes the inconsistency/ICE with the
20339 following Fortran test (or some derivative thereof) while
20340 building libgfortran:
20341
20342 module some_m
20343 contains
20344 logical function funky (FLAG)
20345 funky = .true.
20346 end function
20347 end module
20348 */
20349 || (old_die->die_parent
20350 && old_die->die_parent->die_tag == DW_TAG_module)
20351 || context_die == NULL)
20352 && (DECL_ARTIFICIAL (decl)
20353 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
20354 && (get_AT_unsigned (old_die, DW_AT_decl_line)
20355 == (unsigned) s.line))))
20356 {
20357 subr_die = old_die;
20358
20359 /* Clear out the declaration attribute, but leave the
20360 parameters so they can be augmented with location
20361 information later. Unless this was a declaration, in
20362 which case, wipe out the nameless parameters and recreate
20363 them further down. */
20364 if (remove_AT (subr_die, DW_AT_declaration))
20365 {
20366
20367 remove_AT (subr_die, DW_AT_object_pointer);
20368 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
20369 }
20370 }
20371 /* Make a specification pointing to the previously built
20372 declaration. */
20373 else
20374 {
20375 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
20376 add_AT_specification (subr_die, old_die);
20377 add_pubname (decl, subr_die);
20378 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
20379 add_AT_file (subr_die, DW_AT_decl_file, file_index);
20380 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
20381 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
20382
20383 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
20384 emit the real type on the definition die. */
20385 if (is_cxx() && debug_info_level > DINFO_LEVEL_TERSE)
20386 {
20387 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
20388 if (die == auto_die || die == decltype_auto_die)
20389 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
20390 TYPE_UNQUALIFIED, false, context_die);
20391 }
20392 }
20393 }
20394 /* Create a fresh DIE for anything else. */
20395 else
20396 {
20397 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
20398
20399 if (TREE_PUBLIC (decl))
20400 add_AT_flag (subr_die, DW_AT_external, 1);
20401
20402 add_name_and_src_coords_attributes (subr_die, decl);
20403 add_pubname (decl, subr_die);
20404 if (debug_info_level > DINFO_LEVEL_TERSE)
20405 {
20406 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
20407 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
20408 TYPE_UNQUALIFIED, false, context_die);
20409 }
20410
20411 add_pure_or_virtual_attribute (subr_die, decl);
20412 if (DECL_ARTIFICIAL (decl))
20413 add_AT_flag (subr_die, DW_AT_artificial, 1);
20414
20415 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
20416 add_AT_flag (subr_die, DW_AT_noreturn, 1);
20417
20418 add_accessibility_attribute (subr_die, decl);
20419 }
20420
20421 /* Unless we have an existing non-declaration DIE, equate the new
20422 DIE. */
20423 if (!old_die || is_declaration_die (old_die))
20424 equate_decl_number_to_die (decl, subr_die);
20425
20426 if (declaration)
20427 {
20428 if (!old_die || !get_AT (old_die, DW_AT_inline))
20429 {
20430 add_AT_flag (subr_die, DW_AT_declaration, 1);
20431
20432 /* If this is an explicit function declaration then generate
20433 a DW_AT_explicit attribute. */
20434 if (lang_hooks.decls.function_decl_explicit_p (decl)
20435 && (dwarf_version >= 3 || !dwarf_strict))
20436 add_AT_flag (subr_die, DW_AT_explicit, 1);
20437
20438 /* If this is a C++11 deleted special function member then generate
20439 a DW_AT_GNU_deleted attribute. */
20440 if (lang_hooks.decls.function_decl_deleted_p (decl)
20441 && (! dwarf_strict))
20442 add_AT_flag (subr_die, DW_AT_GNU_deleted, 1);
20443 }
20444 }
20445 /* Tag abstract instances with DW_AT_inline. */
20446 else if (DECL_ABSTRACT_P (decl))
20447 {
20448 if (DECL_DECLARED_INLINE_P (decl))
20449 {
20450 if (cgraph_function_possibly_inlined_p (decl))
20451 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_inlined);
20452 else
20453 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_not_inlined);
20454 }
20455 else
20456 {
20457 if (cgraph_function_possibly_inlined_p (decl))
20458 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_inlined);
20459 else
20460 add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_not_inlined);
20461 }
20462
20463 if (DECL_DECLARED_INLINE_P (decl)
20464 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
20465 add_AT_flag (subr_die, DW_AT_artificial, 1);
20466 }
20467 /* For non DECL_EXTERNALs, if range information is available, fill
20468 the DIE with it. */
20469 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
20470 {
20471 HOST_WIDE_INT cfa_fb_offset;
20472
20473 struct function *fun = DECL_STRUCT_FUNCTION (decl);
20474
20475 if (!flag_reorder_blocks_and_partition)
20476 {
20477 dw_fde_ref fde = fun->fde;
20478 if (fde->dw_fde_begin)
20479 {
20480 /* We have already generated the labels. */
20481 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
20482 fde->dw_fde_end, false);
20483 }
20484 else
20485 {
20486 /* Create start/end labels and add the range. */
20487 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
20488 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
20489 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
20490 current_function_funcdef_no);
20491 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
20492 current_function_funcdef_no);
20493 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
20494 false);
20495 }
20496
20497 #if VMS_DEBUGGING_INFO
20498 /* HP OpenVMS Industry Standard 64: DWARF Extensions
20499 Section 2.3 Prologue and Epilogue Attributes:
20500 When a breakpoint is set on entry to a function, it is generally
20501 desirable for execution to be suspended, not on the very first
20502 instruction of the function, but rather at a point after the
20503 function's frame has been set up, after any language defined local
20504 declaration processing has been completed, and before execution of
20505 the first statement of the function begins. Debuggers generally
20506 cannot properly determine where this point is. Similarly for a
20507 breakpoint set on exit from a function. The prologue and epilogue
20508 attributes allow a compiler to communicate the location(s) to use. */
20509
20510 {
20511 if (fde->dw_fde_vms_end_prologue)
20512 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
20513 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
20514
20515 if (fde->dw_fde_vms_begin_epilogue)
20516 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
20517 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
20518 }
20519 #endif
20520
20521 }
20522 else
20523 {
20524 /* Generate pubnames entries for the split function code ranges. */
20525 dw_fde_ref fde = fun->fde;
20526
20527 if (fde->dw_fde_second_begin)
20528 {
20529 if (dwarf_version >= 3 || !dwarf_strict)
20530 {
20531 /* We should use ranges for non-contiguous code section
20532 addresses. Use the actual code range for the initial
20533 section, since the HOT/COLD labels might precede an
20534 alignment offset. */
20535 bool range_list_added = false;
20536 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
20537 fde->dw_fde_end, &range_list_added,
20538 false);
20539 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
20540 fde->dw_fde_second_end,
20541 &range_list_added, false);
20542 if (range_list_added)
20543 add_ranges (NULL);
20544 }
20545 else
20546 {
20547 /* There is no real support in DW2 for this .. so we make
20548 a work-around. First, emit the pub name for the segment
20549 containing the function label. Then make and emit a
20550 simplified subprogram DIE for the second segment with the
20551 name pre-fixed by __hot/cold_sect_of_. We use the same
20552 linkage name for the second die so that gdb will find both
20553 sections when given "b foo". */
20554 const char *name = NULL;
20555 tree decl_name = DECL_NAME (decl);
20556 dw_die_ref seg_die;
20557
20558 /* Do the 'primary' section. */
20559 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
20560 fde->dw_fde_end, false);
20561
20562 /* Build a minimal DIE for the secondary section. */
20563 seg_die = new_die (DW_TAG_subprogram,
20564 subr_die->die_parent, decl);
20565
20566 if (TREE_PUBLIC (decl))
20567 add_AT_flag (seg_die, DW_AT_external, 1);
20568
20569 if (decl_name != NULL
20570 && IDENTIFIER_POINTER (decl_name) != NULL)
20571 {
20572 name = dwarf2_name (decl, 1);
20573 if (! DECL_ARTIFICIAL (decl))
20574 add_src_coords_attributes (seg_die, decl);
20575
20576 add_linkage_name (seg_die, decl);
20577 }
20578 gcc_assert (name != NULL);
20579 add_pure_or_virtual_attribute (seg_die, decl);
20580 if (DECL_ARTIFICIAL (decl))
20581 add_AT_flag (seg_die, DW_AT_artificial, 1);
20582
20583 name = concat ("__second_sect_of_", name, NULL);
20584 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
20585 fde->dw_fde_second_end, false);
20586 add_name_attribute (seg_die, name);
20587 if (want_pubnames ())
20588 add_pubname_string (name, seg_die);
20589 }
20590 }
20591 else
20592 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
20593 false);
20594 }
20595
20596 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
20597
20598 /* We define the "frame base" as the function's CFA. This is more
20599 convenient for several reasons: (1) It's stable across the prologue
20600 and epilogue, which makes it better than just a frame pointer,
20601 (2) With dwarf3, there exists a one-byte encoding that allows us
20602 to reference the .debug_frame data by proxy, but failing that,
20603 (3) We can at least reuse the code inspection and interpretation
20604 code that determines the CFA position at various points in the
20605 function. */
20606 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
20607 {
20608 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
20609 add_AT_loc (subr_die, DW_AT_frame_base, op);
20610 }
20611 else
20612 {
20613 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
20614 if (list->dw_loc_next)
20615 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
20616 else
20617 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
20618 }
20619
20620 /* Compute a displacement from the "steady-state frame pointer" to
20621 the CFA. The former is what all stack slots and argument slots
20622 will reference in the rtl; the latter is what we've told the
20623 debugger about. We'll need to adjust all frame_base references
20624 by this displacement. */
20625 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
20626
20627 if (fun->static_chain_decl)
20628 {
20629 /* DWARF requires here a location expression that computes the
20630 address of the enclosing subprogram's frame base. The machinery
20631 in tree-nested.c is supposed to store this specific address in the
20632 last field of the FRAME record. */
20633 const tree frame_type
20634 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
20635 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
20636
20637 tree fb_expr
20638 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
20639 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
20640 fb_expr, fb_decl, NULL_TREE);
20641
20642 add_AT_location_description (subr_die, DW_AT_static_link,
20643 loc_list_from_tree (fb_expr, 0, NULL));
20644 }
20645 }
20646
20647 /* Generate child dies for template paramaters. */
20648 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
20649 gen_generic_params_dies (decl);
20650
20651 /* Now output descriptions of the arguments for this function. This gets
20652 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
20653 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
20654 `...' at the end of the formal parameter list. In order to find out if
20655 there was a trailing ellipsis or not, we must instead look at the type
20656 associated with the FUNCTION_DECL. This will be a node of type
20657 FUNCTION_TYPE. If the chain of type nodes hanging off of this
20658 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
20659 an ellipsis at the end. */
20660
20661 /* In the case where we are describing a mere function declaration, all we
20662 need to do here (and all we *can* do here) is to describe the *types* of
20663 its formal parameters. */
20664 if (debug_info_level <= DINFO_LEVEL_TERSE)
20665 ;
20666 else if (declaration)
20667 gen_formal_types_die (decl, subr_die);
20668 else
20669 {
20670 /* Generate DIEs to represent all known formal parameters. */
20671 tree parm = DECL_ARGUMENTS (decl);
20672 tree generic_decl = early_dwarf
20673 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
20674 tree generic_decl_parm = generic_decl
20675 ? DECL_ARGUMENTS (generic_decl)
20676 : NULL;
20677
20678 /* Now we want to walk the list of parameters of the function and
20679 emit their relevant DIEs.
20680
20681 We consider the case of DECL being an instance of a generic function
20682 as well as it being a normal function.
20683
20684 If DECL is an instance of a generic function we walk the
20685 parameters of the generic function declaration _and_ the parameters of
20686 DECL itself. This is useful because we want to emit specific DIEs for
20687 function parameter packs and those are declared as part of the
20688 generic function declaration. In that particular case,
20689 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
20690 That DIE has children DIEs representing the set of arguments
20691 of the pack. Note that the set of pack arguments can be empty.
20692 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
20693 children DIE.
20694
20695 Otherwise, we just consider the parameters of DECL. */
20696 while (generic_decl_parm || parm)
20697 {
20698 if (generic_decl_parm
20699 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
20700 gen_formal_parameter_pack_die (generic_decl_parm,
20701 parm, subr_die,
20702 &parm);
20703 else if (parm && !POINTER_BOUNDS_P (parm))
20704 {
20705 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
20706
20707 if (parm == DECL_ARGUMENTS (decl)
20708 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
20709 && parm_die
20710 && (dwarf_version >= 3 || !dwarf_strict))
20711 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
20712
20713 parm = DECL_CHAIN (parm);
20714 }
20715 else if (parm)
20716 parm = DECL_CHAIN (parm);
20717
20718 if (generic_decl_parm)
20719 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
20720 }
20721
20722 /* Decide whether we need an unspecified_parameters DIE at the end.
20723 There are 2 more cases to do this for: 1) the ansi ... declaration -
20724 this is detectable when the end of the arg list is not a
20725 void_type_node 2) an unprototyped function declaration (not a
20726 definition). This just means that we have no info about the
20727 parameters at all. */
20728 if (prototype_p (TREE_TYPE (decl)))
20729 {
20730 /* This is the prototyped case, check for.... */
20731 if (stdarg_p (TREE_TYPE (decl)))
20732 gen_unspecified_parameters_die (decl, subr_die);
20733 }
20734 else if (DECL_INITIAL (decl) == NULL_TREE)
20735 gen_unspecified_parameters_die (decl, subr_die);
20736 }
20737
20738 if (subr_die != old_die)
20739 /* Add the calling convention attribute if requested. */
20740 add_calling_convention_attribute (subr_die, decl);
20741
20742 /* Output Dwarf info for all of the stuff within the body of the function
20743 (if it has one - it may be just a declaration).
20744
20745 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
20746 a function. This BLOCK actually represents the outermost binding contour
20747 for the function, i.e. the contour in which the function's formal
20748 parameters and labels get declared. Curiously, it appears that the front
20749 end doesn't actually put the PARM_DECL nodes for the current function onto
20750 the BLOCK_VARS list for this outer scope, but are strung off of the
20751 DECL_ARGUMENTS list for the function instead.
20752
20753 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
20754 the LABEL_DECL nodes for the function however, and we output DWARF info
20755 for those in decls_for_scope. Just within the `outer_scope' there will be
20756 a BLOCK node representing the function's outermost pair of curly braces,
20757 and any blocks used for the base and member initializers of a C++
20758 constructor function. */
20759 tree outer_scope = DECL_INITIAL (decl);
20760 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
20761 {
20762 int call_site_note_count = 0;
20763 int tail_call_site_note_count = 0;
20764
20765 /* Emit a DW_TAG_variable DIE for a named return value. */
20766 if (DECL_NAME (DECL_RESULT (decl)))
20767 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
20768
20769 /* The first time through decls_for_scope we will generate the
20770 DIEs for the locals. The second time, we fill in the
20771 location info. */
20772 decls_for_scope (outer_scope, subr_die);
20773
20774 if (call_arg_locations && !dwarf_strict)
20775 {
20776 struct call_arg_loc_node *ca_loc;
20777 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
20778 {
20779 dw_die_ref die = NULL;
20780 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
20781 rtx arg, next_arg;
20782
20783 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
20784 ? NOTE_VAR_LOCATION (ca_loc->call_arg_loc_note)
20785 : NULL_RTX);
20786 arg; arg = next_arg)
20787 {
20788 dw_loc_descr_ref reg, val;
20789 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
20790 dw_die_ref cdie, tdie = NULL;
20791
20792 next_arg = XEXP (arg, 1);
20793 if (REG_P (XEXP (XEXP (arg, 0), 0))
20794 && next_arg
20795 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
20796 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
20797 && REGNO (XEXP (XEXP (arg, 0), 0))
20798 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
20799 next_arg = XEXP (next_arg, 1);
20800 if (mode == VOIDmode)
20801 {
20802 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
20803 if (mode == VOIDmode)
20804 mode = GET_MODE (XEXP (arg, 0));
20805 }
20806 if (mode == VOIDmode || mode == BLKmode)
20807 continue;
20808 /* Get dynamic information about call target only if we
20809 have no static information: we cannot generate both
20810 DW_AT_abstract_origin and DW_AT_GNU_call_site_target
20811 attributes. */
20812 if (ca_loc->symbol_ref == NULL_RTX)
20813 {
20814 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
20815 {
20816 tloc = XEXP (XEXP (arg, 0), 1);
20817 continue;
20818 }
20819 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
20820 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
20821 {
20822 tlocc = XEXP (XEXP (arg, 0), 1);
20823 continue;
20824 }
20825 }
20826 reg = NULL;
20827 if (REG_P (XEXP (XEXP (arg, 0), 0)))
20828 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
20829 VAR_INIT_STATUS_INITIALIZED);
20830 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
20831 {
20832 rtx mem = XEXP (XEXP (arg, 0), 0);
20833 reg = mem_loc_descriptor (XEXP (mem, 0),
20834 get_address_mode (mem),
20835 GET_MODE (mem),
20836 VAR_INIT_STATUS_INITIALIZED);
20837 }
20838 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
20839 == DEBUG_PARAMETER_REF)
20840 {
20841 tree tdecl
20842 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
20843 tdie = lookup_decl_die (tdecl);
20844 if (tdie == NULL)
20845 continue;
20846 }
20847 else
20848 continue;
20849 if (reg == NULL
20850 && GET_CODE (XEXP (XEXP (arg, 0), 0))
20851 != DEBUG_PARAMETER_REF)
20852 continue;
20853 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
20854 VOIDmode,
20855 VAR_INIT_STATUS_INITIALIZED);
20856 if (val == NULL)
20857 continue;
20858 if (die == NULL)
20859 die = gen_call_site_die (decl, subr_die, ca_loc);
20860 cdie = new_die (DW_TAG_GNU_call_site_parameter, die,
20861 NULL_TREE);
20862 if (reg != NULL)
20863 add_AT_loc (cdie, DW_AT_location, reg);
20864 else if (tdie != NULL)
20865 add_AT_die_ref (cdie, DW_AT_abstract_origin, tdie);
20866 add_AT_loc (cdie, DW_AT_GNU_call_site_value, val);
20867 if (next_arg != XEXP (arg, 1))
20868 {
20869 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
20870 if (mode == VOIDmode)
20871 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
20872 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
20873 0), 1),
20874 mode, VOIDmode,
20875 VAR_INIT_STATUS_INITIALIZED);
20876 if (val != NULL)
20877 add_AT_loc (cdie, DW_AT_GNU_call_site_data_value, val);
20878 }
20879 }
20880 if (die == NULL
20881 && (ca_loc->symbol_ref || tloc))
20882 die = gen_call_site_die (decl, subr_die, ca_loc);
20883 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
20884 {
20885 dw_loc_descr_ref tval = NULL;
20886
20887 if (tloc != NULL_RTX)
20888 tval = mem_loc_descriptor (tloc,
20889 GET_MODE (tloc) == VOIDmode
20890 ? Pmode : GET_MODE (tloc),
20891 VOIDmode,
20892 VAR_INIT_STATUS_INITIALIZED);
20893 if (tval)
20894 add_AT_loc (die, DW_AT_GNU_call_site_target, tval);
20895 else if (tlocc != NULL_RTX)
20896 {
20897 tval = mem_loc_descriptor (tlocc,
20898 GET_MODE (tlocc) == VOIDmode
20899 ? Pmode : GET_MODE (tlocc),
20900 VOIDmode,
20901 VAR_INIT_STATUS_INITIALIZED);
20902 if (tval)
20903 add_AT_loc (die, DW_AT_GNU_call_site_target_clobbered,
20904 tval);
20905 }
20906 }
20907 if (die != NULL)
20908 {
20909 call_site_note_count++;
20910 if (ca_loc->tail_call_p)
20911 tail_call_site_note_count++;
20912 }
20913 }
20914 }
20915 call_arg_locations = NULL;
20916 call_arg_loc_last = NULL;
20917 if (tail_call_site_count >= 0
20918 && tail_call_site_count == tail_call_site_note_count
20919 && !dwarf_strict)
20920 {
20921 if (call_site_count >= 0
20922 && call_site_count == call_site_note_count)
20923 add_AT_flag (subr_die, DW_AT_GNU_all_call_sites, 1);
20924 else
20925 add_AT_flag (subr_die, DW_AT_GNU_all_tail_call_sites, 1);
20926 }
20927 call_site_count = -1;
20928 tail_call_site_count = -1;
20929 }
20930 }
20931
20932 /* Returns a hash value for X (which really is a die_struct). */
20933
20934 hashval_t
20935 block_die_hasher::hash (die_struct *d)
20936 {
20937 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
20938 }
20939
20940 /* Return nonzero if decl_id and die_parent of die_struct X is the same
20941 as decl_id and die_parent of die_struct Y. */
20942
20943 bool
20944 block_die_hasher::equal (die_struct *x, die_struct *y)
20945 {
20946 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
20947 }
20948
20949 /* Return TRUE if DECL, which may have been previously generated as
20950 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
20951 true if decl (or its origin) is either an extern declaration or a
20952 class/namespace scoped declaration.
20953
20954 The declare_in_namespace support causes us to get two DIEs for one
20955 variable, both of which are declarations. We want to avoid
20956 considering one to be a specification, so we must test for
20957 DECLARATION and DW_AT_declaration. */
20958 static inline bool
20959 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
20960 {
20961 return (old_die && TREE_STATIC (decl) && !declaration
20962 && get_AT_flag (old_die, DW_AT_declaration) == 1);
20963 }
20964
20965 /* Return true if DECL is a local static. */
20966
20967 static inline bool
20968 local_function_static (tree decl)
20969 {
20970 gcc_assert (TREE_CODE (decl) == VAR_DECL);
20971 return TREE_STATIC (decl)
20972 && DECL_CONTEXT (decl)
20973 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
20974 }
20975
20976 /* Generate a DIE to represent a declared data object.
20977 Either DECL or ORIGIN must be non-null. */
20978
20979 static void
20980 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
20981 {
20982 HOST_WIDE_INT off = 0;
20983 tree com_decl;
20984 tree decl_or_origin = decl ? decl : origin;
20985 tree ultimate_origin;
20986 dw_die_ref var_die;
20987 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
20988 dw_die_ref origin_die = NULL;
20989 bool declaration = (DECL_EXTERNAL (decl_or_origin)
20990 || class_or_namespace_scope_p (context_die));
20991 bool specialization_p = false;
20992
20993 ultimate_origin = decl_ultimate_origin (decl_or_origin);
20994 if (decl || ultimate_origin)
20995 origin = ultimate_origin;
20996 com_decl = fortran_common (decl_or_origin, &off);
20997
20998 /* Symbol in common gets emitted as a child of the common block, in the form
20999 of a data member. */
21000 if (com_decl)
21001 {
21002 dw_die_ref com_die;
21003 dw_loc_list_ref loc;
21004 die_node com_die_arg;
21005
21006 var_die = lookup_decl_die (decl_or_origin);
21007 if (var_die)
21008 {
21009 if (get_AT (var_die, DW_AT_location) == NULL)
21010 {
21011 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
21012 if (loc)
21013 {
21014 if (off)
21015 {
21016 /* Optimize the common case. */
21017 if (single_element_loc_list_p (loc)
21018 && loc->expr->dw_loc_opc == DW_OP_addr
21019 && loc->expr->dw_loc_next == NULL
21020 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
21021 == SYMBOL_REF)
21022 {
21023 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
21024 loc->expr->dw_loc_oprnd1.v.val_addr
21025 = plus_constant (GET_MODE (x), x , off);
21026 }
21027 else
21028 loc_list_plus_const (loc, off);
21029 }
21030 add_AT_location_description (var_die, DW_AT_location, loc);
21031 remove_AT (var_die, DW_AT_declaration);
21032 }
21033 }
21034 return;
21035 }
21036
21037 if (common_block_die_table == NULL)
21038 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
21039
21040 com_die_arg.decl_id = DECL_UID (com_decl);
21041 com_die_arg.die_parent = context_die;
21042 com_die = common_block_die_table->find (&com_die_arg);
21043 loc = loc_list_from_tree (com_decl, 2, NULL);
21044 if (com_die == NULL)
21045 {
21046 const char *cnam
21047 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
21048 die_node **slot;
21049
21050 com_die = new_die (DW_TAG_common_block, context_die, decl);
21051 add_name_and_src_coords_attributes (com_die, com_decl);
21052 if (loc)
21053 {
21054 add_AT_location_description (com_die, DW_AT_location, loc);
21055 /* Avoid sharing the same loc descriptor between
21056 DW_TAG_common_block and DW_TAG_variable. */
21057 loc = loc_list_from_tree (com_decl, 2, NULL);
21058 }
21059 else if (DECL_EXTERNAL (decl_or_origin))
21060 add_AT_flag (com_die, DW_AT_declaration, 1);
21061 if (want_pubnames ())
21062 add_pubname_string (cnam, com_die); /* ??? needed? */
21063 com_die->decl_id = DECL_UID (com_decl);
21064 slot = common_block_die_table->find_slot (com_die, INSERT);
21065 *slot = com_die;
21066 }
21067 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
21068 {
21069 add_AT_location_description (com_die, DW_AT_location, loc);
21070 loc = loc_list_from_tree (com_decl, 2, NULL);
21071 remove_AT (com_die, DW_AT_declaration);
21072 }
21073 var_die = new_die (DW_TAG_variable, com_die, decl);
21074 add_name_and_src_coords_attributes (var_die, decl_or_origin);
21075 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
21076 decl_quals (decl_or_origin), false,
21077 context_die);
21078 add_AT_flag (var_die, DW_AT_external, 1);
21079 if (loc)
21080 {
21081 if (off)
21082 {
21083 /* Optimize the common case. */
21084 if (single_element_loc_list_p (loc)
21085 && loc->expr->dw_loc_opc == DW_OP_addr
21086 && loc->expr->dw_loc_next == NULL
21087 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
21088 {
21089 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
21090 loc->expr->dw_loc_oprnd1.v.val_addr
21091 = plus_constant (GET_MODE (x), x, off);
21092 }
21093 else
21094 loc_list_plus_const (loc, off);
21095 }
21096 add_AT_location_description (var_die, DW_AT_location, loc);
21097 }
21098 else if (DECL_EXTERNAL (decl_or_origin))
21099 add_AT_flag (var_die, DW_AT_declaration, 1);
21100 if (decl)
21101 equate_decl_number_to_die (decl, var_die);
21102 return;
21103 }
21104
21105 if (old_die)
21106 {
21107 if (declaration)
21108 {
21109 /* A declaration that has been previously dumped, needs no
21110 further annotations, since it doesn't need location on
21111 the second pass. */
21112 return;
21113 }
21114 else if (decl_will_get_specification_p (old_die, decl, declaration)
21115 && !get_AT (old_die, DW_AT_specification))
21116 {
21117 /* Fall-thru so we can make a new variable die along with a
21118 DW_AT_specification. */
21119 }
21120 else if (origin && old_die->die_parent != context_die)
21121 {
21122 /* If we will be creating an inlined instance, we need a
21123 new DIE that will get annotated with
21124 DW_AT_abstract_origin. Clear things so we can get a
21125 new DIE. */
21126 gcc_assert (!DECL_ABSTRACT_P (decl));
21127 old_die = NULL;
21128 }
21129 else
21130 {
21131 /* If a DIE was dumped early, it still needs location info.
21132 Skip to where we fill the location bits. */
21133 var_die = old_die;
21134 goto gen_variable_die_location;
21135 }
21136 }
21137
21138 /* For static data members, the declaration in the class is supposed
21139 to have DW_TAG_member tag; the specification should still be
21140 DW_TAG_variable referencing the DW_TAG_member DIE. */
21141 if (declaration && class_scope_p (context_die))
21142 var_die = new_die (DW_TAG_member, context_die, decl);
21143 else
21144 var_die = new_die (DW_TAG_variable, context_die, decl);
21145
21146 if (origin != NULL)
21147 origin_die = add_abstract_origin_attribute (var_die, origin);
21148
21149 /* Loop unrolling can create multiple blocks that refer to the same
21150 static variable, so we must test for the DW_AT_declaration flag.
21151
21152 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
21153 copy decls and set the DECL_ABSTRACT_P flag on them instead of
21154 sharing them.
21155
21156 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
21157 else if (decl_will_get_specification_p (old_die, decl, declaration))
21158 {
21159 /* This is a definition of a C++ class level static. */
21160 add_AT_specification (var_die, old_die);
21161 specialization_p = true;
21162 if (DECL_NAME (decl))
21163 {
21164 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
21165 struct dwarf_file_data * file_index = lookup_filename (s.file);
21166
21167 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
21168 add_AT_file (var_die, DW_AT_decl_file, file_index);
21169
21170 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
21171 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
21172
21173 if (old_die->die_tag == DW_TAG_member)
21174 add_linkage_name (var_die, decl);
21175 }
21176 }
21177 else
21178 add_name_and_src_coords_attributes (var_die, decl);
21179
21180 if ((origin == NULL && !specialization_p)
21181 || (origin != NULL
21182 && !DECL_ABSTRACT_P (decl_or_origin)
21183 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
21184 decl_function_context
21185 (decl_or_origin))))
21186 {
21187 tree type = TREE_TYPE (decl_or_origin);
21188
21189 if (decl_by_reference_p (decl_or_origin))
21190 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21191 context_die);
21192 else
21193 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
21194 context_die);
21195 }
21196
21197 if (origin == NULL && !specialization_p)
21198 {
21199 if (TREE_PUBLIC (decl))
21200 add_AT_flag (var_die, DW_AT_external, 1);
21201
21202 if (DECL_ARTIFICIAL (decl))
21203 add_AT_flag (var_die, DW_AT_artificial, 1);
21204
21205 add_accessibility_attribute (var_die, decl);
21206 }
21207
21208 if (declaration)
21209 add_AT_flag (var_die, DW_AT_declaration, 1);
21210
21211 if (decl && (DECL_ABSTRACT_P (decl)
21212 || !old_die || is_declaration_die (old_die)))
21213 equate_decl_number_to_die (decl, var_die);
21214
21215 gen_variable_die_location:
21216 if (! declaration
21217 && (! DECL_ABSTRACT_P (decl_or_origin)
21218 /* Local static vars are shared between all clones/inlines,
21219 so emit DW_AT_location on the abstract DIE if DECL_RTL is
21220 already set. */
21221 || (TREE_CODE (decl_or_origin) == VAR_DECL
21222 && TREE_STATIC (decl_or_origin)
21223 && DECL_RTL_SET_P (decl_or_origin)))
21224 /* When abstract origin already has DW_AT_location attribute, no need
21225 to add it again. */
21226 && (origin_die == NULL || get_AT (origin_die, DW_AT_location) == NULL))
21227 {
21228 if (early_dwarf)
21229 add_pubname (decl_or_origin, var_die);
21230 else
21231 add_location_or_const_value_attribute (var_die, decl_or_origin,
21232 decl == NULL);
21233 }
21234 else
21235 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
21236 }
21237
21238 /* Generate a DIE to represent a named constant. */
21239
21240 static void
21241 gen_const_die (tree decl, dw_die_ref context_die)
21242 {
21243 dw_die_ref const_die;
21244 tree type = TREE_TYPE (decl);
21245
21246 const_die = lookup_decl_die (decl);
21247 if (const_die)
21248 return;
21249
21250 const_die = new_die (DW_TAG_constant, context_die, decl);
21251 equate_decl_number_to_die (decl, const_die);
21252 add_name_and_src_coords_attributes (const_die, decl);
21253 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
21254 if (TREE_PUBLIC (decl))
21255 add_AT_flag (const_die, DW_AT_external, 1);
21256 if (DECL_ARTIFICIAL (decl))
21257 add_AT_flag (const_die, DW_AT_artificial, 1);
21258 tree_add_const_value_attribute_for_decl (const_die, decl);
21259 }
21260
21261 /* Generate a DIE to represent a label identifier. */
21262
21263 static void
21264 gen_label_die (tree decl, dw_die_ref context_die)
21265 {
21266 tree origin = decl_ultimate_origin (decl);
21267 dw_die_ref lbl_die = lookup_decl_die (decl);
21268 rtx insn;
21269 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21270
21271 if (!lbl_die)
21272 {
21273 lbl_die = new_die (DW_TAG_label, context_die, decl);
21274 equate_decl_number_to_die (decl, lbl_die);
21275
21276 if (origin != NULL)
21277 add_abstract_origin_attribute (lbl_die, origin);
21278 else
21279 add_name_and_src_coords_attributes (lbl_die, decl);
21280 }
21281
21282 if (DECL_ABSTRACT_P (decl))
21283 equate_decl_number_to_die (decl, lbl_die);
21284 else
21285 {
21286 insn = DECL_RTL_IF_SET (decl);
21287
21288 /* Deleted labels are programmer specified labels which have been
21289 eliminated because of various optimizations. We still emit them
21290 here so that it is possible to put breakpoints on them. */
21291 if (insn
21292 && (LABEL_P (insn)
21293 || ((NOTE_P (insn)
21294 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
21295 {
21296 /* When optimization is enabled (via -O) some parts of the compiler
21297 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
21298 represent source-level labels which were explicitly declared by
21299 the user. This really shouldn't be happening though, so catch
21300 it if it ever does happen. */
21301 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
21302
21303 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
21304 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
21305 }
21306 else if (insn
21307 && NOTE_P (insn)
21308 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
21309 && CODE_LABEL_NUMBER (insn) != -1)
21310 {
21311 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
21312 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
21313 }
21314 }
21315 }
21316
21317 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
21318 attributes to the DIE for a block STMT, to describe where the inlined
21319 function was called from. This is similar to add_src_coords_attributes. */
21320
21321 static inline void
21322 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
21323 {
21324 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
21325
21326 if (dwarf_version >= 3 || !dwarf_strict)
21327 {
21328 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
21329 add_AT_unsigned (die, DW_AT_call_line, s.line);
21330 }
21331 }
21332
21333
21334 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
21335 Add low_pc and high_pc attributes to the DIE for a block STMT. */
21336
21337 static inline void
21338 add_high_low_attributes (tree stmt, dw_die_ref die)
21339 {
21340 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21341
21342 if (BLOCK_FRAGMENT_CHAIN (stmt)
21343 && (dwarf_version >= 3 || !dwarf_strict))
21344 {
21345 tree chain, superblock = NULL_TREE;
21346 dw_die_ref pdie;
21347 dw_attr_node *attr = NULL;
21348
21349 if (inlined_function_outer_scope_p (stmt))
21350 {
21351 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
21352 BLOCK_NUMBER (stmt));
21353 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21354 }
21355
21356 /* Optimize duplicate .debug_ranges lists or even tails of
21357 lists. If this BLOCK has same ranges as its supercontext,
21358 lookup DW_AT_ranges attribute in the supercontext (and
21359 recursively so), verify that the ranges_table contains the
21360 right values and use it instead of adding a new .debug_range. */
21361 for (chain = stmt, pdie = die;
21362 BLOCK_SAME_RANGE (chain);
21363 chain = BLOCK_SUPERCONTEXT (chain))
21364 {
21365 dw_attr_node *new_attr;
21366
21367 pdie = pdie->die_parent;
21368 if (pdie == NULL)
21369 break;
21370 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
21371 break;
21372 new_attr = get_AT (pdie, DW_AT_ranges);
21373 if (new_attr == NULL
21374 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
21375 break;
21376 attr = new_attr;
21377 superblock = BLOCK_SUPERCONTEXT (chain);
21378 }
21379 if (attr != NULL
21380 && (ranges_table[attr->dw_attr_val.v.val_offset
21381 / 2 / DWARF2_ADDR_SIZE].num
21382 == BLOCK_NUMBER (superblock))
21383 && BLOCK_FRAGMENT_CHAIN (superblock))
21384 {
21385 unsigned long off = attr->dw_attr_val.v.val_offset
21386 / 2 / DWARF2_ADDR_SIZE;
21387 unsigned long supercnt = 0, thiscnt = 0;
21388 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
21389 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
21390 {
21391 ++supercnt;
21392 gcc_checking_assert (ranges_table[off + supercnt].num
21393 == BLOCK_NUMBER (chain));
21394 }
21395 gcc_checking_assert (ranges_table[off + supercnt + 1].num == 0);
21396 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
21397 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
21398 ++thiscnt;
21399 gcc_assert (supercnt >= thiscnt);
21400 add_AT_range_list (die, DW_AT_ranges,
21401 ((off + supercnt - thiscnt)
21402 * 2 * DWARF2_ADDR_SIZE),
21403 false);
21404 return;
21405 }
21406
21407 add_AT_range_list (die, DW_AT_ranges, add_ranges (stmt), false);
21408
21409 chain = BLOCK_FRAGMENT_CHAIN (stmt);
21410 do
21411 {
21412 add_ranges (chain);
21413 chain = BLOCK_FRAGMENT_CHAIN (chain);
21414 }
21415 while (chain);
21416 add_ranges (NULL);
21417 }
21418 else
21419 {
21420 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
21421 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
21422 BLOCK_NUMBER (stmt));
21423 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
21424 BLOCK_NUMBER (stmt));
21425 add_AT_low_high_pc (die, label, label_high, false);
21426 }
21427 }
21428
21429 /* Generate a DIE for a lexical block. */
21430
21431 static void
21432 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
21433 {
21434 dw_die_ref old_die = BLOCK_DIE (stmt);
21435 dw_die_ref stmt_die = NULL;
21436 if (!old_die)
21437 {
21438 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
21439 BLOCK_DIE (stmt) = stmt_die;
21440 }
21441
21442 if (BLOCK_ABSTRACT (stmt))
21443 {
21444 if (old_die)
21445 {
21446 /* This must have been generated early and it won't even
21447 need location information since it's a DW_AT_inline
21448 function. */
21449 if (flag_checking)
21450 for (dw_die_ref c = context_die; c; c = c->die_parent)
21451 if (c->die_tag == DW_TAG_inlined_subroutine
21452 || c->die_tag == DW_TAG_subprogram)
21453 {
21454 gcc_assert (get_AT (c, DW_AT_inline));
21455 break;
21456 }
21457 return;
21458 }
21459 }
21460 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
21461 {
21462 /* If this is an inlined instance, create a new lexical die for
21463 anything below to attach DW_AT_abstract_origin to. */
21464 if (old_die)
21465 {
21466 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
21467 BLOCK_DIE (stmt) = stmt_die;
21468 old_die = NULL;
21469 }
21470 }
21471
21472 if (old_die)
21473 stmt_die = old_die;
21474
21475 /* A non abstract block whose blocks have already been reordered
21476 should have the instruction range for this block. If so, set the
21477 high/low attributes. */
21478 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
21479 {
21480 gcc_assert (stmt_die);
21481 add_high_low_attributes (stmt, stmt_die);
21482 }
21483
21484 decls_for_scope (stmt, stmt_die);
21485 }
21486
21487 /* Generate a DIE for an inlined subprogram. */
21488
21489 static void
21490 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
21491 {
21492 tree decl;
21493
21494 /* The instance of function that is effectively being inlined shall not
21495 be abstract. */
21496 gcc_assert (! BLOCK_ABSTRACT (stmt));
21497
21498 decl = block_ultimate_origin (stmt);
21499
21500 /* Make sure any inlined functions are known to be inlineable. */
21501 gcc_checking_assert (DECL_ABSTRACT_P (decl)
21502 || cgraph_function_possibly_inlined_p (decl));
21503
21504 /* Emit info for the abstract instance first, if we haven't yet. We
21505 must emit this even if the block is abstract, otherwise when we
21506 emit the block below (or elsewhere), we may end up trying to emit
21507 a die whose origin die hasn't been emitted, and crashing. */
21508 dwarf2out_abstract_function (decl);
21509
21510 if (! BLOCK_ABSTRACT (stmt))
21511 {
21512 dw_die_ref subr_die
21513 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
21514
21515 if (call_arg_locations)
21516 BLOCK_DIE (stmt) = subr_die;
21517 add_abstract_origin_attribute (subr_die, decl);
21518 if (TREE_ASM_WRITTEN (stmt))
21519 add_high_low_attributes (stmt, subr_die);
21520 add_call_src_coords_attributes (stmt, subr_die);
21521
21522 decls_for_scope (stmt, subr_die);
21523 }
21524 }
21525
21526 /* Generate a DIE for a field in a record, or structure. CTX is required: see
21527 the comment for VLR_CONTEXT. */
21528
21529 static void
21530 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
21531 {
21532 dw_die_ref decl_die;
21533
21534 if (TREE_TYPE (decl) == error_mark_node)
21535 return;
21536
21537 decl_die = new_die (DW_TAG_member, context_die, decl);
21538 add_name_and_src_coords_attributes (decl_die, decl);
21539 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
21540 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
21541 context_die);
21542
21543 if (DECL_BIT_FIELD_TYPE (decl))
21544 {
21545 add_byte_size_attribute (decl_die, decl);
21546 add_bit_size_attribute (decl_die, decl);
21547 add_bit_offset_attribute (decl_die, decl, ctx);
21548 }
21549
21550 /* If we have a variant part offset, then we are supposed to process a member
21551 of a QUAL_UNION_TYPE, which is how we represent variant parts in
21552 trees. */
21553 gcc_assert (ctx->variant_part_offset == NULL_TREE
21554 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
21555 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
21556 add_data_member_location_attribute (decl_die, decl, ctx);
21557
21558 if (DECL_ARTIFICIAL (decl))
21559 add_AT_flag (decl_die, DW_AT_artificial, 1);
21560
21561 add_accessibility_attribute (decl_die, decl);
21562
21563 /* Equate decl number to die, so that we can look up this decl later on. */
21564 equate_decl_number_to_die (decl, decl_die);
21565 }
21566
21567 #if 0
21568 /* Don't generate either pointer_type DIEs or reference_type DIEs here.
21569 Use modified_type_die instead.
21570 We keep this code here just in case these types of DIEs may be needed to
21571 represent certain things in other languages (e.g. Pascal) someday. */
21572
21573 static void
21574 gen_pointer_type_die (tree type, dw_die_ref context_die)
21575 {
21576 dw_die_ref ptr_die
21577 = new_die (DW_TAG_pointer_type, scope_die_for (type, context_die), type);
21578
21579 equate_type_number_to_die (type, ptr_die);
21580 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21581 context_die);
21582 add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
21583 }
21584
21585 /* Don't generate either pointer_type DIEs or reference_type DIEs here.
21586 Use modified_type_die instead.
21587 We keep this code here just in case these types of DIEs may be needed to
21588 represent certain things in other languages (e.g. Pascal) someday. */
21589
21590 static void
21591 gen_reference_type_die (tree type, dw_die_ref context_die)
21592 {
21593 dw_die_ref ref_die, scope_die = scope_die_for (type, context_die);
21594
21595 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
21596 ref_die = new_die (DW_TAG_rvalue_reference_type, scope_die, type);
21597 else
21598 ref_die = new_die (DW_TAG_reference_type, scope_die, type);
21599
21600 equate_type_number_to_die (type, ref_die);
21601 add_type_attribute (ref_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21602 context_die);
21603 add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
21604 }
21605 #endif
21606
21607 /* Generate a DIE for a pointer to a member type. */
21608
21609 static void
21610 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
21611 {
21612 dw_die_ref ptr_die
21613 = new_die (DW_TAG_ptr_to_member_type,
21614 scope_die_for (type, context_die), type);
21615
21616 equate_type_number_to_die (type, ptr_die);
21617 add_AT_die_ref (ptr_die, DW_AT_containing_type,
21618 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
21619 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
21620 context_die);
21621 }
21622
21623 static char *producer_string;
21624
21625 /* Return a heap allocated producer string including command line options
21626 if -grecord-gcc-switches. */
21627
21628 static char *
21629 gen_producer_string (void)
21630 {
21631 size_t j;
21632 auto_vec<const char *> switches;
21633 const char *language_string = lang_hooks.name;
21634 char *producer, *tail;
21635 const char *p;
21636 size_t len = dwarf_record_gcc_switches ? 0 : 3;
21637 size_t plen = strlen (language_string) + 1 + strlen (version_string);
21638
21639 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
21640 switch (save_decoded_options[j].opt_index)
21641 {
21642 case OPT_o:
21643 case OPT_d:
21644 case OPT_dumpbase:
21645 case OPT_dumpdir:
21646 case OPT_auxbase:
21647 case OPT_auxbase_strip:
21648 case OPT_quiet:
21649 case OPT_version:
21650 case OPT_v:
21651 case OPT_w:
21652 case OPT_L:
21653 case OPT_D:
21654 case OPT_I:
21655 case OPT_U:
21656 case OPT_SPECIAL_unknown:
21657 case OPT_SPECIAL_ignore:
21658 case OPT_SPECIAL_program_name:
21659 case OPT_SPECIAL_input_file:
21660 case OPT_grecord_gcc_switches:
21661 case OPT_gno_record_gcc_switches:
21662 case OPT__output_pch_:
21663 case OPT_fdiagnostics_show_location_:
21664 case OPT_fdiagnostics_show_option:
21665 case OPT_fdiagnostics_show_caret:
21666 case OPT_fdiagnostics_color_:
21667 case OPT_fverbose_asm:
21668 case OPT____:
21669 case OPT__sysroot_:
21670 case OPT_nostdinc:
21671 case OPT_nostdinc__:
21672 case OPT_fpreprocessed:
21673 case OPT_fltrans_output_list_:
21674 case OPT_fresolution_:
21675 case OPT_fdebug_prefix_map_:
21676 /* Ignore these. */
21677 continue;
21678 default:
21679 if (cl_options[save_decoded_options[j].opt_index].flags
21680 & CL_NO_DWARF_RECORD)
21681 continue;
21682 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
21683 == '-');
21684 switch (save_decoded_options[j].canonical_option[0][1])
21685 {
21686 case 'M':
21687 case 'i':
21688 case 'W':
21689 continue;
21690 case 'f':
21691 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
21692 "dump", 4) == 0)
21693 continue;
21694 break;
21695 default:
21696 break;
21697 }
21698 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
21699 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
21700 break;
21701 }
21702
21703 producer = XNEWVEC (char, plen + 1 + len + 1);
21704 tail = producer;
21705 sprintf (tail, "%s %s", language_string, version_string);
21706 tail += plen;
21707
21708 FOR_EACH_VEC_ELT (switches, j, p)
21709 {
21710 len = strlen (p);
21711 *tail = ' ';
21712 memcpy (tail + 1, p, len);
21713 tail += len + 1;
21714 }
21715
21716 *tail = '\0';
21717 return producer;
21718 }
21719
21720 /* Given a C and/or C++ language/version string return the "highest".
21721 C++ is assumed to be "higher" than C in this case. Used for merging
21722 LTO translation unit languages. */
21723 static const char *
21724 highest_c_language (const char *lang1, const char *lang2)
21725 {
21726 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
21727 return "GNU C++14";
21728 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
21729 return "GNU C++11";
21730 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
21731 return "GNU C++98";
21732
21733 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
21734 return "GNU C11";
21735 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
21736 return "GNU C99";
21737 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
21738 return "GNU C89";
21739
21740 gcc_unreachable ();
21741 }
21742
21743
21744 /* Generate the DIE for the compilation unit. */
21745
21746 static dw_die_ref
21747 gen_compile_unit_die (const char *filename)
21748 {
21749 dw_die_ref die;
21750 const char *language_string = lang_hooks.name;
21751 int language;
21752
21753 die = new_die (DW_TAG_compile_unit, NULL, NULL);
21754
21755 if (filename)
21756 {
21757 add_name_attribute (die, filename);
21758 /* Don't add cwd for <built-in>. */
21759 if (!IS_ABSOLUTE_PATH (filename) && filename[0] != '<')
21760 add_comp_dir_attribute (die);
21761 }
21762
21763 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
21764
21765 /* If our producer is LTO try to figure out a common language to use
21766 from the global list of translation units. */
21767 if (strcmp (language_string, "GNU GIMPLE") == 0)
21768 {
21769 unsigned i;
21770 tree t;
21771 const char *common_lang = NULL;
21772
21773 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
21774 {
21775 if (!TRANSLATION_UNIT_LANGUAGE (t))
21776 continue;
21777 if (!common_lang)
21778 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
21779 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
21780 ;
21781 else if (strncmp (common_lang, "GNU C", 5) == 0
21782 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
21783 /* Mixing C and C++ is ok, use C++ in that case. */
21784 common_lang = highest_c_language (common_lang,
21785 TRANSLATION_UNIT_LANGUAGE (t));
21786 else
21787 {
21788 /* Fall back to C. */
21789 common_lang = NULL;
21790 break;
21791 }
21792 }
21793
21794 if (common_lang)
21795 language_string = common_lang;
21796 }
21797
21798 language = DW_LANG_C;
21799 if (strncmp (language_string, "GNU C", 5) == 0
21800 && ISDIGIT (language_string[5]))
21801 {
21802 language = DW_LANG_C89;
21803 if (dwarf_version >= 3 || !dwarf_strict)
21804 {
21805 if (strcmp (language_string, "GNU C89") != 0)
21806 language = DW_LANG_C99;
21807
21808 if (dwarf_version >= 5 /* || !dwarf_strict */)
21809 if (strcmp (language_string, "GNU C11") == 0)
21810 language = DW_LANG_C11;
21811 }
21812 }
21813 else if (strncmp (language_string, "GNU C++", 7) == 0)
21814 {
21815 language = DW_LANG_C_plus_plus;
21816 if (dwarf_version >= 5 /* || !dwarf_strict */)
21817 {
21818 if (strcmp (language_string, "GNU C++11") == 0)
21819 language = DW_LANG_C_plus_plus_11;
21820 else if (strcmp (language_string, "GNU C++14") == 0)
21821 language = DW_LANG_C_plus_plus_14;
21822 }
21823 }
21824 else if (strcmp (language_string, "GNU F77") == 0)
21825 language = DW_LANG_Fortran77;
21826 else if (strcmp (language_string, "GNU Pascal") == 0)
21827 language = DW_LANG_Pascal83;
21828 else if (dwarf_version >= 3 || !dwarf_strict)
21829 {
21830 if (strcmp (language_string, "GNU Ada") == 0)
21831 language = DW_LANG_Ada95;
21832 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
21833 {
21834 language = DW_LANG_Fortran95;
21835 if (dwarf_version >= 5 /* || !dwarf_strict */)
21836 {
21837 if (strcmp (language_string, "GNU Fortran2003") == 0)
21838 language = DW_LANG_Fortran03;
21839 else if (strcmp (language_string, "GNU Fortran2008") == 0)
21840 language = DW_LANG_Fortran08;
21841 }
21842 }
21843 else if (strcmp (language_string, "GNU Java") == 0)
21844 language = DW_LANG_Java;
21845 else if (strcmp (language_string, "GNU Objective-C") == 0)
21846 language = DW_LANG_ObjC;
21847 else if (strcmp (language_string, "GNU Objective-C++") == 0)
21848 language = DW_LANG_ObjC_plus_plus;
21849 else if (dwarf_version >= 5 || !dwarf_strict)
21850 {
21851 if (strcmp (language_string, "GNU Go") == 0)
21852 language = DW_LANG_Go;
21853 }
21854 }
21855 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
21856 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
21857 language = DW_LANG_Fortran90;
21858
21859 add_AT_unsigned (die, DW_AT_language, language);
21860
21861 switch (language)
21862 {
21863 case DW_LANG_Fortran77:
21864 case DW_LANG_Fortran90:
21865 case DW_LANG_Fortran95:
21866 case DW_LANG_Fortran03:
21867 case DW_LANG_Fortran08:
21868 /* Fortran has case insensitive identifiers and the front-end
21869 lowercases everything. */
21870 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
21871 break;
21872 default:
21873 /* The default DW_ID_case_sensitive doesn't need to be specified. */
21874 break;
21875 }
21876 return die;
21877 }
21878
21879 /* Generate the DIE for a base class. */
21880
21881 static void
21882 gen_inheritance_die (tree binfo, tree access, tree type,
21883 dw_die_ref context_die)
21884 {
21885 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
21886 struct vlr_context ctx = { type, NULL };
21887
21888 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
21889 context_die);
21890 add_data_member_location_attribute (die, binfo, &ctx);
21891
21892 if (BINFO_VIRTUAL_P (binfo))
21893 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21894
21895 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
21896 children, otherwise the default is DW_ACCESS_public. In DWARF2
21897 the default has always been DW_ACCESS_private. */
21898 if (access == access_public_node)
21899 {
21900 if (dwarf_version == 2
21901 || context_die->die_tag == DW_TAG_class_type)
21902 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
21903 }
21904 else if (access == access_protected_node)
21905 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
21906 else if (dwarf_version > 2
21907 && context_die->die_tag != DW_TAG_class_type)
21908 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
21909 }
21910
21911 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
21912 structure. */
21913 static bool
21914 is_variant_part (tree decl)
21915 {
21916 return (TREE_CODE (decl) == FIELD_DECL
21917 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
21918 }
21919
21920 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
21921 return the FIELD_DECL. Return NULL_TREE otherwise. */
21922
21923 static tree
21924 analyze_discr_in_predicate (tree operand, tree struct_type)
21925 {
21926 bool continue_stripping = true;
21927 while (continue_stripping)
21928 switch (TREE_CODE (operand))
21929 {
21930 CASE_CONVERT:
21931 operand = TREE_OPERAND (operand, 0);
21932 break;
21933 default:
21934 continue_stripping = false;
21935 break;
21936 }
21937
21938 /* Match field access to members of struct_type only. */
21939 if (TREE_CODE (operand) == COMPONENT_REF
21940 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
21941 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
21942 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
21943 return TREE_OPERAND (operand, 1);
21944 else
21945 return NULL_TREE;
21946 }
21947
21948 /* Check that SRC is a constant integer that can be represented as a native
21949 integer constant (either signed or unsigned). If so, store it into DEST and
21950 return true. Return false otherwise. */
21951
21952 static bool
21953 get_discr_value (tree src, dw_discr_value *dest)
21954 {
21955 bool is_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
21956
21957 if (TREE_CODE (src) != INTEGER_CST
21958 || !(is_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
21959 return false;
21960
21961 dest->pos = is_unsigned;
21962 if (is_unsigned)
21963 dest->v.uval = tree_to_uhwi (src);
21964 else
21965 dest->v.sval = tree_to_shwi (src);
21966
21967 return true;
21968 }
21969
21970 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
21971 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
21972 store NULL_TREE in DISCR_DECL. Otherwise:
21973
21974 - store the discriminant field in STRUCT_TYPE that controls the variant
21975 part to *DISCR_DECL
21976
21977 - put in *DISCR_LISTS_P an array where for each variant, the item
21978 represents the corresponding matching list of discriminant values.
21979
21980 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
21981 the above array.
21982
21983 Note that when the array is allocated (i.e. when the analysis is
21984 successful), it is up to the caller to free the array. */
21985
21986 static void
21987 analyze_variants_discr (tree variant_part_decl,
21988 tree struct_type,
21989 tree *discr_decl,
21990 dw_discr_list_ref **discr_lists_p,
21991 unsigned *discr_lists_length)
21992 {
21993 tree variant_part_type = TREE_TYPE (variant_part_decl);
21994 tree variant;
21995 dw_discr_list_ref *discr_lists;
21996 unsigned i;
21997
21998 /* Compute how many variants there are in this variant part. */
21999 *discr_lists_length = 0;
22000 for (variant = TYPE_FIELDS (variant_part_type);
22001 variant != NULL_TREE;
22002 variant = DECL_CHAIN (variant))
22003 ++*discr_lists_length;
22004
22005 *discr_decl = NULL_TREE;
22006 *discr_lists_p
22007 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
22008 sizeof (**discr_lists_p));
22009 discr_lists = *discr_lists_p;
22010
22011 /* And then analyze all variants to extract discriminant information for all
22012 of them. This analysis is conservative: as soon as we detect something we
22013 do not support, abort everything and pretend we found nothing. */
22014 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
22015 variant != NULL_TREE;
22016 variant = DECL_CHAIN (variant), ++i)
22017 {
22018 tree match_expr = DECL_QUALIFIER (variant);
22019
22020 /* Now, try to analyze the predicate and deduce a discriminant for
22021 it. */
22022 if (match_expr == boolean_true_node)
22023 /* Typically happens for the default variant: it matches all cases that
22024 previous variants rejected. Don't output any matching value for
22025 this one. */
22026 continue;
22027
22028 /* The following loop tries to iterate over each discriminant
22029 possibility: single values or ranges. */
22030 while (match_expr != NULL_TREE)
22031 {
22032 tree next_round_match_expr;
22033 tree candidate_discr = NULL_TREE;
22034 dw_discr_list_ref new_node = NULL;
22035
22036 /* Possibilities are matched one after the other by nested
22037 TRUTH_ORIF_EXPR expressions. Process the current possibility and
22038 continue with the rest at next iteration. */
22039 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
22040 {
22041 next_round_match_expr = TREE_OPERAND (match_expr, 0);
22042 match_expr = TREE_OPERAND (match_expr, 1);
22043 }
22044 else
22045 next_round_match_expr = NULL_TREE;
22046
22047 if (match_expr == boolean_false_node)
22048 /* This sub-expression matches nothing: just wait for the next
22049 one. */
22050 ;
22051
22052 else if (TREE_CODE (match_expr) == EQ_EXPR)
22053 {
22054 /* We are matching: <discr_field> == <integer_cst>
22055 This sub-expression matches a single value. */
22056 tree integer_cst = TREE_OPERAND (match_expr, 1);
22057
22058 candidate_discr
22059 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
22060 struct_type);
22061
22062 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
22063 if (!get_discr_value (integer_cst,
22064 &new_node->dw_discr_lower_bound))
22065 goto abort;
22066 new_node->dw_discr_range = false;
22067 }
22068
22069 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
22070 {
22071 /* We are matching:
22072 <discr_field> > <integer_cst>
22073 && <discr_field> < <integer_cst>.
22074 This sub-expression matches the range of values between the
22075 two matched integer constants. Note that comparisons can be
22076 inclusive or exclusive. */
22077 tree candidate_discr_1, candidate_discr_2;
22078 tree lower_cst, upper_cst;
22079 bool lower_cst_included, upper_cst_included;
22080 tree lower_op = TREE_OPERAND (match_expr, 0);
22081 tree upper_op = TREE_OPERAND (match_expr, 1);
22082
22083 /* When the comparison is exclusive, the integer constant is not
22084 the discriminant range bound we are looking for: we will have
22085 to increment or decrement it. */
22086 if (TREE_CODE (lower_op) == GE_EXPR)
22087 lower_cst_included = true;
22088 else if (TREE_CODE (lower_op) == GT_EXPR)
22089 lower_cst_included = false;
22090 else
22091 goto abort;
22092
22093 if (TREE_CODE (upper_op) == LE_EXPR)
22094 upper_cst_included = true;
22095 else if (TREE_CODE (upper_op) == LT_EXPR)
22096 upper_cst_included = false;
22097 else
22098 goto abort;
22099
22100 /* Extract the discriminant from the first operand and check it
22101 is consistant with the same analysis in the second
22102 operand. */
22103 candidate_discr_1
22104 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
22105 struct_type);
22106 candidate_discr_2
22107 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
22108 struct_type);
22109 if (candidate_discr_1 == candidate_discr_2)
22110 candidate_discr = candidate_discr_1;
22111 else
22112 goto abort;
22113
22114 /* Extract bounds from both. */
22115 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
22116 lower_cst = TREE_OPERAND (lower_op, 1);
22117 upper_cst = TREE_OPERAND (upper_op, 1);
22118
22119 if (!lower_cst_included)
22120 lower_cst
22121 = fold (build2 (PLUS_EXPR, TREE_TYPE (lower_cst),
22122 lower_cst,
22123 build_int_cst (TREE_TYPE (lower_cst), 1)));
22124 if (!upper_cst_included)
22125 upper_cst
22126 = fold (build2 (MINUS_EXPR, TREE_TYPE (upper_cst),
22127 upper_cst,
22128 build_int_cst (TREE_TYPE (upper_cst), 1)));
22129
22130 if (!get_discr_value (lower_cst,
22131 &new_node->dw_discr_lower_bound)
22132 || !get_discr_value (upper_cst,
22133 &new_node->dw_discr_upper_bound))
22134 goto abort;
22135
22136 new_node->dw_discr_range = true;
22137 }
22138
22139 else
22140 /* Unsupported sub-expression: we cannot determine the set of
22141 matching discriminant values. Abort everything. */
22142 goto abort;
22143
22144 /* If the discriminant info is not consistant with what we saw so
22145 far, consider the analysis failed and abort everything. */
22146 if (candidate_discr == NULL_TREE
22147 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
22148 goto abort;
22149 else
22150 *discr_decl = candidate_discr;
22151
22152 if (new_node != NULL)
22153 {
22154 new_node->dw_discr_next = discr_lists[i];
22155 discr_lists[i] = new_node;
22156 }
22157 match_expr = next_round_match_expr;
22158 }
22159 }
22160
22161 /* If we reach this point, we could match everything we were interested
22162 in. */
22163 return;
22164
22165 abort:
22166 /* Clean all data structure and return no result. */
22167 free (*discr_lists_p);
22168 *discr_lists_p = NULL;
22169 *discr_decl = NULL_TREE;
22170 }
22171
22172 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
22173 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
22174 under CONTEXT_DIE.
22175
22176 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
22177 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
22178 this type, which are record types, represent the available variants and each
22179 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
22180 values are inferred from these attributes.
22181
22182 In trees, the offsets for the fields inside these sub-records are relative
22183 to the variant part itself, whereas the corresponding DIEs should have
22184 offset attributes that are relative to the embedding record base address.
22185 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
22186 must be an expression that computes the offset of the variant part to
22187 describe in DWARF. */
22188
22189 static void
22190 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
22191 dw_die_ref context_die)
22192 {
22193 const tree variant_part_type = TREE_TYPE (variant_part_decl);
22194 tree variant_part_offset = vlr_ctx->variant_part_offset;
22195 struct loc_descr_context ctx = {
22196 vlr_ctx->struct_type, /* context_type */
22197 NULL_TREE, /* base_decl */
22198 NULL /* dpi */
22199 };
22200
22201 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
22202 NULL_TREE if there is no such field. */
22203 tree discr_decl = NULL_TREE;
22204 dw_discr_list_ref *discr_lists;
22205 unsigned discr_lists_length = 0;
22206 unsigned i;
22207
22208 dw_die_ref dwarf_proc_die = NULL;
22209 dw_die_ref variant_part_die
22210 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
22211
22212 equate_decl_number_to_die (variant_part_decl, variant_part_die);
22213
22214 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
22215 &discr_decl, &discr_lists, &discr_lists_length);
22216
22217 if (discr_decl != NULL_TREE)
22218 {
22219 dw_die_ref discr_die = lookup_decl_die (discr_decl);
22220
22221 if (discr_die)
22222 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
22223 else
22224 /* We have no DIE for the discriminant, so just discard all
22225 discrimimant information in the output. */
22226 discr_decl = NULL_TREE;
22227 }
22228
22229 /* If the offset for this variant part is more complex than a constant,
22230 create a DWARF procedure for it so that we will not have to generate DWARF
22231 expressions for it for each member. */
22232 if (TREE_CODE (variant_part_offset) != INTEGER_CST
22233 && (dwarf_version >= 3 || !dwarf_strict))
22234 {
22235 const tree dwarf_proc_fndecl
22236 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
22237 build_function_type (TREE_TYPE (variant_part_offset),
22238 NULL_TREE));
22239 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
22240 const dw_loc_descr_ref dwarf_proc_body
22241 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
22242
22243 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
22244 dwarf_proc_fndecl, context_die);
22245 if (dwarf_proc_die != NULL)
22246 variant_part_offset = dwarf_proc_call;
22247 }
22248
22249 /* Output DIEs for all variants. */
22250 i = 0;
22251 for (tree variant = TYPE_FIELDS (variant_part_type);
22252 variant != NULL_TREE;
22253 variant = DECL_CHAIN (variant), ++i)
22254 {
22255 tree variant_type = TREE_TYPE (variant);
22256 dw_die_ref variant_die;
22257
22258 /* All variants (i.e. members of a variant part) are supposed to be
22259 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
22260 under these records. */
22261 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
22262
22263 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
22264 equate_decl_number_to_die (variant, variant_die);
22265
22266 /* Output discriminant values this variant matches, if any. */
22267 if (discr_decl == NULL || discr_lists[i] == NULL)
22268 /* In the case we have discriminant information at all, this is
22269 probably the default variant: as the standard says, don't
22270 output any discriminant value/list attribute. */
22271 ;
22272 else if (discr_lists[i]->dw_discr_next == NULL
22273 && !discr_lists[i]->dw_discr_range)
22274 /* If there is only one accepted value, don't bother outputting a
22275 list. */
22276 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
22277 else
22278 add_discr_list (variant_die, discr_lists[i]);
22279
22280 for (tree member = TYPE_FIELDS (variant_type);
22281 member != NULL_TREE;
22282 member = DECL_CHAIN (member))
22283 {
22284 struct vlr_context vlr_sub_ctx = {
22285 vlr_ctx->struct_type, /* struct_type */
22286 NULL /* variant_part_offset */
22287 };
22288 if (is_variant_part (member))
22289 {
22290 /* All offsets for fields inside variant parts are relative to
22291 the top-level embedding RECORD_TYPE's base address. On the
22292 other hand, offsets in GCC's types are relative to the
22293 nested-most variant part. So we have to sum offsets each time
22294 we recurse. */
22295
22296 vlr_sub_ctx.variant_part_offset
22297 = fold (build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
22298 variant_part_offset, byte_position (member)));
22299 gen_variant_part (member, &vlr_sub_ctx, variant_die);
22300 }
22301 else
22302 {
22303 vlr_sub_ctx.variant_part_offset = variant_part_offset;
22304 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
22305 }
22306 }
22307 }
22308
22309 free (discr_lists);
22310 }
22311
22312 /* Generate a DIE for a class member. */
22313
22314 static void
22315 gen_member_die (tree type, dw_die_ref context_die)
22316 {
22317 tree member;
22318 tree binfo = TYPE_BINFO (type);
22319 dw_die_ref child;
22320
22321 /* If this is not an incomplete type, output descriptions of each of its
22322 members. Note that as we output the DIEs necessary to represent the
22323 members of this record or union type, we will also be trying to output
22324 DIEs to represent the *types* of those members. However the `type'
22325 function (above) will specifically avoid generating type DIEs for member
22326 types *within* the list of member DIEs for this (containing) type except
22327 for those types (of members) which are explicitly marked as also being
22328 members of this (containing) type themselves. The g++ front- end can
22329 force any given type to be treated as a member of some other (containing)
22330 type by setting the TYPE_CONTEXT of the given (member) type to point to
22331 the TREE node representing the appropriate (containing) type. */
22332
22333 /* First output info about the base classes. */
22334 if (binfo)
22335 {
22336 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
22337 int i;
22338 tree base;
22339
22340 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
22341 gen_inheritance_die (base,
22342 (accesses ? (*accesses)[i] : access_public_node),
22343 type,
22344 context_die);
22345 }
22346
22347 /* Now output info about the data members and type members. */
22348 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
22349 {
22350 struct vlr_context vlr_ctx = { type, NULL_TREE };
22351
22352 /* If we thought we were generating minimal debug info for TYPE
22353 and then changed our minds, some of the member declarations
22354 may have already been defined. Don't define them again, but
22355 do put them in the right order. */
22356
22357 child = lookup_decl_die (member);
22358 if (child)
22359 splice_child_die (context_die, child);
22360
22361 /* Do not generate standard DWARF for variant parts if we are generating
22362 the corresponding GNAT encodings: DIEs generated for both would
22363 conflict in our mappings. */
22364 else if (is_variant_part (member)
22365 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
22366 {
22367 vlr_ctx.variant_part_offset = byte_position (member);
22368 gen_variant_part (member, &vlr_ctx, context_die);
22369 }
22370 else
22371 {
22372 vlr_ctx.variant_part_offset = NULL_TREE;
22373 gen_decl_die (member, NULL, &vlr_ctx, context_die);
22374 }
22375 }
22376
22377 /* We do not keep type methods in type variants. */
22378 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
22379 /* Now output info about the function members (if any). */
22380 if (TYPE_METHODS (type) != error_mark_node)
22381 for (member = TYPE_METHODS (type); member; member = DECL_CHAIN (member))
22382 {
22383 /* Don't include clones in the member list. */
22384 if (DECL_ABSTRACT_ORIGIN (member))
22385 continue;
22386 /* Nor constructors for anonymous classes. */
22387 if (DECL_ARTIFICIAL (member)
22388 && dwarf2_name (member, 0) == NULL)
22389 continue;
22390
22391 child = lookup_decl_die (member);
22392 if (child)
22393 splice_child_die (context_die, child);
22394 else
22395 gen_decl_die (member, NULL, NULL, context_die);
22396 }
22397 }
22398
22399 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
22400 is set, we pretend that the type was never defined, so we only get the
22401 member DIEs needed by later specification DIEs. */
22402
22403 static void
22404 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
22405 enum debug_info_usage usage)
22406 {
22407 if (TREE_ASM_WRITTEN (type))
22408 {
22409 /* Fill in the bound of variable-length fields in late dwarf if
22410 still incomplete. */
22411 if (!early_dwarf && variably_modified_type_p (type, NULL))
22412 for (tree member = TYPE_FIELDS (type);
22413 member;
22414 member = DECL_CHAIN (member))
22415 fill_variable_array_bounds (TREE_TYPE (member));
22416 return;
22417 }
22418
22419 dw_die_ref type_die = lookup_type_die (type);
22420 dw_die_ref scope_die = 0;
22421 int nested = 0;
22422 int complete = (TYPE_SIZE (type)
22423 && (! TYPE_STUB_DECL (type)
22424 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
22425 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
22426 complete = complete && should_emit_struct_debug (type, usage);
22427
22428 if (type_die && ! complete)
22429 return;
22430
22431 if (TYPE_CONTEXT (type) != NULL_TREE
22432 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
22433 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
22434 nested = 1;
22435
22436 scope_die = scope_die_for (type, context_die);
22437
22438 /* Generate child dies for template paramaters. */
22439 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
22440 schedule_generic_params_dies_gen (type);
22441
22442 if (! type_die || (nested && is_cu_die (scope_die)))
22443 /* First occurrence of type or toplevel definition of nested class. */
22444 {
22445 dw_die_ref old_die = type_die;
22446
22447 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
22448 ? record_type_tag (type) : DW_TAG_union_type,
22449 scope_die, type);
22450 equate_type_number_to_die (type, type_die);
22451 if (old_die)
22452 add_AT_specification (type_die, old_die);
22453 else
22454 add_name_attribute (type_die, type_tag (type));
22455 }
22456 else
22457 remove_AT (type_die, DW_AT_declaration);
22458
22459 /* If this type has been completed, then give it a byte_size attribute and
22460 then give a list of members. */
22461 if (complete && !ns_decl)
22462 {
22463 /* Prevent infinite recursion in cases where the type of some member of
22464 this type is expressed in terms of this type itself. */
22465 TREE_ASM_WRITTEN (type) = 1;
22466 add_byte_size_attribute (type_die, type);
22467 if (TYPE_STUB_DECL (type) != NULL_TREE)
22468 {
22469 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22470 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22471 }
22472
22473 /* If the first reference to this type was as the return type of an
22474 inline function, then it may not have a parent. Fix this now. */
22475 if (type_die->die_parent == NULL)
22476 add_child_die (scope_die, type_die);
22477
22478 push_decl_scope (type);
22479 gen_member_die (type, type_die);
22480 pop_decl_scope ();
22481
22482 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22483 if (TYPE_ARTIFICIAL (type))
22484 add_AT_flag (type_die, DW_AT_artificial, 1);
22485
22486 /* GNU extension: Record what type our vtable lives in. */
22487 if (TYPE_VFIELD (type))
22488 {
22489 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
22490
22491 gen_type_die (vtype, context_die);
22492 add_AT_die_ref (type_die, DW_AT_containing_type,
22493 lookup_type_die (vtype));
22494 }
22495 }
22496 else
22497 {
22498 add_AT_flag (type_die, DW_AT_declaration, 1);
22499
22500 /* We don't need to do this for function-local types. */
22501 if (TYPE_STUB_DECL (type)
22502 && ! decl_function_context (TYPE_STUB_DECL (type)))
22503 vec_safe_push (incomplete_types, type);
22504 }
22505
22506 if (get_AT (type_die, DW_AT_name))
22507 add_pubtype (type, type_die);
22508 }
22509
22510 /* Generate a DIE for a subroutine _type_. */
22511
22512 static void
22513 gen_subroutine_type_die (tree type, dw_die_ref context_die)
22514 {
22515 tree return_type = TREE_TYPE (type);
22516 dw_die_ref subr_die
22517 = new_die (DW_TAG_subroutine_type,
22518 scope_die_for (type, context_die), type);
22519
22520 equate_type_number_to_die (type, subr_die);
22521 add_prototyped_attribute (subr_die, type);
22522 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
22523 context_die);
22524 gen_formal_types_die (type, subr_die);
22525
22526 if (get_AT (subr_die, DW_AT_name))
22527 add_pubtype (type, subr_die);
22528 }
22529
22530 /* Generate a DIE for a type definition. */
22531
22532 static void
22533 gen_typedef_die (tree decl, dw_die_ref context_die)
22534 {
22535 dw_die_ref type_die;
22536 tree origin;
22537
22538 if (TREE_ASM_WRITTEN (decl))
22539 {
22540 if (DECL_ORIGINAL_TYPE (decl))
22541 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
22542 return;
22543 }
22544
22545 TREE_ASM_WRITTEN (decl) = 1;
22546 type_die = new_die (DW_TAG_typedef, context_die, decl);
22547 origin = decl_ultimate_origin (decl);
22548 if (origin != NULL)
22549 add_abstract_origin_attribute (type_die, origin);
22550 else
22551 {
22552 tree type;
22553
22554 add_name_and_src_coords_attributes (type_die, decl);
22555 if (DECL_ORIGINAL_TYPE (decl))
22556 {
22557 type = DECL_ORIGINAL_TYPE (decl);
22558
22559 if (type == error_mark_node)
22560 return;
22561
22562 gcc_assert (type != TREE_TYPE (decl));
22563 equate_type_number_to_die (TREE_TYPE (decl), type_die);
22564 }
22565 else
22566 {
22567 type = TREE_TYPE (decl);
22568
22569 if (type == error_mark_node)
22570 return;
22571
22572 if (is_naming_typedef_decl (TYPE_NAME (type)))
22573 {
22574 /* Here, we are in the case of decl being a typedef naming
22575 an anonymous type, e.g:
22576 typedef struct {...} foo;
22577 In that case TREE_TYPE (decl) is not a typedef variant
22578 type and TYPE_NAME of the anonymous type is set to the
22579 TYPE_DECL of the typedef. This construct is emitted by
22580 the C++ FE.
22581
22582 TYPE is the anonymous struct named by the typedef
22583 DECL. As we need the DW_AT_type attribute of the
22584 DW_TAG_typedef to point to the DIE of TYPE, let's
22585 generate that DIE right away. add_type_attribute
22586 called below will then pick (via lookup_type_die) that
22587 anonymous struct DIE. */
22588 if (!TREE_ASM_WRITTEN (type))
22589 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
22590
22591 /* This is a GNU Extension. We are adding a
22592 DW_AT_linkage_name attribute to the DIE of the
22593 anonymous struct TYPE. The value of that attribute
22594 is the name of the typedef decl naming the anonymous
22595 struct. This greatly eases the work of consumers of
22596 this debug info. */
22597 add_linkage_name_raw (lookup_type_die (type), decl);
22598 }
22599 }
22600
22601 add_type_attribute (type_die, type, decl_quals (decl), false,
22602 context_die);
22603
22604 if (is_naming_typedef_decl (decl))
22605 /* We want that all subsequent calls to lookup_type_die with
22606 TYPE in argument yield the DW_TAG_typedef we have just
22607 created. */
22608 equate_type_number_to_die (type, type_die);
22609
22610 add_accessibility_attribute (type_die, decl);
22611 }
22612
22613 if (DECL_ABSTRACT_P (decl))
22614 equate_decl_number_to_die (decl, type_die);
22615
22616 if (get_AT (type_die, DW_AT_name))
22617 add_pubtype (decl, type_die);
22618 }
22619
22620 /* Generate a DIE for a struct, class, enum or union type. */
22621
22622 static void
22623 gen_tagged_type_die (tree type,
22624 dw_die_ref context_die,
22625 enum debug_info_usage usage)
22626 {
22627 int need_pop;
22628
22629 if (type == NULL_TREE
22630 || !is_tagged_type (type))
22631 return;
22632
22633 if (TREE_ASM_WRITTEN (type))
22634 need_pop = 0;
22635 /* If this is a nested type whose containing class hasn't been written
22636 out yet, writing it out will cover this one, too. This does not apply
22637 to instantiations of member class templates; they need to be added to
22638 the containing class as they are generated. FIXME: This hurts the
22639 idea of combining type decls from multiple TUs, since we can't predict
22640 what set of template instantiations we'll get. */
22641 else if (TYPE_CONTEXT (type)
22642 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
22643 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
22644 {
22645 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
22646
22647 if (TREE_ASM_WRITTEN (type))
22648 return;
22649
22650 /* If that failed, attach ourselves to the stub. */
22651 push_decl_scope (TYPE_CONTEXT (type));
22652 context_die = lookup_type_die (TYPE_CONTEXT (type));
22653 need_pop = 1;
22654 }
22655 else if (TYPE_CONTEXT (type) != NULL_TREE
22656 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
22657 {
22658 /* If this type is local to a function that hasn't been written
22659 out yet, use a NULL context for now; it will be fixed up in
22660 decls_for_scope. */
22661 context_die = lookup_decl_die (TYPE_CONTEXT (type));
22662 /* A declaration DIE doesn't count; nested types need to go in the
22663 specification. */
22664 if (context_die && is_declaration_die (context_die))
22665 context_die = NULL;
22666 need_pop = 0;
22667 }
22668 else
22669 {
22670 context_die = declare_in_namespace (type, context_die);
22671 need_pop = 0;
22672 }
22673
22674 if (TREE_CODE (type) == ENUMERAL_TYPE)
22675 {
22676 /* This might have been written out by the call to
22677 declare_in_namespace. */
22678 if (!TREE_ASM_WRITTEN (type))
22679 gen_enumeration_type_die (type, context_die);
22680 }
22681 else
22682 gen_struct_or_union_type_die (type, context_die, usage);
22683
22684 if (need_pop)
22685 pop_decl_scope ();
22686
22687 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
22688 it up if it is ever completed. gen_*_type_die will set it for us
22689 when appropriate. */
22690 }
22691
22692 /* Generate a type description DIE. */
22693
22694 static void
22695 gen_type_die_with_usage (tree type, dw_die_ref context_die,
22696 enum debug_info_usage usage)
22697 {
22698 struct array_descr_info info;
22699
22700 if (type == NULL_TREE || type == error_mark_node)
22701 return;
22702
22703 if (flag_checking && type)
22704 verify_type (type);
22705
22706 if (TYPE_NAME (type) != NULL_TREE
22707 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
22708 && is_redundant_typedef (TYPE_NAME (type))
22709 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
22710 /* The DECL of this type is a typedef we don't want to emit debug
22711 info for but we want debug info for its underlying typedef.
22712 This can happen for e.g, the injected-class-name of a C++
22713 type. */
22714 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
22715
22716 /* If TYPE is a typedef type variant, let's generate debug info
22717 for the parent typedef which TYPE is a type of. */
22718 if (typedef_variant_p (type))
22719 {
22720 if (TREE_ASM_WRITTEN (type))
22721 return;
22722
22723 /* Prevent broken recursion; we can't hand off to the same type. */
22724 gcc_assert (DECL_ORIGINAL_TYPE (TYPE_NAME (type)) != type);
22725
22726 /* Give typedefs the right scope. */
22727 context_die = scope_die_for (type, context_die);
22728
22729 TREE_ASM_WRITTEN (type) = 1;
22730
22731 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
22732 return;
22733 }
22734
22735 /* If type is an anonymous tagged type named by a typedef, let's
22736 generate debug info for the typedef. */
22737 if (is_naming_typedef_decl (TYPE_NAME (type)))
22738 {
22739 /* Use the DIE of the containing namespace as the parent DIE of
22740 the type description DIE we want to generate. */
22741 if (DECL_CONTEXT (TYPE_NAME (type))
22742 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
22743 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
22744
22745 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
22746 return;
22747 }
22748
22749 /* We are going to output a DIE to represent the unqualified version
22750 of this type (i.e. without any const or volatile qualifiers) so
22751 get the main variant (i.e. the unqualified version) of this type
22752 now. (Vectors and arrays are special because the debugging info is in the
22753 cloned type itself). */
22754 if (TREE_CODE (type) != VECTOR_TYPE
22755 && TREE_CODE (type) != ARRAY_TYPE)
22756 type = type_main_variant (type);
22757
22758 /* If this is an array type with hidden descriptor, handle it first. */
22759 if (!TREE_ASM_WRITTEN (type)
22760 && lang_hooks.types.get_array_descr_info)
22761 {
22762 memset (&info, 0, sizeof (info));
22763 if (lang_hooks.types.get_array_descr_info (type, &info))
22764 {
22765 /* Fortran sometimes emits array types with no dimension. */
22766 gcc_assert (info.ndimensions >= 0
22767 && (info.ndimensions
22768 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
22769 gen_descr_array_type_die (type, &info, context_die);
22770 TREE_ASM_WRITTEN (type) = 1;
22771 return;
22772 }
22773 }
22774
22775 if (TREE_ASM_WRITTEN (type))
22776 {
22777 /* Variable-length types may be incomplete even if
22778 TREE_ASM_WRITTEN. For such types, fall through to
22779 gen_array_type_die() and possibly fill in
22780 DW_AT_{upper,lower}_bound attributes. */
22781 if ((TREE_CODE (type) != ARRAY_TYPE
22782 && TREE_CODE (type) != RECORD_TYPE
22783 && TREE_CODE (type) != UNION_TYPE
22784 && TREE_CODE (type) != QUAL_UNION_TYPE)
22785 || !variably_modified_type_p (type, NULL))
22786 return;
22787 }
22788
22789 switch (TREE_CODE (type))
22790 {
22791 case ERROR_MARK:
22792 break;
22793
22794 case POINTER_TYPE:
22795 case REFERENCE_TYPE:
22796 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
22797 ensures that the gen_type_die recursion will terminate even if the
22798 type is recursive. Recursive types are possible in Ada. */
22799 /* ??? We could perhaps do this for all types before the switch
22800 statement. */
22801 TREE_ASM_WRITTEN (type) = 1;
22802
22803 /* For these types, all that is required is that we output a DIE (or a
22804 set of DIEs) to represent the "basis" type. */
22805 gen_type_die_with_usage (TREE_TYPE (type), context_die,
22806 DINFO_USAGE_IND_USE);
22807 break;
22808
22809 case OFFSET_TYPE:
22810 /* This code is used for C++ pointer-to-data-member types.
22811 Output a description of the relevant class type. */
22812 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
22813 DINFO_USAGE_IND_USE);
22814
22815 /* Output a description of the type of the object pointed to. */
22816 gen_type_die_with_usage (TREE_TYPE (type), context_die,
22817 DINFO_USAGE_IND_USE);
22818
22819 /* Now output a DIE to represent this pointer-to-data-member type
22820 itself. */
22821 gen_ptr_to_mbr_type_die (type, context_die);
22822 break;
22823
22824 case FUNCTION_TYPE:
22825 /* Force out return type (in case it wasn't forced out already). */
22826 gen_type_die_with_usage (TREE_TYPE (type), context_die,
22827 DINFO_USAGE_DIR_USE);
22828 gen_subroutine_type_die (type, context_die);
22829 break;
22830
22831 case METHOD_TYPE:
22832 /* Force out return type (in case it wasn't forced out already). */
22833 gen_type_die_with_usage (TREE_TYPE (type), context_die,
22834 DINFO_USAGE_DIR_USE);
22835 gen_subroutine_type_die (type, context_die);
22836 break;
22837
22838 case ARRAY_TYPE:
22839 case VECTOR_TYPE:
22840 gen_array_type_die (type, context_die);
22841 break;
22842
22843 case ENUMERAL_TYPE:
22844 case RECORD_TYPE:
22845 case UNION_TYPE:
22846 case QUAL_UNION_TYPE:
22847 gen_tagged_type_die (type, context_die, usage);
22848 return;
22849
22850 case VOID_TYPE:
22851 case INTEGER_TYPE:
22852 case REAL_TYPE:
22853 case FIXED_POINT_TYPE:
22854 case COMPLEX_TYPE:
22855 case BOOLEAN_TYPE:
22856 case POINTER_BOUNDS_TYPE:
22857 /* No DIEs needed for fundamental types. */
22858 break;
22859
22860 case NULLPTR_TYPE:
22861 case LANG_TYPE:
22862 /* Just use DW_TAG_unspecified_type. */
22863 {
22864 dw_die_ref type_die = lookup_type_die (type);
22865 if (type_die == NULL)
22866 {
22867 tree name = TYPE_IDENTIFIER (type);
22868 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
22869 type);
22870 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
22871 equate_type_number_to_die (type, type_die);
22872 }
22873 }
22874 break;
22875
22876 default:
22877 if (is_cxx_auto (type))
22878 {
22879 tree name = TYPE_IDENTIFIER (type);
22880 dw_die_ref *die = (name == get_identifier ("auto")
22881 ? &auto_die : &decltype_auto_die);
22882 if (!*die)
22883 {
22884 *die = new_die (DW_TAG_unspecified_type,
22885 comp_unit_die (), NULL_TREE);
22886 add_name_attribute (*die, IDENTIFIER_POINTER (name));
22887 }
22888 equate_type_number_to_die (type, *die);
22889 break;
22890 }
22891 gcc_unreachable ();
22892 }
22893
22894 TREE_ASM_WRITTEN (type) = 1;
22895 }
22896
22897 static void
22898 gen_type_die (tree type, dw_die_ref context_die)
22899 {
22900 if (type != error_mark_node)
22901 {
22902 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
22903 if (flag_checking)
22904 {
22905 dw_die_ref die = lookup_type_die (type);
22906 if (die)
22907 check_die (die);
22908 }
22909 }
22910 }
22911
22912 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
22913 things which are local to the given block. */
22914
22915 static void
22916 gen_block_die (tree stmt, dw_die_ref context_die)
22917 {
22918 int must_output_die = 0;
22919 bool inlined_func;
22920
22921 /* Ignore blocks that are NULL. */
22922 if (stmt == NULL_TREE)
22923 return;
22924
22925 inlined_func = inlined_function_outer_scope_p (stmt);
22926
22927 /* If the block is one fragment of a non-contiguous block, do not
22928 process the variables, since they will have been done by the
22929 origin block. Do process subblocks. */
22930 if (BLOCK_FRAGMENT_ORIGIN (stmt))
22931 {
22932 tree sub;
22933
22934 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
22935 gen_block_die (sub, context_die);
22936
22937 return;
22938 }
22939
22940 /* Determine if we need to output any Dwarf DIEs at all to represent this
22941 block. */
22942 if (inlined_func)
22943 /* The outer scopes for inlinings *must* always be represented. We
22944 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
22945 must_output_die = 1;
22946 else
22947 {
22948 /* Determine if this block directly contains any "significant"
22949 local declarations which we will need to output DIEs for. */
22950 if (debug_info_level > DINFO_LEVEL_TERSE)
22951 /* We are not in terse mode so *any* local declaration counts
22952 as being a "significant" one. */
22953 must_output_die = ((BLOCK_VARS (stmt) != NULL
22954 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
22955 && (TREE_USED (stmt)
22956 || TREE_ASM_WRITTEN (stmt)
22957 || BLOCK_ABSTRACT (stmt)));
22958 else if ((TREE_USED (stmt)
22959 || TREE_ASM_WRITTEN (stmt)
22960 || BLOCK_ABSTRACT (stmt))
22961 && !dwarf2out_ignore_block (stmt))
22962 must_output_die = 1;
22963 }
22964
22965 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
22966 DIE for any block which contains no significant local declarations at
22967 all. Rather, in such cases we just call `decls_for_scope' so that any
22968 needed Dwarf info for any sub-blocks will get properly generated. Note
22969 that in terse mode, our definition of what constitutes a "significant"
22970 local declaration gets restricted to include only inlined function
22971 instances and local (nested) function definitions. */
22972 if (must_output_die)
22973 {
22974 if (inlined_func)
22975 {
22976 /* If STMT block is abstract, that means we have been called
22977 indirectly from dwarf2out_abstract_function.
22978 That function rightfully marks the descendent blocks (of
22979 the abstract function it is dealing with) as being abstract,
22980 precisely to prevent us from emitting any
22981 DW_TAG_inlined_subroutine DIE as a descendent
22982 of an abstract function instance. So in that case, we should
22983 not call gen_inlined_subroutine_die.
22984
22985 Later though, when cgraph asks dwarf2out to emit info
22986 for the concrete instance of the function decl into which
22987 the concrete instance of STMT got inlined, the later will lead
22988 to the generation of a DW_TAG_inlined_subroutine DIE. */
22989 if (! BLOCK_ABSTRACT (stmt))
22990 gen_inlined_subroutine_die (stmt, context_die);
22991 }
22992 else
22993 gen_lexical_block_die (stmt, context_die);
22994 }
22995 else
22996 decls_for_scope (stmt, context_die);
22997 }
22998
22999 /* Process variable DECL (or variable with origin ORIGIN) within
23000 block STMT and add it to CONTEXT_DIE. */
23001 static void
23002 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
23003 {
23004 dw_die_ref die;
23005 tree decl_or_origin = decl ? decl : origin;
23006
23007 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
23008 die = lookup_decl_die (decl_or_origin);
23009 else if (TREE_CODE (decl_or_origin) == TYPE_DECL
23010 && TYPE_DECL_IS_STUB (decl_or_origin))
23011 die = lookup_type_die (TREE_TYPE (decl_or_origin));
23012 else
23013 die = NULL;
23014
23015 if (die != NULL && die->die_parent == NULL)
23016 add_child_die (context_die, die);
23017 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
23018 {
23019 if (early_dwarf)
23020 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
23021 stmt, context_die);
23022 }
23023 else
23024 gen_decl_die (decl, origin, NULL, context_die);
23025 }
23026
23027 /* Generate all of the decls declared within a given scope and (recursively)
23028 all of its sub-blocks. */
23029
23030 static void
23031 decls_for_scope (tree stmt, dw_die_ref context_die)
23032 {
23033 tree decl;
23034 unsigned int i;
23035 tree subblocks;
23036
23037 /* Ignore NULL blocks. */
23038 if (stmt == NULL_TREE)
23039 return;
23040
23041 /* Output the DIEs to represent all of the data objects and typedefs
23042 declared directly within this block but not within any nested
23043 sub-blocks. Also, nested function and tag DIEs have been
23044 generated with a parent of NULL; fix that up now. We don't
23045 have to do this if we're at -g1. */
23046 if (debug_info_level > DINFO_LEVEL_TERSE)
23047 {
23048 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
23049 process_scope_var (stmt, decl, NULL_TREE, context_die);
23050 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
23051 process_scope_var (stmt, NULL, BLOCK_NONLOCALIZED_VAR (stmt, i),
23052 context_die);
23053 }
23054
23055 /* Even if we're at -g1, we need to process the subblocks in order to get
23056 inlined call information. */
23057
23058 /* Output the DIEs to represent all sub-blocks (and the items declared
23059 therein) of this block. */
23060 for (subblocks = BLOCK_SUBBLOCKS (stmt);
23061 subblocks != NULL;
23062 subblocks = BLOCK_CHAIN (subblocks))
23063 gen_block_die (subblocks, context_die);
23064 }
23065
23066 /* Is this a typedef we can avoid emitting? */
23067
23068 bool
23069 is_redundant_typedef (const_tree decl)
23070 {
23071 if (TYPE_DECL_IS_STUB (decl))
23072 return true;
23073
23074 if (DECL_ARTIFICIAL (decl)
23075 && DECL_CONTEXT (decl)
23076 && is_tagged_type (DECL_CONTEXT (decl))
23077 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
23078 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
23079 /* Also ignore the artificial member typedef for the class name. */
23080 return true;
23081
23082 return false;
23083 }
23084
23085 /* Return TRUE if TYPE is a typedef that names a type for linkage
23086 purposes. This kind of typedefs is produced by the C++ FE for
23087 constructs like:
23088
23089 typedef struct {...} foo;
23090
23091 In that case, there is no typedef variant type produced for foo.
23092 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
23093 struct type. */
23094
23095 static bool
23096 is_naming_typedef_decl (const_tree decl)
23097 {
23098 if (decl == NULL_TREE
23099 || TREE_CODE (decl) != TYPE_DECL
23100 || DECL_NAMELESS (decl)
23101 || !is_tagged_type (TREE_TYPE (decl))
23102 || DECL_IS_BUILTIN (decl)
23103 || is_redundant_typedef (decl)
23104 /* It looks like Ada produces TYPE_DECLs that are very similar
23105 to C++ naming typedefs but that have different
23106 semantics. Let's be specific to c++ for now. */
23107 || !is_cxx ())
23108 return FALSE;
23109
23110 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
23111 && TYPE_NAME (TREE_TYPE (decl)) == decl
23112 && (TYPE_STUB_DECL (TREE_TYPE (decl))
23113 != TYPE_NAME (TREE_TYPE (decl))));
23114 }
23115
23116 /* Looks up the DIE for a context. */
23117
23118 static inline dw_die_ref
23119 lookup_context_die (tree context)
23120 {
23121 if (context)
23122 {
23123 /* Find die that represents this context. */
23124 if (TYPE_P (context))
23125 {
23126 context = TYPE_MAIN_VARIANT (context);
23127 dw_die_ref ctx = lookup_type_die (context);
23128 if (!ctx)
23129 return NULL;
23130 return strip_naming_typedef (context, ctx);
23131 }
23132 else
23133 return lookup_decl_die (context);
23134 }
23135 return comp_unit_die ();
23136 }
23137
23138 /* Returns the DIE for a context. */
23139
23140 static inline dw_die_ref
23141 get_context_die (tree context)
23142 {
23143 if (context)
23144 {
23145 /* Find die that represents this context. */
23146 if (TYPE_P (context))
23147 {
23148 context = TYPE_MAIN_VARIANT (context);
23149 return strip_naming_typedef (context, force_type_die (context));
23150 }
23151 else
23152 return force_decl_die (context);
23153 }
23154 return comp_unit_die ();
23155 }
23156
23157 /* Returns the DIE for decl. A DIE will always be returned. */
23158
23159 static dw_die_ref
23160 force_decl_die (tree decl)
23161 {
23162 dw_die_ref decl_die;
23163 unsigned saved_external_flag;
23164 tree save_fn = NULL_TREE;
23165 decl_die = lookup_decl_die (decl);
23166 if (!decl_die)
23167 {
23168 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
23169
23170 decl_die = lookup_decl_die (decl);
23171 if (decl_die)
23172 return decl_die;
23173
23174 switch (TREE_CODE (decl))
23175 {
23176 case FUNCTION_DECL:
23177 /* Clear current_function_decl, so that gen_subprogram_die thinks
23178 that this is a declaration. At this point, we just want to force
23179 declaration die. */
23180 save_fn = current_function_decl;
23181 current_function_decl = NULL_TREE;
23182 gen_subprogram_die (decl, context_die);
23183 current_function_decl = save_fn;
23184 break;
23185
23186 case VAR_DECL:
23187 /* Set external flag to force declaration die. Restore it after
23188 gen_decl_die() call. */
23189 saved_external_flag = DECL_EXTERNAL (decl);
23190 DECL_EXTERNAL (decl) = 1;
23191 gen_decl_die (decl, NULL, NULL, context_die);
23192 DECL_EXTERNAL (decl) = saved_external_flag;
23193 break;
23194
23195 case NAMESPACE_DECL:
23196 if (dwarf_version >= 3 || !dwarf_strict)
23197 dwarf2out_decl (decl);
23198 else
23199 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
23200 decl_die = comp_unit_die ();
23201 break;
23202
23203 case TRANSLATION_UNIT_DECL:
23204 decl_die = comp_unit_die ();
23205 break;
23206
23207 default:
23208 gcc_unreachable ();
23209 }
23210
23211 /* We should be able to find the DIE now. */
23212 if (!decl_die)
23213 decl_die = lookup_decl_die (decl);
23214 gcc_assert (decl_die);
23215 }
23216
23217 return decl_die;
23218 }
23219
23220 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
23221 always returned. */
23222
23223 static dw_die_ref
23224 force_type_die (tree type)
23225 {
23226 dw_die_ref type_die;
23227
23228 type_die = lookup_type_die (type);
23229 if (!type_die)
23230 {
23231 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
23232
23233 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
23234 false, context_die);
23235 gcc_assert (type_die);
23236 }
23237 return type_die;
23238 }
23239
23240 /* Force out any required namespaces to be able to output DECL,
23241 and return the new context_die for it, if it's changed. */
23242
23243 static dw_die_ref
23244 setup_namespace_context (tree thing, dw_die_ref context_die)
23245 {
23246 tree context = (DECL_P (thing)
23247 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
23248 if (context && TREE_CODE (context) == NAMESPACE_DECL)
23249 /* Force out the namespace. */
23250 context_die = force_decl_die (context);
23251
23252 return context_die;
23253 }
23254
23255 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
23256 type) within its namespace, if appropriate.
23257
23258 For compatibility with older debuggers, namespace DIEs only contain
23259 declarations; all definitions are emitted at CU scope, with
23260 DW_AT_specification pointing to the declaration (like with class
23261 members). */
23262
23263 static dw_die_ref
23264 declare_in_namespace (tree thing, dw_die_ref context_die)
23265 {
23266 dw_die_ref ns_context;
23267
23268 if (debug_info_level <= DINFO_LEVEL_TERSE)
23269 return context_die;
23270
23271 /* External declarations in the local scope only need to be emitted
23272 once, not once in the namespace and once in the scope.
23273
23274 This avoids declaring the `extern' below in the
23275 namespace DIE as well as in the innermost scope:
23276
23277 namespace S
23278 {
23279 int i=5;
23280 int foo()
23281 {
23282 int i=8;
23283 extern int i;
23284 return i;
23285 }
23286 }
23287 */
23288 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
23289 return context_die;
23290
23291 /* If this decl is from an inlined function, then don't try to emit it in its
23292 namespace, as we will get confused. It would have already been emitted
23293 when the abstract instance of the inline function was emitted anyways. */
23294 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
23295 return context_die;
23296
23297 ns_context = setup_namespace_context (thing, context_die);
23298
23299 if (ns_context != context_die)
23300 {
23301 if (is_fortran ())
23302 return ns_context;
23303 if (DECL_P (thing))
23304 gen_decl_die (thing, NULL, NULL, ns_context);
23305 else
23306 gen_type_die (thing, ns_context);
23307 }
23308 return context_die;
23309 }
23310
23311 /* Generate a DIE for a namespace or namespace alias. */
23312
23313 static void
23314 gen_namespace_die (tree decl, dw_die_ref context_die)
23315 {
23316 dw_die_ref namespace_die;
23317
23318 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
23319 they are an alias of. */
23320 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
23321 {
23322 /* Output a real namespace or module. */
23323 context_die = setup_namespace_context (decl, comp_unit_die ());
23324 namespace_die = new_die (is_fortran ()
23325 ? DW_TAG_module : DW_TAG_namespace,
23326 context_die, decl);
23327 /* For Fortran modules defined in different CU don't add src coords. */
23328 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
23329 {
23330 const char *name = dwarf2_name (decl, 0);
23331 if (name)
23332 add_name_attribute (namespace_die, name);
23333 }
23334 else
23335 add_name_and_src_coords_attributes (namespace_die, decl);
23336 if (DECL_EXTERNAL (decl))
23337 add_AT_flag (namespace_die, DW_AT_declaration, 1);
23338 equate_decl_number_to_die (decl, namespace_die);
23339 }
23340 else
23341 {
23342 /* Output a namespace alias. */
23343
23344 /* Force out the namespace we are an alias of, if necessary. */
23345 dw_die_ref origin_die
23346 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
23347
23348 if (DECL_FILE_SCOPE_P (decl)
23349 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
23350 context_die = setup_namespace_context (decl, comp_unit_die ());
23351 /* Now create the namespace alias DIE. */
23352 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
23353 add_name_and_src_coords_attributes (namespace_die, decl);
23354 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
23355 equate_decl_number_to_die (decl, namespace_die);
23356 }
23357 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
23358 if (want_pubnames ())
23359 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
23360 }
23361
23362 /* Generate Dwarf debug information for a decl described by DECL.
23363 The return value is currently only meaningful for PARM_DECLs,
23364 for all other decls it returns NULL.
23365
23366 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
23367 It can be NULL otherwise. */
23368
23369 static dw_die_ref
23370 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
23371 dw_die_ref context_die)
23372 {
23373 tree decl_or_origin = decl ? decl : origin;
23374 tree class_origin = NULL, ultimate_origin;
23375
23376 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
23377 return NULL;
23378
23379 /* Ignore pointer bounds decls. */
23380 if (DECL_P (decl_or_origin)
23381 && TREE_TYPE (decl_or_origin)
23382 && POINTER_BOUNDS_P (decl_or_origin))
23383 return NULL;
23384
23385 switch (TREE_CODE (decl_or_origin))
23386 {
23387 case ERROR_MARK:
23388 break;
23389
23390 case CONST_DECL:
23391 if (!is_fortran () && !is_ada ())
23392 {
23393 /* The individual enumerators of an enum type get output when we output
23394 the Dwarf representation of the relevant enum type itself. */
23395 break;
23396 }
23397
23398 /* Emit its type. */
23399 gen_type_die (TREE_TYPE (decl), context_die);
23400
23401 /* And its containing namespace. */
23402 context_die = declare_in_namespace (decl, context_die);
23403
23404 gen_const_die (decl, context_die);
23405 break;
23406
23407 case FUNCTION_DECL:
23408 /* Don't output any DIEs to represent mere function declarations,
23409 unless they are class members or explicit block externs. */
23410 if (DECL_INITIAL (decl_or_origin) == NULL_TREE
23411 && DECL_FILE_SCOPE_P (decl_or_origin)
23412 && (current_function_decl == NULL_TREE
23413 || DECL_ARTIFICIAL (decl_or_origin)))
23414 break;
23415
23416 #if 0
23417 /* FIXME */
23418 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
23419 on local redeclarations of global functions. That seems broken. */
23420 if (current_function_decl != decl)
23421 /* This is only a declaration. */;
23422 #endif
23423
23424 /* If we're emitting a clone, emit info for the abstract instance. */
23425 if (origin || DECL_ORIGIN (decl) != decl)
23426 dwarf2out_abstract_function (origin
23427 ? DECL_ORIGIN (origin)
23428 : DECL_ABSTRACT_ORIGIN (decl));
23429
23430 /* If we're emitting an out-of-line copy of an inline function,
23431 emit info for the abstract instance and set up to refer to it. */
23432 else if (cgraph_function_possibly_inlined_p (decl)
23433 && ! DECL_ABSTRACT_P (decl)
23434 && ! class_or_namespace_scope_p (context_die)
23435 /* dwarf2out_abstract_function won't emit a die if this is just
23436 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
23437 that case, because that works only if we have a die. */
23438 && DECL_INITIAL (decl) != NULL_TREE)
23439 {
23440 dwarf2out_abstract_function (decl);
23441 set_decl_origin_self (decl);
23442 }
23443
23444 /* Otherwise we're emitting the primary DIE for this decl. */
23445 else if (debug_info_level > DINFO_LEVEL_TERSE)
23446 {
23447 /* Before we describe the FUNCTION_DECL itself, make sure that we
23448 have its containing type. */
23449 if (!origin)
23450 origin = decl_class_context (decl);
23451 if (origin != NULL_TREE)
23452 gen_type_die (origin, context_die);
23453
23454 /* And its return type. */
23455 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
23456
23457 /* And its virtual context. */
23458 if (DECL_VINDEX (decl) != NULL_TREE)
23459 gen_type_die (DECL_CONTEXT (decl), context_die);
23460
23461 /* Make sure we have a member DIE for decl. */
23462 if (origin != NULL_TREE)
23463 gen_type_die_for_member (origin, decl, context_die);
23464
23465 /* And its containing namespace. */
23466 context_die = declare_in_namespace (decl, context_die);
23467 }
23468
23469 /* Now output a DIE to represent the function itself. */
23470 if (decl)
23471 gen_subprogram_die (decl, context_die);
23472 break;
23473
23474 case TYPE_DECL:
23475 /* If we are in terse mode, don't generate any DIEs to represent any
23476 actual typedefs. */
23477 if (debug_info_level <= DINFO_LEVEL_TERSE)
23478 break;
23479
23480 /* In the special case of a TYPE_DECL node representing the declaration
23481 of some type tag, if the given TYPE_DECL is marked as having been
23482 instantiated from some other (original) TYPE_DECL node (e.g. one which
23483 was generated within the original definition of an inline function) we
23484 used to generate a special (abbreviated) DW_TAG_structure_type,
23485 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
23486 should be actually referencing those DIEs, as variable DIEs with that
23487 type would be emitted already in the abstract origin, so it was always
23488 removed during unused type prunning. Don't add anything in this
23489 case. */
23490 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
23491 break;
23492
23493 if (is_redundant_typedef (decl))
23494 gen_type_die (TREE_TYPE (decl), context_die);
23495 else
23496 /* Output a DIE to represent the typedef itself. */
23497 gen_typedef_die (decl, context_die);
23498 break;
23499
23500 case LABEL_DECL:
23501 if (debug_info_level >= DINFO_LEVEL_NORMAL)
23502 gen_label_die (decl, context_die);
23503 break;
23504
23505 case VAR_DECL:
23506 case RESULT_DECL:
23507 /* If we are in terse mode, don't generate any DIEs to represent any
23508 variable declarations or definitions. */
23509 if (debug_info_level <= DINFO_LEVEL_TERSE)
23510 break;
23511
23512 /* Output any DIEs that are needed to specify the type of this data
23513 object. */
23514 if (decl_by_reference_p (decl_or_origin))
23515 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
23516 else
23517 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
23518
23519 /* And its containing type. */
23520 class_origin = decl_class_context (decl_or_origin);
23521 if (class_origin != NULL_TREE)
23522 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
23523
23524 /* And its containing namespace. */
23525 context_die = declare_in_namespace (decl_or_origin, context_die);
23526
23527 /* Now output the DIE to represent the data object itself. This gets
23528 complicated because of the possibility that the VAR_DECL really
23529 represents an inlined instance of a formal parameter for an inline
23530 function. */
23531 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23532 if (ultimate_origin != NULL_TREE
23533 && TREE_CODE (ultimate_origin) == PARM_DECL)
23534 gen_formal_parameter_die (decl, origin,
23535 true /* Emit name attribute. */,
23536 context_die);
23537 else
23538 gen_variable_die (decl, origin, context_die);
23539 break;
23540
23541 case FIELD_DECL:
23542 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
23543 /* Ignore the nameless fields that are used to skip bits but handle C++
23544 anonymous unions and structs. */
23545 if (DECL_NAME (decl) != NULL_TREE
23546 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
23547 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
23548 {
23549 gen_type_die (member_declared_type (decl), context_die);
23550 gen_field_die (decl, ctx, context_die);
23551 }
23552 break;
23553
23554 case PARM_DECL:
23555 if (DECL_BY_REFERENCE (decl_or_origin))
23556 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
23557 else
23558 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
23559 return gen_formal_parameter_die (decl, origin,
23560 true /* Emit name attribute. */,
23561 context_die);
23562
23563 case NAMESPACE_DECL:
23564 if (dwarf_version >= 3 || !dwarf_strict)
23565 gen_namespace_die (decl, context_die);
23566 break;
23567
23568 case IMPORTED_DECL:
23569 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
23570 DECL_CONTEXT (decl), context_die);
23571 break;
23572
23573 case NAMELIST_DECL:
23574 gen_namelist_decl (DECL_NAME (decl), context_die,
23575 NAMELIST_DECL_ASSOCIATED_DECL (decl));
23576 break;
23577
23578 default:
23579 /* Probably some frontend-internal decl. Assume we don't care. */
23580 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
23581 break;
23582 }
23583
23584 return NULL;
23585 }
23586 \f
23587 /* Output initial debug information for global DECL. Called at the
23588 end of the parsing process.
23589
23590 This is the initial debug generation process. As such, the DIEs
23591 generated may be incomplete. A later debug generation pass
23592 (dwarf2out_late_global_decl) will augment the information generated
23593 in this pass (e.g., with complete location info). */
23594
23595 static void
23596 dwarf2out_early_global_decl (tree decl)
23597 {
23598 set_early_dwarf s;
23599
23600 /* gen_decl_die() will set DECL_ABSTRACT because
23601 cgraph_function_possibly_inlined_p() returns true. This is in
23602 turn will cause DW_AT_inline attributes to be set.
23603
23604 This happens because at early dwarf generation, there is no
23605 cgraph information, causing cgraph_function_possibly_inlined_p()
23606 to return true. Trick cgraph_function_possibly_inlined_p()
23607 while we generate dwarf early. */
23608 bool save = symtab->global_info_ready;
23609 symtab->global_info_ready = true;
23610
23611 /* We don't handle TYPE_DECLs. If required, they'll be reached via
23612 other DECLs and they can point to template types or other things
23613 that dwarf2out can't handle when done via dwarf2out_decl. */
23614 if (TREE_CODE (decl) != TYPE_DECL
23615 && TREE_CODE (decl) != PARM_DECL)
23616 {
23617 tree save_fndecl = current_function_decl;
23618 if (TREE_CODE (decl) == FUNCTION_DECL)
23619 {
23620 /* No cfun means the symbol has no body, so there's nothing
23621 to emit. */
23622 if (!DECL_STRUCT_FUNCTION (decl))
23623 goto early_decl_exit;
23624
23625 current_function_decl = decl;
23626 }
23627 dwarf2out_decl (decl);
23628 if (TREE_CODE (decl) == FUNCTION_DECL)
23629 current_function_decl = save_fndecl;
23630 }
23631 early_decl_exit:
23632 symtab->global_info_ready = save;
23633 }
23634
23635 /* Output debug information for global decl DECL. Called from
23636 toplev.c after compilation proper has finished. */
23637
23638 static void
23639 dwarf2out_late_global_decl (tree decl)
23640 {
23641 /* We have to generate early debug late for LTO. */
23642 if (in_lto_p)
23643 dwarf2out_early_global_decl (decl);
23644
23645 /* Fill-in any location information we were unable to determine
23646 on the first pass. */
23647 if (TREE_CODE (decl) == VAR_DECL
23648 && !POINTER_BOUNDS_P (decl))
23649 {
23650 dw_die_ref die = lookup_decl_die (decl);
23651 if (die)
23652 add_location_or_const_value_attribute (die, decl, false);
23653 }
23654 }
23655
23656 /* Output debug information for type decl DECL. Called from toplev.c
23657 and from language front ends (to record built-in types). */
23658 static void
23659 dwarf2out_type_decl (tree decl, int local)
23660 {
23661 if (!local)
23662 {
23663 set_early_dwarf s;
23664 dwarf2out_decl (decl);
23665 }
23666 }
23667
23668 /* Output debug information for imported module or decl DECL.
23669 NAME is non-NULL name in the lexical block if the decl has been renamed.
23670 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
23671 that DECL belongs to.
23672 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
23673 static void
23674 dwarf2out_imported_module_or_decl_1 (tree decl,
23675 tree name,
23676 tree lexical_block,
23677 dw_die_ref lexical_block_die)
23678 {
23679 expanded_location xloc;
23680 dw_die_ref imported_die = NULL;
23681 dw_die_ref at_import_die;
23682
23683 if (TREE_CODE (decl) == IMPORTED_DECL)
23684 {
23685 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
23686 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
23687 gcc_assert (decl);
23688 }
23689 else
23690 xloc = expand_location (input_location);
23691
23692 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
23693 {
23694 at_import_die = force_type_die (TREE_TYPE (decl));
23695 /* For namespace N { typedef void T; } using N::T; base_type_die
23696 returns NULL, but DW_TAG_imported_declaration requires
23697 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
23698 if (!at_import_die)
23699 {
23700 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
23701 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
23702 at_import_die = lookup_type_die (TREE_TYPE (decl));
23703 gcc_assert (at_import_die);
23704 }
23705 }
23706 else
23707 {
23708 at_import_die = lookup_decl_die (decl);
23709 if (!at_import_die)
23710 {
23711 /* If we're trying to avoid duplicate debug info, we may not have
23712 emitted the member decl for this field. Emit it now. */
23713 if (TREE_CODE (decl) == FIELD_DECL)
23714 {
23715 tree type = DECL_CONTEXT (decl);
23716
23717 if (TYPE_CONTEXT (type)
23718 && TYPE_P (TYPE_CONTEXT (type))
23719 && !should_emit_struct_debug (TYPE_CONTEXT (type),
23720 DINFO_USAGE_DIR_USE))
23721 return;
23722 gen_type_die_for_member (type, decl,
23723 get_context_die (TYPE_CONTEXT (type)));
23724 }
23725 if (TREE_CODE (decl) == NAMELIST_DECL)
23726 at_import_die = gen_namelist_decl (DECL_NAME (decl),
23727 get_context_die (DECL_CONTEXT (decl)),
23728 NULL_TREE);
23729 else
23730 at_import_die = force_decl_die (decl);
23731 }
23732 }
23733
23734 if (TREE_CODE (decl) == NAMESPACE_DECL)
23735 {
23736 if (dwarf_version >= 3 || !dwarf_strict)
23737 imported_die = new_die (DW_TAG_imported_module,
23738 lexical_block_die,
23739 lexical_block);
23740 else
23741 return;
23742 }
23743 else
23744 imported_die = new_die (DW_TAG_imported_declaration,
23745 lexical_block_die,
23746 lexical_block);
23747
23748 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
23749 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
23750 if (name)
23751 add_AT_string (imported_die, DW_AT_name,
23752 IDENTIFIER_POINTER (name));
23753 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
23754 }
23755
23756 /* Output debug information for imported module or decl DECL.
23757 NAME is non-NULL name in context if the decl has been renamed.
23758 CHILD is true if decl is one of the renamed decls as part of
23759 importing whole module. */
23760
23761 static void
23762 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
23763 bool child)
23764 {
23765 /* dw_die_ref at_import_die; */
23766 dw_die_ref scope_die;
23767
23768 if (debug_info_level <= DINFO_LEVEL_TERSE)
23769 return;
23770
23771 gcc_assert (decl);
23772
23773 set_early_dwarf s;
23774
23775 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
23776 We need decl DIE for reference and scope die. First, get DIE for the decl
23777 itself. */
23778
23779 /* Get the scope die for decl context. Use comp_unit_die for global module
23780 or decl. If die is not found for non globals, force new die. */
23781 if (context
23782 && TYPE_P (context)
23783 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
23784 return;
23785
23786 if (!(dwarf_version >= 3 || !dwarf_strict))
23787 return;
23788
23789 scope_die = get_context_die (context);
23790
23791 if (child)
23792 {
23793 gcc_assert (scope_die->die_child);
23794 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
23795 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
23796 scope_die = scope_die->die_child;
23797 }
23798
23799 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
23800 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
23801 }
23802
23803 /* Output debug information for namelists. */
23804
23805 static dw_die_ref
23806 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
23807 {
23808 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
23809 tree value;
23810 unsigned i;
23811
23812 if (debug_info_level <= DINFO_LEVEL_TERSE)
23813 return NULL;
23814
23815 gcc_assert (scope_die != NULL);
23816 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
23817 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
23818
23819 /* If there are no item_decls, we have a nondefining namelist, e.g.
23820 with USE association; hence, set DW_AT_declaration. */
23821 if (item_decls == NULL_TREE)
23822 {
23823 add_AT_flag (nml_die, DW_AT_declaration, 1);
23824 return nml_die;
23825 }
23826
23827 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
23828 {
23829 nml_item_ref_die = lookup_decl_die (value);
23830 if (!nml_item_ref_die)
23831 nml_item_ref_die = force_decl_die (value);
23832
23833 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
23834 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
23835 }
23836 return nml_die;
23837 }
23838
23839
23840 /* Write the debugging output for DECL and return the DIE. */
23841
23842 static void
23843 dwarf2out_decl (tree decl)
23844 {
23845 dw_die_ref context_die = comp_unit_die ();
23846
23847 switch (TREE_CODE (decl))
23848 {
23849 case ERROR_MARK:
23850 return;
23851
23852 case FUNCTION_DECL:
23853 /* What we would really like to do here is to filter out all mere
23854 file-scope declarations of file-scope functions which are never
23855 referenced later within this translation unit (and keep all of ones
23856 that *are* referenced later on) but we aren't clairvoyant, so we have
23857 no idea which functions will be referenced in the future (i.e. later
23858 on within the current translation unit). So here we just ignore all
23859 file-scope function declarations which are not also definitions. If
23860 and when the debugger needs to know something about these functions,
23861 it will have to hunt around and find the DWARF information associated
23862 with the definition of the function.
23863
23864 We can't just check DECL_EXTERNAL to find out which FUNCTION_DECL
23865 nodes represent definitions and which ones represent mere
23866 declarations. We have to check DECL_INITIAL instead. That's because
23867 the C front-end supports some weird semantics for "extern inline"
23868 function definitions. These can get inlined within the current
23869 translation unit (and thus, we need to generate Dwarf info for their
23870 abstract instances so that the Dwarf info for the concrete inlined
23871 instances can have something to refer to) but the compiler never
23872 generates any out-of-lines instances of such things (despite the fact
23873 that they *are* definitions).
23874
23875 The important point is that the C front-end marks these "extern
23876 inline" functions as DECL_EXTERNAL, but we need to generate DWARF for
23877 them anyway. Note that the C++ front-end also plays some similar games
23878 for inline function definitions appearing within include files which
23879 also contain `#pragma interface' pragmas.
23880
23881 If we are called from dwarf2out_abstract_function output a DIE
23882 anyway. We can end up here this way with early inlining and LTO
23883 where the inlined function is output in a different LTRANS unit
23884 or not at all. */
23885 if (DECL_INITIAL (decl) == NULL_TREE
23886 && ! DECL_ABSTRACT_P (decl))
23887 return;
23888
23889 /* If we're a nested function, initially use a parent of NULL; if we're
23890 a plain function, this will be fixed up in decls_for_scope. If
23891 we're a method, it will be ignored, since we already have a DIE. */
23892 if (decl_function_context (decl)
23893 /* But if we're in terse mode, we don't care about scope. */
23894 && debug_info_level > DINFO_LEVEL_TERSE)
23895 context_die = NULL;
23896 break;
23897
23898 case VAR_DECL:
23899 /* For local statics lookup proper context die. */
23900 if (local_function_static (decl))
23901 context_die = lookup_decl_die (DECL_CONTEXT (decl));
23902
23903 /* If we are in terse mode, don't generate any DIEs to represent any
23904 variable declarations or definitions. */
23905 if (debug_info_level <= DINFO_LEVEL_TERSE)
23906 return;
23907 break;
23908
23909 case CONST_DECL:
23910 if (debug_info_level <= DINFO_LEVEL_TERSE)
23911 return;
23912 if (!is_fortran () && !is_ada ())
23913 return;
23914 if (TREE_STATIC (decl) && decl_function_context (decl))
23915 context_die = lookup_decl_die (DECL_CONTEXT (decl));
23916 break;
23917
23918 case NAMESPACE_DECL:
23919 case IMPORTED_DECL:
23920 if (debug_info_level <= DINFO_LEVEL_TERSE)
23921 return;
23922 if (lookup_decl_die (decl) != NULL)
23923 return;
23924 break;
23925
23926 case TYPE_DECL:
23927 /* Don't emit stubs for types unless they are needed by other DIEs. */
23928 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
23929 return;
23930
23931 /* Don't bother trying to generate any DIEs to represent any of the
23932 normal built-in types for the language we are compiling. */
23933 if (DECL_IS_BUILTIN (decl))
23934 return;
23935
23936 /* If we are in terse mode, don't generate any DIEs for types. */
23937 if (debug_info_level <= DINFO_LEVEL_TERSE)
23938 return;
23939
23940 /* If we're a function-scope tag, initially use a parent of NULL;
23941 this will be fixed up in decls_for_scope. */
23942 if (decl_function_context (decl))
23943 context_die = NULL;
23944
23945 break;
23946
23947 case NAMELIST_DECL:
23948 break;
23949
23950 default:
23951 return;
23952 }
23953
23954 gen_decl_die (decl, NULL, NULL, context_die);
23955
23956 if (flag_checking)
23957 {
23958 dw_die_ref die = lookup_decl_die (decl);
23959 if (die)
23960 check_die (die);
23961 }
23962 }
23963
23964 /* Write the debugging output for DECL. */
23965
23966 static void
23967 dwarf2out_function_decl (tree decl)
23968 {
23969 dwarf2out_decl (decl);
23970 call_arg_locations = NULL;
23971 call_arg_loc_last = NULL;
23972 call_site_count = -1;
23973 tail_call_site_count = -1;
23974 decl_loc_table->empty ();
23975 cached_dw_loc_list_table->empty ();
23976 }
23977
23978 /* Output a marker (i.e. a label) for the beginning of the generated code for
23979 a lexical block. */
23980
23981 static void
23982 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
23983 unsigned int blocknum)
23984 {
23985 switch_to_section (current_function_section ());
23986 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
23987 }
23988
23989 /* Output a marker (i.e. a label) for the end of the generated code for a
23990 lexical block. */
23991
23992 static void
23993 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
23994 {
23995 switch_to_section (current_function_section ());
23996 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
23997 }
23998
23999 /* Returns nonzero if it is appropriate not to emit any debugging
24000 information for BLOCK, because it doesn't contain any instructions.
24001
24002 Don't allow this for blocks with nested functions or local classes
24003 as we would end up with orphans, and in the presence of scheduling
24004 we may end up calling them anyway. */
24005
24006 static bool
24007 dwarf2out_ignore_block (const_tree block)
24008 {
24009 tree decl;
24010 unsigned int i;
24011
24012 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
24013 if (TREE_CODE (decl) == FUNCTION_DECL
24014 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
24015 return 0;
24016 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
24017 {
24018 decl = BLOCK_NONLOCALIZED_VAR (block, i);
24019 if (TREE_CODE (decl) == FUNCTION_DECL
24020 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
24021 return 0;
24022 }
24023
24024 return 1;
24025 }
24026
24027 /* Hash table routines for file_hash. */
24028
24029 bool
24030 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
24031 {
24032 return filename_cmp (p1->filename, p2) == 0;
24033 }
24034
24035 hashval_t
24036 dwarf_file_hasher::hash (dwarf_file_data *p)
24037 {
24038 return htab_hash_string (p->filename);
24039 }
24040
24041 /* Lookup FILE_NAME (in the list of filenames that we know about here in
24042 dwarf2out.c) and return its "index". The index of each (known) filename is
24043 just a unique number which is associated with only that one filename. We
24044 need such numbers for the sake of generating labels (in the .debug_sfnames
24045 section) and references to those files numbers (in the .debug_srcinfo
24046 and .debug_macinfo sections). If the filename given as an argument is not
24047 found in our current list, add it to the list and assign it the next
24048 available unique index number. */
24049
24050 static struct dwarf_file_data *
24051 lookup_filename (const char *file_name)
24052 {
24053 struct dwarf_file_data * created;
24054
24055 if (!file_name)
24056 return NULL;
24057
24058 dwarf_file_data **slot
24059 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
24060 INSERT);
24061 if (*slot)
24062 return *slot;
24063
24064 created = ggc_alloc<dwarf_file_data> ();
24065 created->filename = file_name;
24066 created->emitted_number = 0;
24067 *slot = created;
24068 return created;
24069 }
24070
24071 /* If the assembler will construct the file table, then translate the compiler
24072 internal file table number into the assembler file table number, and emit
24073 a .file directive if we haven't already emitted one yet. The file table
24074 numbers are different because we prune debug info for unused variables and
24075 types, which may include filenames. */
24076
24077 static int
24078 maybe_emit_file (struct dwarf_file_data * fd)
24079 {
24080 if (! fd->emitted_number)
24081 {
24082 if (last_emitted_file)
24083 fd->emitted_number = last_emitted_file->emitted_number + 1;
24084 else
24085 fd->emitted_number = 1;
24086 last_emitted_file = fd;
24087
24088 if (DWARF2_ASM_LINE_DEBUG_INFO)
24089 {
24090 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
24091 output_quoted_string (asm_out_file,
24092 remap_debug_filename (fd->filename));
24093 fputc ('\n', asm_out_file);
24094 }
24095 }
24096
24097 return fd->emitted_number;
24098 }
24099
24100 /* Schedule generation of a DW_AT_const_value attribute to DIE.
24101 That generation should happen after function debug info has been
24102 generated. The value of the attribute is the constant value of ARG. */
24103
24104 static void
24105 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
24106 {
24107 die_arg_entry entry;
24108
24109 if (!die || !arg)
24110 return;
24111
24112 gcc_assert (early_dwarf);
24113
24114 if (!tmpl_value_parm_die_table)
24115 vec_alloc (tmpl_value_parm_die_table, 32);
24116
24117 entry.die = die;
24118 entry.arg = arg;
24119 vec_safe_push (tmpl_value_parm_die_table, entry);
24120 }
24121
24122 /* Return TRUE if T is an instance of generic type, FALSE
24123 otherwise. */
24124
24125 static bool
24126 generic_type_p (tree t)
24127 {
24128 if (t == NULL_TREE || !TYPE_P (t))
24129 return false;
24130 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
24131 }
24132
24133 /* Schedule the generation of the generic parameter dies for the
24134 instance of generic type T. The proper generation itself is later
24135 done by gen_scheduled_generic_parms_dies. */
24136
24137 static void
24138 schedule_generic_params_dies_gen (tree t)
24139 {
24140 if (!generic_type_p (t))
24141 return;
24142
24143 gcc_assert (early_dwarf);
24144
24145 if (!generic_type_instances)
24146 vec_alloc (generic_type_instances, 256);
24147
24148 vec_safe_push (generic_type_instances, t);
24149 }
24150
24151 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
24152 by append_entry_to_tmpl_value_parm_die_table. This function must
24153 be called after function DIEs have been generated. */
24154
24155 static void
24156 gen_remaining_tmpl_value_param_die_attribute (void)
24157 {
24158 if (tmpl_value_parm_die_table)
24159 {
24160 unsigned i, j;
24161 die_arg_entry *e;
24162
24163 /* We do this in two phases - first get the cases we can
24164 handle during early-finish, preserving those we cannot
24165 (containing symbolic constants where we don't yet know
24166 whether we are going to output the referenced symbols).
24167 For those we try again at late-finish. */
24168 j = 0;
24169 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
24170 {
24171 if (!tree_add_const_value_attribute (e->die, e->arg))
24172 (*tmpl_value_parm_die_table)[j++] = *e;
24173 }
24174 tmpl_value_parm_die_table->truncate (j);
24175 }
24176 }
24177
24178 /* Generate generic parameters DIEs for instances of generic types
24179 that have been previously scheduled by
24180 schedule_generic_params_dies_gen. This function must be called
24181 after all the types of the CU have been laid out. */
24182
24183 static void
24184 gen_scheduled_generic_parms_dies (void)
24185 {
24186 unsigned i;
24187 tree t;
24188
24189 if (!generic_type_instances)
24190 return;
24191
24192 /* We end up "recursing" into schedule_generic_params_dies_gen, so
24193 pretend this generation is part of "early dwarf" as well. */
24194 set_early_dwarf s;
24195
24196 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
24197 if (COMPLETE_TYPE_P (t))
24198 gen_generic_params_dies (t);
24199
24200 generic_type_instances = NULL;
24201 }
24202
24203
24204 /* Replace DW_AT_name for the decl with name. */
24205
24206 static void
24207 dwarf2out_set_name (tree decl, tree name)
24208 {
24209 dw_die_ref die;
24210 dw_attr_node *attr;
24211 const char *dname;
24212
24213 die = TYPE_SYMTAB_DIE (decl);
24214 if (!die)
24215 return;
24216
24217 dname = dwarf2_name (name, 0);
24218 if (!dname)
24219 return;
24220
24221 attr = get_AT (die, DW_AT_name);
24222 if (attr)
24223 {
24224 struct indirect_string_node *node;
24225
24226 node = find_AT_string (dname);
24227 /* replace the string. */
24228 attr->dw_attr_val.v.val_str = node;
24229 }
24230
24231 else
24232 add_name_attribute (die, dname);
24233 }
24234
24235 /* True if before or during processing of the first function being emitted. */
24236 static bool in_first_function_p = true;
24237 /* True if loc_note during dwarf2out_var_location call might still be
24238 before first real instruction at address equal to .Ltext0. */
24239 static bool maybe_at_text_label_p = true;
24240 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
24241 static unsigned int first_loclabel_num_not_at_text_label;
24242
24243 /* Called by the final INSN scan whenever we see a var location. We
24244 use it to drop labels in the right places, and throw the location in
24245 our lookup table. */
24246
24247 static void
24248 dwarf2out_var_location (rtx_insn *loc_note)
24249 {
24250 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
24251 struct var_loc_node *newloc;
24252 rtx_insn *next_real, *next_note;
24253 rtx_insn *call_insn = NULL;
24254 static const char *last_label;
24255 static const char *last_postcall_label;
24256 static bool last_in_cold_section_p;
24257 static rtx_insn *expected_next_loc_note;
24258 tree decl;
24259 bool var_loc_p;
24260
24261 if (!NOTE_P (loc_note))
24262 {
24263 if (CALL_P (loc_note))
24264 {
24265 call_site_count++;
24266 if (SIBLING_CALL_P (loc_note))
24267 tail_call_site_count++;
24268 if (optimize == 0 && !flag_var_tracking)
24269 {
24270 /* When the var-tracking pass is not running, there is no note
24271 for indirect calls whose target is compile-time known. In this
24272 case, process such calls specifically so that we generate call
24273 sites for them anyway. */
24274 rtx x = PATTERN (loc_note);
24275 if (GET_CODE (x) == PARALLEL)
24276 x = XVECEXP (x, 0, 0);
24277 if (GET_CODE (x) == SET)
24278 x = SET_SRC (x);
24279 if (GET_CODE (x) == CALL)
24280 x = XEXP (x, 0);
24281 if (!MEM_P (x)
24282 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
24283 || !SYMBOL_REF_DECL (XEXP (x, 0))
24284 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
24285 != FUNCTION_DECL))
24286 {
24287 call_insn = loc_note;
24288 loc_note = NULL;
24289 var_loc_p = false;
24290
24291 next_real = next_real_insn (call_insn);
24292 next_note = NULL;
24293 cached_next_real_insn = NULL;
24294 goto create_label;
24295 }
24296 }
24297 }
24298 return;
24299 }
24300
24301 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
24302 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
24303 return;
24304
24305 /* Optimize processing a large consecutive sequence of location
24306 notes so we don't spend too much time in next_real_insn. If the
24307 next insn is another location note, remember the next_real_insn
24308 calculation for next time. */
24309 next_real = cached_next_real_insn;
24310 if (next_real)
24311 {
24312 if (expected_next_loc_note != loc_note)
24313 next_real = NULL;
24314 }
24315
24316 next_note = NEXT_INSN (loc_note);
24317 if (! next_note
24318 || next_note->deleted ()
24319 || ! NOTE_P (next_note)
24320 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
24321 && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
24322 next_note = NULL;
24323
24324 if (! next_real)
24325 next_real = next_real_insn (loc_note);
24326
24327 if (next_note)
24328 {
24329 expected_next_loc_note = next_note;
24330 cached_next_real_insn = next_real;
24331 }
24332 else
24333 cached_next_real_insn = NULL;
24334
24335 /* If there are no instructions which would be affected by this note,
24336 don't do anything. */
24337 if (var_loc_p
24338 && next_real == NULL_RTX
24339 && !NOTE_DURING_CALL_P (loc_note))
24340 return;
24341
24342 create_label:
24343
24344 if (next_real == NULL_RTX)
24345 next_real = get_last_insn ();
24346
24347 /* If there were any real insns between note we processed last time
24348 and this note (or if it is the first note), clear
24349 last_{,postcall_}label so that they are not reused this time. */
24350 if (last_var_location_insn == NULL_RTX
24351 || last_var_location_insn != next_real
24352 || last_in_cold_section_p != in_cold_section_p)
24353 {
24354 last_label = NULL;
24355 last_postcall_label = NULL;
24356 }
24357
24358 if (var_loc_p)
24359 {
24360 decl = NOTE_VAR_LOCATION_DECL (loc_note);
24361 newloc = add_var_loc_to_decl (decl, loc_note,
24362 NOTE_DURING_CALL_P (loc_note)
24363 ? last_postcall_label : last_label);
24364 if (newloc == NULL)
24365 return;
24366 }
24367 else
24368 {
24369 decl = NULL_TREE;
24370 newloc = NULL;
24371 }
24372
24373 /* If there were no real insns between note we processed last time
24374 and this note, use the label we emitted last time. Otherwise
24375 create a new label and emit it. */
24376 if (last_label == NULL)
24377 {
24378 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
24379 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
24380 loclabel_num++;
24381 last_label = ggc_strdup (loclabel);
24382 /* See if loclabel might be equal to .Ltext0. If yes,
24383 bump first_loclabel_num_not_at_text_label. */
24384 if (!have_multiple_function_sections
24385 && in_first_function_p
24386 && maybe_at_text_label_p)
24387 {
24388 static rtx_insn *last_start;
24389 rtx_insn *insn;
24390 for (insn = loc_note; insn; insn = previous_insn (insn))
24391 if (insn == last_start)
24392 break;
24393 else if (!NONDEBUG_INSN_P (insn))
24394 continue;
24395 else
24396 {
24397 rtx body = PATTERN (insn);
24398 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
24399 continue;
24400 /* Inline asm could occupy zero bytes. */
24401 else if (GET_CODE (body) == ASM_INPUT
24402 || asm_noperands (body) >= 0)
24403 continue;
24404 #ifdef HAVE_attr_length
24405 else if (get_attr_min_length (insn) == 0)
24406 continue;
24407 #endif
24408 else
24409 {
24410 /* Assume insn has non-zero length. */
24411 maybe_at_text_label_p = false;
24412 break;
24413 }
24414 }
24415 if (maybe_at_text_label_p)
24416 {
24417 last_start = loc_note;
24418 first_loclabel_num_not_at_text_label = loclabel_num;
24419 }
24420 }
24421 }
24422
24423 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
24424 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
24425
24426 if (!var_loc_p)
24427 {
24428 struct call_arg_loc_node *ca_loc
24429 = ggc_cleared_alloc<call_arg_loc_node> ();
24430 rtx_insn *prev
24431 = loc_note != NULL_RTX ? prev_real_insn (loc_note) : call_insn;
24432
24433 ca_loc->call_arg_loc_note = loc_note;
24434 ca_loc->next = NULL;
24435 ca_loc->label = last_label;
24436 gcc_assert (prev
24437 && (CALL_P (prev)
24438 || (NONJUMP_INSN_P (prev)
24439 && GET_CODE (PATTERN (prev)) == SEQUENCE
24440 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
24441 if (!CALL_P (prev))
24442 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
24443 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
24444
24445 /* Look for a SYMBOL_REF in the "prev" instruction. */
24446 rtx x = get_call_rtx_from (PATTERN (prev));
24447 if (x)
24448 {
24449 /* Try to get the call symbol, if any. */
24450 if (MEM_P (XEXP (x, 0)))
24451 x = XEXP (x, 0);
24452 /* First, look for a memory access to a symbol_ref. */
24453 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
24454 && SYMBOL_REF_DECL (XEXP (x, 0))
24455 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
24456 ca_loc->symbol_ref = XEXP (x, 0);
24457 /* Otherwise, look at a compile-time known user-level function
24458 declaration. */
24459 else if (MEM_P (x)
24460 && MEM_EXPR (x)
24461 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
24462 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
24463 }
24464
24465 ca_loc->block = insn_scope (prev);
24466 if (call_arg_locations)
24467 call_arg_loc_last->next = ca_loc;
24468 else
24469 call_arg_locations = ca_loc;
24470 call_arg_loc_last = ca_loc;
24471 }
24472 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
24473 newloc->label = last_label;
24474 else
24475 {
24476 if (!last_postcall_label)
24477 {
24478 sprintf (loclabel, "%s-1", last_label);
24479 last_postcall_label = ggc_strdup (loclabel);
24480 }
24481 newloc->label = last_postcall_label;
24482 }
24483
24484 last_var_location_insn = next_real;
24485 last_in_cold_section_p = in_cold_section_p;
24486 }
24487
24488 /* Called from finalize_size_functions for size functions so that their body
24489 can be encoded in the debug info to describe the layout of variable-length
24490 structures. */
24491
24492 static void
24493 dwarf2out_size_function (tree decl)
24494 {
24495 function_to_dwarf_procedure (decl);
24496 }
24497
24498 /* Note in one location list that text section has changed. */
24499
24500 int
24501 var_location_switch_text_section_1 (var_loc_list **slot, void *)
24502 {
24503 var_loc_list *list = *slot;
24504 if (list->first)
24505 list->last_before_switch
24506 = list->last->next ? list->last->next : list->last;
24507 return 1;
24508 }
24509
24510 /* Note in all location lists that text section has changed. */
24511
24512 static void
24513 var_location_switch_text_section (void)
24514 {
24515 if (decl_loc_table == NULL)
24516 return;
24517
24518 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
24519 }
24520
24521 /* Create a new line number table. */
24522
24523 static dw_line_info_table *
24524 new_line_info_table (void)
24525 {
24526 dw_line_info_table *table;
24527
24528 table = ggc_cleared_alloc<dw_line_info_table> ();
24529 table->file_num = 1;
24530 table->line_num = 1;
24531 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
24532
24533 return table;
24534 }
24535
24536 /* Lookup the "current" table into which we emit line info, so
24537 that we don't have to do it for every source line. */
24538
24539 static void
24540 set_cur_line_info_table (section *sec)
24541 {
24542 dw_line_info_table *table;
24543
24544 if (sec == text_section)
24545 table = text_section_line_info;
24546 else if (sec == cold_text_section)
24547 {
24548 table = cold_text_section_line_info;
24549 if (!table)
24550 {
24551 cold_text_section_line_info = table = new_line_info_table ();
24552 table->end_label = cold_end_label;
24553 }
24554 }
24555 else
24556 {
24557 const char *end_label;
24558
24559 if (flag_reorder_blocks_and_partition)
24560 {
24561 if (in_cold_section_p)
24562 end_label = crtl->subsections.cold_section_end_label;
24563 else
24564 end_label = crtl->subsections.hot_section_end_label;
24565 }
24566 else
24567 {
24568 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24569 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
24570 current_function_funcdef_no);
24571 end_label = ggc_strdup (label);
24572 }
24573
24574 table = new_line_info_table ();
24575 table->end_label = end_label;
24576
24577 vec_safe_push (separate_line_info, table);
24578 }
24579
24580 if (DWARF2_ASM_LINE_DEBUG_INFO)
24581 table->is_stmt = (cur_line_info_table
24582 ? cur_line_info_table->is_stmt
24583 : DWARF_LINE_DEFAULT_IS_STMT_START);
24584 cur_line_info_table = table;
24585 }
24586
24587
24588 /* We need to reset the locations at the beginning of each
24589 function. We can't do this in the end_function hook, because the
24590 declarations that use the locations won't have been output when
24591 that hook is called. Also compute have_multiple_function_sections here. */
24592
24593 static void
24594 dwarf2out_begin_function (tree fun)
24595 {
24596 section *sec = function_section (fun);
24597
24598 if (sec != text_section)
24599 have_multiple_function_sections = true;
24600
24601 if (flag_reorder_blocks_and_partition && !cold_text_section)
24602 {
24603 gcc_assert (current_function_decl == fun);
24604 cold_text_section = unlikely_text_section ();
24605 switch_to_section (cold_text_section);
24606 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
24607 switch_to_section (sec);
24608 }
24609
24610 dwarf2out_note_section_used ();
24611 call_site_count = 0;
24612 tail_call_site_count = 0;
24613
24614 set_cur_line_info_table (sec);
24615 }
24616
24617 /* Helper function of dwarf2out_end_function, called only after emitting
24618 the very first function into assembly. Check if some .debug_loc range
24619 might end with a .LVL* label that could be equal to .Ltext0.
24620 In that case we must force using absolute addresses in .debug_loc ranges,
24621 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
24622 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
24623 list terminator.
24624 Set have_multiple_function_sections to true in that case and
24625 terminate htab traversal. */
24626
24627 int
24628 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
24629 {
24630 var_loc_list *entry = *slot;
24631 struct var_loc_node *node;
24632
24633 node = entry->first;
24634 if (node && node->next && node->next->label)
24635 {
24636 unsigned int i;
24637 const char *label = node->next->label;
24638 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
24639
24640 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
24641 {
24642 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
24643 if (strcmp (label, loclabel) == 0)
24644 {
24645 have_multiple_function_sections = true;
24646 return 0;
24647 }
24648 }
24649 }
24650 return 1;
24651 }
24652
24653 /* Hook called after emitting a function into assembly.
24654 This does something only for the very first function emitted. */
24655
24656 static void
24657 dwarf2out_end_function (unsigned int)
24658 {
24659 if (in_first_function_p
24660 && !have_multiple_function_sections
24661 && first_loclabel_num_not_at_text_label
24662 && decl_loc_table)
24663 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
24664 in_first_function_p = false;
24665 maybe_at_text_label_p = false;
24666 }
24667
24668 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
24669 front-ends register a translation unit even before dwarf2out_init is
24670 called. */
24671 static tree main_translation_unit = NULL_TREE;
24672
24673 /* Hook called by front-ends after they built their main translation unit.
24674 Associate comp_unit_die to UNIT. */
24675
24676 static void
24677 dwarf2out_register_main_translation_unit (tree unit)
24678 {
24679 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
24680 && main_translation_unit == NULL_TREE);
24681 main_translation_unit = unit;
24682 /* If dwarf2out_init has not been called yet, it will perform the association
24683 itself looking at main_translation_unit. */
24684 if (decl_die_table != NULL)
24685 equate_decl_number_to_die (unit, comp_unit_die ());
24686 }
24687
24688 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
24689
24690 static void
24691 push_dw_line_info_entry (dw_line_info_table *table,
24692 enum dw_line_info_opcode opcode, unsigned int val)
24693 {
24694 dw_line_info_entry e;
24695 e.opcode = opcode;
24696 e.val = val;
24697 vec_safe_push (table->entries, e);
24698 }
24699
24700 /* Output a label to mark the beginning of a source code line entry
24701 and record information relating to this source line, in
24702 'line_info_table' for later output of the .debug_line section. */
24703 /* ??? The discriminator parameter ought to be unsigned. */
24704
24705 static void
24706 dwarf2out_source_line (unsigned int line, const char *filename,
24707 int discriminator, bool is_stmt)
24708 {
24709 unsigned int file_num;
24710 dw_line_info_table *table;
24711
24712 if (debug_info_level < DINFO_LEVEL_TERSE || line == 0)
24713 return;
24714
24715 /* The discriminator column was added in dwarf4. Simplify the below
24716 by simply removing it if we're not supposed to output it. */
24717 if (dwarf_version < 4 && dwarf_strict)
24718 discriminator = 0;
24719
24720 table = cur_line_info_table;
24721 file_num = maybe_emit_file (lookup_filename (filename));
24722
24723 /* ??? TODO: Elide duplicate line number entries. Traditionally,
24724 the debugger has used the second (possibly duplicate) line number
24725 at the beginning of the function to mark the end of the prologue.
24726 We could eliminate any other duplicates within the function. For
24727 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
24728 that second line number entry. */
24729 /* Recall that this end-of-prologue indication is *not* the same thing
24730 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
24731 to which the hook corresponds, follows the last insn that was
24732 emitted by gen_prologue. What we need is to precede the first insn
24733 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
24734 insn that corresponds to something the user wrote. These may be
24735 very different locations once scheduling is enabled. */
24736
24737 if (0 && file_num == table->file_num
24738 && line == table->line_num
24739 && discriminator == table->discrim_num
24740 && is_stmt == table->is_stmt)
24741 return;
24742
24743 switch_to_section (current_function_section ());
24744
24745 /* If requested, emit something human-readable. */
24746 if (flag_debug_asm)
24747 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START, filename, line);
24748
24749 if (DWARF2_ASM_LINE_DEBUG_INFO)
24750 {
24751 /* Emit the .loc directive understood by GNU as. */
24752 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
24753 file_num, line, is_stmt, discriminator */
24754 fputs ("\t.loc ", asm_out_file);
24755 fprint_ul (asm_out_file, file_num);
24756 putc (' ', asm_out_file);
24757 fprint_ul (asm_out_file, line);
24758 putc (' ', asm_out_file);
24759 putc ('0', asm_out_file);
24760
24761 if (is_stmt != table->is_stmt)
24762 {
24763 fputs (" is_stmt ", asm_out_file);
24764 putc (is_stmt ? '1' : '0', asm_out_file);
24765 }
24766 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
24767 {
24768 gcc_assert (discriminator > 0);
24769 fputs (" discriminator ", asm_out_file);
24770 fprint_ul (asm_out_file, (unsigned long) discriminator);
24771 }
24772 putc ('\n', asm_out_file);
24773 }
24774 else
24775 {
24776 unsigned int label_num = ++line_info_label_num;
24777
24778 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
24779
24780 push_dw_line_info_entry (table, LI_set_address, label_num);
24781 if (file_num != table->file_num)
24782 push_dw_line_info_entry (table, LI_set_file, file_num);
24783 if (discriminator != table->discrim_num)
24784 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
24785 if (is_stmt != table->is_stmt)
24786 push_dw_line_info_entry (table, LI_negate_stmt, 0);
24787 push_dw_line_info_entry (table, LI_set_line, line);
24788 }
24789
24790 table->file_num = file_num;
24791 table->line_num = line;
24792 table->discrim_num = discriminator;
24793 table->is_stmt = is_stmt;
24794 table->in_use = true;
24795 }
24796
24797 /* Record the beginning of a new source file. */
24798
24799 static void
24800 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
24801 {
24802 if (flag_eliminate_dwarf2_dups)
24803 {
24804 /* Record the beginning of the file for break_out_includes. */
24805 dw_die_ref bincl_die;
24806
24807 bincl_die = new_die (DW_TAG_GNU_BINCL, comp_unit_die (), NULL);
24808 add_AT_string (bincl_die, DW_AT_name, remap_debug_filename (filename));
24809 }
24810
24811 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
24812 {
24813 macinfo_entry e;
24814 e.code = DW_MACINFO_start_file;
24815 e.lineno = lineno;
24816 e.info = ggc_strdup (filename);
24817 vec_safe_push (macinfo_table, e);
24818 }
24819 }
24820
24821 /* Record the end of a source file. */
24822
24823 static void
24824 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
24825 {
24826 if (flag_eliminate_dwarf2_dups)
24827 /* Record the end of the file for break_out_includes. */
24828 new_die (DW_TAG_GNU_EINCL, comp_unit_die (), NULL);
24829
24830 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
24831 {
24832 macinfo_entry e;
24833 e.code = DW_MACINFO_end_file;
24834 e.lineno = lineno;
24835 e.info = NULL;
24836 vec_safe_push (macinfo_table, e);
24837 }
24838 }
24839
24840 /* Called from debug_define in toplev.c. The `buffer' parameter contains
24841 the tail part of the directive line, i.e. the part which is past the
24842 initial whitespace, #, whitespace, directive-name, whitespace part. */
24843
24844 static void
24845 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
24846 const char *buffer ATTRIBUTE_UNUSED)
24847 {
24848 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
24849 {
24850 macinfo_entry e;
24851 /* Insert a dummy first entry to be able to optimize the whole
24852 predefined macro block using DW_MACRO_GNU_transparent_include. */
24853 if (macinfo_table->is_empty () && lineno <= 1)
24854 {
24855 e.code = 0;
24856 e.lineno = 0;
24857 e.info = NULL;
24858 vec_safe_push (macinfo_table, e);
24859 }
24860 e.code = DW_MACINFO_define;
24861 e.lineno = lineno;
24862 e.info = ggc_strdup (buffer);
24863 vec_safe_push (macinfo_table, e);
24864 }
24865 }
24866
24867 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
24868 the tail part of the directive line, i.e. the part which is past the
24869 initial whitespace, #, whitespace, directive-name, whitespace part. */
24870
24871 static void
24872 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
24873 const char *buffer ATTRIBUTE_UNUSED)
24874 {
24875 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
24876 {
24877 macinfo_entry e;
24878 /* Insert a dummy first entry to be able to optimize the whole
24879 predefined macro block using DW_MACRO_GNU_transparent_include. */
24880 if (macinfo_table->is_empty () && lineno <= 1)
24881 {
24882 e.code = 0;
24883 e.lineno = 0;
24884 e.info = NULL;
24885 vec_safe_push (macinfo_table, e);
24886 }
24887 e.code = DW_MACINFO_undef;
24888 e.lineno = lineno;
24889 e.info = ggc_strdup (buffer);
24890 vec_safe_push (macinfo_table, e);
24891 }
24892 }
24893
24894 /* Helpers to manipulate hash table of CUs. */
24895
24896 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
24897 {
24898 static inline hashval_t hash (const macinfo_entry *);
24899 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
24900 };
24901
24902 inline hashval_t
24903 macinfo_entry_hasher::hash (const macinfo_entry *entry)
24904 {
24905 return htab_hash_string (entry->info);
24906 }
24907
24908 inline bool
24909 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
24910 const macinfo_entry *entry2)
24911 {
24912 return !strcmp (entry1->info, entry2->info);
24913 }
24914
24915 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
24916
24917 /* Output a single .debug_macinfo entry. */
24918
24919 static void
24920 output_macinfo_op (macinfo_entry *ref)
24921 {
24922 int file_num;
24923 size_t len;
24924 struct indirect_string_node *node;
24925 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24926 struct dwarf_file_data *fd;
24927
24928 switch (ref->code)
24929 {
24930 case DW_MACINFO_start_file:
24931 fd = lookup_filename (ref->info);
24932 file_num = maybe_emit_file (fd);
24933 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
24934 dw2_asm_output_data_uleb128 (ref->lineno,
24935 "Included from line number %lu",
24936 (unsigned long) ref->lineno);
24937 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
24938 break;
24939 case DW_MACINFO_end_file:
24940 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
24941 break;
24942 case DW_MACINFO_define:
24943 case DW_MACINFO_undef:
24944 len = strlen (ref->info) + 1;
24945 if (!dwarf_strict
24946 && len > DWARF_OFFSET_SIZE
24947 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
24948 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
24949 {
24950 ref->code = ref->code == DW_MACINFO_define
24951 ? DW_MACRO_GNU_define_indirect
24952 : DW_MACRO_GNU_undef_indirect;
24953 output_macinfo_op (ref);
24954 return;
24955 }
24956 dw2_asm_output_data (1, ref->code,
24957 ref->code == DW_MACINFO_define
24958 ? "Define macro" : "Undefine macro");
24959 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
24960 (unsigned long) ref->lineno);
24961 dw2_asm_output_nstring (ref->info, -1, "The macro");
24962 break;
24963 case DW_MACRO_GNU_define_indirect:
24964 case DW_MACRO_GNU_undef_indirect:
24965 node = find_AT_string (ref->info);
24966 gcc_assert (node
24967 && ((node->form == DW_FORM_strp)
24968 || (node->form == DW_FORM_GNU_str_index)));
24969 dw2_asm_output_data (1, ref->code,
24970 ref->code == DW_MACRO_GNU_define_indirect
24971 ? "Define macro indirect"
24972 : "Undefine macro indirect");
24973 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
24974 (unsigned long) ref->lineno);
24975 if (node->form == DW_FORM_strp)
24976 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
24977 debug_str_section, "The macro: \"%s\"",
24978 ref->info);
24979 else
24980 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
24981 ref->info);
24982 break;
24983 case DW_MACRO_GNU_transparent_include:
24984 dw2_asm_output_data (1, ref->code, "Transparent include");
24985 ASM_GENERATE_INTERNAL_LABEL (label,
24986 DEBUG_MACRO_SECTION_LABEL, ref->lineno);
24987 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
24988 break;
24989 default:
24990 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
24991 ASM_COMMENT_START, (unsigned long) ref->code);
24992 break;
24993 }
24994 }
24995
24996 /* Attempt to make a sequence of define/undef macinfo ops shareable with
24997 other compilation unit .debug_macinfo sections. IDX is the first
24998 index of a define/undef, return the number of ops that should be
24999 emitted in a comdat .debug_macinfo section and emit
25000 a DW_MACRO_GNU_transparent_include entry referencing it.
25001 If the define/undef entry should be emitted normally, return 0. */
25002
25003 static unsigned
25004 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
25005 macinfo_hash_type **macinfo_htab)
25006 {
25007 macinfo_entry *first, *second, *cur, *inc;
25008 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
25009 unsigned char checksum[16];
25010 struct md5_ctx ctx;
25011 char *grp_name, *tail;
25012 const char *base;
25013 unsigned int i, count, encoded_filename_len, linebuf_len;
25014 macinfo_entry **slot;
25015
25016 first = &(*macinfo_table)[idx];
25017 second = &(*macinfo_table)[idx + 1];
25018
25019 /* Optimize only if there are at least two consecutive define/undef ops,
25020 and either all of them are before first DW_MACINFO_start_file
25021 with lineno {0,1} (i.e. predefined macro block), or all of them are
25022 in some included header file. */
25023 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
25024 return 0;
25025 if (vec_safe_is_empty (files))
25026 {
25027 if (first->lineno > 1 || second->lineno > 1)
25028 return 0;
25029 }
25030 else if (first->lineno == 0)
25031 return 0;
25032
25033 /* Find the last define/undef entry that can be grouped together
25034 with first and at the same time compute md5 checksum of their
25035 codes, linenumbers and strings. */
25036 md5_init_ctx (&ctx);
25037 for (i = idx; macinfo_table->iterate (i, &cur); i++)
25038 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
25039 break;
25040 else if (vec_safe_is_empty (files) && cur->lineno > 1)
25041 break;
25042 else
25043 {
25044 unsigned char code = cur->code;
25045 md5_process_bytes (&code, 1, &ctx);
25046 checksum_uleb128 (cur->lineno, &ctx);
25047 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
25048 }
25049 md5_finish_ctx (&ctx, checksum);
25050 count = i - idx;
25051
25052 /* From the containing include filename (if any) pick up just
25053 usable characters from its basename. */
25054 if (vec_safe_is_empty (files))
25055 base = "";
25056 else
25057 base = lbasename (files->last ().info);
25058 for (encoded_filename_len = 0, i = 0; base[i]; i++)
25059 if (ISIDNUM (base[i]) || base[i] == '.')
25060 encoded_filename_len++;
25061 /* Count . at the end. */
25062 if (encoded_filename_len)
25063 encoded_filename_len++;
25064
25065 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
25066 linebuf_len = strlen (linebuf);
25067
25068 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
25069 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
25070 + 16 * 2 + 1);
25071 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
25072 tail = grp_name + 4;
25073 if (encoded_filename_len)
25074 {
25075 for (i = 0; base[i]; i++)
25076 if (ISIDNUM (base[i]) || base[i] == '.')
25077 *tail++ = base[i];
25078 *tail++ = '.';
25079 }
25080 memcpy (tail, linebuf, linebuf_len);
25081 tail += linebuf_len;
25082 *tail++ = '.';
25083 for (i = 0; i < 16; i++)
25084 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
25085
25086 /* Construct a macinfo_entry for DW_MACRO_GNU_transparent_include
25087 in the empty vector entry before the first define/undef. */
25088 inc = &(*macinfo_table)[idx - 1];
25089 inc->code = DW_MACRO_GNU_transparent_include;
25090 inc->lineno = 0;
25091 inc->info = ggc_strdup (grp_name);
25092 if (!*macinfo_htab)
25093 *macinfo_htab = new macinfo_hash_type (10);
25094 /* Avoid emitting duplicates. */
25095 slot = (*macinfo_htab)->find_slot (inc, INSERT);
25096 if (*slot != NULL)
25097 {
25098 inc->code = 0;
25099 inc->info = NULL;
25100 /* If such an entry has been used before, just emit
25101 a DW_MACRO_GNU_transparent_include op. */
25102 inc = *slot;
25103 output_macinfo_op (inc);
25104 /* And clear all macinfo_entry in the range to avoid emitting them
25105 in the second pass. */
25106 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
25107 {
25108 cur->code = 0;
25109 cur->info = NULL;
25110 }
25111 }
25112 else
25113 {
25114 *slot = inc;
25115 inc->lineno = (*macinfo_htab)->elements ();
25116 output_macinfo_op (inc);
25117 }
25118 return count;
25119 }
25120
25121 /* Save any strings needed by the macinfo table in the debug str
25122 table. All strings must be collected into the table by the time
25123 index_string is called. */
25124
25125 static void
25126 save_macinfo_strings (void)
25127 {
25128 unsigned len;
25129 unsigned i;
25130 macinfo_entry *ref;
25131
25132 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
25133 {
25134 switch (ref->code)
25135 {
25136 /* Match the logic in output_macinfo_op to decide on
25137 indirect strings. */
25138 case DW_MACINFO_define:
25139 case DW_MACINFO_undef:
25140 len = strlen (ref->info) + 1;
25141 if (!dwarf_strict
25142 && len > DWARF_OFFSET_SIZE
25143 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
25144 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
25145 set_indirect_string (find_AT_string (ref->info));
25146 break;
25147 case DW_MACRO_GNU_define_indirect:
25148 case DW_MACRO_GNU_undef_indirect:
25149 set_indirect_string (find_AT_string (ref->info));
25150 break;
25151 default:
25152 break;
25153 }
25154 }
25155 }
25156
25157 /* Output macinfo section(s). */
25158
25159 static void
25160 output_macinfo (void)
25161 {
25162 unsigned i;
25163 unsigned long length = vec_safe_length (macinfo_table);
25164 macinfo_entry *ref;
25165 vec<macinfo_entry, va_gc> *files = NULL;
25166 macinfo_hash_type *macinfo_htab = NULL;
25167
25168 if (! length)
25169 return;
25170
25171 /* output_macinfo* uses these interchangeably. */
25172 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_GNU_define
25173 && (int) DW_MACINFO_undef == (int) DW_MACRO_GNU_undef
25174 && (int) DW_MACINFO_start_file == (int) DW_MACRO_GNU_start_file
25175 && (int) DW_MACINFO_end_file == (int) DW_MACRO_GNU_end_file);
25176
25177 /* For .debug_macro emit the section header. */
25178 if (!dwarf_strict)
25179 {
25180 dw2_asm_output_data (2, 4, "DWARF macro version number");
25181 if (DWARF_OFFSET_SIZE == 8)
25182 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
25183 else
25184 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
25185 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
25186 (!dwarf_split_debug_info ? debug_line_section_label
25187 : debug_skeleton_line_section_label),
25188 debug_line_section, NULL);
25189 }
25190
25191 /* In the first loop, it emits the primary .debug_macinfo section
25192 and after each emitted op the macinfo_entry is cleared.
25193 If a longer range of define/undef ops can be optimized using
25194 DW_MACRO_GNU_transparent_include, the
25195 DW_MACRO_GNU_transparent_include op is emitted and kept in
25196 the vector before the first define/undef in the range and the
25197 whole range of define/undef ops is not emitted and kept. */
25198 for (i = 0; macinfo_table->iterate (i, &ref); i++)
25199 {
25200 switch (ref->code)
25201 {
25202 case DW_MACINFO_start_file:
25203 vec_safe_push (files, *ref);
25204 break;
25205 case DW_MACINFO_end_file:
25206 if (!vec_safe_is_empty (files))
25207 files->pop ();
25208 break;
25209 case DW_MACINFO_define:
25210 case DW_MACINFO_undef:
25211 if (!dwarf_strict
25212 && HAVE_COMDAT_GROUP
25213 && vec_safe_length (files) != 1
25214 && i > 0
25215 && i + 1 < length
25216 && (*macinfo_table)[i - 1].code == 0)
25217 {
25218 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
25219 if (count)
25220 {
25221 i += count - 1;
25222 continue;
25223 }
25224 }
25225 break;
25226 case 0:
25227 /* A dummy entry may be inserted at the beginning to be able
25228 to optimize the whole block of predefined macros. */
25229 if (i == 0)
25230 continue;
25231 default:
25232 break;
25233 }
25234 output_macinfo_op (ref);
25235 ref->info = NULL;
25236 ref->code = 0;
25237 }
25238
25239 if (!macinfo_htab)
25240 return;
25241
25242 delete macinfo_htab;
25243 macinfo_htab = NULL;
25244
25245 /* If any DW_MACRO_GNU_transparent_include were used, on those
25246 DW_MACRO_GNU_transparent_include entries terminate the
25247 current chain and switch to a new comdat .debug_macinfo
25248 section and emit the define/undef entries within it. */
25249 for (i = 0; macinfo_table->iterate (i, &ref); i++)
25250 switch (ref->code)
25251 {
25252 case 0:
25253 continue;
25254 case DW_MACRO_GNU_transparent_include:
25255 {
25256 char label[MAX_ARTIFICIAL_LABEL_BYTES];
25257 tree comdat_key = get_identifier (ref->info);
25258 /* Terminate the previous .debug_macinfo section. */
25259 dw2_asm_output_data (1, 0, "End compilation unit");
25260 targetm.asm_out.named_section (DEBUG_MACRO_SECTION,
25261 SECTION_DEBUG
25262 | SECTION_LINKONCE,
25263 comdat_key);
25264 ASM_GENERATE_INTERNAL_LABEL (label,
25265 DEBUG_MACRO_SECTION_LABEL,
25266 ref->lineno);
25267 ASM_OUTPUT_LABEL (asm_out_file, label);
25268 ref->code = 0;
25269 ref->info = NULL;
25270 dw2_asm_output_data (2, 4, "DWARF macro version number");
25271 if (DWARF_OFFSET_SIZE == 8)
25272 dw2_asm_output_data (1, 1, "Flags: 64-bit");
25273 else
25274 dw2_asm_output_data (1, 0, "Flags: 32-bit");
25275 }
25276 break;
25277 case DW_MACINFO_define:
25278 case DW_MACINFO_undef:
25279 output_macinfo_op (ref);
25280 ref->code = 0;
25281 ref->info = NULL;
25282 break;
25283 default:
25284 gcc_unreachable ();
25285 }
25286 }
25287
25288 /* Set up for Dwarf output at the start of compilation. */
25289
25290 static void
25291 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
25292 {
25293 /* This option is currently broken, see (PR53118 and PR46102). */
25294 if (flag_eliminate_dwarf2_dups
25295 && strstr (lang_hooks.name, "C++"))
25296 {
25297 warning (0, "-feliminate-dwarf2-dups is broken for C++, ignoring");
25298 flag_eliminate_dwarf2_dups = 0;
25299 }
25300
25301 /* Allocate the file_table. */
25302 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
25303
25304 #ifndef DWARF2_LINENO_DEBUGGING_INFO
25305 /* Allocate the decl_die_table. */
25306 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
25307
25308 /* Allocate the decl_loc_table. */
25309 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
25310
25311 /* Allocate the cached_dw_loc_list_table. */
25312 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
25313
25314 /* Allocate the initial hunk of the decl_scope_table. */
25315 vec_alloc (decl_scope_table, 256);
25316
25317 /* Allocate the initial hunk of the abbrev_die_table. */
25318 abbrev_die_table = ggc_cleared_vec_alloc<dw_die_ref>
25319 (ABBREV_DIE_TABLE_INCREMENT);
25320 abbrev_die_table_allocated = ABBREV_DIE_TABLE_INCREMENT;
25321 /* Zero-th entry is allocated, but unused. */
25322 abbrev_die_table_in_use = 1;
25323
25324 /* Allocate the dwarf_proc_stack_usage_map. */
25325 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
25326
25327 /* Allocate the pubtypes and pubnames vectors. */
25328 vec_alloc (pubname_table, 32);
25329 vec_alloc (pubtype_table, 32);
25330
25331 vec_alloc (incomplete_types, 64);
25332
25333 vec_alloc (used_rtx_array, 32);
25334
25335 if (!dwarf_split_debug_info)
25336 {
25337 debug_info_section = get_section (DEBUG_INFO_SECTION,
25338 SECTION_DEBUG, NULL);
25339 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
25340 SECTION_DEBUG, NULL);
25341 debug_loc_section = get_section (DEBUG_LOC_SECTION,
25342 SECTION_DEBUG, NULL);
25343 }
25344 else
25345 {
25346 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
25347 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
25348 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
25349 SECTION_DEBUG | SECTION_EXCLUDE,
25350 NULL);
25351 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
25352 SECTION_DEBUG, NULL);
25353 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
25354 SECTION_DEBUG, NULL);
25355 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
25356 SECTION_DEBUG, NULL);
25357 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
25358 DEBUG_SKELETON_ABBREV_SECTION_LABEL, 0);
25359
25360 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections stay in
25361 the main .o, but the skeleton_line goes into the split off dwo. */
25362 debug_skeleton_line_section
25363 = get_section (DEBUG_DWO_LINE_SECTION,
25364 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
25365 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
25366 DEBUG_SKELETON_LINE_SECTION_LABEL, 0);
25367 debug_str_offsets_section = get_section (DEBUG_STR_OFFSETS_SECTION,
25368 SECTION_DEBUG | SECTION_EXCLUDE,
25369 NULL);
25370 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
25371 DEBUG_SKELETON_INFO_SECTION_LABEL, 0);
25372 debug_loc_section = get_section (DEBUG_DWO_LOC_SECTION,
25373 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
25374 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
25375 DEBUG_STR_DWO_SECTION_FLAGS, NULL);
25376 }
25377 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
25378 SECTION_DEBUG, NULL);
25379 debug_macinfo_section = get_section (dwarf_strict
25380 ? DEBUG_MACINFO_SECTION
25381 : DEBUG_MACRO_SECTION,
25382 DEBUG_MACRO_SECTION_FLAGS, NULL);
25383 debug_line_section = get_section (DEBUG_LINE_SECTION,
25384 SECTION_DEBUG, NULL);
25385 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
25386 SECTION_DEBUG, NULL);
25387 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
25388 SECTION_DEBUG, NULL);
25389 debug_str_section = get_section (DEBUG_STR_SECTION,
25390 DEBUG_STR_SECTION_FLAGS, NULL);
25391 debug_ranges_section = get_section (DEBUG_RANGES_SECTION,
25392 SECTION_DEBUG, NULL);
25393 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
25394 SECTION_DEBUG, NULL);
25395
25396 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
25397 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
25398 DEBUG_ABBREV_SECTION_LABEL, 0);
25399 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
25400 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
25401 COLD_TEXT_SECTION_LABEL, 0);
25402 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
25403
25404 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
25405 DEBUG_INFO_SECTION_LABEL, 0);
25406 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
25407 DEBUG_LINE_SECTION_LABEL, 0);
25408 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
25409 DEBUG_RANGES_SECTION_LABEL, 0);
25410 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
25411 DEBUG_ADDR_SECTION_LABEL, 0);
25412 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
25413 dwarf_strict
25414 ? DEBUG_MACINFO_SECTION_LABEL
25415 : DEBUG_MACRO_SECTION_LABEL, 0);
25416 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL, 0);
25417
25418 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
25419 vec_alloc (macinfo_table, 64);
25420
25421 switch_to_section (text_section);
25422 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
25423 #endif
25424
25425 /* Make sure the line number table for .text always exists. */
25426 text_section_line_info = new_line_info_table ();
25427 text_section_line_info->end_label = text_end_label;
25428
25429 #ifdef DWARF2_LINENO_DEBUGGING_INFO
25430 cur_line_info_table = text_section_line_info;
25431 #endif
25432
25433 /* If front-ends already registered a main translation unit but we were not
25434 ready to perform the association, do this now. */
25435 if (main_translation_unit != NULL_TREE)
25436 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
25437 }
25438
25439 /* Called before compile () starts outputtting functions, variables
25440 and toplevel asms into assembly. */
25441
25442 static void
25443 dwarf2out_assembly_start (void)
25444 {
25445 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
25446 && dwarf2out_do_cfi_asm ()
25447 && (!(flag_unwind_tables || flag_exceptions)
25448 || targetm_common.except_unwind_info (&global_options) != UI_DWARF2))
25449 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
25450 }
25451
25452 /* A helper function for dwarf2out_finish called through
25453 htab_traverse. Assign a string its index. All strings must be
25454 collected into the table by the time index_string is called,
25455 because the indexing code relies on htab_traverse to traverse nodes
25456 in the same order for each run. */
25457
25458 int
25459 index_string (indirect_string_node **h, unsigned int *index)
25460 {
25461 indirect_string_node *node = *h;
25462
25463 find_string_form (node);
25464 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
25465 {
25466 gcc_assert (node->index == NO_INDEX_ASSIGNED);
25467 node->index = *index;
25468 *index += 1;
25469 }
25470 return 1;
25471 }
25472
25473 /* A helper function for output_indirect_strings called through
25474 htab_traverse. Output the offset to a string and update the
25475 current offset. */
25476
25477 int
25478 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
25479 {
25480 indirect_string_node *node = *h;
25481
25482 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
25483 {
25484 /* Assert that this node has been assigned an index. */
25485 gcc_assert (node->index != NO_INDEX_ASSIGNED
25486 && node->index != NOT_INDEXED);
25487 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
25488 "indexed string 0x%x: %s", node->index, node->str);
25489 *offset += strlen (node->str) + 1;
25490 }
25491 return 1;
25492 }
25493
25494 /* A helper function for dwarf2out_finish called through
25495 htab_traverse. Output the indexed string. */
25496
25497 int
25498 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
25499 {
25500 struct indirect_string_node *node = *h;
25501
25502 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
25503 {
25504 /* Assert that the strings are output in the same order as their
25505 indexes were assigned. */
25506 gcc_assert (*cur_idx == node->index);
25507 assemble_string (node->str, strlen (node->str) + 1);
25508 *cur_idx += 1;
25509 }
25510 return 1;
25511 }
25512
25513 /* A helper function for dwarf2out_finish called through
25514 htab_traverse. Emit one queued .debug_str string. */
25515
25516 int
25517 output_indirect_string (indirect_string_node **h, void *)
25518 {
25519 struct indirect_string_node *node = *h;
25520
25521 node->form = find_string_form (node);
25522 if (node->form == DW_FORM_strp && node->refcount > 0)
25523 {
25524 ASM_OUTPUT_LABEL (asm_out_file, node->label);
25525 assemble_string (node->str, strlen (node->str) + 1);
25526 }
25527
25528 return 1;
25529 }
25530
25531 /* Output the indexed string table. */
25532
25533 static void
25534 output_indirect_strings (void)
25535 {
25536 switch_to_section (debug_str_section);
25537 if (!dwarf_split_debug_info)
25538 debug_str_hash->traverse<void *, output_indirect_string> (NULL);
25539 else
25540 {
25541 unsigned int offset = 0;
25542 unsigned int cur_idx = 0;
25543
25544 skeleton_debug_str_hash->traverse<void *, output_indirect_string> (NULL);
25545
25546 switch_to_section (debug_str_offsets_section);
25547 debug_str_hash->traverse_noresize
25548 <unsigned int *, output_index_string_offset> (&offset);
25549 switch_to_section (debug_str_dwo_section);
25550 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
25551 (&cur_idx);
25552 }
25553 }
25554
25555 /* Callback for htab_traverse to assign an index to an entry in the
25556 table, and to write that entry to the .debug_addr section. */
25557
25558 int
25559 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
25560 {
25561 addr_table_entry *entry = *slot;
25562
25563 if (entry->refcount == 0)
25564 {
25565 gcc_assert (entry->index == NO_INDEX_ASSIGNED
25566 || entry->index == NOT_INDEXED);
25567 return 1;
25568 }
25569
25570 gcc_assert (entry->index == *cur_index);
25571 (*cur_index)++;
25572
25573 switch (entry->kind)
25574 {
25575 case ate_kind_rtx:
25576 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
25577 "0x%x", entry->index);
25578 break;
25579 case ate_kind_rtx_dtprel:
25580 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
25581 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
25582 DWARF2_ADDR_SIZE,
25583 entry->addr.rtl);
25584 fputc ('\n', asm_out_file);
25585 break;
25586 case ate_kind_label:
25587 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
25588 "0x%x", entry->index);
25589 break;
25590 default:
25591 gcc_unreachable ();
25592 }
25593 return 1;
25594 }
25595
25596 /* Produce the .debug_addr section. */
25597
25598 static void
25599 output_addr_table (void)
25600 {
25601 unsigned int index = 0;
25602 if (addr_index_table == NULL || addr_index_table->size () == 0)
25603 return;
25604
25605 switch_to_section (debug_addr_section);
25606 addr_index_table
25607 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
25608 }
25609
25610 #if ENABLE_ASSERT_CHECKING
25611 /* Verify that all marks are clear. */
25612
25613 static void
25614 verify_marks_clear (dw_die_ref die)
25615 {
25616 dw_die_ref c;
25617
25618 gcc_assert (! die->die_mark);
25619 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
25620 }
25621 #endif /* ENABLE_ASSERT_CHECKING */
25622
25623 /* Clear the marks for a die and its children.
25624 Be cool if the mark isn't set. */
25625
25626 static void
25627 prune_unmark_dies (dw_die_ref die)
25628 {
25629 dw_die_ref c;
25630
25631 if (die->die_mark)
25632 die->die_mark = 0;
25633 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
25634 }
25635
25636 /* Given LOC that is referenced by a DIE we're marking as used, find all
25637 referenced DWARF procedures it references and mark them as used. */
25638
25639 static void
25640 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
25641 {
25642 for (; loc != NULL; loc = loc->dw_loc_next)
25643 switch (loc->dw_loc_opc)
25644 {
25645 case DW_OP_GNU_implicit_pointer:
25646 case DW_OP_GNU_convert:
25647 case DW_OP_GNU_reinterpret:
25648 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
25649 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
25650 break;
25651 case DW_OP_call2:
25652 case DW_OP_call4:
25653 case DW_OP_call_ref:
25654 case DW_OP_GNU_const_type:
25655 case DW_OP_GNU_parameter_ref:
25656 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
25657 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
25658 break;
25659 case DW_OP_GNU_regval_type:
25660 case DW_OP_GNU_deref_type:
25661 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
25662 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
25663 break;
25664 case DW_OP_GNU_entry_value:
25665 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
25666 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
25667 break;
25668 default:
25669 break;
25670 }
25671 }
25672
25673 /* Given DIE that we're marking as used, find any other dies
25674 it references as attributes and mark them as used. */
25675
25676 static void
25677 prune_unused_types_walk_attribs (dw_die_ref die)
25678 {
25679 dw_attr_node *a;
25680 unsigned ix;
25681
25682 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
25683 {
25684 switch (AT_class (a))
25685 {
25686 /* Make sure DWARF procedures referenced by location descriptions will
25687 get emitted. */
25688 case dw_val_class_loc:
25689 prune_unused_types_walk_loc_descr (AT_loc (a));
25690 break;
25691 case dw_val_class_loc_list:
25692 for (dw_loc_list_ref list = AT_loc_list (a);
25693 list != NULL;
25694 list = list->dw_loc_next)
25695 prune_unused_types_walk_loc_descr (list->expr);
25696 break;
25697
25698 case dw_val_class_die_ref:
25699 /* A reference to another DIE.
25700 Make sure that it will get emitted.
25701 If it was broken out into a comdat group, don't follow it. */
25702 if (! AT_ref (a)->comdat_type_p
25703 || a->dw_attr == DW_AT_specification)
25704 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
25705 break;
25706
25707 case dw_val_class_str:
25708 /* Set the string's refcount to 0 so that prune_unused_types_mark
25709 accounts properly for it. */
25710 a->dw_attr_val.v.val_str->refcount = 0;
25711 break;
25712
25713 default:
25714 break;
25715 }
25716 }
25717 }
25718
25719 /* Mark the generic parameters and arguments children DIEs of DIE. */
25720
25721 static void
25722 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
25723 {
25724 dw_die_ref c;
25725
25726 if (die == NULL || die->die_child == NULL)
25727 return;
25728 c = die->die_child;
25729 do
25730 {
25731 if (is_template_parameter (c))
25732 prune_unused_types_mark (c, 1);
25733 c = c->die_sib;
25734 } while (c && c != die->die_child);
25735 }
25736
25737 /* Mark DIE as being used. If DOKIDS is true, then walk down
25738 to DIE's children. */
25739
25740 static void
25741 prune_unused_types_mark (dw_die_ref die, int dokids)
25742 {
25743 dw_die_ref c;
25744
25745 if (die->die_mark == 0)
25746 {
25747 /* We haven't done this node yet. Mark it as used. */
25748 die->die_mark = 1;
25749 /* If this is the DIE of a generic type instantiation,
25750 mark the children DIEs that describe its generic parms and
25751 args. */
25752 prune_unused_types_mark_generic_parms_dies (die);
25753
25754 /* We also have to mark its parents as used.
25755 (But we don't want to mark our parent's kids due to this,
25756 unless it is a class.) */
25757 if (die->die_parent)
25758 prune_unused_types_mark (die->die_parent,
25759 class_scope_p (die->die_parent));
25760
25761 /* Mark any referenced nodes. */
25762 prune_unused_types_walk_attribs (die);
25763
25764 /* If this node is a specification,
25765 also mark the definition, if it exists. */
25766 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
25767 prune_unused_types_mark (die->die_definition, 1);
25768 }
25769
25770 if (dokids && die->die_mark != 2)
25771 {
25772 /* We need to walk the children, but haven't done so yet.
25773 Remember that we've walked the kids. */
25774 die->die_mark = 2;
25775
25776 /* If this is an array type, we need to make sure our
25777 kids get marked, even if they're types. If we're
25778 breaking out types into comdat sections, do this
25779 for all type definitions. */
25780 if (die->die_tag == DW_TAG_array_type
25781 || (use_debug_types
25782 && is_type_die (die) && ! is_declaration_die (die)))
25783 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
25784 else
25785 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
25786 }
25787 }
25788
25789 /* For local classes, look if any static member functions were emitted
25790 and if so, mark them. */
25791
25792 static void
25793 prune_unused_types_walk_local_classes (dw_die_ref die)
25794 {
25795 dw_die_ref c;
25796
25797 if (die->die_mark == 2)
25798 return;
25799
25800 switch (die->die_tag)
25801 {
25802 case DW_TAG_structure_type:
25803 case DW_TAG_union_type:
25804 case DW_TAG_class_type:
25805 break;
25806
25807 case DW_TAG_subprogram:
25808 if (!get_AT_flag (die, DW_AT_declaration)
25809 || die->die_definition != NULL)
25810 prune_unused_types_mark (die, 1);
25811 return;
25812
25813 default:
25814 return;
25815 }
25816
25817 /* Mark children. */
25818 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
25819 }
25820
25821 /* Walk the tree DIE and mark types that we actually use. */
25822
25823 static void
25824 prune_unused_types_walk (dw_die_ref die)
25825 {
25826 dw_die_ref c;
25827
25828 /* Don't do anything if this node is already marked and
25829 children have been marked as well. */
25830 if (die->die_mark == 2)
25831 return;
25832
25833 switch (die->die_tag)
25834 {
25835 case DW_TAG_structure_type:
25836 case DW_TAG_union_type:
25837 case DW_TAG_class_type:
25838 if (die->die_perennial_p)
25839 break;
25840
25841 for (c = die->die_parent; c; c = c->die_parent)
25842 if (c->die_tag == DW_TAG_subprogram)
25843 break;
25844
25845 /* Finding used static member functions inside of classes
25846 is needed just for local classes, because for other classes
25847 static member function DIEs with DW_AT_specification
25848 are emitted outside of the DW_TAG_*_type. If we ever change
25849 it, we'd need to call this even for non-local classes. */
25850 if (c)
25851 prune_unused_types_walk_local_classes (die);
25852
25853 /* It's a type node --- don't mark it. */
25854 return;
25855
25856 case DW_TAG_const_type:
25857 case DW_TAG_packed_type:
25858 case DW_TAG_pointer_type:
25859 case DW_TAG_reference_type:
25860 case DW_TAG_rvalue_reference_type:
25861 case DW_TAG_volatile_type:
25862 case DW_TAG_typedef:
25863 case DW_TAG_array_type:
25864 case DW_TAG_interface_type:
25865 case DW_TAG_friend:
25866 case DW_TAG_enumeration_type:
25867 case DW_TAG_subroutine_type:
25868 case DW_TAG_string_type:
25869 case DW_TAG_set_type:
25870 case DW_TAG_subrange_type:
25871 case DW_TAG_ptr_to_member_type:
25872 case DW_TAG_file_type:
25873 /* Type nodes are useful only when other DIEs reference them --- don't
25874 mark them. */
25875 /* FALLTHROUGH */
25876
25877 case DW_TAG_dwarf_procedure:
25878 /* Likewise for DWARF procedures. */
25879
25880 if (die->die_perennial_p)
25881 break;
25882
25883 return;
25884
25885 default:
25886 /* Mark everything else. */
25887 break;
25888 }
25889
25890 if (die->die_mark == 0)
25891 {
25892 die->die_mark = 1;
25893
25894 /* Now, mark any dies referenced from here. */
25895 prune_unused_types_walk_attribs (die);
25896 }
25897
25898 die->die_mark = 2;
25899
25900 /* Mark children. */
25901 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
25902 }
25903
25904 /* Increment the string counts on strings referred to from DIE's
25905 attributes. */
25906
25907 static void
25908 prune_unused_types_update_strings (dw_die_ref die)
25909 {
25910 dw_attr_node *a;
25911 unsigned ix;
25912
25913 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
25914 if (AT_class (a) == dw_val_class_str)
25915 {
25916 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
25917 s->refcount++;
25918 /* Avoid unnecessarily putting strings that are used less than
25919 twice in the hash table. */
25920 if (s->refcount
25921 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
25922 {
25923 indirect_string_node **slot
25924 = debug_str_hash->find_slot_with_hash (s->str,
25925 htab_hash_string (s->str),
25926 INSERT);
25927 gcc_assert (*slot == NULL);
25928 *slot = s;
25929 }
25930 }
25931 }
25932
25933 /* Remove from the tree DIE any dies that aren't marked. */
25934
25935 static void
25936 prune_unused_types_prune (dw_die_ref die)
25937 {
25938 dw_die_ref c;
25939
25940 gcc_assert (die->die_mark);
25941 prune_unused_types_update_strings (die);
25942
25943 if (! die->die_child)
25944 return;
25945
25946 c = die->die_child;
25947 do {
25948 dw_die_ref prev = c;
25949 for (c = c->die_sib; ! c->die_mark; c = c->die_sib)
25950 if (c == die->die_child)
25951 {
25952 /* No marked children between 'prev' and the end of the list. */
25953 if (prev == c)
25954 /* No marked children at all. */
25955 die->die_child = NULL;
25956 else
25957 {
25958 prev->die_sib = c->die_sib;
25959 die->die_child = prev;
25960 }
25961 return;
25962 }
25963
25964 if (c != prev->die_sib)
25965 prev->die_sib = c;
25966 prune_unused_types_prune (c);
25967 } while (c != die->die_child);
25968 }
25969
25970 /* Remove dies representing declarations that we never use. */
25971
25972 static void
25973 prune_unused_types (void)
25974 {
25975 unsigned int i;
25976 limbo_die_node *node;
25977 comdat_type_node *ctnode;
25978 pubname_entry *pub;
25979 dw_die_ref base_type;
25980
25981 #if ENABLE_ASSERT_CHECKING
25982 /* All the marks should already be clear. */
25983 verify_marks_clear (comp_unit_die ());
25984 for (node = limbo_die_list; node; node = node->next)
25985 verify_marks_clear (node->die);
25986 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
25987 verify_marks_clear (ctnode->root_die);
25988 #endif /* ENABLE_ASSERT_CHECKING */
25989
25990 /* Mark types that are used in global variables. */
25991 premark_types_used_by_global_vars ();
25992
25993 /* Set the mark on nodes that are actually used. */
25994 prune_unused_types_walk (comp_unit_die ());
25995 for (node = limbo_die_list; node; node = node->next)
25996 prune_unused_types_walk (node->die);
25997 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
25998 {
25999 prune_unused_types_walk (ctnode->root_die);
26000 prune_unused_types_mark (ctnode->type_die, 1);
26001 }
26002
26003 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
26004 are unusual in that they are pubnames that are the children of pubtypes.
26005 They should only be marked via their parent DW_TAG_enumeration_type die,
26006 not as roots in themselves. */
26007 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
26008 if (pub->die->die_tag != DW_TAG_enumerator)
26009 prune_unused_types_mark (pub->die, 1);
26010 for (i = 0; base_types.iterate (i, &base_type); i++)
26011 prune_unused_types_mark (base_type, 1);
26012
26013 if (debug_str_hash)
26014 debug_str_hash->empty ();
26015 if (skeleton_debug_str_hash)
26016 skeleton_debug_str_hash->empty ();
26017 prune_unused_types_prune (comp_unit_die ());
26018 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
26019 {
26020 node = *pnode;
26021 if (!node->die->die_mark)
26022 *pnode = node->next;
26023 else
26024 {
26025 prune_unused_types_prune (node->die);
26026 pnode = &node->next;
26027 }
26028 }
26029 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26030 prune_unused_types_prune (ctnode->root_die);
26031
26032 /* Leave the marks clear. */
26033 prune_unmark_dies (comp_unit_die ());
26034 for (node = limbo_die_list; node; node = node->next)
26035 prune_unmark_dies (node->die);
26036 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
26037 prune_unmark_dies (ctnode->root_die);
26038 }
26039
26040 /* Set the parameter to true if there are any relative pathnames in
26041 the file table. */
26042 int
26043 file_table_relative_p (dwarf_file_data **slot, bool *p)
26044 {
26045 struct dwarf_file_data *d = *slot;
26046 if (!IS_ABSOLUTE_PATH (d->filename))
26047 {
26048 *p = true;
26049 return 0;
26050 }
26051 return 1;
26052 }
26053
26054 /* Helpers to manipulate hash table of comdat type units. */
26055
26056 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
26057 {
26058 static inline hashval_t hash (const comdat_type_node *);
26059 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
26060 };
26061
26062 inline hashval_t
26063 comdat_type_hasher::hash (const comdat_type_node *type_node)
26064 {
26065 hashval_t h;
26066 memcpy (&h, type_node->signature, sizeof (h));
26067 return h;
26068 }
26069
26070 inline bool
26071 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
26072 const comdat_type_node *type_node_2)
26073 {
26074 return (! memcmp (type_node_1->signature, type_node_2->signature,
26075 DWARF_TYPE_SIGNATURE_SIZE));
26076 }
26077
26078 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
26079 to the location it would have been added, should we know its
26080 DECL_ASSEMBLER_NAME when we added other attributes. This will
26081 probably improve compactness of debug info, removing equivalent
26082 abbrevs, and hide any differences caused by deferring the
26083 computation of the assembler name, triggered by e.g. PCH. */
26084
26085 static inline void
26086 move_linkage_attr (dw_die_ref die)
26087 {
26088 unsigned ix = vec_safe_length (die->die_attr);
26089 dw_attr_node linkage = (*die->die_attr)[ix - 1];
26090
26091 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
26092 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
26093
26094 while (--ix > 0)
26095 {
26096 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
26097
26098 if (prev->dw_attr == DW_AT_decl_line || prev->dw_attr == DW_AT_name)
26099 break;
26100 }
26101
26102 if (ix != vec_safe_length (die->die_attr) - 1)
26103 {
26104 die->die_attr->pop ();
26105 die->die_attr->quick_insert (ix, linkage);
26106 }
26107 }
26108
26109 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
26110 referenced from typed stack ops and count how often they are used. */
26111
26112 static void
26113 mark_base_types (dw_loc_descr_ref loc)
26114 {
26115 dw_die_ref base_type = NULL;
26116
26117 for (; loc; loc = loc->dw_loc_next)
26118 {
26119 switch (loc->dw_loc_opc)
26120 {
26121 case DW_OP_GNU_regval_type:
26122 case DW_OP_GNU_deref_type:
26123 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
26124 break;
26125 case DW_OP_GNU_convert:
26126 case DW_OP_GNU_reinterpret:
26127 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
26128 continue;
26129 /* FALLTHRU */
26130 case DW_OP_GNU_const_type:
26131 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
26132 break;
26133 case DW_OP_GNU_entry_value:
26134 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
26135 continue;
26136 default:
26137 continue;
26138 }
26139 gcc_assert (base_type->die_parent == comp_unit_die ());
26140 if (base_type->die_mark)
26141 base_type->die_mark++;
26142 else
26143 {
26144 base_types.safe_push (base_type);
26145 base_type->die_mark = 1;
26146 }
26147 }
26148 }
26149
26150 /* Comparison function for sorting marked base types. */
26151
26152 static int
26153 base_type_cmp (const void *x, const void *y)
26154 {
26155 dw_die_ref dx = *(const dw_die_ref *) x;
26156 dw_die_ref dy = *(const dw_die_ref *) y;
26157 unsigned int byte_size1, byte_size2;
26158 unsigned int encoding1, encoding2;
26159 if (dx->die_mark > dy->die_mark)
26160 return -1;
26161 if (dx->die_mark < dy->die_mark)
26162 return 1;
26163 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
26164 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
26165 if (byte_size1 < byte_size2)
26166 return 1;
26167 if (byte_size1 > byte_size2)
26168 return -1;
26169 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
26170 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
26171 if (encoding1 < encoding2)
26172 return 1;
26173 if (encoding1 > encoding2)
26174 return -1;
26175 return 0;
26176 }
26177
26178 /* Move base types marked by mark_base_types as early as possible
26179 in the CU, sorted by decreasing usage count both to make the
26180 uleb128 references as small as possible and to make sure they
26181 will have die_offset already computed by calc_die_sizes when
26182 sizes of typed stack loc ops is computed. */
26183
26184 static void
26185 move_marked_base_types (void)
26186 {
26187 unsigned int i;
26188 dw_die_ref base_type, die, c;
26189
26190 if (base_types.is_empty ())
26191 return;
26192
26193 /* Sort by decreasing usage count, they will be added again in that
26194 order later on. */
26195 base_types.qsort (base_type_cmp);
26196 die = comp_unit_die ();
26197 c = die->die_child;
26198 do
26199 {
26200 dw_die_ref prev = c;
26201 c = c->die_sib;
26202 while (c->die_mark)
26203 {
26204 remove_child_with_prev (c, prev);
26205 /* As base types got marked, there must be at least
26206 one node other than DW_TAG_base_type. */
26207 gcc_assert (c != c->die_sib);
26208 c = c->die_sib;
26209 }
26210 }
26211 while (c != die->die_child);
26212 gcc_assert (die->die_child);
26213 c = die->die_child;
26214 for (i = 0; base_types.iterate (i, &base_type); i++)
26215 {
26216 base_type->die_mark = 0;
26217 base_type->die_sib = c->die_sib;
26218 c->die_sib = base_type;
26219 c = base_type;
26220 }
26221 }
26222
26223 /* Helper function for resolve_addr, attempt to resolve
26224 one CONST_STRING, return true if successful. Similarly verify that
26225 SYMBOL_REFs refer to variables emitted in the current CU. */
26226
26227 static bool
26228 resolve_one_addr (rtx *addr)
26229 {
26230 rtx rtl = *addr;
26231
26232 if (GET_CODE (rtl) == CONST_STRING)
26233 {
26234 size_t len = strlen (XSTR (rtl, 0)) + 1;
26235 tree t = build_string (len, XSTR (rtl, 0));
26236 tree tlen = size_int (len - 1);
26237 TREE_TYPE (t)
26238 = build_array_type (char_type_node, build_index_type (tlen));
26239 rtl = lookup_constant_def (t);
26240 if (!rtl || !MEM_P (rtl))
26241 return false;
26242 rtl = XEXP (rtl, 0);
26243 if (GET_CODE (rtl) == SYMBOL_REF
26244 && SYMBOL_REF_DECL (rtl)
26245 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
26246 return false;
26247 vec_safe_push (used_rtx_array, rtl);
26248 *addr = rtl;
26249 return true;
26250 }
26251
26252 if (GET_CODE (rtl) == SYMBOL_REF
26253 && SYMBOL_REF_DECL (rtl))
26254 {
26255 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
26256 {
26257 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
26258 return false;
26259 }
26260 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
26261 return false;
26262 }
26263
26264 if (GET_CODE (rtl) == CONST)
26265 {
26266 subrtx_ptr_iterator::array_type array;
26267 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
26268 if (!resolve_one_addr (*iter))
26269 return false;
26270 }
26271
26272 return true;
26273 }
26274
26275 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
26276 if possible, and create DW_TAG_dwarf_procedure that can be referenced
26277 from DW_OP_GNU_implicit_pointer if the string hasn't been seen yet. */
26278
26279 static rtx
26280 string_cst_pool_decl (tree t)
26281 {
26282 rtx rtl = output_constant_def (t, 1);
26283 unsigned char *array;
26284 dw_loc_descr_ref l;
26285 tree decl;
26286 size_t len;
26287 dw_die_ref ref;
26288
26289 if (!rtl || !MEM_P (rtl))
26290 return NULL_RTX;
26291 rtl = XEXP (rtl, 0);
26292 if (GET_CODE (rtl) != SYMBOL_REF
26293 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
26294 return NULL_RTX;
26295
26296 decl = SYMBOL_REF_DECL (rtl);
26297 if (!lookup_decl_die (decl))
26298 {
26299 len = TREE_STRING_LENGTH (t);
26300 vec_safe_push (used_rtx_array, rtl);
26301 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
26302 array = ggc_vec_alloc<unsigned char> (len);
26303 memcpy (array, TREE_STRING_POINTER (t), len);
26304 l = new_loc_descr (DW_OP_implicit_value, len, 0);
26305 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
26306 l->dw_loc_oprnd2.v.val_vec.length = len;
26307 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
26308 l->dw_loc_oprnd2.v.val_vec.array = array;
26309 add_AT_loc (ref, DW_AT_location, l);
26310 equate_decl_number_to_die (decl, ref);
26311 }
26312 return rtl;
26313 }
26314
26315 /* Helper function of resolve_addr_in_expr. LOC is
26316 a DW_OP_addr followed by DW_OP_stack_value, either at the start
26317 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
26318 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
26319 with DW_OP_GNU_implicit_pointer if possible
26320 and return true, if unsuccessful, return false. */
26321
26322 static bool
26323 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
26324 {
26325 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
26326 HOST_WIDE_INT offset = 0;
26327 dw_die_ref ref = NULL;
26328 tree decl;
26329
26330 if (GET_CODE (rtl) == CONST
26331 && GET_CODE (XEXP (rtl, 0)) == PLUS
26332 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
26333 {
26334 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
26335 rtl = XEXP (XEXP (rtl, 0), 0);
26336 }
26337 if (GET_CODE (rtl) == CONST_STRING)
26338 {
26339 size_t len = strlen (XSTR (rtl, 0)) + 1;
26340 tree t = build_string (len, XSTR (rtl, 0));
26341 tree tlen = size_int (len - 1);
26342
26343 TREE_TYPE (t)
26344 = build_array_type (char_type_node, build_index_type (tlen));
26345 rtl = string_cst_pool_decl (t);
26346 if (!rtl)
26347 return false;
26348 }
26349 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
26350 {
26351 decl = SYMBOL_REF_DECL (rtl);
26352 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
26353 {
26354 ref = lookup_decl_die (decl);
26355 if (ref && (get_AT (ref, DW_AT_location)
26356 || get_AT (ref, DW_AT_const_value)))
26357 {
26358 loc->dw_loc_opc = DW_OP_GNU_implicit_pointer;
26359 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
26360 loc->dw_loc_oprnd1.val_entry = NULL;
26361 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
26362 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
26363 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
26364 loc->dw_loc_oprnd2.v.val_int = offset;
26365 return true;
26366 }
26367 }
26368 }
26369 return false;
26370 }
26371
26372 /* Helper function for resolve_addr, handle one location
26373 expression, return false if at least one CONST_STRING or SYMBOL_REF in
26374 the location list couldn't be resolved. */
26375
26376 static bool
26377 resolve_addr_in_expr (dw_loc_descr_ref loc)
26378 {
26379 dw_loc_descr_ref keep = NULL;
26380 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
26381 switch (loc->dw_loc_opc)
26382 {
26383 case DW_OP_addr:
26384 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
26385 {
26386 if ((prev == NULL
26387 || prev->dw_loc_opc == DW_OP_piece
26388 || prev->dw_loc_opc == DW_OP_bit_piece)
26389 && loc->dw_loc_next
26390 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
26391 && !dwarf_strict
26392 && optimize_one_addr_into_implicit_ptr (loc))
26393 break;
26394 return false;
26395 }
26396 break;
26397 case DW_OP_GNU_addr_index:
26398 case DW_OP_GNU_const_index:
26399 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
26400 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
26401 {
26402 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
26403 if (!resolve_one_addr (&rtl))
26404 return false;
26405 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
26406 loc->dw_loc_oprnd1.val_entry =
26407 add_addr_table_entry (rtl, ate_kind_rtx);
26408 }
26409 break;
26410 case DW_OP_const4u:
26411 case DW_OP_const8u:
26412 if (loc->dtprel
26413 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
26414 return false;
26415 break;
26416 case DW_OP_plus_uconst:
26417 if (size_of_loc_descr (loc)
26418 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
26419 + 1
26420 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
26421 {
26422 dw_loc_descr_ref repl
26423 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
26424 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
26425 add_loc_descr (&repl, loc->dw_loc_next);
26426 *loc = *repl;
26427 }
26428 break;
26429 case DW_OP_implicit_value:
26430 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
26431 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
26432 return false;
26433 break;
26434 case DW_OP_GNU_implicit_pointer:
26435 case DW_OP_GNU_parameter_ref:
26436 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
26437 {
26438 dw_die_ref ref
26439 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
26440 if (ref == NULL)
26441 return false;
26442 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
26443 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
26444 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
26445 }
26446 break;
26447 case DW_OP_GNU_const_type:
26448 case DW_OP_GNU_regval_type:
26449 case DW_OP_GNU_deref_type:
26450 case DW_OP_GNU_convert:
26451 case DW_OP_GNU_reinterpret:
26452 while (loc->dw_loc_next
26453 && loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert)
26454 {
26455 dw_die_ref base1, base2;
26456 unsigned enc1, enc2, size1, size2;
26457 if (loc->dw_loc_opc == DW_OP_GNU_regval_type
26458 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
26459 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
26460 else if (loc->dw_loc_oprnd1.val_class
26461 == dw_val_class_unsigned_const)
26462 break;
26463 else
26464 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
26465 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
26466 == dw_val_class_unsigned_const)
26467 break;
26468 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
26469 gcc_assert (base1->die_tag == DW_TAG_base_type
26470 && base2->die_tag == DW_TAG_base_type);
26471 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
26472 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
26473 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
26474 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
26475 if (size1 == size2
26476 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
26477 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
26478 && loc != keep)
26479 || enc1 == enc2))
26480 {
26481 /* Optimize away next DW_OP_GNU_convert after
26482 adjusting LOC's base type die reference. */
26483 if (loc->dw_loc_opc == DW_OP_GNU_regval_type
26484 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
26485 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
26486 else
26487 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
26488 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
26489 continue;
26490 }
26491 /* Don't change integer DW_OP_GNU_convert after e.g. floating
26492 point typed stack entry. */
26493 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
26494 keep = loc->dw_loc_next;
26495 break;
26496 }
26497 break;
26498 default:
26499 break;
26500 }
26501 return true;
26502 }
26503
26504 /* Helper function of resolve_addr. DIE had DW_AT_location of
26505 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
26506 and DW_OP_addr couldn't be resolved. resolve_addr has already
26507 removed the DW_AT_location attribute. This function attempts to
26508 add a new DW_AT_location attribute with DW_OP_GNU_implicit_pointer
26509 to it or DW_AT_const_value attribute, if possible. */
26510
26511 static void
26512 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
26513 {
26514 if (TREE_CODE (decl) != VAR_DECL
26515 || lookup_decl_die (decl) != die
26516 || DECL_EXTERNAL (decl)
26517 || !TREE_STATIC (decl)
26518 || DECL_INITIAL (decl) == NULL_TREE
26519 || DECL_P (DECL_INITIAL (decl))
26520 || get_AT (die, DW_AT_const_value))
26521 return;
26522
26523 tree init = DECL_INITIAL (decl);
26524 HOST_WIDE_INT offset = 0;
26525 /* For variables that have been optimized away and thus
26526 don't have a memory location, see if we can emit
26527 DW_AT_const_value instead. */
26528 if (tree_add_const_value_attribute (die, init))
26529 return;
26530 if (dwarf_strict)
26531 return;
26532 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
26533 and ADDR_EXPR refers to a decl that has DW_AT_location or
26534 DW_AT_const_value (but isn't addressable, otherwise
26535 resolving the original DW_OP_addr wouldn't fail), see if
26536 we can add DW_OP_GNU_implicit_pointer. */
26537 STRIP_NOPS (init);
26538 if (TREE_CODE (init) == POINTER_PLUS_EXPR
26539 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
26540 {
26541 offset = tree_to_shwi (TREE_OPERAND (init, 1));
26542 init = TREE_OPERAND (init, 0);
26543 STRIP_NOPS (init);
26544 }
26545 if (TREE_CODE (init) != ADDR_EXPR)
26546 return;
26547 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
26548 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
26549 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
26550 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
26551 && TREE_OPERAND (init, 0) != decl))
26552 {
26553 dw_die_ref ref;
26554 dw_loc_descr_ref l;
26555
26556 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
26557 {
26558 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
26559 if (!rtl)
26560 return;
26561 decl = SYMBOL_REF_DECL (rtl);
26562 }
26563 else
26564 decl = TREE_OPERAND (init, 0);
26565 ref = lookup_decl_die (decl);
26566 if (ref == NULL
26567 || (!get_AT (ref, DW_AT_location)
26568 && !get_AT (ref, DW_AT_const_value)))
26569 return;
26570 l = new_loc_descr (DW_OP_GNU_implicit_pointer, 0, offset);
26571 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
26572 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
26573 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
26574 add_AT_loc (die, DW_AT_location, l);
26575 }
26576 }
26577
26578 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
26579 an address in .rodata section if the string literal is emitted there,
26580 or remove the containing location list or replace DW_AT_const_value
26581 with DW_AT_location and empty location expression, if it isn't found
26582 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
26583 to something that has been emitted in the current CU. */
26584
26585 static void
26586 resolve_addr (dw_die_ref die)
26587 {
26588 dw_die_ref c;
26589 dw_attr_node *a;
26590 dw_loc_list_ref *curr, *start, loc;
26591 unsigned ix;
26592
26593 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
26594 switch (AT_class (a))
26595 {
26596 case dw_val_class_loc_list:
26597 start = curr = AT_loc_list_ptr (a);
26598 loc = *curr;
26599 gcc_assert (loc);
26600 /* The same list can be referenced more than once. See if we have
26601 already recorded the result from a previous pass. */
26602 if (loc->replaced)
26603 *curr = loc->dw_loc_next;
26604 else if (!loc->resolved_addr)
26605 {
26606 /* As things stand, we do not expect or allow one die to
26607 reference a suffix of another die's location list chain.
26608 References must be identical or completely separate.
26609 There is therefore no need to cache the result of this
26610 pass on any list other than the first; doing so
26611 would lead to unnecessary writes. */
26612 while (*curr)
26613 {
26614 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
26615 if (!resolve_addr_in_expr ((*curr)->expr))
26616 {
26617 dw_loc_list_ref next = (*curr)->dw_loc_next;
26618 dw_loc_descr_ref l = (*curr)->expr;
26619
26620 if (next && (*curr)->ll_symbol)
26621 {
26622 gcc_assert (!next->ll_symbol);
26623 next->ll_symbol = (*curr)->ll_symbol;
26624 }
26625 if (dwarf_split_debug_info)
26626 remove_loc_list_addr_table_entries (l);
26627 *curr = next;
26628 }
26629 else
26630 {
26631 mark_base_types ((*curr)->expr);
26632 curr = &(*curr)->dw_loc_next;
26633 }
26634 }
26635 if (loc == *start)
26636 loc->resolved_addr = 1;
26637 else
26638 {
26639 loc->replaced = 1;
26640 loc->dw_loc_next = *start;
26641 }
26642 }
26643 if (!*start)
26644 {
26645 remove_AT (die, a->dw_attr);
26646 ix--;
26647 }
26648 break;
26649 case dw_val_class_loc:
26650 {
26651 dw_loc_descr_ref l = AT_loc (a);
26652 /* For -gdwarf-2 don't attempt to optimize
26653 DW_AT_data_member_location containing
26654 DW_OP_plus_uconst - older consumers might
26655 rely on it being that op instead of a more complex,
26656 but shorter, location description. */
26657 if ((dwarf_version > 2
26658 || a->dw_attr != DW_AT_data_member_location
26659 || l == NULL
26660 || l->dw_loc_opc != DW_OP_plus_uconst
26661 || l->dw_loc_next != NULL)
26662 && !resolve_addr_in_expr (l))
26663 {
26664 if (dwarf_split_debug_info)
26665 remove_loc_list_addr_table_entries (l);
26666 if (l != NULL
26667 && l->dw_loc_next == NULL
26668 && l->dw_loc_opc == DW_OP_addr
26669 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
26670 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
26671 && a->dw_attr == DW_AT_location)
26672 {
26673 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
26674 remove_AT (die, a->dw_attr);
26675 ix--;
26676 optimize_location_into_implicit_ptr (die, decl);
26677 break;
26678 }
26679 remove_AT (die, a->dw_attr);
26680 ix--;
26681 }
26682 else
26683 mark_base_types (l);
26684 }
26685 break;
26686 case dw_val_class_addr:
26687 if (a->dw_attr == DW_AT_const_value
26688 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
26689 {
26690 if (AT_index (a) != NOT_INDEXED)
26691 remove_addr_table_entry (a->dw_attr_val.val_entry);
26692 remove_AT (die, a->dw_attr);
26693 ix--;
26694 }
26695 if (die->die_tag == DW_TAG_GNU_call_site
26696 && a->dw_attr == DW_AT_abstract_origin)
26697 {
26698 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
26699 dw_die_ref tdie = lookup_decl_die (tdecl);
26700 dw_die_ref cdie;
26701 if (tdie == NULL
26702 && DECL_EXTERNAL (tdecl)
26703 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
26704 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
26705 {
26706 /* Creating a full DIE for tdecl is overly expensive and
26707 at this point even wrong when in the LTO phase
26708 as it can end up generating new type DIEs we didn't
26709 output and thus optimize_external_refs will crash. */
26710 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
26711 add_AT_flag (tdie, DW_AT_external, 1);
26712 add_AT_flag (tdie, DW_AT_declaration, 1);
26713 add_linkage_attr (tdie, tdecl);
26714 add_name_and_src_coords_attributes (tdie, tdecl);
26715 equate_decl_number_to_die (tdecl, tdie);
26716 }
26717 if (tdie)
26718 {
26719 a->dw_attr_val.val_class = dw_val_class_die_ref;
26720 a->dw_attr_val.v.val_die_ref.die = tdie;
26721 a->dw_attr_val.v.val_die_ref.external = 0;
26722 }
26723 else
26724 {
26725 if (AT_index (a) != NOT_INDEXED)
26726 remove_addr_table_entry (a->dw_attr_val.val_entry);
26727 remove_AT (die, a->dw_attr);
26728 ix--;
26729 }
26730 }
26731 break;
26732 default:
26733 break;
26734 }
26735
26736 FOR_EACH_CHILD (die, c, resolve_addr (c));
26737 }
26738 \f
26739 /* Helper routines for optimize_location_lists.
26740 This pass tries to share identical local lists in .debug_loc
26741 section. */
26742
26743 /* Iteratively hash operands of LOC opcode into HSTATE. */
26744
26745 static void
26746 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
26747 {
26748 dw_val_ref val1 = &loc->dw_loc_oprnd1;
26749 dw_val_ref val2 = &loc->dw_loc_oprnd2;
26750
26751 switch (loc->dw_loc_opc)
26752 {
26753 case DW_OP_const4u:
26754 case DW_OP_const8u:
26755 if (loc->dtprel)
26756 goto hash_addr;
26757 /* FALLTHRU */
26758 case DW_OP_const1u:
26759 case DW_OP_const1s:
26760 case DW_OP_const2u:
26761 case DW_OP_const2s:
26762 case DW_OP_const4s:
26763 case DW_OP_const8s:
26764 case DW_OP_constu:
26765 case DW_OP_consts:
26766 case DW_OP_pick:
26767 case DW_OP_plus_uconst:
26768 case DW_OP_breg0:
26769 case DW_OP_breg1:
26770 case DW_OP_breg2:
26771 case DW_OP_breg3:
26772 case DW_OP_breg4:
26773 case DW_OP_breg5:
26774 case DW_OP_breg6:
26775 case DW_OP_breg7:
26776 case DW_OP_breg8:
26777 case DW_OP_breg9:
26778 case DW_OP_breg10:
26779 case DW_OP_breg11:
26780 case DW_OP_breg12:
26781 case DW_OP_breg13:
26782 case DW_OP_breg14:
26783 case DW_OP_breg15:
26784 case DW_OP_breg16:
26785 case DW_OP_breg17:
26786 case DW_OP_breg18:
26787 case DW_OP_breg19:
26788 case DW_OP_breg20:
26789 case DW_OP_breg21:
26790 case DW_OP_breg22:
26791 case DW_OP_breg23:
26792 case DW_OP_breg24:
26793 case DW_OP_breg25:
26794 case DW_OP_breg26:
26795 case DW_OP_breg27:
26796 case DW_OP_breg28:
26797 case DW_OP_breg29:
26798 case DW_OP_breg30:
26799 case DW_OP_breg31:
26800 case DW_OP_regx:
26801 case DW_OP_fbreg:
26802 case DW_OP_piece:
26803 case DW_OP_deref_size:
26804 case DW_OP_xderef_size:
26805 hstate.add_object (val1->v.val_int);
26806 break;
26807 case DW_OP_skip:
26808 case DW_OP_bra:
26809 {
26810 int offset;
26811
26812 gcc_assert (val1->val_class == dw_val_class_loc);
26813 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
26814 hstate.add_object (offset);
26815 }
26816 break;
26817 case DW_OP_implicit_value:
26818 hstate.add_object (val1->v.val_unsigned);
26819 switch (val2->val_class)
26820 {
26821 case dw_val_class_const:
26822 hstate.add_object (val2->v.val_int);
26823 break;
26824 case dw_val_class_vec:
26825 {
26826 unsigned int elt_size = val2->v.val_vec.elt_size;
26827 unsigned int len = val2->v.val_vec.length;
26828
26829 hstate.add_int (elt_size);
26830 hstate.add_int (len);
26831 hstate.add (val2->v.val_vec.array, len * elt_size);
26832 }
26833 break;
26834 case dw_val_class_const_double:
26835 hstate.add_object (val2->v.val_double.low);
26836 hstate.add_object (val2->v.val_double.high);
26837 break;
26838 case dw_val_class_wide_int:
26839 hstate.add (val2->v.val_wide->get_val (),
26840 get_full_len (*val2->v.val_wide)
26841 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
26842 break;
26843 case dw_val_class_addr:
26844 inchash::add_rtx (val2->v.val_addr, hstate);
26845 break;
26846 default:
26847 gcc_unreachable ();
26848 }
26849 break;
26850 case DW_OP_bregx:
26851 case DW_OP_bit_piece:
26852 hstate.add_object (val1->v.val_int);
26853 hstate.add_object (val2->v.val_int);
26854 break;
26855 case DW_OP_addr:
26856 hash_addr:
26857 if (loc->dtprel)
26858 {
26859 unsigned char dtprel = 0xd1;
26860 hstate.add_object (dtprel);
26861 }
26862 inchash::add_rtx (val1->v.val_addr, hstate);
26863 break;
26864 case DW_OP_GNU_addr_index:
26865 case DW_OP_GNU_const_index:
26866 {
26867 if (loc->dtprel)
26868 {
26869 unsigned char dtprel = 0xd1;
26870 hstate.add_object (dtprel);
26871 }
26872 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
26873 }
26874 break;
26875 case DW_OP_GNU_implicit_pointer:
26876 hstate.add_int (val2->v.val_int);
26877 break;
26878 case DW_OP_GNU_entry_value:
26879 hstate.add_object (val1->v.val_loc);
26880 break;
26881 case DW_OP_GNU_regval_type:
26882 case DW_OP_GNU_deref_type:
26883 {
26884 unsigned int byte_size
26885 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
26886 unsigned int encoding
26887 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
26888 hstate.add_object (val1->v.val_int);
26889 hstate.add_object (byte_size);
26890 hstate.add_object (encoding);
26891 }
26892 break;
26893 case DW_OP_GNU_convert:
26894 case DW_OP_GNU_reinterpret:
26895 if (val1->val_class == dw_val_class_unsigned_const)
26896 {
26897 hstate.add_object (val1->v.val_unsigned);
26898 break;
26899 }
26900 /* FALLTHRU */
26901 case DW_OP_GNU_const_type:
26902 {
26903 unsigned int byte_size
26904 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
26905 unsigned int encoding
26906 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
26907 hstate.add_object (byte_size);
26908 hstate.add_object (encoding);
26909 if (loc->dw_loc_opc != DW_OP_GNU_const_type)
26910 break;
26911 hstate.add_object (val2->val_class);
26912 switch (val2->val_class)
26913 {
26914 case dw_val_class_const:
26915 hstate.add_object (val2->v.val_int);
26916 break;
26917 case dw_val_class_vec:
26918 {
26919 unsigned int elt_size = val2->v.val_vec.elt_size;
26920 unsigned int len = val2->v.val_vec.length;
26921
26922 hstate.add_object (elt_size);
26923 hstate.add_object (len);
26924 hstate.add (val2->v.val_vec.array, len * elt_size);
26925 }
26926 break;
26927 case dw_val_class_const_double:
26928 hstate.add_object (val2->v.val_double.low);
26929 hstate.add_object (val2->v.val_double.high);
26930 break;
26931 case dw_val_class_wide_int:
26932 hstate.add (val2->v.val_wide->get_val (),
26933 get_full_len (*val2->v.val_wide)
26934 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
26935 break;
26936 default:
26937 gcc_unreachable ();
26938 }
26939 }
26940 break;
26941
26942 default:
26943 /* Other codes have no operands. */
26944 break;
26945 }
26946 }
26947
26948 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
26949
26950 static inline void
26951 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
26952 {
26953 dw_loc_descr_ref l;
26954 bool sizes_computed = false;
26955 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
26956 size_of_locs (loc);
26957
26958 for (l = loc; l != NULL; l = l->dw_loc_next)
26959 {
26960 enum dwarf_location_atom opc = l->dw_loc_opc;
26961 hstate.add_object (opc);
26962 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
26963 {
26964 size_of_locs (loc);
26965 sizes_computed = true;
26966 }
26967 hash_loc_operands (l, hstate);
26968 }
26969 }
26970
26971 /* Compute hash of the whole location list LIST_HEAD. */
26972
26973 static inline void
26974 hash_loc_list (dw_loc_list_ref list_head)
26975 {
26976 dw_loc_list_ref curr = list_head;
26977 inchash::hash hstate;
26978
26979 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
26980 {
26981 hstate.add (curr->begin, strlen (curr->begin) + 1);
26982 hstate.add (curr->end, strlen (curr->end) + 1);
26983 if (curr->section)
26984 hstate.add (curr->section, strlen (curr->section) + 1);
26985 hash_locs (curr->expr, hstate);
26986 }
26987 list_head->hash = hstate.end ();
26988 }
26989
26990 /* Return true if X and Y opcodes have the same operands. */
26991
26992 static inline bool
26993 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
26994 {
26995 dw_val_ref valx1 = &x->dw_loc_oprnd1;
26996 dw_val_ref valx2 = &x->dw_loc_oprnd2;
26997 dw_val_ref valy1 = &y->dw_loc_oprnd1;
26998 dw_val_ref valy2 = &y->dw_loc_oprnd2;
26999
27000 switch (x->dw_loc_opc)
27001 {
27002 case DW_OP_const4u:
27003 case DW_OP_const8u:
27004 if (x->dtprel)
27005 goto hash_addr;
27006 /* FALLTHRU */
27007 case DW_OP_const1u:
27008 case DW_OP_const1s:
27009 case DW_OP_const2u:
27010 case DW_OP_const2s:
27011 case DW_OP_const4s:
27012 case DW_OP_const8s:
27013 case DW_OP_constu:
27014 case DW_OP_consts:
27015 case DW_OP_pick:
27016 case DW_OP_plus_uconst:
27017 case DW_OP_breg0:
27018 case DW_OP_breg1:
27019 case DW_OP_breg2:
27020 case DW_OP_breg3:
27021 case DW_OP_breg4:
27022 case DW_OP_breg5:
27023 case DW_OP_breg6:
27024 case DW_OP_breg7:
27025 case DW_OP_breg8:
27026 case DW_OP_breg9:
27027 case DW_OP_breg10:
27028 case DW_OP_breg11:
27029 case DW_OP_breg12:
27030 case DW_OP_breg13:
27031 case DW_OP_breg14:
27032 case DW_OP_breg15:
27033 case DW_OP_breg16:
27034 case DW_OP_breg17:
27035 case DW_OP_breg18:
27036 case DW_OP_breg19:
27037 case DW_OP_breg20:
27038 case DW_OP_breg21:
27039 case DW_OP_breg22:
27040 case DW_OP_breg23:
27041 case DW_OP_breg24:
27042 case DW_OP_breg25:
27043 case DW_OP_breg26:
27044 case DW_OP_breg27:
27045 case DW_OP_breg28:
27046 case DW_OP_breg29:
27047 case DW_OP_breg30:
27048 case DW_OP_breg31:
27049 case DW_OP_regx:
27050 case DW_OP_fbreg:
27051 case DW_OP_piece:
27052 case DW_OP_deref_size:
27053 case DW_OP_xderef_size:
27054 return valx1->v.val_int == valy1->v.val_int;
27055 case DW_OP_skip:
27056 case DW_OP_bra:
27057 /* If splitting debug info, the use of DW_OP_GNU_addr_index
27058 can cause irrelevant differences in dw_loc_addr. */
27059 gcc_assert (valx1->val_class == dw_val_class_loc
27060 && valy1->val_class == dw_val_class_loc
27061 && (dwarf_split_debug_info
27062 || x->dw_loc_addr == y->dw_loc_addr));
27063 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
27064 case DW_OP_implicit_value:
27065 if (valx1->v.val_unsigned != valy1->v.val_unsigned
27066 || valx2->val_class != valy2->val_class)
27067 return false;
27068 switch (valx2->val_class)
27069 {
27070 case dw_val_class_const:
27071 return valx2->v.val_int == valy2->v.val_int;
27072 case dw_val_class_vec:
27073 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
27074 && valx2->v.val_vec.length == valy2->v.val_vec.length
27075 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
27076 valx2->v.val_vec.elt_size
27077 * valx2->v.val_vec.length) == 0;
27078 case dw_val_class_const_double:
27079 return valx2->v.val_double.low == valy2->v.val_double.low
27080 && valx2->v.val_double.high == valy2->v.val_double.high;
27081 case dw_val_class_wide_int:
27082 return *valx2->v.val_wide == *valy2->v.val_wide;
27083 case dw_val_class_addr:
27084 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
27085 default:
27086 gcc_unreachable ();
27087 }
27088 case DW_OP_bregx:
27089 case DW_OP_bit_piece:
27090 return valx1->v.val_int == valy1->v.val_int
27091 && valx2->v.val_int == valy2->v.val_int;
27092 case DW_OP_addr:
27093 hash_addr:
27094 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
27095 case DW_OP_GNU_addr_index:
27096 case DW_OP_GNU_const_index:
27097 {
27098 rtx ax1 = valx1->val_entry->addr.rtl;
27099 rtx ay1 = valy1->val_entry->addr.rtl;
27100 return rtx_equal_p (ax1, ay1);
27101 }
27102 case DW_OP_GNU_implicit_pointer:
27103 return valx1->val_class == dw_val_class_die_ref
27104 && valx1->val_class == valy1->val_class
27105 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
27106 && valx2->v.val_int == valy2->v.val_int;
27107 case DW_OP_GNU_entry_value:
27108 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
27109 case DW_OP_GNU_const_type:
27110 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
27111 || valx2->val_class != valy2->val_class)
27112 return false;
27113 switch (valx2->val_class)
27114 {
27115 case dw_val_class_const:
27116 return valx2->v.val_int == valy2->v.val_int;
27117 case dw_val_class_vec:
27118 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
27119 && valx2->v.val_vec.length == valy2->v.val_vec.length
27120 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
27121 valx2->v.val_vec.elt_size
27122 * valx2->v.val_vec.length) == 0;
27123 case dw_val_class_const_double:
27124 return valx2->v.val_double.low == valy2->v.val_double.low
27125 && valx2->v.val_double.high == valy2->v.val_double.high;
27126 case dw_val_class_wide_int:
27127 return *valx2->v.val_wide == *valy2->v.val_wide;
27128 default:
27129 gcc_unreachable ();
27130 }
27131 case DW_OP_GNU_regval_type:
27132 case DW_OP_GNU_deref_type:
27133 return valx1->v.val_int == valy1->v.val_int
27134 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
27135 case DW_OP_GNU_convert:
27136 case DW_OP_GNU_reinterpret:
27137 if (valx1->val_class != valy1->val_class)
27138 return false;
27139 if (valx1->val_class == dw_val_class_unsigned_const)
27140 return valx1->v.val_unsigned == valy1->v.val_unsigned;
27141 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
27142 case DW_OP_GNU_parameter_ref:
27143 return valx1->val_class == dw_val_class_die_ref
27144 && valx1->val_class == valy1->val_class
27145 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
27146 default:
27147 /* Other codes have no operands. */
27148 return true;
27149 }
27150 }
27151
27152 /* Return true if DWARF location expressions X and Y are the same. */
27153
27154 static inline bool
27155 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
27156 {
27157 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
27158 if (x->dw_loc_opc != y->dw_loc_opc
27159 || x->dtprel != y->dtprel
27160 || !compare_loc_operands (x, y))
27161 break;
27162 return x == NULL && y == NULL;
27163 }
27164
27165 /* Hashtable helpers. */
27166
27167 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
27168 {
27169 static inline hashval_t hash (const dw_loc_list_struct *);
27170 static inline bool equal (const dw_loc_list_struct *,
27171 const dw_loc_list_struct *);
27172 };
27173
27174 /* Return precomputed hash of location list X. */
27175
27176 inline hashval_t
27177 loc_list_hasher::hash (const dw_loc_list_struct *x)
27178 {
27179 return x->hash;
27180 }
27181
27182 /* Return true if location lists A and B are the same. */
27183
27184 inline bool
27185 loc_list_hasher::equal (const dw_loc_list_struct *a,
27186 const dw_loc_list_struct *b)
27187 {
27188 if (a == b)
27189 return 1;
27190 if (a->hash != b->hash)
27191 return 0;
27192 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
27193 if (strcmp (a->begin, b->begin) != 0
27194 || strcmp (a->end, b->end) != 0
27195 || (a->section == NULL) != (b->section == NULL)
27196 || (a->section && strcmp (a->section, b->section) != 0)
27197 || !compare_locs (a->expr, b->expr))
27198 break;
27199 return a == NULL && b == NULL;
27200 }
27201
27202 typedef hash_table<loc_list_hasher> loc_list_hash_type;
27203
27204
27205 /* Recursively optimize location lists referenced from DIE
27206 children and share them whenever possible. */
27207
27208 static void
27209 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
27210 {
27211 dw_die_ref c;
27212 dw_attr_node *a;
27213 unsigned ix;
27214 dw_loc_list_struct **slot;
27215
27216 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27217 if (AT_class (a) == dw_val_class_loc_list)
27218 {
27219 dw_loc_list_ref list = AT_loc_list (a);
27220 /* TODO: perform some optimizations here, before hashing
27221 it and storing into the hash table. */
27222 hash_loc_list (list);
27223 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
27224 if (*slot == NULL)
27225 *slot = list;
27226 else
27227 a->dw_attr_val.v.val_loc_list = *slot;
27228 }
27229
27230 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
27231 }
27232
27233
27234 /* Recursively assign each location list a unique index into the debug_addr
27235 section. */
27236
27237 static void
27238 index_location_lists (dw_die_ref die)
27239 {
27240 dw_die_ref c;
27241 dw_attr_node *a;
27242 unsigned ix;
27243
27244 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27245 if (AT_class (a) == dw_val_class_loc_list)
27246 {
27247 dw_loc_list_ref list = AT_loc_list (a);
27248 dw_loc_list_ref curr;
27249 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
27250 {
27251 /* Don't index an entry that has already been indexed
27252 or won't be output. */
27253 if (curr->begin_entry != NULL
27254 || (strcmp (curr->begin, curr->end) == 0 && !curr->force))
27255 continue;
27256
27257 curr->begin_entry
27258 = add_addr_table_entry (xstrdup (curr->begin),
27259 ate_kind_label);
27260 }
27261 }
27262
27263 FOR_EACH_CHILD (die, c, index_location_lists (c));
27264 }
27265
27266 /* Optimize location lists referenced from DIE
27267 children and share them whenever possible. */
27268
27269 static void
27270 optimize_location_lists (dw_die_ref die)
27271 {
27272 loc_list_hash_type htab (500);
27273 optimize_location_lists_1 (die, &htab);
27274 }
27275 \f
27276 /* Traverse the limbo die list, and add parent/child links. The only
27277 dies without parents that should be here are concrete instances of
27278 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
27279 For concrete instances, we can get the parent die from the abstract
27280 instance. */
27281
27282 static void
27283 flush_limbo_die_list (void)
27284 {
27285 limbo_die_node *node;
27286
27287 /* get_context_die calls force_decl_die, which can put new DIEs on the
27288 limbo list in LTO mode when nested functions are put in a different
27289 partition than that of their parent function. */
27290 while ((node = limbo_die_list))
27291 {
27292 dw_die_ref die = node->die;
27293 limbo_die_list = node->next;
27294
27295 if (die->die_parent == NULL)
27296 {
27297 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
27298
27299 if (origin && origin->die_parent)
27300 add_child_die (origin->die_parent, die);
27301 else if (is_cu_die (die))
27302 ;
27303 else if (seen_error ())
27304 /* It's OK to be confused by errors in the input. */
27305 add_child_die (comp_unit_die (), die);
27306 else
27307 {
27308 /* In certain situations, the lexical block containing a
27309 nested function can be optimized away, which results
27310 in the nested function die being orphaned. Likewise
27311 with the return type of that nested function. Force
27312 this to be a child of the containing function.
27313
27314 It may happen that even the containing function got fully
27315 inlined and optimized out. In that case we are lost and
27316 assign the empty child. This should not be big issue as
27317 the function is likely unreachable too. */
27318 gcc_assert (node->created_for);
27319
27320 if (DECL_P (node->created_for))
27321 origin = get_context_die (DECL_CONTEXT (node->created_for));
27322 else if (TYPE_P (node->created_for))
27323 origin = scope_die_for (node->created_for, comp_unit_die ());
27324 else
27325 origin = comp_unit_die ();
27326
27327 add_child_die (origin, die);
27328 }
27329 }
27330 }
27331 }
27332
27333 /* Output stuff that dwarf requires at the end of every file,
27334 and generate the DWARF-2 debugging info. */
27335
27336 static void
27337 dwarf2out_finish (const char *filename)
27338 {
27339 comdat_type_node *ctnode;
27340 dw_die_ref main_comp_unit_die;
27341
27342 /* Flush out any latecomers to the limbo party. */
27343 flush_limbo_die_list ();
27344
27345 /* We shouldn't have any symbols with delayed asm names for
27346 DIEs generated after early finish. */
27347 gcc_assert (deferred_asm_name == NULL);
27348
27349 /* PCH might result in DW_AT_producer string being restored from the
27350 header compilation, so always fill it with empty string initially
27351 and overwrite only here. */
27352 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
27353 producer_string = gen_producer_string ();
27354 producer->dw_attr_val.v.val_str->refcount--;
27355 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
27356
27357 gen_remaining_tmpl_value_param_die_attribute ();
27358
27359 /* Add the name for the main input file now. We delayed this from
27360 dwarf2out_init to avoid complications with PCH.
27361 For LTO produced units use a fixed artificial name to avoid
27362 leaking tempfile names into the dwarf. */
27363 if (!in_lto_p)
27364 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
27365 else
27366 add_name_attribute (comp_unit_die (), "<artificial>");
27367 if (!IS_ABSOLUTE_PATH (filename) || targetm.force_at_comp_dir)
27368 add_comp_dir_attribute (comp_unit_die ());
27369 else if (get_AT (comp_unit_die (), DW_AT_comp_dir) == NULL)
27370 {
27371 bool p = false;
27372 file_table->traverse<bool *, file_table_relative_p> (&p);
27373 if (p)
27374 add_comp_dir_attribute (comp_unit_die ());
27375 }
27376
27377 #if ENABLE_ASSERT_CHECKING
27378 {
27379 dw_die_ref die = comp_unit_die (), c;
27380 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
27381 }
27382 #endif
27383 resolve_addr (comp_unit_die ());
27384 move_marked_base_types ();
27385
27386 /* Walk through the list of incomplete types again, trying once more to
27387 emit full debugging info for them. */
27388 retry_incomplete_types ();
27389
27390 if (flag_eliminate_unused_debug_types)
27391 prune_unused_types ();
27392
27393 /* Generate separate COMDAT sections for type DIEs. */
27394 if (use_debug_types)
27395 {
27396 break_out_comdat_types (comp_unit_die ());
27397
27398 /* Each new type_unit DIE was added to the limbo die list when created.
27399 Since these have all been added to comdat_type_list, clear the
27400 limbo die list. */
27401 limbo_die_list = NULL;
27402
27403 /* For each new comdat type unit, copy declarations for incomplete
27404 types to make the new unit self-contained (i.e., no direct
27405 references to the main compile unit). */
27406 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
27407 copy_decls_for_unworthy_types (ctnode->root_die);
27408 copy_decls_for_unworthy_types (comp_unit_die ());
27409
27410 /* In the process of copying declarations from one unit to another,
27411 we may have left some declarations behind that are no longer
27412 referenced. Prune them. */
27413 prune_unused_types ();
27414 }
27415
27416 /* Generate separate CUs for each of the include files we've seen.
27417 They will go into limbo_die_list. */
27418 if (flag_eliminate_dwarf2_dups)
27419 break_out_includes (comp_unit_die ());
27420
27421 /* Traverse the DIE's and add sibling attributes to those DIE's that
27422 have children. */
27423 add_sibling_attributes (comp_unit_die ());
27424 limbo_die_node *node;
27425 for (node = limbo_die_list; node; node = node->next)
27426 add_sibling_attributes (node->die);
27427 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
27428 add_sibling_attributes (ctnode->root_die);
27429
27430 /* When splitting DWARF info, we put some attributes in the
27431 skeleton compile_unit DIE that remains in the .o, while
27432 most attributes go in the DWO compile_unit_die. */
27433 if (dwarf_split_debug_info)
27434 main_comp_unit_die = gen_compile_unit_die (NULL);
27435 else
27436 main_comp_unit_die = comp_unit_die ();
27437
27438 /* Output a terminator label for the .text section. */
27439 switch_to_section (text_section);
27440 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
27441 if (cold_text_section)
27442 {
27443 switch_to_section (cold_text_section);
27444 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
27445 }
27446
27447 /* We can only use the low/high_pc attributes if all of the code was
27448 in .text. */
27449 if (!have_multiple_function_sections
27450 || (dwarf_version < 3 && dwarf_strict))
27451 {
27452 /* Don't add if the CU has no associated code. */
27453 if (text_section_used)
27454 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
27455 text_end_label, true);
27456 }
27457 else
27458 {
27459 unsigned fde_idx;
27460 dw_fde_ref fde;
27461 bool range_list_added = false;
27462
27463 if (text_section_used)
27464 add_ranges_by_labels (main_comp_unit_die, text_section_label,
27465 text_end_label, &range_list_added, true);
27466 if (cold_text_section_used)
27467 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
27468 cold_end_label, &range_list_added, true);
27469
27470 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
27471 {
27472 if (DECL_IGNORED_P (fde->decl))
27473 continue;
27474 if (!fde->in_std_section)
27475 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
27476 fde->dw_fde_end, &range_list_added,
27477 true);
27478 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
27479 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
27480 fde->dw_fde_second_end, &range_list_added,
27481 true);
27482 }
27483
27484 if (range_list_added)
27485 {
27486 /* We need to give .debug_loc and .debug_ranges an appropriate
27487 "base address". Use zero so that these addresses become
27488 absolute. Historically, we've emitted the unexpected
27489 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
27490 Emit both to give time for other tools to adapt. */
27491 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
27492 if (! dwarf_strict && dwarf_version < 4)
27493 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
27494
27495 add_ranges (NULL);
27496 }
27497 }
27498
27499 if (debug_info_level >= DINFO_LEVEL_TERSE)
27500 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
27501 debug_line_section_label);
27502
27503 if (have_macinfo)
27504 add_AT_macptr (comp_unit_die (),
27505 dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros,
27506 macinfo_section_label);
27507
27508 if (dwarf_split_debug_info)
27509 {
27510 /* optimize_location_lists calculates the size of the lists,
27511 so index them first, and assign indices to the entries.
27512 Although optimize_location_lists will remove entries from
27513 the table, it only does so for duplicates, and therefore
27514 only reduces ref_counts to 1. */
27515 index_location_lists (comp_unit_die ());
27516
27517 if (addr_index_table != NULL)
27518 {
27519 unsigned int index = 0;
27520 addr_index_table
27521 ->traverse_noresize<unsigned int *, index_addr_table_entry>
27522 (&index);
27523 }
27524 }
27525
27526 if (have_location_lists)
27527 optimize_location_lists (comp_unit_die ());
27528
27529 save_macinfo_strings ();
27530
27531 if (dwarf_split_debug_info)
27532 {
27533 unsigned int index = 0;
27534
27535 /* Add attributes common to skeleton compile_units and
27536 type_units. Because these attributes include strings, it
27537 must be done before freezing the string table. Top-level
27538 skeleton die attrs are added when the skeleton type unit is
27539 created, so ensure it is created by this point. */
27540 add_top_level_skeleton_die_attrs (main_comp_unit_die);
27541 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
27542 }
27543
27544 /* Output all of the compilation units. We put the main one last so that
27545 the offsets are available to output_pubnames. */
27546 for (node = limbo_die_list; node; node = node->next)
27547 output_comp_unit (node->die, 0);
27548
27549 hash_table<comdat_type_hasher> comdat_type_table (100);
27550 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
27551 {
27552 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
27553
27554 /* Don't output duplicate types. */
27555 if (*slot != HTAB_EMPTY_ENTRY)
27556 continue;
27557
27558 /* Add a pointer to the line table for the main compilation unit
27559 so that the debugger can make sense of DW_AT_decl_file
27560 attributes. */
27561 if (debug_info_level >= DINFO_LEVEL_TERSE)
27562 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
27563 (!dwarf_split_debug_info
27564 ? debug_line_section_label
27565 : debug_skeleton_line_section_label));
27566
27567 output_comdat_type_unit (ctnode);
27568 *slot = ctnode;
27569 }
27570
27571 /* The AT_pubnames attribute needs to go in all skeleton dies, including
27572 both the main_cu and all skeleton TUs. Making this call unconditional
27573 would end up either adding a second copy of the AT_pubnames attribute, or
27574 requiring a special case in add_top_level_skeleton_die_attrs. */
27575 if (!dwarf_split_debug_info)
27576 add_AT_pubnames (comp_unit_die ());
27577
27578 if (dwarf_split_debug_info)
27579 {
27580 int mark;
27581 unsigned char checksum[16];
27582 struct md5_ctx ctx;
27583
27584 /* Compute a checksum of the comp_unit to use as the dwo_id. */
27585 md5_init_ctx (&ctx);
27586 mark = 0;
27587 die_checksum (comp_unit_die (), &ctx, &mark);
27588 unmark_all_dies (comp_unit_die ());
27589 md5_finish_ctx (&ctx, checksum);
27590
27591 /* Use the first 8 bytes of the checksum as the dwo_id,
27592 and add it to both comp-unit DIEs. */
27593 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
27594 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
27595
27596 /* Add the base offset of the ranges table to the skeleton
27597 comp-unit DIE. */
27598 if (ranges_table_in_use)
27599 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
27600 ranges_section_label);
27601
27602 switch_to_section (debug_addr_section);
27603 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
27604 output_addr_table ();
27605 }
27606
27607 /* Output the main compilation unit if non-empty or if .debug_macinfo
27608 or .debug_macro will be emitted. */
27609 output_comp_unit (comp_unit_die (), have_macinfo);
27610
27611 if (dwarf_split_debug_info && info_section_emitted)
27612 output_skeleton_debug_sections (main_comp_unit_die);
27613
27614 /* Output the abbreviation table. */
27615 if (abbrev_die_table_in_use != 1)
27616 {
27617 switch_to_section (debug_abbrev_section);
27618 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
27619 output_abbrev_section ();
27620 }
27621
27622 /* Output location list section if necessary. */
27623 if (have_location_lists)
27624 {
27625 /* Output the location lists info. */
27626 switch_to_section (debug_loc_section);
27627 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
27628 output_location_lists (comp_unit_die ());
27629 }
27630
27631 output_pubtables ();
27632
27633 /* Output the address range information if a CU (.debug_info section)
27634 was emitted. We output an empty table even if we had no functions
27635 to put in it. This because the consumer has no way to tell the
27636 difference between an empty table that we omitted and failure to
27637 generate a table that would have contained data. */
27638 if (info_section_emitted)
27639 {
27640 switch_to_section (debug_aranges_section);
27641 output_aranges ();
27642 }
27643
27644 /* Output ranges section if necessary. */
27645 if (ranges_table_in_use)
27646 {
27647 switch_to_section (debug_ranges_section);
27648 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
27649 output_ranges ();
27650 }
27651
27652 /* Have to end the macro section. */
27653 if (have_macinfo)
27654 {
27655 switch_to_section (debug_macinfo_section);
27656 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
27657 output_macinfo ();
27658 dw2_asm_output_data (1, 0, "End compilation unit");
27659 }
27660
27661 /* Output the source line correspondence table. We must do this
27662 even if there is no line information. Otherwise, on an empty
27663 translation unit, we will generate a present, but empty,
27664 .debug_info section. IRIX 6.5 `nm' will then complain when
27665 examining the file. This is done late so that any filenames
27666 used by the debug_info section are marked as 'used'. */
27667 switch_to_section (debug_line_section);
27668 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
27669 if (! DWARF2_ASM_LINE_DEBUG_INFO)
27670 output_line_info (false);
27671
27672 if (dwarf_split_debug_info && info_section_emitted)
27673 {
27674 switch_to_section (debug_skeleton_line_section);
27675 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
27676 output_line_info (true);
27677 }
27678
27679 /* If we emitted any indirect strings, output the string table too. */
27680 if (debug_str_hash || skeleton_debug_str_hash)
27681 output_indirect_strings ();
27682 }
27683
27684 /* Perform any cleanups needed after the early debug generation pass
27685 has run. */
27686
27687 static void
27688 dwarf2out_early_finish (void)
27689 {
27690 /* The point here is to flush out the limbo list so that it is empty
27691 and we don't need to stream it for LTO. */
27692 flush_limbo_die_list ();
27693
27694 gen_scheduled_generic_parms_dies ();
27695 gen_remaining_tmpl_value_param_die_attribute ();
27696
27697 /* Add DW_AT_linkage_name for all deferred DIEs. */
27698 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
27699 {
27700 tree decl = node->created_for;
27701 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
27702 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
27703 ended up in deferred_asm_name before we knew it was
27704 constant and never written to disk. */
27705 && DECL_ASSEMBLER_NAME (decl))
27706 {
27707 add_linkage_attr (node->die, decl);
27708 move_linkage_attr (node->die);
27709 }
27710 }
27711 deferred_asm_name = NULL;
27712 }
27713
27714 /* Reset all state within dwarf2out.c so that we can rerun the compiler
27715 within the same process. For use by toplev::finalize. */
27716
27717 void
27718 dwarf2out_c_finalize (void)
27719 {
27720 last_var_location_insn = NULL;
27721 cached_next_real_insn = NULL;
27722 used_rtx_array = NULL;
27723 incomplete_types = NULL;
27724 decl_scope_table = NULL;
27725 debug_info_section = NULL;
27726 debug_skeleton_info_section = NULL;
27727 debug_abbrev_section = NULL;
27728 debug_skeleton_abbrev_section = NULL;
27729 debug_aranges_section = NULL;
27730 debug_addr_section = NULL;
27731 debug_macinfo_section = NULL;
27732 debug_line_section = NULL;
27733 debug_skeleton_line_section = NULL;
27734 debug_loc_section = NULL;
27735 debug_pubnames_section = NULL;
27736 debug_pubtypes_section = NULL;
27737 debug_str_section = NULL;
27738 debug_str_dwo_section = NULL;
27739 debug_str_offsets_section = NULL;
27740 debug_ranges_section = NULL;
27741 debug_frame_section = NULL;
27742 fde_vec = NULL;
27743 debug_str_hash = NULL;
27744 skeleton_debug_str_hash = NULL;
27745 dw2_string_counter = 0;
27746 have_multiple_function_sections = false;
27747 text_section_used = false;
27748 cold_text_section_used = false;
27749 cold_text_section = NULL;
27750 current_unit_personality = NULL;
27751
27752 next_die_offset = 0;
27753 single_comp_unit_die = NULL;
27754 comdat_type_list = NULL;
27755 limbo_die_list = NULL;
27756 file_table = NULL;
27757 decl_die_table = NULL;
27758 common_block_die_table = NULL;
27759 decl_loc_table = NULL;
27760 call_arg_locations = NULL;
27761 call_arg_loc_last = NULL;
27762 call_site_count = -1;
27763 tail_call_site_count = -1;
27764 cached_dw_loc_list_table = NULL;
27765 abbrev_die_table = NULL;
27766 abbrev_die_table_allocated = 0;
27767 abbrev_die_table_in_use = 0;
27768 delete dwarf_proc_stack_usage_map;
27769 dwarf_proc_stack_usage_map = NULL;
27770 line_info_label_num = 0;
27771 cur_line_info_table = NULL;
27772 text_section_line_info = NULL;
27773 cold_text_section_line_info = NULL;
27774 separate_line_info = NULL;
27775 info_section_emitted = false;
27776 pubname_table = NULL;
27777 pubtype_table = NULL;
27778 macinfo_table = NULL;
27779 ranges_table = NULL;
27780 ranges_table_allocated = 0;
27781 ranges_table_in_use = 0;
27782 ranges_by_label = 0;
27783 ranges_by_label_allocated = 0;
27784 ranges_by_label_in_use = 0;
27785 have_location_lists = false;
27786 loclabel_num = 0;
27787 poc_label_num = 0;
27788 last_emitted_file = NULL;
27789 label_num = 0;
27790 tmpl_value_parm_die_table = NULL;
27791 generic_type_instances = NULL;
27792 frame_pointer_fb_offset = 0;
27793 frame_pointer_fb_offset_valid = false;
27794 base_types.release ();
27795 XDELETEVEC (producer_string);
27796 producer_string = NULL;
27797 }
27798
27799 #include "gt-dwarf2out.h"