/* Output Dwarf2 format symbol table information from GCC.
- Copyright (C) 1992-2015 Free Software Foundation, Inc.
+ Copyright (C) 1992-2016 Free Software Foundation, Inc.
Contributed by Gary Funck (gary@intrepid.com).
Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
Extensively modified by Jason Merrill (jason@cygnus.com).
dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
descr->dw_loc_opc = op;
-#if ENABLE_CHECKING
- descr->dw_loc_frame_offset = -1;
-#endif
descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
descr->dw_loc_oprnd1.val_entry = NULL;
descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
/* The current table to which we should emit line number information
for the current function. This will be set up at the beginning of
assembly for the function. */
-static dw_line_info_table *cur_line_info_table;
+static GTY(()) dw_line_info_table *cur_line_info_table;
/* The two default tables of line number info. */
static GTY(()) dw_line_info_table *text_section_line_info;
static vec<dw_die_ref> base_types;
+/* Pointer to vector of DW_TAG_string_type DIEs that need finalization
+ once all arguments are parsed. */
+static vec<dw_die_ref> *string_types;
+
/* Flags to represent a set of attribute classes for attributes that represent
a scalar value (bounds, pointers, ...). */
enum dw_scalar_form
static dw_line_info_table *new_line_info_table (void);
static void output_line_info (bool);
static void output_file_names (void);
-static dw_die_ref base_type_die (tree);
+static dw_die_ref base_type_die (tree, bool);
static int is_base_type (tree);
-static dw_die_ref subrange_type_die (tree, tree, tree, dw_die_ref);
+static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
static int decl_quals (const_tree);
-static dw_die_ref modified_type_die (tree, int, dw_die_ref);
+static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
static int type_is_enum (const_tree);
static inline int local_scope_p (dw_die_ref);
static inline int class_scope_p (dw_die_ref);
static inline int class_or_namespace_scope_p (dw_die_ref);
-static void add_type_attribute (dw_die_ref, tree, int, dw_die_ref);
+static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
static void add_calling_convention_attribute (dw_die_ref, tree);
static const char *type_tag (const_tree);
static tree member_declared_type (const_tree);
die->die_child = child_die;
}
+/* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
+
+static void
+add_child_die_after (dw_die_ref die, dw_die_ref child_die,
+ dw_die_ref after_die)
+{
+ gcc_assert (die
+ && child_die
+ && after_die
+ && die->die_child
+ && die != child_die);
+
+ child_die->die_parent = die;
+ child_die->die_sib = after_die->die_sib;
+ after_die->die_sib = child_die;
+ if (die->die_child == after_die)
+ die->die_child = child_die;
+}
+
/* Unassociate CHILD from its parent, and make its parent be
NEW_PARENT. */
dw_attr_node *at_small;
dw_attr_node *at_segment;
dw_attr_node *at_string_length;
+ dw_attr_node *at_string_length_bit_size;
+ dw_attr_node *at_string_length_byte_size;
dw_attr_node *at_threads_scaled;
dw_attr_node *at_upper_bound;
dw_attr_node *at_use_location;
case DW_AT_string_length:
attrs->at_string_length = a;
break;
+ case DW_AT_string_length_bit_size:
+ attrs->at_string_length_bit_size = a;
+ break;
+ case DW_AT_string_length_byte_size:
+ attrs->at_string_length_byte_size = a;
+ break;
case DW_AT_threads_scaled:
attrs->at_threads_scaled = a;
break;
CHECKSUM_ATTR (attrs.at_small);
CHECKSUM_ATTR (attrs.at_segment);
CHECKSUM_ATTR (attrs.at_string_length);
+ CHECKSUM_ATTR (attrs.at_string_length_bit_size);
+ CHECKSUM_ATTR (attrs.at_string_length_byte_size);
CHECKSUM_ATTR (attrs.at_threads_scaled);
CHECKSUM_ATTR (attrs.at_upper_bound);
CHECKSUM_ATTR (attrs.at_use_location);
comdat_type_node *type_node,
hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
{
- /* We do this for COMDAT section, which is DWARFv4 specific, so
- DWARF procedure are always DW_TAG_dwarf_procedure DIEs (unlike
- DW_TAG_variable in DWARFv3). */
gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
/* DWARF procedures are not supposed to have children... */
ASM_OUTPUT_LABEL (asm_out_file, l2);
}
\f
+/* Return true if DW_AT_endianity should be emitted according to REVERSE. */
+
+static inline bool
+need_endianity_attribute_p (bool reverse)
+{
+ return reverse && (dwarf_version >= 3 || !dwarf_strict);
+}
+
/* Given a pointer to a tree node for some base type, return a pointer to
- a DIE that describes the given type.
+ a DIE that describes the given type. REVERSE is true if the type is
+ to be interpreted in the reverse storage order wrt the target order.
This routine must only be called for GCC type nodes that correspond to
Dwarf base (fundamental) types. */
static dw_die_ref
-base_type_die (tree type)
+base_type_die (tree type, bool reverse)
{
dw_die_ref base_type_result;
enum dwarf_type encoding;
bool fpt_used = false;
struct fixed_point_type_info fpt_info;
+ tree type_bias = NULL_TREE;
if (TREE_CODE (type) == ERROR_MARK || TREE_CODE (type) == VOID_TYPE)
return 0;
encoding = DW_ATE_unsigned;
else
encoding = DW_ATE_signed;
+
+ if (!dwarf_strict
+ && lang_hooks.types.get_type_bias)
+ type_bias = lang_hooks.types.get_type_bias (type);
break;
case REAL_TYPE:
int_size_in_bytes (type));
add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
+ if (need_endianity_attribute_p (reverse))
+ add_AT_unsigned (base_type_result, DW_AT_endianity,
+ BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
+
if (fpt_used)
{
switch (fpt_info.scale_factor_kind)
gcc_unreachable ();
}
}
+
+ if (type_bias)
+ add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
+ dw_scalar_form_constant
+ | dw_scalar_form_exprloc
+ | dw_scalar_form_reference,
+ NULL);
+
add_pubtype (type, base_type_result);
return base_type_result;
to a DIE that describes the given type. */
static dw_die_ref
-subrange_type_die (tree type, tree low, tree high, dw_die_ref context_die)
+subrange_type_die (tree type, tree low, tree high, tree bias,
+ dw_die_ref context_die)
{
dw_die_ref subrange_die;
const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
if (high)
add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
+ if (bias && !dwarf_strict)
+ add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
+ dw_scalar_form_constant
+ | dw_scalar_form_exprloc
+ | dw_scalar_form_reference,
+ NULL);
return subrange_die;
}
decl_quals (const_tree decl)
{
return ((TREE_READONLY (decl)
+ /* The C++ front-end correctly marks reference-typed
+ variables as readonly, but from a language (and debug
+ info) standpoint they are not const-qualified. */
+ && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
| (TREE_THIS_VOLATILE (decl)
? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
return best_qual;
}
+struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
+static const dwarf_qual_info_t dwarf_qual_info[] =
+{
+ { TYPE_QUAL_CONST, DW_TAG_const_type },
+ { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
+ { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
+ { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
+};
+static const unsigned int dwarf_qual_info_size
+ = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
+
+/* If DIE is a qualified DIE of some base DIE with the same parent,
+ return the base DIE, otherwise return NULL. Set MASK to the
+ qualifiers added compared to the returned DIE. */
+
+static dw_die_ref
+qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
+{
+ unsigned int i;
+ for (i = 0; i < dwarf_qual_info_size; i++)
+ if (die->die_tag == dwarf_qual_info[i].t)
+ break;
+ if (i == dwarf_qual_info_size)
+ return NULL;
+ if (vec_safe_length (die->die_attr) != 1)
+ return NULL;
+ dw_die_ref type = get_AT_ref (die, DW_AT_type);
+ if (type == NULL || type->die_parent != die->die_parent)
+ return NULL;
+ *mask |= dwarf_qual_info[i].q;
+ if (depth)
+ {
+ dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
+ if (ret)
+ return ret;
+ }
+ return type;
+}
+
/* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
- entry that chains various modifiers in front of the given type. */
+ entry that chains the modifiers specified by CV_QUALS in front of the
+ given type. REVERSE is true if the type is to be interpreted in the
+ reverse storage order wrt the target order. */
static dw_die_ref
-modified_type_die (tree type, int cv_quals, dw_die_ref context_die)
+modified_type_die (tree type, int cv_quals, bool reverse,
+ dw_die_ref context_die)
{
enum tree_code code = TREE_CODE (type);
dw_die_ref mod_type_die;
tree debug_type = lang_hooks.types.get_debug_type (type);
if (debug_type != NULL_TREE && debug_type != type)
- return modified_type_die (debug_type, cv_quals, context_die);
+ return modified_type_die (debug_type, cv_quals, reverse, context_die);
}
cv_quals &= cv_qual_mask;
if (qualified_type)
{
mod_type_die = lookup_type_die (qualified_type);
- if (mod_type_die)
+
+ /* DW_AT_endianity doesn't come from a qualifier on the type. */
+ if (mod_type_die
+ && (!need_endianity_attribute_p (reverse)
+ || !is_base_type (type)
+ || get_AT_unsigned (mod_type_die, DW_AT_endianity)))
return mod_type_die;
}
|| (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
/* cv-unqualified version of named type. Just use
the unnamed type to which it refers. */
- return modified_type_die (DECL_ORIGINAL_TYPE (name),
- cv_quals, context_die);
+ return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
+ reverse, context_die);
/* Else cv-qualified version of named type; fall through. */
}
}
if (cv_quals)
{
- struct qual_info { int q; enum dwarf_tag t; };
- static const struct qual_info qual_info[] =
- {
- { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type },
- { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
- { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
- { TYPE_QUAL_CONST, DW_TAG_const_type },
- };
- int sub_quals;
+ int sub_quals = 0, first_quals = 0;
unsigned i;
+ dw_die_ref first = NULL, last = NULL;
/* Determine a lesser qualified type that most closely matches
this one. Then generate DW_TAG_* entries for the remaining
qualifiers. */
sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
cv_qual_mask);
- mod_type_die = modified_type_die (type, sub_quals, context_die);
+ if (sub_quals && use_debug_types)
+ {
+ bool needed = false;
+ /* If emitting type units, make sure the order of qualifiers
+ is canonical. Thus, start from unqualified type if
+ an earlier qualifier is missing in sub_quals, but some later
+ one is present there. */
+ for (i = 0; i < dwarf_qual_info_size; i++)
+ if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
+ needed = true;
+ else if (needed && (dwarf_qual_info[i].q & cv_quals))
+ {
+ sub_quals = 0;
+ break;
+ }
+ }
+ mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
+ if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
+ {
+ /* As not all intermediate qualified DIEs have corresponding
+ tree types, ensure that qualified DIEs in the same scope
+ as their DW_AT_type are emitted after their DW_AT_type,
+ only with other qualified DIEs for the same type possibly
+ in between them. Determine the range of such qualified
+ DIEs now (first being the base type, last being corresponding
+ last qualified DIE for it). */
+ unsigned int count = 0;
+ first = qualified_die_p (mod_type_die, &first_quals,
+ dwarf_qual_info_size);
+ if (first == NULL)
+ first = mod_type_die;
+ gcc_assert ((first_quals & ~sub_quals) == 0);
+ for (count = 0, last = first;
+ count < (1U << dwarf_qual_info_size);
+ count++, last = last->die_sib)
+ {
+ int quals = 0;
+ if (last == mod_scope->die_child)
+ break;
+ if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
+ != first)
+ break;
+ }
+ }
- for (i = 0; i < sizeof (qual_info) / sizeof (qual_info[0]); i++)
- if (qual_info[i].q & cv_quals & ~sub_quals)
+ for (i = 0; i < dwarf_qual_info_size; i++)
+ if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
{
- dw_die_ref d = new_die (qual_info[i].t, mod_scope, type);
+ dw_die_ref d;
+ if (first && first != last)
+ {
+ for (d = first->die_sib; ; d = d->die_sib)
+ {
+ int quals = 0;
+ qualified_die_p (d, &quals, dwarf_qual_info_size);
+ if (quals == (first_quals | dwarf_qual_info[i].q))
+ break;
+ if (d == last)
+ {
+ d = NULL;
+ break;
+ }
+ }
+ if (d)
+ {
+ mod_type_die = d;
+ continue;
+ }
+ }
+ if (first)
+ {
+ d = ggc_cleared_alloc<die_node> ();
+ d->die_tag = dwarf_qual_info[i].t;
+ add_child_die_after (mod_scope, d, last);
+ last = d;
+ }
+ else
+ d = new_die (dwarf_qual_info[i].t, mod_scope, type);
if (mod_type_die)
add_AT_die_ref (d, DW_AT_type, mod_type_die);
mod_type_die = d;
+ first_quals |= dwarf_qual_info[i].q;
}
}
else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
&& TREE_TYPE (type) != NULL_TREE
&& subrange_type_for_debug_p (type, &low, &high))
{
- mod_type_die = subrange_type_die (type, low, high, context_die);
+ tree bias = NULL_TREE;
+ if (lang_hooks.types.get_type_bias)
+ bias = lang_hooks.types.get_type_bias (type);
+ mod_type_die = subrange_type_die (type, low, high, bias, context_die);
item_type = TREE_TYPE (type);
}
else if (is_base_type (type))
- mod_type_die = base_type_die (type);
+ mod_type_die = base_type_die (type, reverse);
else
{
gen_type_die (type, context_die);
types are possible in Ada. */
sub_die = modified_type_die (item_type,
TYPE_QUALS_NO_ADDR_SPACE (item_type),
+ reverse,
context_die);
if (sub_die != NULL)
add_type_attribute (tmpl_die, tmpl_type,
(TREE_THIS_VOLATILE (tmpl_type)
? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
- parent_die);
+ false, parent_die);
}
else
{
}
type_die = lookup_type_die (type);
if (!type_die)
- type_die = modified_type_die (type, TYPE_UNQUALIFIED, comp_unit_die ());
+ type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
+ comp_unit_die ());
if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
return NULL;
return type_die;
return NULL;
if (dwarf_strict
- && (GET_MODE_CLASS (op_mode) != MODE_INT
+ && (!SCALAR_INT_MODE_P (op_mode)
|| GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE))
return NULL;
if (op0 == NULL || op1 == NULL)
return NULL;
- if (GET_MODE_CLASS (op_mode) != MODE_INT
+ if (!SCALAR_INT_MODE_P (op_mode)
|| GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
return compare_loc_descriptor (op, op0, op1);
op_mode = GET_MODE (XEXP (rtl, 1));
if (op_mode == VOIDmode)
return NULL;
- if (GET_MODE_CLASS (op_mode) != MODE_INT)
+ if (!SCALAR_INT_MODE_P (op_mode))
return NULL;
if (dwarf_strict && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
dw_loc_descr_ref bra_node, drop_node;
if (dwarf_strict
- && (GET_MODE_CLASS (mode) != MODE_INT
+ && (!SCALAR_INT_MODE_P (mode)
|| GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE))
return NULL;
add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
}
}
- else if (GET_MODE_CLASS (mode) == MODE_INT
+ else if (!SCALAR_INT_MODE_P (mode)
&& GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
{
int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode)) * BITS_PER_UNIT;
add_loc_descr (&op1, int_loc_descriptor (shift));
add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
}
- else if (GET_MODE_CLASS (mode) == MODE_INT
+ else if (SCALAR_INT_MODE_P (mode)
&& GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
{
dw_die_ref type_die = base_type_for_mode (mode, 0);
bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
- && GET_MODE_CLASS (mode) == MODE_INT
+ && SCALAR_INT_MODE_P (mode)
&& GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
ret = convert_descriptor_to_mode (mode, ret);
return ret;
dw_loc_descr_ref l4jump, l4label;
rtx msb;
- if (GET_MODE_CLASS (mode) != MODE_INT
+ if (!SCALAR_INT_MODE_P (mode)
|| GET_MODE (XEXP (rtl, 0)) != mode)
return NULL;
if (GET_CODE (rtl) != CLZ)
msb = const1_rtx;
else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
- msb = GEN_INT ((unsigned HOST_WIDE_INT) 1
+ msb = GEN_INT (HOST_WIDE_INT_1U
<< (GET_MODE_BITSIZE (mode) - 1));
else
msb = immed_wide_int_const
dw_loc_descr_ref l1jump, l1label;
dw_loc_descr_ref l2jump, l2label;
- if (GET_MODE_CLASS (mode) != MODE_INT
+ if (!SCALAR_INT_MODE_P (mode)
|| GET_MODE (XEXP (rtl, 0)) != mode)
return NULL;
dw_loc_descr_ref l1jump, l1label;
dw_loc_descr_ref l2jump, l2label;
- if (GET_MODE_CLASS (mode) != MODE_INT
+ if (!SCALAR_INT_MODE_P (mode)
|| BITS_PER_UNIT != 8
|| (GET_MODE_BITSIZE (mode) != 32
&& GET_MODE_BITSIZE (mode) != 64))
dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
int i;
- if (GET_MODE_CLASS (mode) != MODE_INT)
+ if (!SCALAR_INT_MODE_P (mode))
return NULL;
if (GET_MODE (rtlop1) != VOIDmode
if (!subreg_lowpart_p (rtl))
break;
inner = SUBREG_REG (rtl);
+ /* FALLTHRU */
case TRUNCATE:
if (inner == NULL_RTX)
inner = XEXP (rtl, 0);
- if (GET_MODE_CLASS (mode) == MODE_INT
- && GET_MODE_CLASS (GET_MODE (inner)) == MODE_INT
+ if (SCALAR_INT_MODE_P (mode)
+ && SCALAR_INT_MODE_P (GET_MODE (inner))
&& (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
#ifdef POINTERS_EXTEND_UNSIGNED
|| (mode == Pmode && mem_mode != VOIDmode)
if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (inner)))
break;
if (GET_MODE_SIZE (mode) != GET_MODE_SIZE (GET_MODE (inner))
- && (GET_MODE_CLASS (mode) != MODE_INT
- || GET_MODE_CLASS (GET_MODE (inner)) != MODE_INT))
+ && (!SCALAR_INT_MODE_P (mode)
+ || !SCALAR_INT_MODE_P (GET_MODE (inner))))
break;
else
{
mem_mode, initialized);
if (mem_loc_result == NULL)
break;
- type_die = base_type_for_mode (mode,
- GET_MODE_CLASS (mode) == MODE_INT);
+ type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
if (type_die == NULL)
{
mem_loc_result = NULL;
cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
add_loc_descr (&mem_loc_result, cvt);
+ if (SCALAR_INT_MODE_P (mode)
+ && GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
+ {
+ /* Convert it to untyped afterwards. */
+ cvt = new_loc_descr (DW_OP_GNU_convert, 0, 0);
+ add_loc_descr (&mem_loc_result, cvt);
+ }
}
break;
case REG:
- if (GET_MODE_CLASS (mode) != MODE_INT
+ if (! SCALAR_INT_MODE_P (mode)
|| (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
&& rtl != arg_pointer_rtx
&& rtl != frame_pointer_rtx
break;
if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
break;
- type_die = base_type_for_mode (mode,
- GET_MODE_CLASS (mode) == MODE_INT);
+ type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
if (type_die == NULL)
break;
dbx_regnum = dbx_reg_number (rtl);
if (dbx_regnum == IGNORED_DWARF_REGNUM)
break;
- mem_loc_result = new_loc_descr (DW_OP_GNU_regval_type,
+ mem_loc_result = new_loc_descr (DW_OP_GNU_regval_type,
dbx_regnum, 0);
mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
case SIGN_EXTEND:
case ZERO_EXTEND:
- if (GET_MODE_CLASS (mode) != MODE_INT)
+ if (!SCALAR_INT_MODE_P (mode))
break;
op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
mem_mode, VAR_INIT_STATUS_INITIALIZED);
if (mem_loc_result != NULL)
{
if (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
- || GET_MODE_CLASS (mode) != MODE_INT)
+ || !SCALAR_INT_MODE_P(mode))
{
dw_die_ref type_die;
dw_loc_descr_ref deref;
if (dwarf_strict)
return NULL;
type_die
- = base_type_for_mode (mode, GET_MODE_CLASS (mode) == MODE_INT);
+ = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
if (type_die == NULL)
return NULL;
deref = new_loc_descr (DW_OP_GNU_deref_type,
pool. */
case CONST:
case SYMBOL_REF:
- if ((GET_MODE_CLASS (mode) != MODE_INT
- && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
+ if (!SCALAR_INT_MODE_P (mode)
|| (GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE
#ifdef POINTERS_EXTEND_UNSIGNED
&& (mode != Pmode || mem_mode == VOIDmode)
return NULL;
if (REG_P (ENTRY_VALUE_EXP (rtl)))
{
- if (GET_MODE_CLASS (mode) != MODE_INT
+ if (!SCALAR_INT_MODE_P (mode)
|| GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
VOIDmode, VAR_INIT_STATUS_INITIALIZED);
: -GET_MODE_UNIT_SIZE (mem_mode),
mode));
- /* ... fall through ... */
+ /* fall through */
case PLUS:
plus:
&& (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
|| XEXP (rtl, 0) == arg_pointer_rtx
|| XEXP (rtl, 0) == frame_pointer_rtx)
- && GET_MODE_CLASS (mode) == MODE_INT)
+ && SCALAR_INT_MODE_P (mode))
mem_loc_result = based_loc_descr (XEXP (rtl, 0),
INTVAL (XEXP (rtl, 1)),
VAR_INIT_STATUS_INITIALIZED);
case DIV:
if (!dwarf_strict
- && GET_MODE_CLASS (mode) == MODE_INT
+ && SCALAR_INT_MODE_P (mode)
&& GET_MODE_SIZE (mode) > DWARF2_ADDR_SIZE)
{
mem_loc_result = typed_binop (DW_OP_div, rtl,
goto do_shift;
do_shift:
- if (GET_MODE_CLASS (mode) != MODE_INT)
+ if (!SCALAR_INT_MODE_P (mode))
break;
op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
VAR_INIT_STATUS_INITIALIZED);
break;
case UDIV:
- if (!dwarf_strict && GET_MODE_CLASS (mode) == MODE_INT)
+ if (!dwarf_strict && SCALAR_INT_MODE_P (mode))
{
if (GET_MODE_CLASS (mode) > DWARF2_ADDR_SIZE)
{
|| (GET_MODE (rtl) == VOIDmode
&& GET_MODE_BITSIZE (mode) != HOST_BITS_PER_DOUBLE_INT))
break;
- type_die = base_type_for_mode (mode,
- GET_MODE_CLASS (mode) == MODE_INT);
+ type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
if (type_die == NULL)
return NULL;
mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0, 0);
{
dw_die_ref type_die;
- type_die = base_type_for_mode (mode,
- GET_MODE_CLASS (mode) == MODE_INT);
+ type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
if (type_die == NULL)
return NULL;
mem_loc_result = new_loc_descr (DW_OP_GNU_const_type, 0, 0);
case UMIN:
case UMAX:
- if (GET_MODE_CLASS (mode) != MODE_INT)
+ if (!SCALAR_INT_MODE_P (mode))
break;
/* FALLTHRU */
case SMIN:
&& ((unsigned) INTVAL (XEXP (rtl, 1))
+ (unsigned) INTVAL (XEXP (rtl, 2))
<= GET_MODE_BITSIZE (mode))
- && GET_MODE_CLASS (mode) == MODE_INT
+ && SCALAR_INT_MODE_P (mode)
&& GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE
&& GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= DWARF2_ADDR_SIZE)
{
mem_mode, VAR_INIT_STATUS_INITIALIZED);
if (op0 == NULL)
break;
- if (GET_MODE_CLASS (GET_MODE (XEXP (rtl, 0))) == MODE_INT
+ if (SCALAR_INT_MODE_P (GET_MODE (XEXP (rtl, 0)))
&& (GET_CODE (rtl) == FLOAT
|| GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)))
<= DWARF2_ADDR_SIZE))
cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
add_loc_descr (&op0, cvt);
- if (GET_MODE_CLASS (mode) == MODE_INT
+ if (SCALAR_INT_MODE_P (mode)
&& (GET_CODE (rtl) == FIX
|| GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE))
{
/* FALLTHRU */
do_default:
default:
- if ((GET_MODE_CLASS (mode) == MODE_INT && GET_MODE (rtl) == mode
+ if ((SCALAR_INT_MODE_P (mode)
+ && GET_MODE (rtl) == mode
&& GET_MODE_SIZE (GET_MODE (rtl)) <= DWARF2_ADDR_SIZE
&& dwarf_version >= 4)
|| (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
obj = get_inner_reference (TREE_OPERAND (loc, 0),
&bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
STRIP_NOPS (obj);
if (bitpos % BITS_PER_UNIT)
{
new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
dw_die_ref parent_die)
{
- const bool dwarf_proc_supported = dwarf_version >= 4;
dw_die_ref dwarf_proc_die;
if ((dwarf_version < 3 && dwarf_strict)
|| location == NULL)
return NULL;
- dwarf_proc_die = new_die (dwarf_proc_supported
- ? DW_TAG_dwarf_procedure
- : DW_TAG_variable,
- parent_die,
- fndecl);
+ dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
if (fndecl)
equate_decl_number_to_die (fndecl, dwarf_proc_die);
- if (!dwarf_proc_supported)
- add_AT_flag (dwarf_proc_die, DW_AT_artificial, 1);
add_AT_loc (dwarf_proc_die, DW_AT_location, location);
return dwarf_proc_die;
}
&& int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
}
-/* Helper for resolve_args_picking. Stop when coming across VISITED nodes. */
+/* Helper for resolve_args_picking: do the same but stop when coming across
+ visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
+ offset *before* evaluating the corresponding operation. */
static bool
resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
struct dwarf_procedure_info *dpi,
- hash_set<dw_loc_descr_ref> &visited)
+ hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
{
/* The "frame_offset" identifier is already used to name a macro... */
unsigned frame_offset_ = initial_frame_offset;
for (l = loc; l != NULL;)
{
+ bool existed;
+ unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
+
/* If we already met this node, there is nothing to compute anymore. */
- if (visited.add (l))
+ if (existed)
{
-#if ENABLE_CHECKING
/* Make sure that the stack size is consistent wherever the execution
flow comes from. */
- gcc_assert ((unsigned) l->dw_loc_frame_offset == frame_offset_);
-#endif
+ gcc_assert ((unsigned) l_frame_offset == frame_offset_);
break;
}
-#if ENABLE_CHECKING
- l->dw_loc_frame_offset = frame_offset_;
-#endif
+ l_frame_offset = frame_offset_;
/* If needed, relocate the picking offset with respect to the frame
offset. */
case DW_OP_swap:
case DW_OP_rot:
case DW_OP_abs:
+ case DW_OP_neg:
case DW_OP_not:
case DW_OP_plus_uconst:
case DW_OP_skip:
case DW_OP_minus:
case DW_OP_mod:
case DW_OP_mul:
- case DW_OP_neg:
case DW_OP_or:
case DW_OP_plus:
case DW_OP_shl:
if (stack_usage == NULL)
return false;
- frame_offset += *stack_usage;
+ frame_offset_ += *stack_usage;
break;
}
{
case DW_OP_bra:
if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
- visited))
+ frame_offsets))
return false;
- /* Fall through... */
+ /* Fall through. */
case DW_OP_skip:
l = l->dw_loc_oprnd1.v.val_loc;
/* Make a DFS over operations reachable through LOC (i.e. follow branch
operations) in order to resolve the operand of DW_OP_pick operations that
- target DWARF procedure arguments (DPI). Stop at already visited nodes.
- INITIAL_FRAME_OFFSET is the frame offset *before* LOC is executed. Return
- if all relocations were successful. */
+ target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
+ offset *before* LOC is executed. Return if all relocations were
+ successful. */
static bool
resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
struct dwarf_procedure_info *dpi)
{
- hash_set<dw_loc_descr_ref> visited;
+ /* Associate to all visited operations the frame offset *before* evaluating
+ this operation. */
+ hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
- return resolve_args_picking_1 (loc, initial_frame_offset, dpi, visited);
+ return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
+ frame_offsets);
}
/* Try to generate a DWARF procedure that computes the same result as FNDECL.
if (dwarf_proc_die != NULL)
return dwarf_proc_die;
- /* DWARF procedures are available starting with the DWARFv3 standard, but
- it's the DWARFv4 standard that introduces the DW_TAG_dwarf_procedure
- DIE. */
+ /* DWARF procedures are available starting with the DWARFv3 standard. */
if (dwarf_version < 3 && dwarf_strict)
return NULL;
int unsignedp, reversep, volatilep = 0;
obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
gcc_assert (obj != loc);
case COMPLEX_CST:
if ((ret = cst_pool_loc_descr (loc)))
have_address = 1;
+ else if (TREE_CODE (loc) == CONSTRUCTOR)
+ {
+ tree type = TREE_TYPE (loc);
+ unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
+ unsigned HOST_WIDE_INT offset = 0;
+ unsigned HOST_WIDE_INT cnt;
+ constructor_elt *ce;
+
+ if (TREE_CODE (type) == RECORD_TYPE)
+ {
+ /* This is very limited, but it's enough to output
+ pointers to member functions, as long as the
+ referenced function is defined in the current
+ translation unit. */
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
+ {
+ tree val = ce->value;
+
+ tree field = ce->index;
+
+ if (val)
+ STRIP_NOPS (val);
+
+ if (!field || DECL_BIT_FIELD (field))
+ {
+ expansion_failed (loc, NULL_RTX,
+ "bitfield in record type constructor");
+ size = offset = (unsigned HOST_WIDE_INT)-1;
+ ret = NULL;
+ break;
+ }
+
+ HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
+ unsigned HOST_WIDE_INT pos = int_byte_position (field);
+ gcc_assert (pos + fieldsize <= size);
+ if (pos < offset)
+ {
+ expansion_failed (loc, NULL_RTX,
+ "out-of-order fields in record constructor");
+ size = offset = (unsigned HOST_WIDE_INT)-1;
+ ret = NULL;
+ break;
+ }
+ if (pos > offset)
+ {
+ ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
+ add_loc_descr (&ret, ret1);
+ offset = pos;
+ }
+ if (val && fieldsize != 0)
+ {
+ ret1 = loc_descriptor_from_tree (val, want_address, context);
+ if (!ret1)
+ {
+ expansion_failed (loc, NULL_RTX,
+ "unsupported expression in field");
+ size = offset = (unsigned HOST_WIDE_INT)-1;
+ ret = NULL;
+ break;
+ }
+ add_loc_descr (&ret, ret1);
+ }
+ if (fieldsize)
+ {
+ ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
+ add_loc_descr (&ret, ret1);
+ offset = pos + fieldsize;
+ }
+ }
+
+ if (offset != size)
+ {
+ ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
+ add_loc_descr (&ret, ret1);
+ offset = size;
+ }
+
+ have_address = !!want_address;
+ }
+ else
+ expansion_failed (loc, NULL_RTX,
+ "constructor of non-record type");
+ }
else
/* We can construct small constants here using int_loc_descriptor. */
expansion_failed (loc, NULL_RTX,
TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
}
- /* ... fall through ... */
+ /* fall through */
case COND_EXPR:
{
{
loc_descr = field_byte_offset (decl, ctx, &offset);
- /* Data member location evalutation start with the base address on the
+ /* If loc_descr is available then we know the field offset is dynamic.
+ However, GDB does not handle dynamic field offsets very well at the
+ moment. */
+ if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
+ {
+ loc_descr = NULL;
+ offset = 0;
+ }
+
+ /* Data member location evalutation starts with the base address on the
stack. Compute the field offset and add it to this base address. */
- if (loc_descr != NULL)
+ else if (loc_descr != NULL)
add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
}
- /* If loc_descr is available then we know the field offset is dynamic.
- However, GDB does not handle dynamic field offsets very well at the
- moment. */
- if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
- {
- loc_descr = NULL;
- offset = 0;
- }
-
if (! loc_descr)
{
if (dwarf_version > 2)
return NULL_TREE;
cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &reversep, &volatilep, true);
+ &unsignedp, &reversep, &volatilep);
if (cvar == NULL_TREE
|| TREE_CODE (cvar) != VAR_DECL
fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
pos = int_byte_position (field);
gcc_assert (pos + fieldsize <= size);
- if (val
+ if (val && fieldsize != 0
&& !native_encode_initializer (val, array + pos, fieldsize))
return false;
}
add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
int forms, const struct loc_descr_context *context)
{
- dw_die_ref ctx, decl_die;
+ dw_die_ref context_die, decl_die;
dw_loc_list_ref list;
bool strip_conversions = true;
return;
if (current_function_decl == 0)
- ctx = comp_unit_die ();
+ context_die = comp_unit_die ();
else
- ctx = lookup_decl_die (current_function_decl);
+ context_die = lookup_decl_die (current_function_decl);
- decl_die = new_die (DW_TAG_variable, ctx, value);
+ decl_die = new_die (DW_TAG_variable, context_die, value);
add_AT_flag (decl_die, DW_AT_artificial, 1);
- add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, ctx);
+ add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
+ context_die);
add_AT_location_description (decl_die, DW_AT_location, list);
add_AT_die_ref (die, attr, decl_die);
}
;
else
add_type_attribute (subrange_die, TREE_TYPE (domain),
- TYPE_UNQUALIFIED, type_die);
+ TYPE_UNQUALIFIED, false, type_die);
}
/* ??? If upper is NULL, the array has unspecified length,
}
/* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
- by looking in either the type declaration or object declaration
- equate table. */
+ by looking in the type declaration, the object declaration equate table or
+ the block mapping. */
static inline dw_die_ref
add_abstract_origin_attribute (dw_die_ref die, tree origin)
{
dw_die_ref origin_die = NULL;
- if (TREE_CODE (origin) != FUNCTION_DECL)
+ if (TREE_CODE (origin) != FUNCTION_DECL
+ && TREE_CODE (origin) != BLOCK)
{
/* We may have gotten separated from the block for the inlined
function, if we're in an exception handler or some such; make
origin_die = lookup_decl_die (origin);
else if (TYPE_P (origin))
origin_die = lookup_type_die (origin);
+ else if (TREE_CODE (origin) == BLOCK)
+ origin_die = BLOCK_DIE (origin);
/* XXX: Functions that are never lowered don't always have correct block
trees (in the case of java, they simply have no block tree, in some other
/* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
+static void
+add_linkage_name_raw (dw_die_ref die, tree decl)
+{
+ /* Defer until we have an assembler name set. */
+ if (!DECL_ASSEMBLER_NAME_SET_P (decl))
+ {
+ limbo_die_node *asm_name;
+
+ asm_name = ggc_cleared_alloc<limbo_die_node> ();
+ asm_name->die = die;
+ asm_name->created_for = decl;
+ asm_name->next = deferred_asm_name;
+ deferred_asm_name = asm_name;
+ }
+ else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
+ add_linkage_attr (die, decl);
+}
+
+/* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
+
static void
add_linkage_name (dw_die_ref die, tree decl)
{
&& TREE_PUBLIC (decl)
&& !(TREE_CODE (decl) == VAR_DECL && DECL_REGISTER (decl))
&& die->die_tag != DW_TAG_member)
- {
- /* Defer until we have an assembler name set. */
- if (!DECL_ASSEMBLER_NAME_SET_P (decl))
- {
- limbo_die_node *asm_name;
-
- asm_name = ggc_cleared_alloc<limbo_die_node> ();
- asm_name->die = die;
- asm_name->created_for = decl;
- asm_name->next = deferred_asm_name;
- deferred_asm_name = asm_name;
- }
- else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
- add_linkage_attr (die, decl);
- }
+ add_linkage_name_raw (die, decl);
}
/* Add a DW_AT_name attribute and source coordinate attribute for the
static void
add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
- dw_die_ref context_die)
+ bool reverse, dw_die_ref context_die)
{
enum tree_code code = TREE_CODE (type);
dw_die_ref type_die = NULL;
type_die = modified_type_die (type,
cv_quals | TYPE_QUALS_NO_ADDR_SPACE (type),
+ reverse,
context_die);
if (type_die != NULL)
if (size >= 0)
add_AT_unsigned (array_die, DW_AT_byte_size, size);
else if (TYPE_DOMAIN (type) != NULL_TREE
- && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE
- && DECL_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
+ && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
{
tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
- dw_loc_list_ref loc = loc_list_from_tree (szdecl, 2, NULL);
+ tree rszdecl = szdecl;
+ HOST_WIDE_INT rsize = 0;
size = int_size_in_bytes (TREE_TYPE (szdecl));
- if (loc && size > 0)
+ if (!DECL_P (szdecl))
+ {
+ if (TREE_CODE (szdecl) == INDIRECT_REF
+ && DECL_P (TREE_OPERAND (szdecl, 0)))
+ {
+ rszdecl = TREE_OPERAND (szdecl, 0);
+ rsize = int_size_in_bytes (TREE_TYPE (rszdecl));
+ if (rsize <= 0)
+ size = 0;
+ }
+ else
+ size = 0;
+ }
+ if (size > 0)
{
- add_AT_location_description (array_die, DW_AT_string_length, loc);
- if (size != DWARF2_ADDR_SIZE)
- add_AT_unsigned (array_die, DW_AT_byte_size, size);
+ dw_loc_list_ref loc = loc_list_from_tree (szdecl, 2, NULL);
+ if (loc == NULL
+ && early_dwarf
+ && current_function_decl
+ && DECL_CONTEXT (rszdecl) == current_function_decl)
+ {
+ dw_die_ref ref = lookup_decl_die (rszdecl);
+ dw_loc_descr_ref l = NULL;
+ if (ref)
+ {
+ l = new_loc_descr (DW_OP_call4, 0, 0);
+ l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
+ l->dw_loc_oprnd1.v.val_die_ref.die = ref;
+ l->dw_loc_oprnd1.v.val_die_ref.external = 0;
+ }
+ else if (TREE_CODE (rszdecl) == PARM_DECL
+ && string_types)
+ {
+ l = new_loc_descr (DW_OP_call4, 0, 0);
+ l->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
+ l->dw_loc_oprnd1.v.val_decl_ref = rszdecl;
+ string_types->safe_push (array_die);
+ }
+ if (l && rszdecl != szdecl)
+ {
+ if (rsize == DWARF2_ADDR_SIZE)
+ add_loc_descr (&l, new_loc_descr (DW_OP_deref,
+ 0, 0));
+ else
+ add_loc_descr (&l, new_loc_descr (DW_OP_deref_size,
+ rsize, 0));
+ }
+ if (l)
+ loc = new_loc_list (l, NULL, NULL, NULL);
+ }
+ if (loc)
+ {
+ add_AT_location_description (array_die, DW_AT_string_length,
+ loc);
+ if (size != DWARF2_ADDR_SIZE)
+ add_AT_unsigned (array_die, dwarf_version >= 5
+ ? DW_AT_string_length_byte_size
+ : DW_AT_byte_size, size);
+ }
}
}
return;
element_type = TREE_TYPE (element_type);
}
- add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED, context_die);
+ add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
+ TREE_CODE (type) == ARRAY_TYPE
+ && TYPE_REVERSE_STORAGE_ORDER (type),
+ context_die);
add_gnat_descriptive_type_attribute (array_die, type, context_die);
if (TYPE_ARTIFICIAL (type))
add_pubtype (type, array_die);
}
+/* After all arguments are created, adjust any DW_TAG_string_type
+ DIEs DW_AT_string_length attributes. */
+
+static void
+adjust_string_types (void)
+{
+ dw_die_ref array_die;
+ unsigned int i;
+ FOR_EACH_VEC_ELT (*string_types, i, array_die)
+ {
+ dw_attr_node *a = get_AT (array_die, DW_AT_string_length);
+ if (a == NULL)
+ continue;
+ dw_loc_descr_ref loc = AT_loc (a);
+ gcc_assert (loc->dw_loc_opc == DW_OP_call4
+ && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref);
+ dw_die_ref ref = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
+ if (ref)
+ {
+ loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
+ loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
+ loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
+ }
+ else
+ {
+ remove_AT (array_die, DW_AT_string_length);
+ remove_AT (array_die, dwarf_version >= 5
+ ? DW_AT_string_length_byte_size
+ : DW_AT_byte_size);
+ }
+ }
+}
+
/* This routine generates DIE for array with hidden descriptor, details
are filled into *info by a langhook. */
if (info->dimen[dim].bounds_type)
add_type_attribute (subrange_die,
- info->dimen[dim].bounds_type, 0,
- context_die);
+ info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
+ false, context_die);
if (info->dimen[dim].lower_bound)
add_bound_info (subrange_die, DW_AT_lower_bound,
info->dimen[dim].lower_bound, &context);
gen_type_die (info->element_type, context_die);
add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
+ TREE_CODE (type) == ARRAY_TYPE
+ && TYPE_REVERSE_STORAGE_ORDER (type),
context_die);
if (get_AT (array_die, DW_AT_name))
{
add_name_and_src_coords_attributes (decl_die, decl);
add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
- TYPE_UNQUALIFIED, context_die);
+ TYPE_UNQUALIFIED, false, context_die);
}
if (DECL_ABSTRACT_P (decl))
static void
retry_incomplete_types (void)
{
+ set_early_dwarf s;
int i;
for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
gen_type_die ((*incomplete_types)[i], comp_unit_die ());
+ vec_safe_truncate (incomplete_types, 0);
}
/* Determine what tag to use for a record type. */
if (dwarf_version >= 3 || !dwarf_strict)
{
tree underlying = lang_hooks.types.enum_underlying_base_type (type);
- add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED,
+ add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
context_die);
}
if (TYPE_STUB_DECL (type) != NULL_TREE)
tree type = TREE_TYPE (node_or_origin);
if (decl_by_reference_p (node_or_origin))
add_type_attribute (parm_die, TREE_TYPE (type),
- TYPE_UNQUALIFIED, context_die);
+ TYPE_UNQUALIFIED,
+ false, context_die);
else
add_type_attribute (parm_die, type,
decl_quals (node_or_origin),
- context_die);
+ false, context_die);
}
if (origin == NULL && DECL_ARTIFICIAL (node))
add_AT_flag (parm_die, DW_AT_artificial, 1);
case tcc_type:
/* We were called with some kind of a ..._TYPE node. */
- add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED,
+ add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
context_die);
break;
dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
if (die == auto_die || die == decltype_auto_die)
add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
- TYPE_UNQUALIFIED, context_die);
+ TYPE_UNQUALIFIED, false, context_die);
}
+
+ /* When we process the method declaration, we haven't seen
+ the out-of-class defaulted definition yet, so we have to
+ recheck now. */
+ int defaulted = lang_hooks.decls.function_decl_defaulted (decl);
+ if (defaulted && (dwarf_version >= 5 || ! dwarf_strict)
+ && !get_AT (subr_die, DW_AT_defaulted))
+ switch (defaulted)
+ {
+ case 2:
+ add_AT_unsigned (subr_die, DW_AT_defaulted,
+ DW_DEFAULTED_out_of_class);
+ break;
+
+ case 1: /* This must have been handled before. */
+ default:
+ gcc_unreachable ();
+ }
}
}
/* Create a fresh DIE for anything else. */
{
add_prototyped_attribute (subr_die, TREE_TYPE (decl));
add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
- TYPE_UNQUALIFIED, context_die);
+ TYPE_UNQUALIFIED, false, context_die);
}
add_pure_or_virtual_attribute (subr_die, decl);
add_AT_flag (subr_die, DW_AT_explicit, 1);
/* If this is a C++11 deleted special function member then generate
- a DW_AT_GNU_deleted attribute. */
+ a DW_AT_deleted attribute. */
if (lang_hooks.decls.function_decl_deleted_p (decl)
- && (! dwarf_strict))
- add_AT_flag (subr_die, DW_AT_GNU_deleted, 1);
+ && (dwarf_version >= 5 || ! dwarf_strict))
+ add_AT_flag (subr_die, DW_AT_deleted, 1);
+
+ /* If this is a C++11 defaulted special function member then
+ generate a DW_AT_GNU_defaulted attribute. */
+ int defaulted = lang_hooks.decls.function_decl_defaulted (decl);
+ if (defaulted && (dwarf_version >= 5 || ! dwarf_strict))
+ switch (defaulted)
+ {
+ case 1:
+ add_AT_unsigned (subr_die, DW_AT_defaulted,
+ DW_DEFAULTED_in_class);
+ break;
+
+ /* It is likely that this will never hit, since we
+ don't have the out-of-class definition yet when we
+ process the class definition and the method
+ declaration. We recheck elsewhere, but leave it
+ here just in case. */
+ case 2:
+ add_AT_unsigned (subr_die, DW_AT_defaulted,
+ DW_DEFAULTED_out_of_class);
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
}
}
/* Tag abstract instances with DW_AT_inline. */
tree generic_decl_parm = generic_decl
? DECL_ARGUMENTS (generic_decl)
: NULL;
+ auto_vec<dw_die_ref> string_types_vec;
+ if (string_types == NULL)
+ string_types = &string_types_vec;
/* Now we want to walk the list of parameters of the function and
emit their relevant DIEs.
void_type_node 2) an unprototyped function declaration (not a
definition). This just means that we have no info about the
parameters at all. */
- if (prototype_p (TREE_TYPE (decl)))
+ if (early_dwarf)
{
- /* This is the prototyped case, check for.... */
- if (stdarg_p (TREE_TYPE (decl)))
+ if (prototype_p (TREE_TYPE (decl)))
+ {
+ /* This is the prototyped case, check for.... */
+ if (stdarg_p (TREE_TYPE (decl)))
+ gen_unspecified_parameters_die (decl, subr_die);
+ }
+ else if (DECL_INITIAL (decl) == NULL_TREE)
gen_unspecified_parameters_die (decl, subr_die);
}
- else if (DECL_INITIAL (decl) == NULL_TREE)
- gen_unspecified_parameters_die (decl, subr_die);
+
+ /* Adjust DW_TAG_string_type DIEs if needed, now that all arguments
+ have DIEs. */
+ if (string_types == &string_types_vec)
+ {
+ adjust_string_types ();
+ string_types = NULL;
+ }
}
if (subr_die != old_die)
DW_TAG_common_block and DW_TAG_variable. */
loc = loc_list_from_tree (com_decl, 2, NULL);
}
- else if (DECL_EXTERNAL (decl))
+ else if (DECL_EXTERNAL (decl_or_origin))
add_AT_flag (com_die, DW_AT_declaration, 1);
if (want_pubnames ())
add_pubname_string (cnam, com_die); /* ??? needed? */
remove_AT (com_die, DW_AT_declaration);
}
var_die = new_die (DW_TAG_variable, com_die, decl);
- add_name_and_src_coords_attributes (var_die, decl);
- add_type_attribute (var_die, TREE_TYPE (decl), decl_quals (decl),
+ add_name_and_src_coords_attributes (var_die, decl_or_origin);
+ add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
+ decl_quals (decl_or_origin), false,
context_die);
add_AT_flag (var_die, DW_AT_external, 1);
if (loc)
}
add_AT_location_description (var_die, DW_AT_location, loc);
}
- else if (DECL_EXTERNAL (decl))
+ else if (DECL_EXTERNAL (decl_or_origin))
add_AT_flag (var_die, DW_AT_declaration, 1);
- equate_decl_number_to_die (decl, var_die);
+ if (decl)
+ equate_decl_number_to_die (decl, var_die);
return;
}
tree type = TREE_TYPE (decl_or_origin);
if (decl_by_reference_p (decl_or_origin))
- add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED,
+ add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
context_die);
else
- add_type_attribute (var_die, type, decl_quals (decl_or_origin),
+ add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
context_die);
}
const_die = new_die (DW_TAG_constant, context_die, decl);
equate_decl_number_to_die (decl, const_die);
add_name_and_src_coords_attributes (const_die, decl);
- add_type_attribute (const_die, type, TYPE_QUAL_CONST, context_die);
+ add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
if (TREE_PUBLIC (decl))
add_AT_flag (const_die, DW_AT_external, 1);
if (DECL_ARTIFICIAL (decl))
BLOCK_DIE (stmt) = stmt_die;
old_die = NULL;
}
+
+ tree origin = block_ultimate_origin (stmt);
+ if (origin != NULL_TREE && origin != stmt)
+ add_abstract_origin_attribute (stmt_die, origin);
}
if (old_die)
decl_die = new_die (DW_TAG_member, context_die, decl);
add_name_and_src_coords_attributes (decl_die, decl);
- add_type_attribute (decl_die, member_declared_type (decl),
- decl_quals (decl), context_die);
+ add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
+ TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
+ context_die);
if (DECL_BIT_FIELD_TYPE (decl))
{
= new_die (DW_TAG_pointer_type, scope_die_for (type, context_die), type);
equate_type_number_to_die (type, ptr_die);
- add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED,
+ add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
context_die);
add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
}
ref_die = new_die (DW_TAG_reference_type, scope_die, type);
equate_type_number_to_die (type, ref_die);
- add_type_attribute (ref_die, TREE_TYPE (type), TYPE_UNQUALIFIED,
+ add_type_attribute (ref_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
context_die);
add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
}
equate_type_number_to_die (type, ptr_die);
add_AT_die_ref (ptr_die, DW_AT_containing_type,
lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
- add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED,
+ add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
context_die);
}
case OPT_fpreprocessed:
case OPT_fltrans_output_list_:
case OPT_fresolution_:
+ case OPT_fdebug_prefix_map_:
/* Ignore these. */
continue;
default:
dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
struct vlr_context ctx = { type, NULL };
- add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, context_die);
+ add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
+ context_die);
add_data_member_location_attribute (die, binfo, &ctx);
if (BINFO_VIRTUAL_P (binfo))
equate_type_number_to_die (type, subr_die);
add_prototyped_attribute (subr_die, type);
- add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, context_die);
+ add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
+ context_die);
gen_formal_types_die (type, subr_die);
if (get_AT (subr_die, DW_AT_name))
is the name of the typedef decl naming the anonymous
struct. This greatly eases the work of consumers of
this debug info. */
- add_linkage_attr (lookup_type_die (type), decl);
+ add_linkage_name_raw (lookup_type_die (type), decl);
}
}
- add_type_attribute (type_die, type, decl_quals (decl), context_die);
+ add_type_attribute (type_die, type, decl_quals (decl), false,
+ context_die);
if (is_naming_typedef_decl (decl))
/* We want that all subsequent calls to lookup_type_die with
memset (&info, 0, sizeof (info));
if (lang_hooks.types.get_array_descr_info (type, &info))
{
+ /* Fortran sometimes emits array types with no dimension. */
+ gcc_assert (info.ndimensions >= 0
+ && (info.ndimensions
+ <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
gen_descr_array_type_die (type, &info, context_die);
TREE_ASM_WRITTEN (type) = 1;
return;
{
if (decl == NULL_TREE
|| TREE_CODE (decl) != TYPE_DECL
+ || DECL_NAMELESS (decl)
|| !is_tagged_type (TREE_TYPE (decl))
|| DECL_IS_BUILTIN (decl)
|| is_redundant_typedef (decl)
dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
- context_die);
+ false, context_die);
gcc_assert (type_die);
}
return type_die;
FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
{
if (!tree_add_const_value_attribute (e->die, e->arg))
- (*tmpl_value_parm_die_table)[j++] = *e;
+ {
+ dw_loc_descr_ref loc = NULL;
+ if (! early_dwarf
+ && (dwarf_version >= 5 || !dwarf_strict))
+ loc = loc_descriptor_from_tree (e->arg, 2, NULL);
+ if (loc)
+ add_AT_loc (e->die, DW_AT_location, loc);
+ else
+ (*tmpl_value_parm_die_table)[j++] = *e;
+ }
}
tmpl_value_parm_die_table->truncate (j);
}
if (!generic_type_instances)
return;
- /* We end up "recursing" into schedule_generic_params_dies_gen, so
- pretend this generation is part of "early dwarf" as well. */
- set_early_dwarf s;
-
FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
if (COMPLETE_TYPE_P (t))
gen_generic_params_dies (t);
for (; loc != NULL; loc = loc->dw_loc_next)
switch (loc->dw_loc_opc)
{
+ case DW_OP_GNU_implicit_pointer:
+ case DW_OP_GNU_convert:
+ case DW_OP_GNU_reinterpret:
+ if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
+ prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
+ break;
case DW_OP_call2:
case DW_OP_call4:
case DW_OP_call_ref:
+ case DW_OP_GNU_const_type:
+ case DW_OP_GNU_parameter_ref:
+ gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
break;
+ case DW_OP_GNU_regval_type:
+ case DW_OP_GNU_deref_type:
+ gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
+ prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
+ break;
+ case DW_OP_GNU_entry_value:
+ gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
+ prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
+ break;
default:
break;
}
}
}
+/* Return NULL if l is a DWARF expression, or first op that is not
+ valid DWARF expression. */
+
+static dw_loc_descr_ref
+non_dwarf_expression (dw_loc_descr_ref l)
+{
+ while (l)
+ {
+ if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
+ return l;
+ switch (l->dw_loc_opc)
+ {
+ case DW_OP_regx:
+ case DW_OP_implicit_value:
+ case DW_OP_stack_value:
+ case DW_OP_GNU_implicit_pointer:
+ case DW_OP_GNU_parameter_ref:
+ case DW_OP_piece:
+ case DW_OP_bit_piece:
+ return l;
+ default:
+ break;
+ }
+ l = l->dw_loc_next;
+ }
+ return NULL;
+}
+
+/* Return adjusted copy of EXPR:
+ If it is empty DWARF expression, return it.
+ If it is valid non-empty DWARF expression,
+ return copy of EXPR with copy of DEREF appended to it.
+ If it is DWARF expression followed by DW_OP_reg{N,x}, return
+ copy of the DWARF expression with DW_OP_breg{N,x} <0> appended
+ and no DEREF.
+ If it is DWARF expression followed by DW_OP_stack_value, return
+ copy of the DWARF expression without anything appended.
+ Otherwise, return NULL. */
+
+static dw_loc_descr_ref
+copy_deref_exprloc (dw_loc_descr_ref expr, dw_loc_descr_ref deref)
+{
+
+ if (expr == NULL)
+ return NULL;
+
+ dw_loc_descr_ref l = non_dwarf_expression (expr);
+ if (l && l->dw_loc_next)
+ return NULL;
+
+ if (l)
+ {
+ if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
+ deref = new_loc_descr ((enum dwarf_location_atom)
+ (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
+ 0, 0);
+ else
+ switch (l->dw_loc_opc)
+ {
+ case DW_OP_regx:
+ deref = new_loc_descr (DW_OP_bregx,
+ l->dw_loc_oprnd1.v.val_unsigned, 0);
+ break;
+ case DW_OP_stack_value:
+ deref = NULL;
+ break;
+ default:
+ return NULL;
+ }
+ }
+ else
+ deref = new_loc_descr (deref->dw_loc_opc,
+ deref->dw_loc_oprnd1.v.val_int, 0);
+
+ dw_loc_descr_ref ret = NULL, *p = &ret;
+ while (expr != l)
+ {
+ *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
+ (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
+ (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
+ p = &(*p)->dw_loc_next;
+ expr = expr->dw_loc_next;
+ }
+ *p = deref;
+ return ret;
+}
+
+/* For DW_AT_string_length attribute with DW_OP_call4 reference to a variable
+ or argument, adjust it if needed and return:
+ -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
+ attribute if present should be removed
+ 0 keep the attribute as is if the referenced var or argument has
+ only DWARF expression that covers all ranges
+ 1 if the attribute has been successfully adjusted. */
+
+static int
+optimize_string_length (dw_attr_node *a)
+{
+ dw_loc_descr_ref l = AT_loc (a), lv;
+ dw_die_ref die = l->dw_loc_oprnd1.v.val_die_ref.die;
+ dw_attr_node *av = get_AT (die, DW_AT_location);
+ dw_loc_list_ref d;
+ bool non_dwarf_expr = false;
+
+ if (av == NULL)
+ return -1;
+ switch (AT_class (av))
+ {
+ case dw_val_class_loc_list:
+ for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
+ if (d->expr && non_dwarf_expression (d->expr))
+ non_dwarf_expr = true;
+ break;
+ case dw_val_class_loc:
+ lv = AT_loc (av);
+ if (lv == NULL)
+ return -1;
+ if (non_dwarf_expression (lv))
+ non_dwarf_expr = true;
+ break;
+ default:
+ return -1;
+ }
+
+ /* If it is safe to keep DW_OP_call4 in, keep it. */
+ if (!non_dwarf_expr
+ && (l->dw_loc_next == NULL || AT_class (av) == dw_val_class_loc))
+ return 0;
+
+ /* If not dereferencing the DW_OP_call4 afterwards, we can just
+ copy over the DW_AT_location attribute from die to a. */
+ if (l->dw_loc_next == NULL)
+ {
+ a->dw_attr_val = av->dw_attr_val;
+ return 1;
+ }
+
+ dw_loc_list_ref list, *p;
+ switch (AT_class (av))
+ {
+ case dw_val_class_loc_list:
+ p = &list;
+ list = NULL;
+ for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
+ {
+ lv = copy_deref_exprloc (d->expr, l->dw_loc_next);
+ if (lv)
+ {
+ *p = new_loc_list (lv, d->begin, d->end, d->section);
+ p = &(*p)->dw_loc_next;
+ }
+ }
+ if (list == NULL)
+ return -1;
+ a->dw_attr_val.val_class = dw_val_class_loc_list;
+ gen_llsym (list);
+ *AT_loc_list_ptr (a) = list;
+ return 1;
+ case dw_val_class_loc:
+ lv = copy_deref_exprloc (AT_loc (av), l->dw_loc_next);
+ if (lv == NULL)
+ return -1;
+ a->dw_attr_val.v.val_loc = lv;
+ return 1;
+ default:
+ gcc_unreachable ();
+ }
+}
+
/* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
an address in .rodata section if the string literal is emitted there,
or remove the containing location list or replace DW_AT_const_value
dw_attr_node *a;
dw_loc_list_ref *curr, *start, loc;
unsigned ix;
+ bool remove_AT_byte_size = false;
FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
switch (AT_class (a))
case dw_val_class_loc:
{
dw_loc_descr_ref l = AT_loc (a);
+ /* Using DW_OP_call4 or DW_OP_call4 DW_OP_deref in
+ DW_AT_string_length is only a rough approximation; unfortunately
+ DW_AT_string_length can't be a reference to a DIE. DW_OP_call4
+ needs a DWARF expression, while DW_AT_location of the referenced
+ variable or argument might be any location description. */
+ if (a->dw_attr == DW_AT_string_length
+ && l
+ && l->dw_loc_opc == DW_OP_call4
+ && l->dw_loc_oprnd1.val_class == dw_val_class_die_ref
+ && (l->dw_loc_next == NULL
+ || (l->dw_loc_next->dw_loc_next == NULL
+ && (l->dw_loc_next->dw_loc_opc == DW_OP_deref
+ || l->dw_loc_next->dw_loc_opc != DW_OP_deref_size))))
+ {
+ switch (optimize_string_length (a))
+ {
+ case -1:
+ remove_AT (die, a->dw_attr);
+ ix--;
+ /* If we drop DW_AT_string_length, we need to drop also
+ DW_AT_{string_length_,}byte_size. */
+ remove_AT_byte_size = true;
+ continue;
+ default:
+ break;
+ case 1:
+ /* Even if we keep the optimized DW_AT_string_length,
+ it might have changed AT_class, so process it again. */
+ ix--;
+ continue;
+ }
+ }
/* For -gdwarf-2 don't attempt to optimize
DW_AT_data_member_location containing
DW_OP_plus_uconst - older consumers might
break;
}
+ if (remove_AT_byte_size)
+ remove_AT (die, dwarf_version >= 5
+ ? DW_AT_string_length_byte_size
+ : DW_AT_byte_size);
+
FOR_EACH_CHILD (die, c, resolve_addr (c));
}
\f
static void
flush_limbo_die_list (void)
{
- limbo_die_node *node, *next_node;
+ limbo_die_node *node;
- for (node = limbo_die_list; node; node = next_node)
+ /* get_context_die calls force_decl_die, which can put new DIEs on the
+ limbo list in LTO mode when nested functions are put in a different
+ partition than that of their parent function. */
+ while ((node = limbo_die_list))
{
dw_die_ref die = node->die;
- next_node = node->next;
+ limbo_die_list = node->next;
if (die->die_parent == NULL)
{
}
}
}
-
- limbo_die_list = NULL;
}
/* Output stuff that dwarf requires at the end of every file,
resolve_addr (comp_unit_die ());
move_marked_base_types ();
- /* Walk through the list of incomplete types again, trying once more to
- emit full debugging info for them. */
- retry_incomplete_types ();
-
if (flag_eliminate_unused_debug_types)
prune_unused_types ();
static void
dwarf2out_early_finish (void)
{
- limbo_die_node *node;
+ set_early_dwarf s;
+
+ /* Walk through the list of incomplete types again, trying once more to
+ emit full debugging info for them. */
+ retry_incomplete_types ();
+
+ /* The point here is to flush out the limbo list so that it is empty
+ and we don't need to stream it for LTO. */
+ flush_limbo_die_list ();
+
+ gen_scheduled_generic_parms_dies ();
+ gen_remaining_tmpl_value_param_die_attribute ();
/* Add DW_AT_linkage_name for all deferred DIEs. */
- for (node = deferred_asm_name; node; node = node->next)
+ for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
{
tree decl = node->created_for;
if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
}
}
deferred_asm_name = NULL;
-
- /* The point here is to flush out the limbo list so that it is empty
- and we don't need to stream it for LTO. */
- flush_limbo_die_list ();
-
- gen_scheduled_generic_parms_dies ();
- gen_remaining_tmpl_value_param_die_attribute ();
}
/* Reset all state within dwarf2out.c so that we can rerun the compiler