1 /* Basic IPA utilities for type inheritance graph construction and
3 Copyright (C) 2013-2015 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
84 build_type_inheritance_graph triggers a construction of the type inheritance
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
92 The inheritance graph is represented as follows:
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
105 pass_ipa_devirt performs simple speculative devirtualization.
110 #include "coretypes.h"
115 #include "fold-const.h"
116 #include "print-tree.h"
119 #include "basic-block.h"
120 #include "hard-reg-set.h"
121 #include "function.h"
125 #include "insn-config.h"
129 #include "emit-rtl.h"
133 #include "tree-pass.h"
135 #include "tree-pretty-print.h"
136 #include "ipa-utils.h"
137 #include "tree-ssa-alias.h"
138 #include "internal-fn.h"
139 #include "gimple-fold.h"
140 #include "gimple-expr.h"
142 #include "alloc-pool.h"
143 #include "symbol-summary.h"
144 #include "ipa-prop.h"
145 #include "ipa-inline.h"
146 #include "diagnostic.h"
147 #include "tree-dfa.h"
148 #include "demangle.h"
150 #include "gimple-pretty-print.h"
151 #include "stor-layout.h"
153 #include "streamer-hooks.h"
154 #include "lto-streamer.h"
156 /* Hash based set of pairs of types. */
164 struct default_hash_traits
<type_pair
> : typed_noop_remove
<type_pair
>
166 typedef type_pair value_type
;
167 typedef type_pair compare_type
;
171 return TYPE_UID (p
.first
) ^ TYPE_UID (p
.second
);
174 is_empty (type_pair p
)
176 return p
.first
== NULL
;
179 is_deleted (type_pair p ATTRIBUTE_UNUSED
)
184 equal (const type_pair
&a
, const type_pair
&b
)
186 return a
.first
==b
.first
&& a
.second
== b
.second
;
189 mark_empty (type_pair
&e
)
195 static bool odr_types_equivalent_p (tree
, tree
, bool, bool *,
196 hash_set
<type_pair
> *,
197 location_t
, location_t
);
199 static bool odr_violation_reported
= false;
202 /* Pointer set of all call targets appearing in the cache. */
203 static hash_set
<cgraph_node
*> *cached_polymorphic_call_targets
;
205 /* The node of type inheritance graph. For each type unique in
206 One Definition Rule (ODR) sense, we produce one node linking all
207 main variants of types equivalent to it, bases and derived types. */
209 struct GTY(()) odr_type_d
213 /* All bases; built only for main variants of types. */
214 vec
<odr_type
> GTY((skip
)) bases
;
215 /* All derived types with virtual methods seen in unit;
216 built only for main variants of types. */
217 vec
<odr_type
> GTY((skip
)) derived_types
;
219 /* All equivalent types, if more than one. */
220 vec
<tree
, va_gc
> *types
;
221 /* Set of all equivalent types, if NON-NULL. */
222 hash_set
<tree
> * GTY((skip
)) types_set
;
224 /* Unique ID indexing the type in odr_types array. */
226 /* Is it in anonymous namespace? */
227 bool anonymous_namespace
;
228 /* Do we know about all derivations of given type? */
229 bool all_derivations_known
;
230 /* Did we report ODR violation here? */
232 /* Set when virtual table without RTTI previaled table with. */
236 /* Return true if T is a type with linkage defined. */
239 type_with_linkage_p (const_tree t
)
241 /* Builtin types do not define linkage, their TYPE_CONTEXT is NULL. */
242 if (!TYPE_CONTEXT (t
)
243 || !TYPE_NAME (t
) || TREE_CODE (TYPE_NAME (t
)) != TYPE_DECL
244 || !TYPE_STUB_DECL (t
))
247 /* In LTO do not get confused by non-C++ produced types or types built
248 with -fno-lto-odr-type-merigng. */
251 /* To support -fno-lto-odr-type-merigng recognize types with vtables
253 if (RECORD_OR_UNION_TYPE_P (t
)
254 && TYPE_BINFO (t
) && BINFO_VTABLE (TYPE_BINFO (t
)))
256 /* Do not accept any other types - we do not know if they were produced
258 if (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t
)))
262 return (RECORD_OR_UNION_TYPE_P (t
)
263 || TREE_CODE (t
) == ENUMERAL_TYPE
);
266 /* Return true if T is in anonymous namespace.
267 This works only on those C++ types with linkage defined. */
270 type_in_anonymous_namespace_p (const_tree t
)
272 gcc_assert (type_with_linkage_p (t
));
274 /* Keep -fno-lto-odr-type-merging working by recognizing classes with vtables
275 properly into anonymous namespaces. */
276 if (RECORD_OR_UNION_TYPE_P (t
)
277 && TYPE_BINFO (t
) && BINFO_VTABLE (TYPE_BINFO (t
)))
278 return (TYPE_STUB_DECL (t
) && !TREE_PUBLIC (TYPE_STUB_DECL (t
)));
280 if (TYPE_STUB_DECL (t
) && !TREE_PUBLIC (TYPE_STUB_DECL (t
)))
282 /* C++ FE uses magic <anon> as assembler names of anonymous types.
283 verify that this match with type_in_anonymous_namespace_p. */
284 #ifdef ENABLE_CHECKING
286 gcc_assert (!strcmp ("<anon>",
287 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t
)))));
294 /* Return true of T is type with One Definition Rule info attached.
295 It means that either it is anonymous type or it has assembler name
299 odr_type_p (const_tree t
)
301 /* We do not have this information when not in LTO, but we do not need
302 to care, since it is used only for type merging. */
303 gcc_checking_assert (in_lto_p
|| flag_lto
);
305 /* To support -fno-lto-odr-type-merging consider types with vtables ODR. */
306 if (type_with_linkage_p (t
) && type_in_anonymous_namespace_p (t
))
309 if (TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
310 && (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t
))))
312 #ifdef ENABLE_CHECKING
313 /* C++ FE uses magic <anon> as assembler names of anonymous types.
314 verify that this match with type_in_anonymous_namespace_p. */
315 gcc_assert (!type_with_linkage_p (t
)
318 (DECL_ASSEMBLER_NAME (TYPE_NAME (t
))))
319 || type_in_anonymous_namespace_p (t
));
326 /* Return TRUE if all derived types of T are known and thus
327 we may consider the walk of derived type complete.
329 This is typically true only for final anonymous namespace types and types
330 defined within functions (that may be COMDAT and thus shared across units,
331 but with the same set of derived types). */
334 type_all_derivations_known_p (const_tree t
)
336 if (TYPE_FINAL_P (t
))
340 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
341 if (!TYPE_NAME (t
) || TREE_CODE (TYPE_NAME (t
)) != TYPE_DECL
)
343 if (type_in_anonymous_namespace_p (t
))
345 return (decl_function_context (TYPE_NAME (t
)) != NULL
);
348 /* Return TRUE if type's constructors are all visible. */
351 type_all_ctors_visible_p (tree t
)
354 && symtab
->state
>= CONSTRUCTION
355 /* We can not always use type_all_derivations_known_p.
356 For function local types we must assume case where
357 the function is COMDAT and shared in between units.
359 TODO: These cases are quite easy to get, but we need
360 to keep track of C++ privatizing via -Wno-weak
361 as well as the IPA privatizing. */
362 && type_in_anonymous_namespace_p (t
);
365 /* Return TRUE if type may have instance. */
368 type_possibly_instantiated_p (tree t
)
373 /* TODO: Add abstract types here. */
374 if (!type_all_ctors_visible_p (t
))
377 vtable
= BINFO_VTABLE (TYPE_BINFO (t
));
378 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
379 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
380 vnode
= varpool_node::get (vtable
);
381 return vnode
&& vnode
->definition
;
384 /* Hash used to unify ODR types based on their mangled name and for anonymous
387 struct odr_name_hasher
: pointer_hash
<odr_type_d
>
389 typedef union tree_node
*compare_type
;
390 static inline hashval_t
hash (const odr_type_d
*);
391 static inline bool equal (const odr_type_d
*, const tree_node
*);
392 static inline void remove (odr_type_d
*);
395 /* Has used to unify ODR types based on their associated virtual table.
396 This hash is needed to keep -fno-lto-odr-type-merging to work and contains
397 only polymorphic types. Types with mangled names are inserted to both. */
399 struct odr_vtable_hasher
:odr_name_hasher
401 static inline hashval_t
hash (const odr_type_d
*);
402 static inline bool equal (const odr_type_d
*, const tree_node
*);
405 /* Return type that was declared with T's name so that T is an
406 qualified variant of it. */
409 main_odr_variant (const_tree t
)
411 if (TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
)
412 return TREE_TYPE (TYPE_NAME (t
));
413 /* Unnamed types and non-C++ produced types can be compared by variants. */
415 return TYPE_MAIN_VARIANT (t
);
419 can_be_name_hashed_p (tree t
)
421 return (!in_lto_p
|| odr_type_p (t
));
424 /* Hash type by its ODR name. */
427 hash_odr_name (const_tree t
)
429 gcc_checking_assert (main_odr_variant (t
) == t
);
431 /* If not in LTO, all main variants are unique, so we can do
434 return htab_hash_pointer (t
);
436 /* Anonymous types are unique. */
437 if (type_with_linkage_p (t
) && type_in_anonymous_namespace_p (t
))
438 return htab_hash_pointer (t
);
440 gcc_checking_assert (TYPE_NAME (t
)
441 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t
)));
442 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t
)));
445 /* Return the computed hashcode for ODR_TYPE. */
448 odr_name_hasher::hash (const odr_type_d
*odr_type
)
450 return hash_odr_name (odr_type
->type
);
454 can_be_vtable_hashed_p (tree t
)
456 /* vtable hashing can distinguish only main variants. */
457 if (TYPE_MAIN_VARIANT (t
) != t
)
459 /* Anonymous namespace types are always handled by name hash. */
460 if (type_with_linkage_p (t
) && type_in_anonymous_namespace_p (t
))
462 return (TREE_CODE (t
) == RECORD_TYPE
463 && TYPE_BINFO (t
) && BINFO_VTABLE (TYPE_BINFO (t
)));
466 /* Hash type by assembler name of its vtable. */
469 hash_odr_vtable (const_tree t
)
471 tree v
= BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (t
)));
472 inchash::hash hstate
;
474 gcc_checking_assert (in_lto_p
);
475 gcc_checking_assert (!type_in_anonymous_namespace_p (t
));
476 gcc_checking_assert (TREE_CODE (t
) == RECORD_TYPE
477 && TYPE_BINFO (t
) && BINFO_VTABLE (TYPE_BINFO (t
)));
478 gcc_checking_assert (main_odr_variant (t
) == t
);
480 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
482 add_expr (TREE_OPERAND (v
, 1), hstate
);
483 v
= TREE_OPERAND (TREE_OPERAND (v
, 0), 0);
486 hstate
.add_wide_int (IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (v
)));
487 return hstate
.end ();
490 /* Return the computed hashcode for ODR_TYPE. */
493 odr_vtable_hasher::hash (const odr_type_d
*odr_type
)
495 return hash_odr_vtable (odr_type
->type
);
498 /* For languages with One Definition Rule, work out if
499 types are the same based on their name.
501 This is non-trivial for LTO where minor differences in
502 the type representation may have prevented type merging
503 to merge two copies of otherwise equivalent type.
505 Until we start streaming mangled type names, this function works
506 only for polymorphic types.
508 When STRICT is true, we compare types by their names for purposes of
509 ODR violation warnings. When strict is false, we consider variants
510 equivalent, becuase it is all that matters for devirtualization machinery.
514 types_same_for_odr (const_tree type1
, const_tree type2
, bool strict
)
516 gcc_checking_assert (TYPE_P (type1
) && TYPE_P (type2
));
518 type1
= main_odr_variant (type1
);
519 type2
= main_odr_variant (type2
);
522 type1
= TYPE_MAIN_VARIANT (type1
);
523 type2
= TYPE_MAIN_VARIANT (type2
);
532 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
533 on the corresponding TYPE_STUB_DECL. */
534 if ((type_with_linkage_p (type1
) && type_in_anonymous_namespace_p (type1
))
535 || (type_with_linkage_p (type2
) && type_in_anonymous_namespace_p (type2
)))
539 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
541 Ideally we should never need types without ODR names here. It can however
544 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
545 Here testing for equivalence is safe, since their MAIN_VARIANTs are
547 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
548 establish precise ODR equivalency, but for correctness we care only
549 about equivalency on complete polymorphic types. For these we can
550 compare assembler names of their virtual tables. */
551 if ((!TYPE_NAME (type1
) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1
)))
552 || (!TYPE_NAME (type2
) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2
))))
554 /* See if types are obviously different (i.e. different codes
555 or polymorphic wrt non-polymorphic). This is not strictly correct
556 for ODR violating programs, but we can't do better without streaming
558 if (TREE_CODE (type1
) != TREE_CODE (type2
))
560 if (TREE_CODE (type1
) == RECORD_TYPE
561 && (TYPE_BINFO (type1
) == NULL_TREE
)
562 != (TYPE_BINFO (type1
) == NULL_TREE
))
564 if (TREE_CODE (type1
) == RECORD_TYPE
&& TYPE_BINFO (type1
)
565 && (BINFO_VTABLE (TYPE_BINFO (type1
)) == NULL_TREE
)
566 != (BINFO_VTABLE (TYPE_BINFO (type2
)) == NULL_TREE
))
569 /* At the moment we have no way to establish ODR equivalence at LTO
570 other than comparing virtual table pointers of polymorphic types.
571 Eventually we should start saving mangled names in TYPE_NAME.
572 Then this condition will become non-trivial. */
574 if (TREE_CODE (type1
) == RECORD_TYPE
575 && TYPE_BINFO (type1
) && TYPE_BINFO (type2
)
576 && BINFO_VTABLE (TYPE_BINFO (type1
))
577 && BINFO_VTABLE (TYPE_BINFO (type2
)))
579 tree v1
= BINFO_VTABLE (TYPE_BINFO (type1
));
580 tree v2
= BINFO_VTABLE (TYPE_BINFO (type2
));
581 gcc_assert (TREE_CODE (v1
) == POINTER_PLUS_EXPR
582 && TREE_CODE (v2
) == POINTER_PLUS_EXPR
);
583 return (operand_equal_p (TREE_OPERAND (v1
, 1),
584 TREE_OPERAND (v2
, 1), 0)
585 && DECL_ASSEMBLER_NAME
586 (TREE_OPERAND (TREE_OPERAND (v1
, 0), 0))
587 == DECL_ASSEMBLER_NAME
588 (TREE_OPERAND (TREE_OPERAND (v2
, 0), 0)));
592 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1
))
593 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2
)));
596 /* Return true if we can decide on ODR equivalency.
598 In non-LTO it is always decide, in LTO however it depends in the type has
601 When STRICT is false, compare main variants. */
604 types_odr_comparable (tree t1
, tree t2
, bool strict
)
607 || (strict
? (main_odr_variant (t1
) == main_odr_variant (t2
)
608 && main_odr_variant (t1
))
609 : TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
610 || (odr_type_p (t1
) && odr_type_p (t2
))
611 || (TREE_CODE (t1
) == RECORD_TYPE
&& TREE_CODE (t2
) == RECORD_TYPE
612 && TYPE_BINFO (t1
) && TYPE_BINFO (t2
)
613 && polymorphic_type_binfo_p (TYPE_BINFO (t1
))
614 && polymorphic_type_binfo_p (TYPE_BINFO (t2
))));
617 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
618 known, be conservative and return false. */
621 types_must_be_same_for_odr (tree t1
, tree t2
)
623 if (types_odr_comparable (t1
, t2
))
624 return types_same_for_odr (t1
, t2
);
626 return TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
);
629 /* If T is compound type, return type it is based on. */
632 compound_type_base (const_tree t
)
634 if (TREE_CODE (t
) == ARRAY_TYPE
635 || POINTER_TYPE_P (t
)
636 || TREE_CODE (t
) == COMPLEX_TYPE
637 || VECTOR_TYPE_P (t
))
638 return TREE_TYPE (t
);
639 if (TREE_CODE (t
) == METHOD_TYPE
)
640 return TYPE_METHOD_BASETYPE (t
);
641 if (TREE_CODE (t
) == OFFSET_TYPE
)
642 return TYPE_OFFSET_BASETYPE (t
);
646 /* Return true if T is either ODR type or compound type based from it.
647 If the function return true, we know that T is a type originating from C++
648 source even at link-time. */
651 odr_or_derived_type_p (const_tree t
)
657 /* Function type is a tricky one. Basically we can consider it
658 ODR derived if return type or any of the parameters is.
659 We need to check all parameters because LTO streaming merges
660 common types (such as void) and they are not considered ODR then. */
661 if (TREE_CODE (t
) == FUNCTION_TYPE
)
663 if (TYPE_METHOD_BASETYPE (t
))
664 t
= TYPE_METHOD_BASETYPE (t
);
667 if (TREE_TYPE (t
) && odr_or_derived_type_p (TREE_TYPE (t
)))
669 for (t
= TYPE_ARG_TYPES (t
); t
; t
= TREE_CHAIN (t
))
670 if (odr_or_derived_type_p (TREE_VALUE (t
)))
676 t
= compound_type_base (t
);
682 /* Compare types T1 and T2 and return true if they are
686 odr_name_hasher::equal (const odr_type_d
*o1
, const tree_node
*t2
)
690 gcc_checking_assert (main_odr_variant (t2
) == t2
);
691 gcc_checking_assert (main_odr_variant (t1
) == t1
);
696 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
697 on the corresponding TYPE_STUB_DECL. */
698 if ((type_with_linkage_p (t1
) && type_in_anonymous_namespace_p (t1
))
699 || (type_with_linkage_p (t2
) && type_in_anonymous_namespace_p (t2
)))
701 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1
)));
702 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2
)));
703 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1
))
704 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2
)));
707 /* Compare types T1 and T2 and return true if they are
711 odr_vtable_hasher::equal (const odr_type_d
*o1
, const tree_node
*t2
)
715 gcc_checking_assert (main_odr_variant (t2
) == t2
);
716 gcc_checking_assert (main_odr_variant (t1
) == t1
);
717 gcc_checking_assert (in_lto_p
);
718 t1
= TYPE_MAIN_VARIANT (t1
);
719 t2
= TYPE_MAIN_VARIANT (t2
);
722 tree v1
= BINFO_VTABLE (TYPE_BINFO (t1
));
723 tree v2
= BINFO_VTABLE (TYPE_BINFO (t2
));
724 return (operand_equal_p (TREE_OPERAND (v1
, 1),
725 TREE_OPERAND (v2
, 1), 0)
726 && DECL_ASSEMBLER_NAME
727 (TREE_OPERAND (TREE_OPERAND (v1
, 0), 0))
728 == DECL_ASSEMBLER_NAME
729 (TREE_OPERAND (TREE_OPERAND (v2
, 0), 0)));
732 /* Free ODR type V. */
735 odr_name_hasher::remove (odr_type_d
*v
)
738 v
->derived_types
.release ();
744 /* ODR type hash used to look up ODR type based on tree type node. */
746 typedef hash_table
<odr_name_hasher
> odr_hash_type
;
747 static odr_hash_type
*odr_hash
;
748 typedef hash_table
<odr_vtable_hasher
> odr_vtable_hash_type
;
749 static odr_vtable_hash_type
*odr_vtable_hash
;
751 /* ODR types are also stored into ODR_TYPE vector to allow consistent
752 walking. Bases appear before derived types. Vector is garbage collected
753 so we won't end up visiting empty types. */
755 static GTY(()) vec
<odr_type
, va_gc
> *odr_types_ptr
;
756 #define odr_types (*odr_types_ptr)
758 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
760 set_type_binfo (tree type
, tree binfo
)
762 for (; type
; type
= TYPE_NEXT_VARIANT (type
))
763 if (COMPLETE_TYPE_P (type
))
764 TYPE_BINFO (type
) = binfo
;
766 gcc_assert (!TYPE_BINFO (type
));
769 /* Compare T2 and T2 based on name or structure. */
772 odr_subtypes_equivalent_p (tree t1
, tree t2
,
773 hash_set
<type_pair
> *visited
,
774 location_t loc1
, location_t loc2
)
777 /* This can happen in incomplete types that should be handled earlier. */
778 gcc_assert (t1
&& t2
);
780 t1
= main_odr_variant (t1
);
781 t2
= main_odr_variant (t2
);
785 /* Anonymous namespace types must match exactly. */
786 if ((type_with_linkage_p (t1
) && type_in_anonymous_namespace_p (t1
))
787 || (type_with_linkage_p (t2
) && type_in_anonymous_namespace_p (t2
)))
790 /* For ODR types be sure to compare their names.
791 To support -wno-odr-type-merging we allow one type to be non-ODR
792 and other ODR even though it is a violation. */
793 if (types_odr_comparable (t1
, t2
, true))
795 if (!types_same_for_odr (t1
, t2
, true))
797 /* Limit recursion: If subtypes are ODR types and we know
798 that they are same, be happy. */
799 if (!odr_type_p (t1
) || !get_odr_type (t1
, true)->odr_violated
)
803 /* Component types, builtins and possibly violating ODR types
804 have to be compared structurally. */
805 if (TREE_CODE (t1
) != TREE_CODE (t2
))
807 if (AGGREGATE_TYPE_P (t1
)
808 && (TYPE_NAME (t1
) == NULL_TREE
) != (TYPE_NAME (t2
) == NULL_TREE
))
811 type_pair pair
={t1
,t2
};
812 if (TYPE_UID (t1
) > TYPE_UID (t2
))
817 if (visited
->add (pair
))
819 return odr_types_equivalent_p (t1
, t2
, false, NULL
, visited
, loc1
, loc2
);
822 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
823 violation warnings. */
826 compare_virtual_tables (varpool_node
*prevailing
, varpool_node
*vtable
)
830 if (DECL_VIRTUAL_P (prevailing
->decl
) != DECL_VIRTUAL_P (vtable
->decl
))
832 odr_violation_reported
= true;
833 if (DECL_VIRTUAL_P (prevailing
->decl
))
835 varpool_node
*tmp
= prevailing
;
839 if (warning_at (DECL_SOURCE_LOCATION
840 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
842 "virtual table of type %qD violates one definition rule",
843 DECL_CONTEXT (vtable
->decl
)))
844 inform (DECL_SOURCE_LOCATION (prevailing
->decl
),
845 "variable of same assembler name as the virtual table is "
846 "defined in another translation unit");
849 if (!prevailing
->definition
|| !vtable
->definition
)
852 /* If we do not stream ODR type info, do not bother to do useful compare. */
853 if (!TYPE_BINFO (DECL_CONTEXT (vtable
->decl
))
854 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable
->decl
))))
857 odr_type class_type
= get_odr_type (DECL_CONTEXT (vtable
->decl
), true);
859 if (class_type
->odr_violated
)
862 for (n1
= 0, n2
= 0; true; n1
++, n2
++)
864 struct ipa_ref
*ref1
, *ref2
;
867 end1
= !prevailing
->iterate_reference (n1
, ref1
);
868 end2
= !vtable
->iterate_reference (n2
, ref2
);
870 /* !DECL_VIRTUAL_P means RTTI entry;
871 We warn when RTTI is lost because non-RTTI previals; we silently
872 accept the other case. */
875 || (DECL_ASSEMBLER_NAME (ref1
->referred
->decl
)
876 != DECL_ASSEMBLER_NAME (ref2
->referred
->decl
)
877 && TREE_CODE (ref1
->referred
->decl
) == FUNCTION_DECL
))
878 && TREE_CODE (ref2
->referred
->decl
) != FUNCTION_DECL
)
880 if (!class_type
->rtti_broken
881 && warning_at (DECL_SOURCE_LOCATION
882 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
884 "virtual table of type %qD contains RTTI "
886 DECL_CONTEXT (vtable
->decl
)))
888 inform (DECL_SOURCE_LOCATION
889 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
890 "but is prevailed by one without from other translation "
892 inform (DECL_SOURCE_LOCATION
893 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
894 "RTTI will not work on this type");
895 class_type
->rtti_broken
= true;
898 end2
= !vtable
->iterate_reference (n2
, ref2
);
902 || (DECL_ASSEMBLER_NAME (ref2
->referred
->decl
)
903 != DECL_ASSEMBLER_NAME (ref1
->referred
->decl
)
904 && TREE_CODE (ref2
->referred
->decl
) == FUNCTION_DECL
))
905 && TREE_CODE (ref1
->referred
->decl
) != FUNCTION_DECL
)
908 end1
= !prevailing
->iterate_reference (n1
, ref1
);
914 /* Extra paranoia; compare the sizes. We do not have information
915 about virtual inheritance offsets, so just be sure that these
917 Do this as very last check so the not very informative error
918 is not output too often. */
919 if (DECL_SIZE (prevailing
->decl
) != DECL_SIZE (vtable
->decl
))
921 class_type
->odr_violated
= true;
922 if (warning_at (DECL_SOURCE_LOCATION
923 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
925 "virtual table of type %qD violates "
926 "one definition rule ",
927 DECL_CONTEXT (vtable
->decl
)))
929 inform (DECL_SOURCE_LOCATION
930 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
931 "the conflicting type defined in another translation "
932 "unit has virtual table of different size");
940 if (DECL_ASSEMBLER_NAME (ref1
->referred
->decl
)
941 == DECL_ASSEMBLER_NAME (ref2
->referred
->decl
))
944 class_type
->odr_violated
= true;
946 /* If the loops above stopped on non-virtual pointer, we have
947 mismatch in RTTI information mangling. */
948 if (TREE_CODE (ref1
->referred
->decl
) != FUNCTION_DECL
949 && TREE_CODE (ref2
->referred
->decl
) != FUNCTION_DECL
)
951 if (warning_at (DECL_SOURCE_LOCATION
952 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
954 "virtual table of type %qD violates "
955 "one definition rule ",
956 DECL_CONTEXT (vtable
->decl
)))
958 inform (DECL_SOURCE_LOCATION
959 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
960 "the conflicting type defined in another translation "
961 "unit with different RTTI information");
965 /* At this point both REF1 and REF2 points either to virtual table
966 or virtual method. If one points to virtual table and other to
967 method we can complain the same way as if one table was shorter
968 than other pointing out the extra method. */
969 if (TREE_CODE (ref1
->referred
->decl
)
970 != TREE_CODE (ref2
->referred
->decl
))
972 if (TREE_CODE (ref1
->referred
->decl
) == VAR_DECL
)
974 else if (TREE_CODE (ref2
->referred
->decl
) == VAR_DECL
)
979 class_type
->odr_violated
= true;
981 /* Complain about size mismatch. Either we have too many virutal
982 functions or too many virtual table pointers. */
987 varpool_node
*tmp
= prevailing
;
992 if (warning_at (DECL_SOURCE_LOCATION
993 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))),
995 "virtual table of type %qD violates "
996 "one definition rule",
997 DECL_CONTEXT (vtable
->decl
)))
999 if (TREE_CODE (ref1
->referring
->decl
) == FUNCTION_DECL
)
1001 inform (DECL_SOURCE_LOCATION
1002 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
1003 "the conflicting type defined in another translation "
1005 inform (DECL_SOURCE_LOCATION
1006 (TYPE_NAME (DECL_CONTEXT (ref1
->referring
->decl
))),
1007 "contains additional virtual method %qD",
1008 ref1
->referred
->decl
);
1012 inform (DECL_SOURCE_LOCATION
1013 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
1014 "the conflicting type defined in another translation "
1015 "unit has virtual table table with more entries");
1021 /* And in the last case we have either mistmatch in between two virtual
1022 methods or two virtual table pointers. */
1023 if (warning_at (DECL_SOURCE_LOCATION
1024 (TYPE_NAME (DECL_CONTEXT (vtable
->decl
))), OPT_Wodr
,
1025 "virtual table of type %qD violates "
1026 "one definition rule ",
1027 DECL_CONTEXT (vtable
->decl
)))
1029 if (TREE_CODE (ref1
->referred
->decl
) == FUNCTION_DECL
)
1031 inform (DECL_SOURCE_LOCATION
1032 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
1033 "the conflicting type defined in another translation "
1035 gcc_assert (TREE_CODE (ref2
->referred
->decl
)
1037 inform (DECL_SOURCE_LOCATION (ref1
->referred
->decl
),
1038 "virtual method %qD", ref1
->referred
->decl
);
1039 inform (DECL_SOURCE_LOCATION (ref2
->referred
->decl
),
1040 "ought to match virtual method %qD but does not",
1041 ref2
->referred
->decl
);
1044 inform (DECL_SOURCE_LOCATION
1045 (TYPE_NAME (DECL_CONTEXT (prevailing
->decl
))),
1046 "the conflicting type defined in another translation "
1047 "unit has virtual table table with different contents");
1053 /* Output ODR violation warning about T1 and T2 with REASON.
1054 Display location of ST1 and ST2 if REASON speaks about field or
1056 If WARN is false, do nothing. Set WARNED if warning was indeed
1060 warn_odr (tree t1
, tree t2
, tree st1
, tree st2
,
1061 bool warn
, bool *warned
, const char *reason
)
1063 tree decl2
= TYPE_NAME (t2
);
1067 if (!warn
|| !TYPE_NAME(t1
))
1070 /* ODR warnings are output druing LTO streaming; we must apply location
1071 cache for potential warnings to be output correctly. */
1072 if (lto_location_cache::current_cache
)
1073 lto_location_cache::current_cache
->apply_location_cache ();
1075 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1
)), OPT_Wodr
,
1076 "type %qT violates the C++ One Definition Rule",
1081 /* For FIELD_DECL support also case where one of fields is
1082 NULL - this is used when the structures have mismatching number of
1084 else if (!st1
|| TREE_CODE (st1
) == FIELD_DECL
)
1086 inform (DECL_SOURCE_LOCATION (decl2
),
1087 "a different type is defined in another translation unit");
1093 inform (DECL_SOURCE_LOCATION (st1
),
1094 "the first difference of corresponding definitions is field %qD",
1099 else if (TREE_CODE (st1
) == FUNCTION_DECL
)
1101 inform (DECL_SOURCE_LOCATION (decl2
),
1102 "a different type is defined in another translation unit");
1103 inform (DECL_SOURCE_LOCATION (st1
),
1104 "the first difference of corresponding definitions is method %qD",
1110 inform (DECL_SOURCE_LOCATION (decl2
), reason
);
1116 /* Return ture if T1 and T2 are incompatible and we want to recusively
1117 dive into them from warn_type_mismatch to give sensible answer. */
1120 type_mismatch_p (tree t1
, tree t2
)
1122 if (odr_or_derived_type_p (t1
) && odr_or_derived_type_p (t2
)
1123 && !odr_types_equivalent_p (t1
, t2
))
1125 return !types_compatible_p (t1
, t2
);
1129 /* Types T1 and T2 was found to be incompatible in a context they can't
1130 (either used to declare a symbol of same assembler name or unified by
1131 ODR rule). We already output warning about this, but if possible, output
1132 extra information on how the types mismatch.
1134 This is hard to do in general. We basically handle the common cases.
1136 If LOC1 and LOC2 are meaningful locations, use it in the case the types
1137 themselves do no thave one.*/
1140 warn_types_mismatch (tree t1
, tree t2
, location_t loc1
, location_t loc2
)
1142 /* Location of type is known only if it has TYPE_NAME and the name is
1144 location_t loc_t1
= TYPE_NAME (t1
) && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
1145 ? DECL_SOURCE_LOCATION (TYPE_NAME (t1
))
1147 location_t loc_t2
= TYPE_NAME (t2
) && TREE_CODE (TYPE_NAME (t2
)) == TYPE_DECL
1148 ? DECL_SOURCE_LOCATION (TYPE_NAME (t2
))
1150 bool loc_t2_useful
= false;
1152 /* With LTO it is a common case that the location of both types match.
1153 See if T2 has a location that is different from T1. If so, we will
1154 inform user about the location.
1155 Do not consider the location passed to us in LOC1/LOC2 as those are
1157 if (loc_t2
> BUILTINS_LOCATION
&& loc_t2
!= loc_t1
)
1159 if (loc_t1
<= BUILTINS_LOCATION
)
1160 loc_t2_useful
= true;
1163 expanded_location xloc1
= expand_location (loc_t1
);
1164 expanded_location xloc2
= expand_location (loc_t2
);
1166 if (strcmp (xloc1
.file
, xloc2
.file
)
1167 || xloc1
.line
!= xloc2
.line
1168 || xloc1
.column
!= xloc2
.column
)
1169 loc_t2_useful
= true;
1173 if (loc_t1
<= BUILTINS_LOCATION
)
1175 if (loc_t2
<= BUILTINS_LOCATION
)
1178 location_t loc
= loc_t1
<= BUILTINS_LOCATION
? loc_t2
: loc_t1
;
1180 /* It is a quite common bug to reference anonymous namespace type in
1181 non-anonymous namespace class. */
1182 if ((type_with_linkage_p (t1
) && type_in_anonymous_namespace_p (t1
))
1183 || (type_with_linkage_p (t2
) && type_in_anonymous_namespace_p (t2
)))
1185 if (type_with_linkage_p (t1
) && !type_in_anonymous_namespace_p (t1
))
1188 std::swap (loc_t1
, loc_t2
);
1190 gcc_assert (TYPE_NAME (t1
) && TYPE_NAME (t2
)
1191 && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
1192 && TREE_CODE (TYPE_NAME (t2
)) == TYPE_DECL
);
1193 /* Most of the time, the type names will match, do not be unnecesarily
1195 if (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t1
)))
1196 != IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (t2
))))
1198 "type %qT defined in anonymous namespace can not match "
1199 "type %qT across the translation unit boundary",
1203 "type %qT defined in anonymous namespace can not match "
1204 "across the translation unit boundary",
1208 "the incompatible type defined in another translation unit");
1211 /* If types have mangled ODR names and they are different, it is most
1212 informative to output those.
1213 This also covers types defined in different namespaces. */
1214 if (TYPE_NAME (t1
) && TYPE_NAME (t2
)
1215 && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
1216 && TREE_CODE (TYPE_NAME (t2
)) == TYPE_DECL
1217 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t1
))
1218 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t2
))
1219 && DECL_ASSEMBLER_NAME (TYPE_NAME (t1
))
1220 != DECL_ASSEMBLER_NAME (TYPE_NAME (t2
)))
1222 char *name1
= xstrdup (cplus_demangle
1223 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t1
))),
1224 DMGL_PARAMS
| DMGL_ANSI
| DMGL_TYPES
));
1225 char *name2
= cplus_demangle
1226 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t2
))),
1227 DMGL_PARAMS
| DMGL_ANSI
| DMGL_TYPES
);
1228 if (name1
&& name2
&& strcmp (name1
, name2
))
1231 "type name %<%s%> should match type name %<%s%>",
1235 "the incompatible type is defined here");
1241 /* A tricky case are compound types. Often they appear the same in source
1242 code and the mismatch is dragged in by type they are build from.
1243 Look for those differences in subtypes and try to be informative. In other
1244 cases just output nothing because the source code is probably different
1245 and in this case we already output a all necessary info. */
1246 if (!TYPE_NAME (t1
) || !TYPE_NAME (t2
))
1248 if (TREE_CODE (t1
) == TREE_CODE (t2
))
1250 if (TREE_CODE (t1
) == ARRAY_TYPE
1251 && COMPLETE_TYPE_P (t1
) && COMPLETE_TYPE_P (t2
))
1253 tree i1
= TYPE_DOMAIN (t1
);
1254 tree i2
= TYPE_DOMAIN (t2
);
1257 && TYPE_MAX_VALUE (i1
)
1258 && TYPE_MAX_VALUE (i2
)
1259 && !operand_equal_p (TYPE_MAX_VALUE (i1
),
1260 TYPE_MAX_VALUE (i2
), 0))
1263 "array types have different bounds");
1267 if ((POINTER_TYPE_P (t1
) || TREE_CODE (t1
) == ARRAY_TYPE
)
1268 && type_mismatch_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
1269 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc_t1
, loc_t2
);
1270 else if (TREE_CODE (t1
) == METHOD_TYPE
1271 || TREE_CODE (t1
) == FUNCTION_TYPE
)
1273 tree parms1
= NULL
, parms2
= NULL
;
1276 if (type_mismatch_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
1278 inform (loc
, "return value type mismatch");
1279 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc_t1
,
1283 if (prototype_p (t1
) && prototype_p (t2
))
1284 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
1286 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
),
1289 if (type_mismatch_p (TREE_VALUE (parms1
), TREE_VALUE (parms2
)))
1291 if (count
== 1 && TREE_CODE (t1
) == METHOD_TYPE
)
1293 "implicit this pointer type mismatch");
1296 "type mismatch in parameter %i",
1297 count
- (TREE_CODE (t1
) == METHOD_TYPE
));
1298 warn_types_mismatch (TREE_VALUE (parms1
),
1299 TREE_VALUE (parms2
),
1304 if (parms1
|| parms2
)
1307 "types have different parameter counts");
1315 if (types_odr_comparable (t1
, t2
, true)
1316 && types_same_for_odr (t1
, t2
, true))
1318 "type %qT itself violate the C++ One Definition Rule", t1
);
1319 /* Prevent pointless warnings like "struct aa" should match "struct aa". */
1320 else if (TYPE_NAME (t1
) == TYPE_NAME (t2
)
1321 && TREE_CODE (t1
) == TREE_CODE (t2
) && !loc_t2_useful
)
1324 inform (loc_t1
, "type %qT should match type %qT",
1327 inform (loc_t2
, "the incompatible type is defined here");
1330 /* Compare T1 and T2, report ODR violations if WARN is true and set
1331 WARNED to true if anything is reported. Return true if types match.
1332 If true is returned, the types are also compatible in the sense of
1333 gimple_canonical_types_compatible_p.
1334 If LOC1 and LOC2 is not UNKNOWN_LOCATION it may be used to output a warning
1335 about the type if the type itself do not have location. */
1338 odr_types_equivalent_p (tree t1
, tree t2
, bool warn
, bool *warned
,
1339 hash_set
<type_pair
> *visited
,
1340 location_t loc1
, location_t loc2
)
1342 /* Check first for the obvious case of pointer identity. */
1345 gcc_assert (!type_with_linkage_p (t1
) || !type_in_anonymous_namespace_p (t1
));
1346 gcc_assert (!type_with_linkage_p (t2
) || !type_in_anonymous_namespace_p (t2
));
1348 /* Can't be the same type if the types don't have the same code. */
1349 if (TREE_CODE (t1
) != TREE_CODE (t2
))
1351 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1352 G_("a different type is defined in another translation unit"));
1356 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
1358 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1359 G_("a type with different qualifiers is defined in another "
1360 "translation unit"));
1364 if ((type_with_linkage_p (t1
) && type_in_anonymous_namespace_p (t1
))
1365 || (type_with_linkage_p (t2
) && type_in_anonymous_namespace_p (t2
)))
1367 /* We can not trip this when comparing ODR types, only when trying to
1368 match different ODR derivations from different declarations.
1369 So WARN should be always false. */
1374 if (comp_type_attributes (t1
, t2
) != 1)
1376 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1377 G_("a type with different attributes "
1378 "is defined in another translation unit"));
1382 if (TREE_CODE (t1
) == ENUMERAL_TYPE
1383 && TYPE_VALUES (t1
) && TYPE_VALUES (t2
))
1386 for (v1
= TYPE_VALUES (t1
), v2
= TYPE_VALUES (t2
);
1387 v1
&& v2
; v1
= TREE_CHAIN (v1
), v2
= TREE_CHAIN (v2
))
1389 if (TREE_PURPOSE (v1
) != TREE_PURPOSE (v2
))
1391 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1392 G_("an enum with different value name"
1393 " is defined in another translation unit"));
1396 if (TREE_VALUE (v1
) != TREE_VALUE (v2
)
1397 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1
)),
1398 DECL_INITIAL (TREE_VALUE (v2
)), 0))
1400 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1401 G_("an enum with different values is defined"
1402 " in another translation unit"));
1408 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1409 G_("an enum with mismatching number of values "
1410 "is defined in another translation unit"));
1415 /* Non-aggregate types can be handled cheaply. */
1416 if (INTEGRAL_TYPE_P (t1
)
1417 || SCALAR_FLOAT_TYPE_P (t1
)
1418 || FIXED_POINT_TYPE_P (t1
)
1419 || TREE_CODE (t1
) == VECTOR_TYPE
1420 || TREE_CODE (t1
) == COMPLEX_TYPE
1421 || TREE_CODE (t1
) == OFFSET_TYPE
1422 || POINTER_TYPE_P (t1
))
1424 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
1426 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1427 G_("a type with different precision is defined "
1428 "in another translation unit"));
1431 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
1433 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1434 G_("a type with different signedness is defined "
1435 "in another translation unit"));
1439 if (TREE_CODE (t1
) == INTEGER_TYPE
1440 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
1442 /* char WRT uint_8? */
1443 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1444 G_("a different type is defined in another "
1445 "translation unit"));
1449 /* For canonical type comparisons we do not want to build SCCs
1450 so we cannot compare pointed-to types. But we can, for now,
1451 require the same pointed-to type kind and match what
1452 useless_type_conversion_p would do. */
1453 if (POINTER_TYPE_P (t1
))
1455 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
1456 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
1458 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1459 G_("it is defined as a pointer in different address "
1460 "space in another translation unit"));
1464 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1465 visited
, loc1
, loc2
))
1467 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1468 G_("it is defined as a pointer to different type "
1469 "in another translation unit"));
1471 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
),
1477 if ((TREE_CODE (t1
) == VECTOR_TYPE
|| TREE_CODE (t1
) == COMPLEX_TYPE
)
1478 && !odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1479 visited
, loc1
, loc2
))
1481 /* Probably specific enough. */
1482 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1483 G_("a different type is defined "
1484 "in another translation unit"));
1486 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc1
, loc2
);
1490 /* Do type-specific comparisons. */
1491 else switch (TREE_CODE (t1
))
1495 /* Array types are the same if the element types are the same and
1496 the number of elements are the same. */
1497 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1498 visited
, loc1
, loc2
))
1500 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1501 G_("a different type is defined in another "
1502 "translation unit"));
1504 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc1
, loc2
);
1506 gcc_assert (TYPE_STRING_FLAG (t1
) == TYPE_STRING_FLAG (t2
));
1507 gcc_assert (TYPE_NONALIASED_COMPONENT (t1
)
1508 == TYPE_NONALIASED_COMPONENT (t2
));
1510 tree i1
= TYPE_DOMAIN (t1
);
1511 tree i2
= TYPE_DOMAIN (t2
);
1513 /* For an incomplete external array, the type domain can be
1514 NULL_TREE. Check this condition also. */
1515 if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
1518 tree min1
= TYPE_MIN_VALUE (i1
);
1519 tree min2
= TYPE_MIN_VALUE (i2
);
1520 tree max1
= TYPE_MAX_VALUE (i1
);
1521 tree max2
= TYPE_MAX_VALUE (i2
);
1523 /* In C++, minimums should be always 0. */
1524 gcc_assert (min1
== min2
);
1525 if (!operand_equal_p (max1
, max2
, 0))
1527 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1528 G_("an array of different size is defined "
1529 "in another translation unit"));
1537 /* Function types are the same if the return type and arguments types
1539 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
1540 visited
, loc1
, loc2
))
1542 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1543 G_("has different return value "
1544 "in another translation unit"));
1546 warn_types_mismatch (TREE_TYPE (t1
), TREE_TYPE (t2
), loc1
, loc2
);
1550 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
)
1551 || !prototype_p (t1
) || !prototype_p (t2
))
1555 tree parms1
, parms2
;
1557 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
1559 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
1561 if (!odr_subtypes_equivalent_p
1562 (TREE_VALUE (parms1
), TREE_VALUE (parms2
), visited
,
1565 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1566 G_("has different parameters in another "
1567 "translation unit"));
1569 warn_types_mismatch (TREE_VALUE (parms1
),
1570 TREE_VALUE (parms2
), loc1
, loc2
);
1575 if (parms1
|| parms2
)
1577 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1578 G_("has different parameters "
1579 "in another translation unit"));
1588 case QUAL_UNION_TYPE
:
1592 /* For aggregate types, all the fields must be the same. */
1593 if (COMPLETE_TYPE_P (t1
) && COMPLETE_TYPE_P (t2
))
1595 if (TYPE_BINFO (t1
) && TYPE_BINFO (t2
)
1596 && polymorphic_type_binfo_p (TYPE_BINFO (t1
))
1597 != polymorphic_type_binfo_p (TYPE_BINFO (t2
)))
1599 if (polymorphic_type_binfo_p (TYPE_BINFO (t1
)))
1600 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1601 G_("a type defined in another translation unit "
1602 "is not polymorphic"));
1604 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1605 G_("a type defined in another translation unit "
1609 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
1611 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
1613 /* Skip non-fields. */
1614 while (f1
&& TREE_CODE (f1
) != FIELD_DECL
)
1615 f1
= TREE_CHAIN (f1
);
1616 while (f2
&& TREE_CODE (f2
) != FIELD_DECL
)
1617 f2
= TREE_CHAIN (f2
);
1620 if (DECL_VIRTUAL_P (f1
) != DECL_VIRTUAL_P (f2
))
1622 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1623 G_("a type with different virtual table pointers"
1624 " is defined in another translation unit"));
1627 if (DECL_ARTIFICIAL (f1
) != DECL_ARTIFICIAL (f2
))
1629 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1630 G_("a type with different bases is defined "
1631 "in another translation unit"));
1634 if (DECL_NAME (f1
) != DECL_NAME (f2
)
1635 && !DECL_ARTIFICIAL (f1
))
1637 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1638 G_("a field with different name is defined "
1639 "in another translation unit"));
1642 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1
),
1643 TREE_TYPE (f2
), visited
,
1646 /* Do not warn about artificial fields and just go into
1647 generic field mismatch warning. */
1648 if (DECL_ARTIFICIAL (f1
))
1651 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1652 G_("a field of same name but different type "
1653 "is defined in another translation unit"));
1655 warn_types_mismatch (TREE_TYPE (f1
), TREE_TYPE (f2
), loc1
, loc2
);
1658 if (!gimple_compare_field_offset (f1
, f2
))
1660 /* Do not warn about artificial fields and just go into
1661 generic field mismatch warning. */
1662 if (DECL_ARTIFICIAL (f1
))
1664 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1665 G_("fields has different layout "
1666 "in another translation unit"));
1669 gcc_assert (DECL_NONADDRESSABLE_P (f1
)
1670 == DECL_NONADDRESSABLE_P (f2
));
1673 /* If one aggregate has more fields than the other, they
1674 are not the same. */
1677 if ((f1
&& DECL_VIRTUAL_P (f1
)) || (f2
&& DECL_VIRTUAL_P (f2
)))
1678 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1679 G_("a type with different virtual table pointers"
1680 " is defined in another translation unit"));
1681 else if ((f1
&& DECL_ARTIFICIAL (f1
))
1682 || (f2
&& DECL_ARTIFICIAL (f2
)))
1683 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1684 G_("a type with different bases is defined "
1685 "in another translation unit"));
1687 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1688 G_("a type with different number of fields "
1689 "is defined in another translation unit"));
1693 if ((TYPE_MAIN_VARIANT (t1
) == t1
|| TYPE_MAIN_VARIANT (t2
) == t2
)
1694 && COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t1
))
1695 && COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t2
))
1696 && odr_type_p (TYPE_MAIN_VARIANT (t1
))
1697 && odr_type_p (TYPE_MAIN_VARIANT (t2
))
1698 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1
))
1699 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2
))))
1701 /* Currently free_lang_data sets TYPE_METHODS to error_mark_node
1702 if it is non-NULL so this loop will never realy execute. */
1703 if (TYPE_METHODS (TYPE_MAIN_VARIANT (t1
)) != error_mark_node
1704 && TYPE_METHODS (TYPE_MAIN_VARIANT (t2
)) != error_mark_node
)
1705 for (f1
= TYPE_METHODS (TYPE_MAIN_VARIANT (t1
)),
1706 f2
= TYPE_METHODS (TYPE_MAIN_VARIANT (t2
));
1707 f1
&& f2
; f1
= DECL_CHAIN (f1
), f2
= DECL_CHAIN (f2
))
1709 if (DECL_ASSEMBLER_NAME (f1
) != DECL_ASSEMBLER_NAME (f2
))
1711 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1712 G_("a different method of same type "
1713 "is defined in another "
1714 "translation unit"));
1717 if (DECL_VIRTUAL_P (f1
) != DECL_VIRTUAL_P (f2
))
1719 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1720 G_("s definition that differs by virtual "
1721 "keyword in another translation unit"));
1724 if (DECL_VINDEX (f1
) != DECL_VINDEX (f2
))
1726 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1727 G_("virtual table layout differs "
1728 "in another translation unit"));
1731 if (odr_subtypes_equivalent_p (TREE_TYPE (f1
),
1732 TREE_TYPE (f2
), visited
,
1735 warn_odr (t1
, t2
, f1
, f2
, warn
, warned
,
1736 G_("method with incompatible type is "
1737 "defined in another translation unit"));
1741 if ((f1
== NULL
) != (f2
== NULL
))
1743 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1744 G_("a type with different number of methods "
1745 "is defined in another translation unit"));
1761 /* Those are better to come last as they are utterly uninformative. */
1762 if (TYPE_SIZE (t1
) && TYPE_SIZE (t2
)
1763 && !operand_equal_p (TYPE_SIZE (t1
), TYPE_SIZE (t2
), 0))
1765 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1766 G_("a type with different size "
1767 "is defined in another translation unit"));
1770 if (COMPLETE_TYPE_P (t1
) && COMPLETE_TYPE_P (t2
)
1771 && TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
))
1773 warn_odr (t1
, t2
, NULL
, NULL
, warn
, warned
,
1774 G_("a type with different alignment "
1775 "is defined in another translation unit"));
1778 gcc_assert (!TYPE_SIZE_UNIT (t1
) || !TYPE_SIZE_UNIT (t2
)
1779 || operand_equal_p (TYPE_SIZE_UNIT (t1
),
1780 TYPE_SIZE_UNIT (t2
), 0));
1784 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1787 odr_types_equivalent_p (tree type1
, tree type2
)
1789 hash_set
<type_pair
> visited
;
1791 #ifdef ENABLE_CHECKING
1792 gcc_assert (odr_or_derived_type_p (type1
) && odr_or_derived_type_p (type2
));
1794 return odr_types_equivalent_p (type1
, type2
, false, NULL
,
1795 &visited
, UNKNOWN_LOCATION
, UNKNOWN_LOCATION
);
1798 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1799 from VAL->type. This may happen in LTO where tree merging did not merge
1800 all variants of the same type or due to ODR violation.
1802 Analyze and report ODR violations and add type to duplicate list.
1803 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1804 this is first time we see definition of a class return true so the
1805 base types are analyzed. */
1808 add_type_duplicate (odr_type val
, tree type
)
1810 bool build_bases
= false;
1811 bool prevail
= false;
1812 bool odr_must_violate
= false;
1814 if (!val
->types_set
)
1815 val
->types_set
= new hash_set
<tree
>;
1817 /* Chose polymorphic type as leader (this happens only in case of ODR
1819 if ((TREE_CODE (type
) == RECORD_TYPE
&& TYPE_BINFO (type
)
1820 && polymorphic_type_binfo_p (TYPE_BINFO (type
)))
1821 && (TREE_CODE (val
->type
) != RECORD_TYPE
|| !TYPE_BINFO (val
->type
)
1822 || !polymorphic_type_binfo_p (TYPE_BINFO (val
->type
))))
1827 /* Always prefer complete type to be the leader. */
1828 else if (!COMPLETE_TYPE_P (val
->type
) && COMPLETE_TYPE_P (type
))
1831 build_bases
= TYPE_BINFO (type
);
1833 else if (COMPLETE_TYPE_P (val
->type
) && !COMPLETE_TYPE_P (type
))
1835 else if (TREE_CODE (val
->type
) == ENUMERAL_TYPE
1836 && TREE_CODE (type
) == ENUMERAL_TYPE
1837 && !TYPE_VALUES (val
->type
) && TYPE_VALUES (type
))
1839 else if (TREE_CODE (val
->type
) == RECORD_TYPE
1840 && TREE_CODE (type
) == RECORD_TYPE
1841 && TYPE_BINFO (type
) && !TYPE_BINFO (val
->type
))
1843 gcc_assert (!val
->bases
.length ());
1849 std::swap (val
->type
, type
);
1851 val
->types_set
->add (type
);
1853 /* If we now have a mangled name, be sure to record it to val->type
1854 so ODR hash can work. */
1856 if (can_be_name_hashed_p (type
) && !can_be_name_hashed_p (val
->type
))
1857 SET_DECL_ASSEMBLER_NAME (TYPE_NAME (val
->type
),
1858 DECL_ASSEMBLER_NAME (TYPE_NAME (type
)));
1861 bool base_mismatch
= false;
1863 bool warned
= false;
1864 hash_set
<type_pair
> visited
;
1866 gcc_assert (in_lto_p
);
1867 vec_safe_push (val
->types
, type
);
1869 /* If both are class types, compare the bases. */
1870 if (COMPLETE_TYPE_P (type
) && COMPLETE_TYPE_P (val
->type
)
1871 && TREE_CODE (val
->type
) == RECORD_TYPE
1872 && TREE_CODE (type
) == RECORD_TYPE
1873 && TYPE_BINFO (val
->type
) && TYPE_BINFO (type
))
1875 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type
))
1876 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val
->type
)))
1878 if (!flag_ltrans
&& !warned
&& !val
->odr_violated
)
1881 warn_odr (type
, val
->type
, NULL
, NULL
, !warned
, &warned
,
1882 "a type with the same name but different "
1883 "number of polymorphic bases is "
1884 "defined in another translation unit");
1887 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type
))
1888 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val
->type
)))
1889 extra_base
= BINFO_BASE_BINFO
1891 BINFO_N_BASE_BINFOS (TYPE_BINFO (val
->type
)));
1893 extra_base
= BINFO_BASE_BINFO
1894 (TYPE_BINFO (val
->type
),
1895 BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)));
1896 tree extra_base_type
= BINFO_TYPE (extra_base
);
1897 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type
)),
1898 "the extra base is defined here");
1901 base_mismatch
= true;
1904 for (i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
1906 tree base1
= BINFO_BASE_BINFO (TYPE_BINFO (type
), i
);
1907 tree base2
= BINFO_BASE_BINFO (TYPE_BINFO (val
->type
), i
);
1908 tree type1
= BINFO_TYPE (base1
);
1909 tree type2
= BINFO_TYPE (base2
);
1911 if (types_odr_comparable (type1
, type2
))
1913 if (!types_same_for_odr (type1
, type2
))
1914 base_mismatch
= true;
1917 if (!odr_types_equivalent_p (type1
, type2
))
1918 base_mismatch
= true;
1921 if (!warned
&& !val
->odr_violated
)
1923 warn_odr (type
, val
->type
, NULL
, NULL
,
1925 "a type with the same name but different base "
1926 "type is defined in another translation unit");
1928 warn_types_mismatch (type1
, type2
,
1929 UNKNOWN_LOCATION
, UNKNOWN_LOCATION
);
1933 if (BINFO_OFFSET (base1
) != BINFO_OFFSET (base2
))
1935 base_mismatch
= true;
1936 if (!warned
&& !val
->odr_violated
)
1937 warn_odr (type
, val
->type
, NULL
, NULL
,
1939 "a type with the same name but different base "
1940 "layout is defined in another translation unit");
1943 /* One of bases is not of complete type. */
1944 if (!TYPE_BINFO (type1
) != !TYPE_BINFO (type2
))
1946 /* If we have a polymorphic type info specified for TYPE1
1947 but not for TYPE2 we possibly missed a base when recording
1949 Be sure this does not happen. */
1950 if (TYPE_BINFO (type1
)
1951 && polymorphic_type_binfo_p (TYPE_BINFO (type1
))
1953 odr_must_violate
= true;
1956 /* One base is polymorphic and the other not.
1957 This ought to be diagnosed earlier, but do not ICE in the
1959 else if (TYPE_BINFO (type1
)
1960 && polymorphic_type_binfo_p (TYPE_BINFO (type1
))
1961 != polymorphic_type_binfo_p (TYPE_BINFO (type2
)))
1963 if (!warned
&& !val
->odr_violated
)
1964 warn_odr (type
, val
->type
, NULL
, NULL
,
1966 "a base of the type is polymorphic only in one "
1967 "translation unit");
1968 base_mismatch
= true;
1975 odr_violation_reported
= true;
1976 val
->odr_violated
= true;
1978 if (symtab
->dump_file
)
1980 fprintf (symtab
->dump_file
, "ODR base violation\n");
1982 print_node (symtab
->dump_file
, "", val
->type
, 0);
1983 putc ('\n',symtab
->dump_file
);
1984 print_node (symtab
->dump_file
, "", type
, 0);
1985 putc ('\n',symtab
->dump_file
);
1990 /* Next compare memory layout. */
1991 if (!odr_types_equivalent_p (val
->type
, type
,
1992 !flag_ltrans
&& !val
->odr_violated
&& !warned
,
1994 DECL_SOURCE_LOCATION (TYPE_NAME (val
->type
)),
1995 DECL_SOURCE_LOCATION (TYPE_NAME (type
))))
1998 odr_violation_reported
= true;
1999 val
->odr_violated
= true;
2000 if (symtab
->dump_file
)
2002 fprintf (symtab
->dump_file
, "ODR violation\n");
2004 print_node (symtab
->dump_file
, "", val
->type
, 0);
2005 putc ('\n',symtab
->dump_file
);
2006 print_node (symtab
->dump_file
, "", type
, 0);
2007 putc ('\n',symtab
->dump_file
);
2010 gcc_assert (val
->odr_violated
|| !odr_must_violate
);
2011 /* Sanity check that all bases will be build same way again. */
2012 #ifdef ENABLE_CHECKING
2013 if (COMPLETE_TYPE_P (type
) && COMPLETE_TYPE_P (val
->type
)
2014 && TREE_CODE (val
->type
) == RECORD_TYPE
2015 && TREE_CODE (type
) == RECORD_TYPE
2016 && TYPE_BINFO (val
->type
) && TYPE_BINFO (type
)
2017 && !val
->odr_violated
2018 && !base_mismatch
&& val
->bases
.length ())
2020 unsigned int num_poly_bases
= 0;
2023 for (i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
)); i
++)
2024 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
2025 (TYPE_BINFO (type
), i
)))
2027 gcc_assert (num_poly_bases
== val
->bases
.length ());
2028 for (j
= 0, i
= 0; i
< BINFO_N_BASE_BINFOS (TYPE_BINFO (type
));
2030 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
2031 (TYPE_BINFO (type
), i
)))
2033 odr_type base
= get_odr_type
2035 (BINFO_BASE_BINFO (TYPE_BINFO (type
),
2038 gcc_assert (val
->bases
[j
] == base
);
2045 /* Regularize things a little. During LTO same types may come with
2046 different BINFOs. Either because their virtual table was
2047 not merged by tree merging and only later at decl merging or
2048 because one type comes with external vtable, while other
2049 with internal. We want to merge equivalent binfos to conserve
2050 memory and streaming overhead.
2052 The external vtables are more harmful: they contain references
2053 to external declarations of methods that may be defined in the
2054 merged LTO unit. For this reason we absolutely need to remove
2055 them and replace by internal variants. Not doing so will lead
2056 to incomplete answers from possible_polymorphic_call_targets.
2058 FIXME: disable for now; because ODR types are now build during
2059 streaming in, the variants do not need to be linked to the type,
2060 yet. We need to do the merging in cleanup pass to be implemented
2062 if (!flag_ltrans
&& merge
2064 && TREE_CODE (val
->type
) == RECORD_TYPE
2065 && TREE_CODE (type
) == RECORD_TYPE
2066 && TYPE_BINFO (val
->type
) && TYPE_BINFO (type
)
2067 && TYPE_MAIN_VARIANT (type
) == type
2068 && TYPE_MAIN_VARIANT (val
->type
) == val
->type
2069 && BINFO_VTABLE (TYPE_BINFO (val
->type
))
2070 && BINFO_VTABLE (TYPE_BINFO (type
)))
2072 tree master_binfo
= TYPE_BINFO (val
->type
);
2073 tree v1
= BINFO_VTABLE (master_binfo
);
2074 tree v2
= BINFO_VTABLE (TYPE_BINFO (type
));
2076 if (TREE_CODE (v1
) == POINTER_PLUS_EXPR
)
2078 gcc_assert (TREE_CODE (v2
) == POINTER_PLUS_EXPR
2079 && operand_equal_p (TREE_OPERAND (v1
, 1),
2080 TREE_OPERAND (v2
, 1), 0));
2081 v1
= TREE_OPERAND (TREE_OPERAND (v1
, 0), 0);
2082 v2
= TREE_OPERAND (TREE_OPERAND (v2
, 0), 0);
2084 gcc_assert (DECL_ASSEMBLER_NAME (v1
)
2085 == DECL_ASSEMBLER_NAME (v2
));
2087 if (DECL_EXTERNAL (v1
) && !DECL_EXTERNAL (v2
))
2091 set_type_binfo (val
->type
, TYPE_BINFO (type
));
2092 for (i
= 0; i
< val
->types
->length (); i
++)
2094 if (TYPE_BINFO ((*val
->types
)[i
])
2096 set_type_binfo ((*val
->types
)[i
], TYPE_BINFO (type
));
2098 BINFO_TYPE (TYPE_BINFO (type
)) = val
->type
;
2101 set_type_binfo (type
, master_binfo
);
2106 /* Get ODR type hash entry for TYPE. If INSERT is true, create
2107 possibly new entry. */
2110 get_odr_type (tree type
, bool insert
)
2112 odr_type_d
**slot
= NULL
;
2113 odr_type_d
**vtable_slot
= NULL
;
2114 odr_type val
= NULL
;
2116 bool build_bases
= false;
2117 bool insert_to_odr_array
= false;
2120 type
= main_odr_variant (type
);
2122 gcc_checking_assert (can_be_name_hashed_p (type
)
2123 || can_be_vtable_hashed_p (type
));
2125 /* Lookup entry, first try name hash, fallback to vtable hash. */
2126 if (can_be_name_hashed_p (type
))
2128 hash
= hash_odr_name (type
);
2129 slot
= odr_hash
->find_slot_with_hash (type
, hash
,
2130 insert
? INSERT
: NO_INSERT
);
2132 if ((!slot
|| !*slot
) && in_lto_p
&& can_be_vtable_hashed_p (type
))
2134 hash
= hash_odr_vtable (type
);
2135 vtable_slot
= odr_vtable_hash
->find_slot_with_hash (type
, hash
,
2136 insert
? INSERT
: NO_INSERT
);
2139 if (!slot
&& !vtable_slot
)
2142 /* See if we already have entry for type. */
2143 if ((slot
&& *slot
) || (vtable_slot
&& *vtable_slot
))
2148 #ifdef ENABLE_CHECKING
2149 if (in_lto_p
&& can_be_vtable_hashed_p (type
))
2151 hash
= hash_odr_vtable (type
);
2152 vtable_slot
= odr_vtable_hash
->find_slot_with_hash (type
, hash
,
2154 gcc_assert (!vtable_slot
|| *vtable_slot
== *slot
);
2159 else if (*vtable_slot
)
2162 if (val
->type
!= type
2163 && (!val
->types_set
|| !val
->types_set
->add (type
)))
2165 gcc_assert (insert
);
2166 /* We have type duplicate, but it may introduce vtable name or
2167 mangled name; be sure to keep hashes in sync. */
2168 if (in_lto_p
&& can_be_vtable_hashed_p (type
)
2169 && (!vtable_slot
|| !*vtable_slot
))
2173 hash
= hash_odr_vtable (type
);
2174 vtable_slot
= odr_vtable_hash
->find_slot_with_hash
2175 (type
, hash
, INSERT
);
2176 gcc_checking_assert (!*vtable_slot
|| *vtable_slot
== val
);
2182 build_bases
= add_type_duplicate (val
, type
);
2187 val
= ggc_cleared_alloc
<odr_type_d
> ();
2190 val
->derived_types
= vNULL
;
2191 if (type_with_linkage_p (type
))
2192 val
->anonymous_namespace
= type_in_anonymous_namespace_p (type
);
2194 val
->anonymous_namespace
= 0;
2195 build_bases
= COMPLETE_TYPE_P (val
->type
);
2196 insert_to_odr_array
= true;
2203 if (build_bases
&& TREE_CODE (type
) == RECORD_TYPE
&& TYPE_BINFO (type
)
2204 && type_with_linkage_p (type
)
2205 && type
== TYPE_MAIN_VARIANT (type
))
2207 tree binfo
= TYPE_BINFO (type
);
2210 gcc_assert (BINFO_TYPE (TYPE_BINFO (val
->type
)) == type
);
2212 val
->all_derivations_known
= type_all_derivations_known_p (type
);
2213 for (i
= 0; i
< BINFO_N_BASE_BINFOS (binfo
); i
++)
2214 /* For now record only polymorphic types. other are
2215 pointless for devirtualization and we can not precisely
2216 determine ODR equivalency of these during LTO. */
2217 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo
, i
)))
2219 tree base_type
= BINFO_TYPE (BINFO_BASE_BINFO (binfo
, i
));
2220 odr_type base
= get_odr_type (base_type
, true);
2221 gcc_assert (TYPE_MAIN_VARIANT (base_type
) == base_type
);
2222 base
->derived_types
.safe_push (val
);
2223 val
->bases
.safe_push (base
);
2224 if (base
->id
> base_id
)
2228 /* Ensure that type always appears after bases. */
2229 if (insert_to_odr_array
)
2232 val
->id
= odr_types
.length ();
2233 vec_safe_push (odr_types_ptr
, val
);
2235 else if (base_id
> val
->id
)
2237 odr_types
[val
->id
] = 0;
2238 /* Be sure we did not recorded any derived types; these may need
2240 gcc_assert (val
->derived_types
.length() == 0);
2242 val
->id
= odr_types
.length ();
2243 vec_safe_push (odr_types_ptr
, val
);
2248 /* Add TYPE od ODR type hash. */
2251 register_odr_type (tree type
)
2255 odr_hash
= new odr_hash_type (23);
2257 odr_vtable_hash
= new odr_vtable_hash_type (23);
2259 /* Arrange things to be nicer and insert main variants first.
2260 ??? fundamental prerecorded types do not have mangled names; this
2261 makes it possible that non-ODR type is main_odr_variant of ODR type.
2262 Things may get smoother if LTO FE set mangled name of those types same
2263 way as C++ FE does. */
2264 if (odr_type_p (main_odr_variant (TYPE_MAIN_VARIANT (type
)))
2265 && odr_type_p (TYPE_MAIN_VARIANT (type
)))
2266 get_odr_type (TYPE_MAIN_VARIANT (type
), true);
2267 if (TYPE_MAIN_VARIANT (type
) != type
&& odr_type_p (main_odr_variant (type
)))
2268 get_odr_type (type
, true);
2271 /* Return true if type is known to have no derivations. */
2274 type_known_to_have_no_derivations_p (tree t
)
2276 return (type_all_derivations_known_p (t
)
2277 && (TYPE_FINAL_P (t
)
2279 && !get_odr_type (t
, true)->derived_types
.length())));
2282 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2283 recursive printing. */
2286 dump_odr_type (FILE *f
, odr_type t
, int indent
=0)
2289 fprintf (f
, "%*s type %i: ", indent
* 2, "", t
->id
);
2290 print_generic_expr (f
, t
->type
, TDF_SLIM
);
2291 fprintf (f
, "%s", t
->anonymous_namespace
? " (anonymous namespace)":"");
2292 fprintf (f
, "%s\n", t
->all_derivations_known
? " (derivations known)":"");
2293 if (TYPE_NAME (t
->type
))
2295 /*fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
2296 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
2297 DECL_SOURCE_LINE (TYPE_NAME (t->type)));*/
2298 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t
->type
)))
2299 fprintf (f
, "%*s mangled name: %s\n", indent
* 2, "",
2301 (DECL_ASSEMBLER_NAME (TYPE_NAME (t
->type
))));
2303 if (t
->bases
.length ())
2305 fprintf (f
, "%*s base odr type ids: ", indent
* 2, "");
2306 for (i
= 0; i
< t
->bases
.length (); i
++)
2307 fprintf (f
, " %i", t
->bases
[i
]->id
);
2310 if (t
->derived_types
.length ())
2312 fprintf (f
, "%*s derived types:\n", indent
* 2, "");
2313 for (i
= 0; i
< t
->derived_types
.length (); i
++)
2314 dump_odr_type (f
, t
->derived_types
[i
], indent
+ 1);
2319 /* Dump the type inheritance graph. */
2322 dump_type_inheritance_graph (FILE *f
)
2327 fprintf (f
, "\n\nType inheritance graph:\n");
2328 for (i
= 0; i
< odr_types
.length (); i
++)
2330 if (odr_types
[i
] && odr_types
[i
]->bases
.length () == 0)
2331 dump_odr_type (f
, odr_types
[i
]);
2333 for (i
= 0; i
< odr_types
.length (); i
++)
2335 if (odr_types
[i
] && odr_types
[i
]->types
&& odr_types
[i
]->types
->length ())
2338 fprintf (f
, "Duplicate tree types for odr type %i\n", i
);
2339 print_node (f
, "", odr_types
[i
]->type
, 0);
2340 for (j
= 0; j
< odr_types
[i
]->types
->length (); j
++)
2343 fprintf (f
, "duplicate #%i\n", j
);
2344 print_node (f
, "", (*odr_types
[i
]->types
)[j
], 0);
2345 t
= (*odr_types
[i
]->types
)[j
];
2346 while (TYPE_P (t
) && TYPE_CONTEXT (t
))
2348 t
= TYPE_CONTEXT (t
);
2349 print_node (f
, "", t
, 0);
2357 /* Initialize IPA devirt and build inheritance tree graph. */
2360 build_type_inheritance_graph (void)
2362 struct symtab_node
*n
;
2363 FILE *inheritance_dump_file
;
2368 timevar_push (TV_IPA_INHERITANCE
);
2369 inheritance_dump_file
= dump_begin (TDI_inheritance
, &flags
);
2370 odr_hash
= new odr_hash_type (23);
2372 odr_vtable_hash
= new odr_vtable_hash_type (23);
2374 /* We reconstruct the graph starting of types of all methods seen in the
2377 if (is_a
<cgraph_node
*> (n
)
2378 && DECL_VIRTUAL_P (n
->decl
)
2379 && n
->real_symbol_p ())
2380 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n
->decl
)), true);
2382 /* Look also for virtual tables of types that do not define any methods.
2384 We need it in a case where class B has virtual base of class A
2385 re-defining its virtual method and there is class C with no virtual
2386 methods with B as virtual base.
2388 Here we output B's virtual method in two variant - for non-virtual
2389 and virtual inheritance. B's virtual table has non-virtual version,
2390 while C's has virtual.
2392 For this reason we need to know about C in order to include both
2393 variants of B. More correctly, record_target_from_binfo should
2394 add both variants of the method when walking B, but we have no
2395 link in between them.
2397 We rely on fact that either the method is exported and thus we
2398 assume it is called externally or C is in anonymous namespace and
2399 thus we will see the vtable. */
2401 else if (is_a
<varpool_node
*> (n
)
2402 && DECL_VIRTUAL_P (n
->decl
)
2403 && TREE_CODE (DECL_CONTEXT (n
->decl
)) == RECORD_TYPE
2404 && TYPE_BINFO (DECL_CONTEXT (n
->decl
))
2405 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n
->decl
))))
2406 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n
->decl
)), true);
2407 if (inheritance_dump_file
)
2409 dump_type_inheritance_graph (inheritance_dump_file
);
2410 dump_end (TDI_inheritance
, inheritance_dump_file
);
2412 timevar_pop (TV_IPA_INHERITANCE
);
2415 /* Return true if N has reference from live virtual table
2416 (and thus can be a destination of polymorphic call).
2417 Be conservatively correct when callgraph is not built or
2418 if the method may be referred externally. */
2421 referenced_from_vtable_p (struct cgraph_node
*node
)
2424 struct ipa_ref
*ref
;
2427 if (node
->externally_visible
2428 || DECL_EXTERNAL (node
->decl
)
2429 || node
->used_from_other_partition
)
2432 /* Keep this test constant time.
2433 It is unlikely this can happen except for the case where speculative
2434 devirtualization introduced many speculative edges to this node.
2435 In this case the target is very likely alive anyway. */
2436 if (node
->ref_list
.referring
.length () > 100)
2439 /* We need references built. */
2440 if (symtab
->state
<= CONSTRUCTION
)
2443 for (i
= 0; node
->iterate_referring (i
, ref
); i
++)
2444 if ((ref
->use
== IPA_REF_ALIAS
2445 && referenced_from_vtable_p (dyn_cast
<cgraph_node
*> (ref
->referring
)))
2446 || (ref
->use
== IPA_REF_ADDR
2447 && TREE_CODE (ref
->referring
->decl
) == VAR_DECL
2448 && DECL_VIRTUAL_P (ref
->referring
->decl
)))
2456 /* If TARGET has associated node, record it in the NODES array.
2457 CAN_REFER specify if program can refer to the target directly.
2458 if TARGET is unknown (NULL) or it can not be inserted (for example because
2459 its body was already removed and there is no way to refer to it), clear
2463 maybe_record_node (vec
<cgraph_node
*> &nodes
,
2464 tree target
, hash_set
<tree
> *inserted
,
2468 struct cgraph_node
*target_node
, *alias_target
;
2469 enum availability avail
;
2471 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
2472 list of targets; the runtime effect of calling them is undefined.
2473 Only "real" virtual methods should be accounted. */
2474 if (target
&& TREE_CODE (TREE_TYPE (target
)) != METHOD_TYPE
)
2479 /* The only case when method of anonymous namespace becomes unreferable
2480 is when we completely optimized it out. */
2483 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target
)))
2491 target_node
= cgraph_node::get (target
);
2493 /* Prefer alias target over aliases, so we do not get confused by
2497 alias_target
= target_node
->ultimate_alias_target (&avail
);
2498 if (target_node
!= alias_target
2499 && avail
>= AVAIL_AVAILABLE
2500 && target_node
->get_availability ())
2501 target_node
= alias_target
;
2504 /* Method can only be called by polymorphic call if any
2505 of vtables referring to it are alive.
2507 While this holds for non-anonymous functions, too, there are
2508 cases where we want to keep them in the list; for example
2509 inline functions with -fno-weak are static, but we still
2510 may devirtualize them when instance comes from other unit.
2511 The same holds for LTO.
2513 Currently we ignore these functions in speculative devirtualization.
2514 ??? Maybe it would make sense to be more aggressive for LTO even
2517 && type_in_anonymous_namespace_p (DECL_CONTEXT (target
))
2519 || !referenced_from_vtable_p (target_node
)))
2521 /* See if TARGET is useful function we can deal with. */
2522 else if (target_node
!= NULL
2523 && (TREE_PUBLIC (target
)
2524 || DECL_EXTERNAL (target
)
2525 || target_node
->definition
)
2526 && target_node
->real_symbol_p ())
2528 gcc_assert (!target_node
->global
.inlined_to
);
2529 gcc_assert (target_node
->real_symbol_p ());
2530 if (!inserted
->add (target
))
2532 cached_polymorphic_call_targets
->add (target_node
);
2533 nodes
.safe_push (target_node
);
2537 && (!type_in_anonymous_namespace_p
2538 (DECL_CONTEXT (target
))
2543 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2544 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2545 method in vtable and insert method to NODES array
2546 or BASES_TO_CONSIDER if this array is non-NULL.
2547 Otherwise recurse to base BINFOs.
2548 This matches what get_binfo_at_offset does, but with offset
2551 TYPE_BINFOS is a stack of BINFOS of types with defined
2552 virtual table seen on way from class type to BINFO.
2554 MATCHED_VTABLES tracks virtual tables we already did lookup
2555 for virtual function in. INSERTED tracks nodes we already
2558 ANONYMOUS is true if BINFO is part of anonymous namespace.
2560 Clear COMPLETEP when we hit unreferable target.
2564 record_target_from_binfo (vec
<cgraph_node
*> &nodes
,
2565 vec
<tree
> *bases_to_consider
,
2568 vec
<tree
> &type_binfos
,
2569 HOST_WIDE_INT otr_token
,
2571 HOST_WIDE_INT offset
,
2572 hash_set
<tree
> *inserted
,
2573 hash_set
<tree
> *matched_vtables
,
2577 tree type
= BINFO_TYPE (binfo
);
2582 if (BINFO_VTABLE (binfo
))
2583 type_binfos
.safe_push (binfo
);
2584 if (types_same_for_odr (type
, outer_type
))
2587 tree type_binfo
= NULL
;
2589 /* Look up BINFO with virtual table. For normal types it is always last
2591 for (i
= type_binfos
.length () - 1; i
>= 0; i
--)
2592 if (BINFO_OFFSET (type_binfos
[i
]) == BINFO_OFFSET (binfo
))
2594 type_binfo
= type_binfos
[i
];
2597 if (BINFO_VTABLE (binfo
))
2599 /* If this is duplicated BINFO for base shared by virtual inheritance,
2600 we may not have its associated vtable. This is not a problem, since
2601 we will walk it on the other path. */
2604 tree inner_binfo
= get_binfo_at_offset (type_binfo
,
2608 gcc_assert (odr_violation_reported
);
2611 /* For types in anonymous namespace first check if the respective vtable
2612 is alive. If not, we know the type can't be called. */
2613 if (!flag_ltrans
&& anonymous
)
2615 tree vtable
= BINFO_VTABLE (inner_binfo
);
2616 varpool_node
*vnode
;
2618 if (TREE_CODE (vtable
) == POINTER_PLUS_EXPR
)
2619 vtable
= TREE_OPERAND (TREE_OPERAND (vtable
, 0), 0);
2620 vnode
= varpool_node::get (vtable
);
2621 if (!vnode
|| !vnode
->definition
)
2624 gcc_assert (inner_binfo
);
2625 if (bases_to_consider
2626 ? !matched_vtables
->contains (BINFO_VTABLE (inner_binfo
))
2627 : !matched_vtables
->add (BINFO_VTABLE (inner_binfo
)))
2630 tree target
= gimple_get_virt_method_for_binfo (otr_token
,
2633 if (!bases_to_consider
)
2634 maybe_record_node (nodes
, target
, inserted
, can_refer
, completep
);
2635 /* Destructors are never called via construction vtables. */
2636 else if (!target
|| !DECL_CXX_DESTRUCTOR_P (target
))
2637 bases_to_consider
->safe_push (target
);
2643 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
2644 /* Walking bases that have no virtual method is pointless exercise. */
2645 if (polymorphic_type_binfo_p (base_binfo
))
2646 record_target_from_binfo (nodes
, bases_to_consider
, base_binfo
, otr_type
,
2648 otr_token
, outer_type
, offset
, inserted
,
2649 matched_vtables
, anonymous
, completep
);
2650 if (BINFO_VTABLE (binfo
))
2654 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2655 of TYPE, insert them to NODES, recurse into derived nodes.
2656 INSERTED is used to avoid duplicate insertions of methods into NODES.
2657 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2658 Clear COMPLETEP if unreferable target is found.
2660 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2661 all cases where BASE_SKIPPED is true (because the base is abstract
2665 possible_polymorphic_call_targets_1 (vec
<cgraph_node
*> &nodes
,
2666 hash_set
<tree
> *inserted
,
2667 hash_set
<tree
> *matched_vtables
,
2670 HOST_WIDE_INT otr_token
,
2672 HOST_WIDE_INT offset
,
2674 vec
<tree
> &bases_to_consider
,
2675 bool consider_construction
)
2677 tree binfo
= TYPE_BINFO (type
->type
);
2679 auto_vec
<tree
, 8> type_binfos
;
2680 bool possibly_instantiated
= type_possibly_instantiated_p (type
->type
);
2682 /* We may need to consider types w/o instances because of possible derived
2683 types using their methods either directly or via construction vtables.
2684 We are safe to skip them when all derivations are known, since we will
2686 This is done by recording them to BASES_TO_CONSIDER array. */
2687 if (possibly_instantiated
|| consider_construction
)
2689 record_target_from_binfo (nodes
,
2690 (!possibly_instantiated
2691 && type_all_derivations_known_p (type
->type
))
2692 ? &bases_to_consider
: NULL
,
2693 binfo
, otr_type
, type_binfos
, otr_token
,
2695 inserted
, matched_vtables
,
2696 type
->anonymous_namespace
, completep
);
2698 for (i
= 0; i
< type
->derived_types
.length (); i
++)
2699 possible_polymorphic_call_targets_1 (nodes
, inserted
,
2702 type
->derived_types
[i
],
2703 otr_token
, outer_type
, offset
, completep
,
2704 bases_to_consider
, consider_construction
);
2707 /* Cache of queries for polymorphic call targets.
2709 Enumerating all call targets may get expensive when there are many
2710 polymorphic calls in the program, so we memoize all the previous
2711 queries and avoid duplicated work. */
2713 struct polymorphic_call_target_d
2715 HOST_WIDE_INT otr_token
;
2716 ipa_polymorphic_call_context context
;
2718 vec
<cgraph_node
*> targets
;
2725 /* Polymorphic call target cache helpers. */
2727 struct polymorphic_call_target_hasher
2728 : pointer_hash
<polymorphic_call_target_d
>
2730 static inline hashval_t
hash (const polymorphic_call_target_d
*);
2731 static inline bool equal (const polymorphic_call_target_d
*,
2732 const polymorphic_call_target_d
*);
2733 static inline void remove (polymorphic_call_target_d
*);
2736 /* Return the computed hashcode for ODR_QUERY. */
2739 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d
*odr_query
)
2741 inchash::hash
hstate (odr_query
->otr_token
);
2743 hstate
.add_wide_int (odr_query
->type
->id
);
2744 hstate
.merge_hash (TYPE_UID (odr_query
->context
.outer_type
));
2745 hstate
.add_wide_int (odr_query
->context
.offset
);
2747 if (odr_query
->context
.speculative_outer_type
)
2749 hstate
.merge_hash (TYPE_UID (odr_query
->context
.speculative_outer_type
));
2750 hstate
.add_wide_int (odr_query
->context
.speculative_offset
);
2752 hstate
.add_flag (odr_query
->speculative
);
2753 hstate
.add_flag (odr_query
->context
.maybe_in_construction
);
2754 hstate
.add_flag (odr_query
->context
.maybe_derived_type
);
2755 hstate
.add_flag (odr_query
->context
.speculative_maybe_derived_type
);
2756 hstate
.commit_flag ();
2757 return hstate
.end ();
2760 /* Compare cache entries T1 and T2. */
2763 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d
*t1
,
2764 const polymorphic_call_target_d
*t2
)
2766 return (t1
->type
== t2
->type
&& t1
->otr_token
== t2
->otr_token
2767 && t1
->speculative
== t2
->speculative
2768 && t1
->context
.offset
== t2
->context
.offset
2769 && t1
->context
.speculative_offset
== t2
->context
.speculative_offset
2770 && t1
->context
.outer_type
== t2
->context
.outer_type
2771 && t1
->context
.speculative_outer_type
== t2
->context
.speculative_outer_type
2772 && t1
->context
.maybe_in_construction
2773 == t2
->context
.maybe_in_construction
2774 && t1
->context
.maybe_derived_type
== t2
->context
.maybe_derived_type
2775 && (t1
->context
.speculative_maybe_derived_type
2776 == t2
->context
.speculative_maybe_derived_type
));
2779 /* Remove entry in polymorphic call target cache hash. */
2782 polymorphic_call_target_hasher::remove (polymorphic_call_target_d
*v
)
2784 v
->targets
.release ();
2788 /* Polymorphic call target query cache. */
2790 typedef hash_table
<polymorphic_call_target_hasher
>
2791 polymorphic_call_target_hash_type
;
2792 static polymorphic_call_target_hash_type
*polymorphic_call_target_hash
;
2794 /* Destroy polymorphic call target query cache. */
2797 free_polymorphic_call_targets_hash ()
2799 if (cached_polymorphic_call_targets
)
2801 delete polymorphic_call_target_hash
;
2802 polymorphic_call_target_hash
= NULL
;
2803 delete cached_polymorphic_call_targets
;
2804 cached_polymorphic_call_targets
= NULL
;
2808 /* When virtual function is removed, we may need to flush the cache. */
2811 devirt_node_removal_hook (struct cgraph_node
*n
, void *d ATTRIBUTE_UNUSED
)
2813 if (cached_polymorphic_call_targets
2814 && cached_polymorphic_call_targets
->contains (n
))
2815 free_polymorphic_call_targets_hash ();
2818 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2821 subbinfo_with_vtable_at_offset (tree binfo
, unsigned HOST_WIDE_INT offset
,
2824 tree v
= BINFO_VTABLE (binfo
);
2827 unsigned HOST_WIDE_INT this_offset
;
2831 if (!vtable_pointer_value_to_vtable (v
, &v
, &this_offset
))
2834 if (offset
== this_offset
2835 && DECL_ASSEMBLER_NAME (v
) == DECL_ASSEMBLER_NAME (vtable
))
2839 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
2840 if (polymorphic_type_binfo_p (base_binfo
))
2842 base_binfo
= subbinfo_with_vtable_at_offset (base_binfo
, offset
, vtable
);
2849 /* T is known constant value of virtual table pointer.
2850 Store virtual table to V and its offset to OFFSET.
2851 Return false if T does not look like virtual table reference. */
2854 vtable_pointer_value_to_vtable (const_tree t
, tree
*v
,
2855 unsigned HOST_WIDE_INT
*offset
)
2857 /* We expect &MEM[(void *)&virtual_table + 16B].
2858 We obtain object's BINFO from the context of the virtual table.
2859 This one contains pointer to virtual table represented via
2860 POINTER_PLUS_EXPR. Verify that this pointer matches what
2861 we propagated through.
2863 In the case of virtual inheritance, the virtual tables may
2864 be nested, i.e. the offset may be different from 16 and we may
2865 need to dive into the type representation. */
2866 if (TREE_CODE (t
) == ADDR_EXPR
2867 && TREE_CODE (TREE_OPERAND (t
, 0)) == MEM_REF
2868 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == ADDR_EXPR
2869 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
2870 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 0))
2872 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2873 (TREE_OPERAND (t
, 0), 0), 0)))
2875 *v
= TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 0);
2876 *offset
= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t
, 0), 1));
2880 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2881 We need to handle it when T comes from static variable initializer or
2883 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
2885 *offset
= tree_to_uhwi (TREE_OPERAND (t
, 1));
2886 t
= TREE_OPERAND (t
, 0);
2891 if (TREE_CODE (t
) != ADDR_EXPR
)
2893 *v
= TREE_OPERAND (t
, 0);
2897 /* T is known constant value of virtual table pointer. Return BINFO of the
2901 vtable_pointer_value_to_binfo (const_tree t
)
2904 unsigned HOST_WIDE_INT offset
;
2906 if (!vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2909 /* FIXME: for stores of construction vtables we return NULL,
2910 because we do not have BINFO for those. Eventually we should fix
2911 our representation to allow this case to be handled, too.
2912 In the case we see store of BINFO we however may assume
2913 that standard folding will be able to cope with it. */
2914 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable
)),
2918 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2919 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2920 and insert them in NODES.
2922 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2925 record_targets_from_bases (tree otr_type
,
2926 HOST_WIDE_INT otr_token
,
2928 HOST_WIDE_INT offset
,
2929 vec
<cgraph_node
*> &nodes
,
2930 hash_set
<tree
> *inserted
,
2931 hash_set
<tree
> *matched_vtables
,
2936 HOST_WIDE_INT pos
, size
;
2940 if (types_same_for_odr (outer_type
, otr_type
))
2943 for (fld
= TYPE_FIELDS (outer_type
); fld
; fld
= DECL_CHAIN (fld
))
2945 if (TREE_CODE (fld
) != FIELD_DECL
)
2948 pos
= int_bit_position (fld
);
2949 size
= tree_to_shwi (DECL_SIZE (fld
));
2950 if (pos
<= offset
&& (pos
+ size
) > offset
2951 /* Do not get confused by zero sized bases. */
2952 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld
))))
2955 /* Within a class type we should always find corresponding fields. */
2956 gcc_assert (fld
&& TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
);
2958 /* Nonbase types should have been stripped by outer_class_type. */
2959 gcc_assert (DECL_ARTIFICIAL (fld
));
2961 outer_type
= TREE_TYPE (fld
);
2964 base_binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
),
2968 gcc_assert (odr_violation_reported
);
2971 gcc_assert (base_binfo
);
2972 if (!matched_vtables
->add (BINFO_VTABLE (base_binfo
)))
2975 tree target
= gimple_get_virt_method_for_binfo (otr_token
,
2978 if (!target
|| ! DECL_CXX_DESTRUCTOR_P (target
))
2979 maybe_record_node (nodes
, target
, inserted
, can_refer
, completep
);
2980 matched_vtables
->add (BINFO_VTABLE (base_binfo
));
2985 /* When virtual table is removed, we may need to flush the cache. */
2988 devirt_variable_node_removal_hook (varpool_node
*n
,
2989 void *d ATTRIBUTE_UNUSED
)
2991 if (cached_polymorphic_call_targets
2992 && DECL_VIRTUAL_P (n
->decl
)
2993 && type_in_anonymous_namespace_p (DECL_CONTEXT (n
->decl
)))
2994 free_polymorphic_call_targets_hash ();
2997 /* Record about how many calls would benefit from given type to be final. */
2999 struct odr_type_warn_count
3003 gcov_type dyn_count
;
3006 /* Record about how many calls would benefit from given method to be final. */
3008 struct decl_warn_count
3012 gcov_type dyn_count
;
3015 /* Information about type and decl warnings. */
3017 struct final_warning_record
3019 gcov_type dyn_count
;
3020 vec
<odr_type_warn_count
> type_warnings
;
3021 hash_map
<tree
, decl_warn_count
> decl_warnings
;
3023 struct final_warning_record
*final_warning_records
;
3025 /* Return vector containing possible targets of polymorphic call of type
3026 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3027 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
3028 OTR_TYPE and include their virtual method. This is useful for types
3029 possibly in construction or destruction where the virtual table may
3030 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
3031 us to walk the inheritance graph for all derivations.
3033 If COMPLETEP is non-NULL, store true if the list is complete.
3034 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3035 in the target cache. If user needs to visit every target list
3036 just once, it can memoize them.
3038 If SPECULATIVE is set, the list will not contain targets that
3039 are not speculatively taken.
3041 Returned vector is placed into cache. It is NOT caller's responsibility
3042 to free it. The vector can be freed on cgraph_remove_node call if
3043 the particular node is a virtual function present in the cache. */
3046 possible_polymorphic_call_targets (tree otr_type
,
3047 HOST_WIDE_INT otr_token
,
3048 ipa_polymorphic_call_context context
,
3053 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
3054 vec
<cgraph_node
*> nodes
= vNULL
;
3055 auto_vec
<tree
, 8> bases_to_consider
;
3056 odr_type type
, outer_type
;
3057 polymorphic_call_target_d key
;
3058 polymorphic_call_target_d
**slot
;
3062 bool can_refer
= false;
3063 bool skipped
= false;
3065 otr_type
= TYPE_MAIN_VARIANT (otr_type
);
3067 /* If ODR is not initialized or the context is invalid, return empty
3069 if (!odr_hash
|| context
.invalid
|| !TYPE_BINFO (otr_type
))
3072 *completep
= context
.invalid
;
3074 *cache_token
= NULL
;
3078 /* Do not bother to compute speculative info when user do not asks for it. */
3079 if (!speculative
|| !context
.speculative_outer_type
)
3080 context
.clear_speculation ();
3082 type
= get_odr_type (otr_type
, true);
3084 /* Recording type variants would waste results cache. */
3085 gcc_assert (!context
.outer_type
3086 || TYPE_MAIN_VARIANT (context
.outer_type
) == context
.outer_type
);
3088 /* Look up the outer class type we want to walk.
3089 If we fail to do so, the context is invalid. */
3090 if ((context
.outer_type
|| context
.speculative_outer_type
)
3091 && !context
.restrict_to_inner_class (otr_type
))
3096 *cache_token
= NULL
;
3099 gcc_assert (!context
.invalid
);
3101 /* Check that restrict_to_inner_class kept the main variant. */
3102 gcc_assert (!context
.outer_type
3103 || TYPE_MAIN_VARIANT (context
.outer_type
) == context
.outer_type
);
3105 /* We canonicalize our query, so we do not need extra hashtable entries. */
3107 /* Without outer type, we have no use for offset. Just do the
3108 basic search from inner type. */
3109 if (!context
.outer_type
)
3110 context
.clear_outer_type (otr_type
);
3111 /* We need to update our hierarchy if the type does not exist. */
3112 outer_type
= get_odr_type (context
.outer_type
, true);
3113 /* If the type is complete, there are no derivations. */
3114 if (TYPE_FINAL_P (outer_type
->type
))
3115 context
.maybe_derived_type
= false;
3117 /* Initialize query cache. */
3118 if (!cached_polymorphic_call_targets
)
3120 cached_polymorphic_call_targets
= new hash_set
<cgraph_node
*>;
3121 polymorphic_call_target_hash
3122 = new polymorphic_call_target_hash_type (23);
3123 if (!node_removal_hook_holder
)
3125 node_removal_hook_holder
=
3126 symtab
->add_cgraph_removal_hook (&devirt_node_removal_hook
, NULL
);
3127 symtab
->add_varpool_removal_hook (&devirt_variable_node_removal_hook
,
3134 if (context
.outer_type
!= otr_type
)
3136 = get_odr_type (context
.outer_type
, true)->type
;
3137 if (context
.speculative_outer_type
)
3138 context
.speculative_outer_type
3139 = get_odr_type (context
.speculative_outer_type
, true)->type
;
3142 /* Look up cached answer. */
3144 key
.otr_token
= otr_token
;
3145 key
.speculative
= speculative
;
3146 key
.context
= context
;
3147 slot
= polymorphic_call_target_hash
->find_slot (&key
, INSERT
);
3149 *cache_token
= (void *)*slot
;
3153 *completep
= (*slot
)->complete
;
3154 if ((*slot
)->type_warning
&& final_warning_records
)
3156 final_warning_records
->type_warnings
[(*slot
)->type_warning
- 1].count
++;
3157 final_warning_records
->type_warnings
[(*slot
)->type_warning
- 1].dyn_count
3158 += final_warning_records
->dyn_count
;
3160 if (!speculative
&& (*slot
)->decl_warning
&& final_warning_records
)
3162 struct decl_warn_count
*c
=
3163 final_warning_records
->decl_warnings
.get ((*slot
)->decl_warning
);
3165 c
->dyn_count
+= final_warning_records
->dyn_count
;
3167 return (*slot
)->targets
;
3172 /* Do actual search. */
3173 timevar_push (TV_IPA_VIRTUAL_CALL
);
3174 *slot
= XCNEW (polymorphic_call_target_d
);
3176 *cache_token
= (void *)*slot
;
3177 (*slot
)->type
= type
;
3178 (*slot
)->otr_token
= otr_token
;
3179 (*slot
)->context
= context
;
3180 (*slot
)->speculative
= speculative
;
3182 hash_set
<tree
> inserted
;
3183 hash_set
<tree
> matched_vtables
;
3185 /* First insert targets we speculatively identified as likely. */
3186 if (context
.speculative_outer_type
)
3188 odr_type speculative_outer_type
;
3189 bool speculation_complete
= true;
3191 /* First insert target from type itself and check if it may have
3193 speculative_outer_type
= get_odr_type (context
.speculative_outer_type
, true);
3194 if (TYPE_FINAL_P (speculative_outer_type
->type
))
3195 context
.speculative_maybe_derived_type
= false;
3196 binfo
= get_binfo_at_offset (TYPE_BINFO (speculative_outer_type
->type
),
3197 context
.speculative_offset
, otr_type
);
3199 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
,
3204 /* In the case we get complete method, we don't need
3205 to walk derivations. */
3206 if (target
&& DECL_FINAL_P (target
))
3207 context
.speculative_maybe_derived_type
= false;
3208 if (type_possibly_instantiated_p (speculative_outer_type
->type
))
3209 maybe_record_node (nodes
, target
, &inserted
, can_refer
, &speculation_complete
);
3211 matched_vtables
.add (BINFO_VTABLE (binfo
));
3214 /* Next walk recursively all derived types. */
3215 if (context
.speculative_maybe_derived_type
)
3216 for (i
= 0; i
< speculative_outer_type
->derived_types
.length(); i
++)
3217 possible_polymorphic_call_targets_1 (nodes
, &inserted
,
3220 speculative_outer_type
->derived_types
[i
],
3221 otr_token
, speculative_outer_type
->type
,
3222 context
.speculative_offset
,
3223 &speculation_complete
,
3228 if (!speculative
|| !nodes
.length ())
3230 /* First see virtual method of type itself. */
3231 binfo
= get_binfo_at_offset (TYPE_BINFO (outer_type
->type
),
3232 context
.offset
, otr_type
);
3234 target
= gimple_get_virt_method_for_binfo (otr_token
, binfo
,
3238 gcc_assert (odr_violation_reported
);
3242 /* Destructors are never called through construction virtual tables,
3243 because the type is always known. */
3244 if (target
&& DECL_CXX_DESTRUCTOR_P (target
))
3245 context
.maybe_in_construction
= false;
3249 /* In the case we get complete method, we don't need
3250 to walk derivations. */
3251 if (DECL_FINAL_P (target
))
3252 context
.maybe_derived_type
= false;
3255 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3256 if (type_possibly_instantiated_p (outer_type
->type
))
3257 maybe_record_node (nodes
, target
, &inserted
, can_refer
, &complete
);
3262 matched_vtables
.add (BINFO_VTABLE (binfo
));
3264 /* Next walk recursively all derived types. */
3265 if (context
.maybe_derived_type
)
3267 for (i
= 0; i
< outer_type
->derived_types
.length(); i
++)
3268 possible_polymorphic_call_targets_1 (nodes
, &inserted
,
3271 outer_type
->derived_types
[i
],
3272 otr_token
, outer_type
->type
,
3273 context
.offset
, &complete
,
3275 context
.maybe_in_construction
);
3277 if (!outer_type
->all_derivations_known
)
3279 if (!speculative
&& final_warning_records
)
3282 && nodes
.length () == 1
3283 && warn_suggest_final_types
3284 && !outer_type
->derived_types
.length ())
3286 if (outer_type
->id
>= (int)final_warning_records
->type_warnings
.length ())
3287 final_warning_records
->type_warnings
.safe_grow_cleared
3288 (odr_types
.length ());
3289 final_warning_records
->type_warnings
[outer_type
->id
].count
++;
3290 final_warning_records
->type_warnings
[outer_type
->id
].dyn_count
3291 += final_warning_records
->dyn_count
;
3292 final_warning_records
->type_warnings
[outer_type
->id
].type
3294 (*slot
)->type_warning
= outer_type
->id
+ 1;
3297 && warn_suggest_final_methods
3298 && nodes
.length () == 1
3299 && types_same_for_odr (DECL_CONTEXT (nodes
[0]->decl
),
3303 struct decl_warn_count
&c
=
3304 final_warning_records
->decl_warnings
.get_or_insert
3305 (nodes
[0]->decl
, &existed
);
3310 c
.dyn_count
+= final_warning_records
->dyn_count
;
3315 c
.dyn_count
= final_warning_records
->dyn_count
;
3316 c
.decl
= nodes
[0]->decl
;
3318 (*slot
)->decl_warning
= nodes
[0]->decl
;
3327 /* Destructors are never called through construction virtual tables,
3328 because the type is always known. One of entries may be
3329 cxa_pure_virtual so look to at least two of them. */
3330 if (context
.maybe_in_construction
)
3331 for (i
=0 ; i
< MIN (nodes
.length (), 2); i
++)
3332 if (DECL_CXX_DESTRUCTOR_P (nodes
[i
]->decl
))
3333 context
.maybe_in_construction
= false;
3334 if (context
.maybe_in_construction
)
3336 if (type
!= outer_type
3338 || (context
.maybe_derived_type
3339 && !type_all_derivations_known_p (outer_type
->type
))))
3340 record_targets_from_bases (otr_type
, otr_token
, outer_type
->type
,
3341 context
.offset
, nodes
, &inserted
,
3342 &matched_vtables
, &complete
);
3344 maybe_record_node (nodes
, target
, &inserted
, can_refer
, &complete
);
3345 for (i
= 0; i
< bases_to_consider
.length(); i
++)
3346 maybe_record_node (nodes
, bases_to_consider
[i
], &inserted
, can_refer
, &complete
);
3351 (*slot
)->targets
= nodes
;
3352 (*slot
)->complete
= complete
;
3354 *completep
= complete
;
3356 timevar_pop (TV_IPA_VIRTUAL_CALL
);
3361 add_decl_warning (const tree
&key ATTRIBUTE_UNUSED
, const decl_warn_count
&value
,
3362 vec
<const decl_warn_count
*> *vec
)
3364 vec
->safe_push (&value
);
3368 /* Dump target list TARGETS into FILE. */
3371 dump_targets (FILE *f
, vec
<cgraph_node
*> targets
)
3375 for (i
= 0; i
< targets
.length (); i
++)
3379 name
= cplus_demangle_v3 (targets
[i
]->asm_name (), 0);
3380 fprintf (f
, " %s/%i", name
? name
: targets
[i
]->name (), targets
[i
]->order
);
3383 if (!targets
[i
]->definition
)
3384 fprintf (f
, " (no definition%s)",
3385 DECL_DECLARED_INLINE_P (targets
[i
]->decl
)
3391 /* Dump all possible targets of a polymorphic call. */
3394 dump_possible_polymorphic_call_targets (FILE *f
,
3396 HOST_WIDE_INT otr_token
,
3397 const ipa_polymorphic_call_context
&ctx
)
3399 vec
<cgraph_node
*> targets
;
3401 odr_type type
= get_odr_type (TYPE_MAIN_VARIANT (otr_type
), false);
3406 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
3408 &final
, NULL
, false);
3409 fprintf (f
, " Targets of polymorphic call of type %i:", type
->id
);
3410 print_generic_expr (f
, type
->type
, TDF_SLIM
);
3411 fprintf (f
, " token %i\n", (int)otr_token
);
3415 fprintf (f
, " %s%s%s%s\n ",
3416 final
? "This is a complete list." :
3417 "This is partial list; extra targets may be defined in other units.",
3418 ctx
.maybe_in_construction
? " (base types included)" : "",
3419 ctx
.maybe_derived_type
? " (derived types included)" : "",
3420 ctx
.speculative_maybe_derived_type
? " (speculative derived types included)" : "");
3421 len
= targets
.length ();
3422 dump_targets (f
, targets
);
3424 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
,
3426 &final
, NULL
, true);
3427 if (targets
.length () != len
)
3429 fprintf (f
, " Speculative targets:");
3430 dump_targets (f
, targets
);
3432 gcc_assert (targets
.length () <= len
);
3437 /* Return true if N can be possibly target of a polymorphic call of
3438 OTR_TYPE/OTR_TOKEN. */
3441 possible_polymorphic_call_target_p (tree otr_type
,
3442 HOST_WIDE_INT otr_token
,
3443 const ipa_polymorphic_call_context
&ctx
,
3444 struct cgraph_node
*n
)
3446 vec
<cgraph_node
*> targets
;
3448 enum built_in_function fcode
;
3451 if (TREE_CODE (TREE_TYPE (n
->decl
)) == FUNCTION_TYPE
3452 && ((fcode
= DECL_FUNCTION_CODE (n
->decl
))
3453 == BUILT_IN_UNREACHABLE
3454 || fcode
== BUILT_IN_TRAP
))
3459 targets
= possible_polymorphic_call_targets (otr_type
, otr_token
, ctx
, &final
);
3460 for (i
= 0; i
< targets
.length (); i
++)
3461 if (n
->semantically_equivalent_p (targets
[i
]))
3464 /* At a moment we allow middle end to dig out new external declarations
3465 as a targets of polymorphic calls. */
3466 if (!final
&& !n
->definition
)
3473 /* Return true if N can be possibly target of a polymorphic call of
3474 OBJ_TYPE_REF expression REF in STMT. */
3477 possible_polymorphic_call_target_p (tree ref
,
3479 struct cgraph_node
*n
)
3481 ipa_polymorphic_call_context
context (current_function_decl
, ref
, stmt
);
3482 tree call_fn
= gimple_call_fn (stmt
);
3484 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn
),
3486 (OBJ_TYPE_REF_TOKEN (call_fn
)),
3492 /* After callgraph construction new external nodes may appear.
3493 Add them into the graph. */
3496 update_type_inheritance_graph (void)
3498 struct cgraph_node
*n
;
3502 free_polymorphic_call_targets_hash ();
3503 timevar_push (TV_IPA_INHERITANCE
);
3504 /* We reconstruct the graph starting from types of all methods seen in the
3506 FOR_EACH_FUNCTION (n
)
3507 if (DECL_VIRTUAL_P (n
->decl
)
3509 && n
->real_symbol_p ())
3510 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n
->decl
)), true);
3511 timevar_pop (TV_IPA_INHERITANCE
);
3515 /* Return true if N looks like likely target of a polymorphic call.
3516 Rule out cxa_pure_virtual, noreturns, function declared cold and
3517 other obvious cases. */
3520 likely_target_p (struct cgraph_node
*n
)
3523 /* cxa_pure_virtual and similar things are not likely. */
3524 if (TREE_CODE (TREE_TYPE (n
->decl
)) != METHOD_TYPE
)
3526 flags
= flags_from_decl_or_type (n
->decl
);
3527 if (flags
& ECF_NORETURN
)
3529 if (lookup_attribute ("cold",
3530 DECL_ATTRIBUTES (n
->decl
)))
3532 if (n
->frequency
< NODE_FREQUENCY_NORMAL
)
3534 /* If there are no live virtual tables referring the target,
3535 the only way the target can be called is an instance coming from other
3536 compilation unit; speculative devirtualization is built around an
3537 assumption that won't happen. */
3538 if (!referenced_from_vtable_p (n
))
3543 /* Compare type warning records P1 and P2 and choose one with larger count;
3544 helper for qsort. */
3547 type_warning_cmp (const void *p1
, const void *p2
)
3549 const odr_type_warn_count
*t1
= (const odr_type_warn_count
*)p1
;
3550 const odr_type_warn_count
*t2
= (const odr_type_warn_count
*)p2
;
3552 if (t1
->dyn_count
< t2
->dyn_count
)
3554 if (t1
->dyn_count
> t2
->dyn_count
)
3556 return t2
->count
- t1
->count
;
3559 /* Compare decl warning records P1 and P2 and choose one with larger count;
3560 helper for qsort. */
3563 decl_warning_cmp (const void *p1
, const void *p2
)
3565 const decl_warn_count
*t1
= *(const decl_warn_count
* const *)p1
;
3566 const decl_warn_count
*t2
= *(const decl_warn_count
* const *)p2
;
3568 if (t1
->dyn_count
< t2
->dyn_count
)
3570 if (t1
->dyn_count
> t2
->dyn_count
)
3572 return t2
->count
- t1
->count
;
3576 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3579 struct cgraph_node
*
3580 try_speculative_devirtualization (tree otr_type
, HOST_WIDE_INT otr_token
,
3581 ipa_polymorphic_call_context ctx
)
3583 vec
<cgraph_node
*>targets
3584 = possible_polymorphic_call_targets
3585 (otr_type
, otr_token
, ctx
, NULL
, NULL
, true);
3587 struct cgraph_node
*likely_target
= NULL
;
3589 for (i
= 0; i
< targets
.length (); i
++)
3590 if (likely_target_p (targets
[i
]))
3594 likely_target
= targets
[i
];
3597 ||!likely_target
->definition
3598 || DECL_EXTERNAL (likely_target
->decl
))
3601 /* Don't use an implicitly-declared destructor (c++/58678). */
3602 struct cgraph_node
*non_thunk_target
3603 = likely_target
->function_symbol ();
3604 if (DECL_ARTIFICIAL (non_thunk_target
->decl
))
3606 if (likely_target
->get_availability () <= AVAIL_INTERPOSABLE
3607 && likely_target
->can_be_discarded_p ())
3609 return likely_target
;
3612 /* The ipa-devirt pass.
3613 When polymorphic call has only one likely target in the unit,
3614 turn it into a speculative call. */
3619 struct cgraph_node
*n
;
3620 hash_set
<void *> bad_call_targets
;
3621 struct cgraph_edge
*e
;
3623 int npolymorphic
= 0, nspeculated
= 0, nconverted
= 0, ncold
= 0;
3624 int nmultiple
= 0, noverwritable
= 0, ndevirtualized
= 0, nnotdefined
= 0;
3625 int nwrong
= 0, nok
= 0, nexternal
= 0, nartificial
= 0;
3632 dump_type_inheritance_graph (dump_file
);
3634 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3635 This is implemented by setting up final_warning_records that are updated
3636 by get_polymorphic_call_targets.
3637 We need to clear cache in this case to trigger recomputation of all
3639 if (warn_suggest_final_methods
|| warn_suggest_final_types
)
3641 final_warning_records
= new (final_warning_record
);
3642 final_warning_records
->type_warnings
= vNULL
;
3643 final_warning_records
->type_warnings
.safe_grow_cleared (odr_types
.length ());
3644 free_polymorphic_call_targets_hash ();
3647 FOR_EACH_DEFINED_FUNCTION (n
)
3649 bool update
= false;
3650 if (!opt_for_fn (n
->decl
, flag_devirtualize
))
3652 if (dump_file
&& n
->indirect_calls
)
3653 fprintf (dump_file
, "\n\nProcesing function %s/%i\n",
3654 n
->name (), n
->order
);
3655 for (e
= n
->indirect_calls
; e
; e
= e
->next_callee
)
3656 if (e
->indirect_info
->polymorphic
)
3658 struct cgraph_node
*likely_target
= NULL
;
3662 if (final_warning_records
)
3663 final_warning_records
->dyn_count
= e
->count
;
3665 vec
<cgraph_node
*>targets
3666 = possible_polymorphic_call_targets
3667 (e
, &final
, &cache_token
, true);
3670 /* Trigger warnings by calculating non-speculative targets. */
3671 if (warn_suggest_final_methods
|| warn_suggest_final_types
)
3672 possible_polymorphic_call_targets (e
);
3675 dump_possible_polymorphic_call_targets
3680 /* See if the call can be devirtualized by means of ipa-prop's
3681 polymorphic call context propagation. If not, we can just
3682 forget about this call being polymorphic and avoid some heavy
3683 lifting in remove_unreachable_nodes that will otherwise try to
3684 keep all possible targets alive until inlining and in the inliner
3687 This may need to be revisited once we add further ways to use
3688 the may edges, but it is a resonable thing to do right now. */
3690 if ((e
->indirect_info
->param_index
== -1
3691 || (!opt_for_fn (n
->decl
, flag_devirtualize_speculatively
)
3692 && e
->indirect_info
->vptr_changed
))
3693 && !flag_ltrans_devirtualize
)
3695 e
->indirect_info
->polymorphic
= false;
3698 fprintf (dump_file
, "Dropping polymorphic call info;"
3699 " it can not be used by ipa-prop\n");
3702 if (!opt_for_fn (n
->decl
, flag_devirtualize_speculatively
))
3705 if (!e
->maybe_hot_p ())
3708 fprintf (dump_file
, "Call is cold\n\n");
3715 fprintf (dump_file
, "Call is already speculated\n\n");
3718 /* When dumping see if we agree with speculation. */
3722 if (bad_call_targets
.contains (cache_token
))
3725 fprintf (dump_file
, "Target list is known to be useless\n\n");
3729 for (i
= 0; i
< targets
.length (); i
++)
3730 if (likely_target_p (targets
[i
]))
3734 likely_target
= NULL
;
3736 fprintf (dump_file
, "More than one likely target\n\n");
3740 likely_target
= targets
[i
];
3744 bad_call_targets
.add (cache_token
);
3747 /* This is reached only when dumping; check if we agree or disagree
3748 with the speculation. */
3751 struct cgraph_edge
*e2
;
3752 struct ipa_ref
*ref
;
3753 e
->speculative_call_info (e2
, e
, ref
);
3754 if (e2
->callee
->ultimate_alias_target ()
3755 == likely_target
->ultimate_alias_target ())
3757 fprintf (dump_file
, "We agree with speculation\n\n");
3762 fprintf (dump_file
, "We disagree with speculation\n\n");
3767 if (!likely_target
->definition
)
3770 fprintf (dump_file
, "Target is not a definition\n\n");
3774 /* Do not introduce new references to external symbols. While we
3775 can handle these just well, it is common for programs to
3776 incorrectly with headers defining methods they are linked
3778 if (DECL_EXTERNAL (likely_target
->decl
))
3781 fprintf (dump_file
, "Target is external\n\n");
3785 /* Don't use an implicitly-declared destructor (c++/58678). */
3786 struct cgraph_node
*non_thunk_target
3787 = likely_target
->function_symbol ();
3788 if (DECL_ARTIFICIAL (non_thunk_target
->decl
))
3791 fprintf (dump_file
, "Target is artificial\n\n");
3795 if (likely_target
->get_availability () <= AVAIL_INTERPOSABLE
3796 && likely_target
->can_be_discarded_p ())
3799 fprintf (dump_file
, "Target is overwritable\n\n");
3803 else if (dbg_cnt (devirt
))
3805 if (dump_enabled_p ())
3807 location_t locus
= gimple_location_safe (e
->call_stmt
);
3808 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, locus
,
3809 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3810 n
->name (), n
->order
,
3811 likely_target
->name (),
3812 likely_target
->order
);
3814 if (!likely_target
->can_be_discarded_p ())
3817 alias
= dyn_cast
<cgraph_node
*> (likely_target
->noninterposable_alias ());
3819 likely_target
= alias
;
3824 (likely_target
, e
->count
* 8 / 10, e
->frequency
* 8 / 10);
3828 inline_update_overall_summary (n
);
3830 if (warn_suggest_final_methods
|| warn_suggest_final_types
)
3832 if (warn_suggest_final_types
)
3834 final_warning_records
->type_warnings
.qsort (type_warning_cmp
);
3835 for (unsigned int i
= 0;
3836 i
< final_warning_records
->type_warnings
.length (); i
++)
3837 if (final_warning_records
->type_warnings
[i
].count
)
3839 tree type
= final_warning_records
->type_warnings
[i
].type
;
3840 int count
= final_warning_records
->type_warnings
[i
].count
;
3842 = final_warning_records
->type_warnings
[i
].dyn_count
;
3845 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type
)),
3846 OPT_Wsuggest_final_types
, count
,
3847 "Declaring type %qD final "
3848 "would enable devirtualization of %i call",
3849 "Declaring type %qD final "
3850 "would enable devirtualization of %i calls",
3854 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type
)),
3855 OPT_Wsuggest_final_types
, count
,
3856 "Declaring type %qD final "
3857 "would enable devirtualization of %i call "
3858 "executed %lli times",
3859 "Declaring type %qD final "
3860 "would enable devirtualization of %i calls "
3861 "executed %lli times",
3868 if (warn_suggest_final_methods
)
3870 vec
<const decl_warn_count
*> decl_warnings_vec
= vNULL
;
3872 final_warning_records
->decl_warnings
.traverse
3873 <vec
<const decl_warn_count
*> *, add_decl_warning
> (&decl_warnings_vec
);
3874 decl_warnings_vec
.qsort (decl_warning_cmp
);
3875 for (unsigned int i
= 0; i
< decl_warnings_vec
.length (); i
++)
3877 tree decl
= decl_warnings_vec
[i
]->decl
;
3878 int count
= decl_warnings_vec
[i
]->count
;
3879 long long dyn_count
= decl_warnings_vec
[i
]->dyn_count
;
3882 if (DECL_CXX_DESTRUCTOR_P (decl
))
3883 warning_n (DECL_SOURCE_LOCATION (decl
),
3884 OPT_Wsuggest_final_methods
, count
,
3885 "Declaring virtual destructor of %qD final "
3886 "would enable devirtualization of %i call",
3887 "Declaring virtual destructor of %qD final "
3888 "would enable devirtualization of %i calls",
3889 DECL_CONTEXT (decl
), count
);
3891 warning_n (DECL_SOURCE_LOCATION (decl
),
3892 OPT_Wsuggest_final_methods
, count
,
3893 "Declaring method %qD final "
3894 "would enable devirtualization of %i call",
3895 "Declaring method %qD final "
3896 "would enable devirtualization of %i calls",
3898 else if (DECL_CXX_DESTRUCTOR_P (decl
))
3899 warning_n (DECL_SOURCE_LOCATION (decl
),
3900 OPT_Wsuggest_final_methods
, count
,
3901 "Declaring virtual destructor of %qD final "
3902 "would enable devirtualization of %i call "
3903 "executed %lli times",
3904 "Declaring virtual destructor of %qD final "
3905 "would enable devirtualization of %i calls "
3906 "executed %lli times",
3907 DECL_CONTEXT (decl
), count
, dyn_count
);
3909 warning_n (DECL_SOURCE_LOCATION (decl
),
3910 OPT_Wsuggest_final_methods
, count
,
3911 "Declaring method %qD final "
3912 "would enable devirtualization of %i call "
3913 "executed %lli times",
3914 "Declaring method %qD final "
3915 "would enable devirtualization of %i calls "
3916 "executed %lli times",
3917 decl
, count
, dyn_count
);
3921 delete (final_warning_records
);
3922 final_warning_records
= 0;
3927 "%i polymorphic calls, %i devirtualized,"
3928 " %i speculatively devirtualized, %i cold\n"
3929 "%i have multiple targets, %i overwritable,"
3930 " %i already speculated (%i agree, %i disagree),"
3931 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3932 npolymorphic
, ndevirtualized
, nconverted
, ncold
,
3933 nmultiple
, noverwritable
, nspeculated
, nok
, nwrong
,
3934 nexternal
, nnotdefined
, nartificial
, ndropped
);
3935 return ndevirtualized
|| ndropped
? TODO_remove_functions
: 0;
3940 const pass_data pass_data_ipa_devirt
=
3942 IPA_PASS
, /* type */
3943 "devirt", /* name */
3944 OPTGROUP_NONE
, /* optinfo_flags */
3945 TV_IPA_DEVIRT
, /* tv_id */
3946 0, /* properties_required */
3947 0, /* properties_provided */
3948 0, /* properties_destroyed */
3949 0, /* todo_flags_start */
3950 ( TODO_dump_symtab
), /* todo_flags_finish */
3953 class pass_ipa_devirt
: public ipa_opt_pass_d
3956 pass_ipa_devirt (gcc::context
*ctxt
)
3957 : ipa_opt_pass_d (pass_data_ipa_devirt
, ctxt
,
3958 NULL
, /* generate_summary */
3959 NULL
, /* write_summary */
3960 NULL
, /* read_summary */
3961 NULL
, /* write_optimization_summary */
3962 NULL
, /* read_optimization_summary */
3963 NULL
, /* stmt_fixup */
3964 0, /* function_transform_todo_flags_start */
3965 NULL
, /* function_transform */
3966 NULL
) /* variable_transform */
3969 /* opt_pass methods: */
3970 virtual bool gate (function
*)
3972 /* In LTO, always run the IPA passes and decide on function basis if the
3976 return (flag_devirtualize
3977 && (flag_devirtualize_speculatively
3978 || (warn_suggest_final_methods
3979 || warn_suggest_final_types
))
3983 virtual unsigned int execute (function
*) { return ipa_devirt (); }
3985 }; // class pass_ipa_devirt
3990 make_pass_ipa_devirt (gcc::context
*ctxt
)
3992 return new pass_ipa_devirt (ctxt
);
3995 #include "gt-ipa-devirt.h"