[multiple changes]
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2015 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocabulary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frontend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph representation of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "hash-set.h"
113 #include "machmode.h"
114 #include "hash-map.h"
115 #include "vec.h"
116 #include "double-int.h"
117 #include "input.h"
118 #include "alias.h"
119 #include "symtab.h"
120 #include "wide-int.h"
121 #include "inchash.h"
122 #include "tree.h"
123 #include "fold-const.h"
124 #include "print-tree.h"
125 #include "calls.h"
126 #include "predict.h"
127 #include "basic-block.h"
128 #include "is-a.h"
129 #include "plugin-api.h"
130 #include "hard-reg-set.h"
131 #include "function.h"
132 #include "ipa-ref.h"
133 #include "cgraph.h"
134 #include "hashtab.h"
135 #include "rtl.h"
136 #include "flags.h"
137 #include "statistics.h"
138 #include "real.h"
139 #include "fixed-value.h"
140 #include "insn-config.h"
141 #include "expmed.h"
142 #include "dojump.h"
143 #include "explow.h"
144 #include "emit-rtl.h"
145 #include "varasm.h"
146 #include "stmt.h"
147 #include "expr.h"
148 #include "tree-pass.h"
149 #include "target.h"
150 #include "hash-table.h"
151 #include "tree-pretty-print.h"
152 #include "ipa-utils.h"
153 #include "tree-ssa-alias.h"
154 #include "internal-fn.h"
155 #include "gimple-fold.h"
156 #include "gimple-expr.h"
157 #include "gimple.h"
158 #include "alloc-pool.h"
159 #include "symbol-summary.h"
160 #include "ipa-prop.h"
161 #include "ipa-inline.h"
162 #include "diagnostic.h"
163 #include "tree-dfa.h"
164 #include "demangle.h"
165 #include "dbgcnt.h"
166 #include "gimple-pretty-print.h"
167 #include "stor-layout.h"
168 #include "intl.h"
169 #include "streamer-hooks.h"
170 #include "lto-streamer.h"
171
172 /* Hash based set of pairs of types. */
173 typedef struct
174 {
175 tree first;
176 tree second;
177 } type_pair;
178
179 struct pair_traits : default_hashset_traits
180 {
181 static hashval_t
182 hash (type_pair p)
183 {
184 return TYPE_UID (p.first) ^ TYPE_UID (p.second);
185 }
186 static bool
187 is_empty (type_pair p)
188 {
189 return p.first == NULL;
190 }
191 static bool
192 is_deleted (type_pair p ATTRIBUTE_UNUSED)
193 {
194 return false;
195 }
196 static bool
197 equal (const type_pair &a, const type_pair &b)
198 {
199 return a.first==b.first && a.second == b.second;
200 }
201 static void
202 mark_empty (type_pair &e)
203 {
204 e.first = NULL;
205 }
206 };
207
208 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
209 hash_set<type_pair,pair_traits> *);
210
211 static bool odr_violation_reported = false;
212
213
214 /* Pointer set of all call targets appearing in the cache. */
215 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
216
217 /* The node of type inheritance graph. For each type unique in
218 One Definition Rule (ODR) sense, we produce one node linking all
219 main variants of types equivalent to it, bases and derived types. */
220
221 struct GTY(()) odr_type_d
222 {
223 /* leader type. */
224 tree type;
225 /* All bases; built only for main variants of types. */
226 vec<odr_type> GTY((skip)) bases;
227 /* All derived types with virtual methods seen in unit;
228 built only for main variants of types. */
229 vec<odr_type> GTY((skip)) derived_types;
230
231 /* All equivalent types, if more than one. */
232 vec<tree, va_gc> *types;
233 /* Set of all equivalent types, if NON-NULL. */
234 hash_set<tree> * GTY((skip)) types_set;
235
236 /* Unique ID indexing the type in odr_types array. */
237 int id;
238 /* Is it in anonymous namespace? */
239 bool anonymous_namespace;
240 /* Do we know about all derivations of given type? */
241 bool all_derivations_known;
242 /* Did we report ODR violation here? */
243 bool odr_violated;
244 /* Set when virtual table without RTTI previaled table with. */
245 bool rtti_broken;
246 };
247
248 /* Return true if T is a type with linkage defined. */
249
250 bool
251 type_with_linkage_p (const_tree t)
252 {
253 /* Builtin types do not define linkage, their TYPE_CONTEXT is NULL. */
254 if (!TYPE_CONTEXT (t)
255 || !TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL
256 || !TYPE_STUB_DECL (t))
257 return false;
258
259 /* In LTO do not get confused by non-C++ produced types or types built
260 with -fno-lto-odr-type-merigng. */
261 if (in_lto_p)
262 {
263 /* To support -fno-lto-odr-type-merigng recognize types with vtables
264 to have linkage. */
265 if (RECORD_OR_UNION_TYPE_P (t)
266 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
267 return true;
268 /* Do not accept any other types - we do not know if they were produced
269 by C++ FE. */
270 if (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
271 return false;
272 }
273
274 return (RECORD_OR_UNION_TYPE_P (t)
275 || TREE_CODE (t) == ENUMERAL_TYPE);
276 }
277
278 /* Return true if T is in anonymous namespace.
279 This works only on those C++ types with linkage defined. */
280
281 bool
282 type_in_anonymous_namespace_p (const_tree t)
283 {
284 gcc_assert (type_with_linkage_p (t));
285
286 /* Keep -fno-lto-odr-type-merging working by recognizing classes with vtables
287 properly into anonymous namespaces. */
288 if (RECORD_OR_UNION_TYPE_P (t)
289 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
290 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
291
292 if (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)))
293 {
294 /* C++ FE uses magic <anon> as assembler names of anonymous types.
295 verify that this match with type_in_anonymous_namespace_p. */
296 #ifdef ENABLE_CHECKING
297 if (in_lto_p)
298 gcc_assert (!strcmp ("<anon>",
299 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t)))));
300 #endif
301 return true;
302 }
303 return false;
304 }
305
306 /* Return true of T is type with One Definition Rule info attached.
307 It means that either it is anonymous type or it has assembler name
308 set. */
309
310 bool
311 odr_type_p (const_tree t)
312 {
313 /* We do not have this information when not in LTO, but we do not need
314 to care, since it is used only for type merging. */
315 gcc_checking_assert (in_lto_p || flag_lto);
316
317 /* To support -fno-lto-odr-type-merging consider types with vtables ODR. */
318 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
319 return true;
320
321 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
322 && (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t))))
323 {
324 #ifdef ENABLE_CHECKING
325 /* C++ FE uses magic <anon> as assembler names of anonymous types.
326 verify that this match with type_in_anonymous_namespace_p. */
327 gcc_assert (!type_with_linkage_p (t)
328 || strcmp ("<anon>",
329 IDENTIFIER_POINTER
330 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))))
331 || type_in_anonymous_namespace_p (t));
332 #endif
333 return true;
334 }
335 return false;
336 }
337
338 /* Return TRUE if all derived types of T are known and thus
339 we may consider the walk of derived type complete.
340
341 This is typically true only for final anonymous namespace types and types
342 defined within functions (that may be COMDAT and thus shared across units,
343 but with the same set of derived types). */
344
345 bool
346 type_all_derivations_known_p (const_tree t)
347 {
348 if (TYPE_FINAL_P (t))
349 return true;
350 if (flag_ltrans)
351 return false;
352 /* Non-C++ types may have IDENTIFIER_NODE here, do not crash. */
353 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) != TYPE_DECL)
354 return true;
355 if (type_in_anonymous_namespace_p (t))
356 return true;
357 return (decl_function_context (TYPE_NAME (t)) != NULL);
358 }
359
360 /* Return TRUE if type's constructors are all visible. */
361
362 static bool
363 type_all_ctors_visible_p (tree t)
364 {
365 return !flag_ltrans
366 && symtab->state >= CONSTRUCTION
367 /* We can not always use type_all_derivations_known_p.
368 For function local types we must assume case where
369 the function is COMDAT and shared in between units.
370
371 TODO: These cases are quite easy to get, but we need
372 to keep track of C++ privatizing via -Wno-weak
373 as well as the IPA privatizing. */
374 && type_in_anonymous_namespace_p (t);
375 }
376
377 /* Return TRUE if type may have instance. */
378
379 static bool
380 type_possibly_instantiated_p (tree t)
381 {
382 tree vtable;
383 varpool_node *vnode;
384
385 /* TODO: Add abstract types here. */
386 if (!type_all_ctors_visible_p (t))
387 return true;
388
389 vtable = BINFO_VTABLE (TYPE_BINFO (t));
390 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
391 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
392 vnode = varpool_node::get (vtable);
393 return vnode && vnode->definition;
394 }
395
396 /* Hash used to unify ODR types based on their mangled name and for anonymous
397 namespace types. */
398
399 struct odr_name_hasher
400 {
401 typedef odr_type_d *value_type;
402 typedef union tree_node *compare_type;
403 static inline hashval_t hash (const odr_type_d *);
404 static inline bool equal (const odr_type_d *, const tree_node *);
405 static inline void remove (odr_type_d *);
406 };
407
408 /* Has used to unify ODR types based on their associated virtual table.
409 This hash is needed to keep -fno-lto-odr-type-merging to work and contains
410 only polymorphic types. Types with mangled names are inserted to both. */
411
412 struct odr_vtable_hasher:odr_name_hasher
413 {
414 static inline hashval_t hash (const odr_type_d *);
415 static inline bool equal (const odr_type_d *, const tree_node *);
416 };
417
418 /* Return type that was declared with T's name so that T is an
419 qualified variant of it. */
420
421 static inline tree
422 main_odr_variant (const_tree t)
423 {
424 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
425 return TREE_TYPE (TYPE_NAME (t));
426 /* Unnamed types and non-C++ produced types can be compared by variants. */
427 else
428 return TYPE_MAIN_VARIANT (t);
429 }
430
431 static bool
432 can_be_name_hashed_p (tree t)
433 {
434 return (!in_lto_p || odr_type_p (t));
435 }
436
437 /* Hash type by its ODR name. */
438
439 static hashval_t
440 hash_odr_name (const_tree t)
441 {
442 gcc_checking_assert (main_odr_variant (t) == t);
443
444 /* If not in LTO, all main variants are unique, so we can do
445 pointer hash. */
446 if (!in_lto_p)
447 return htab_hash_pointer (t);
448
449 /* Anonymous types are unique. */
450 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
451 return htab_hash_pointer (t);
452
453 gcc_checking_assert (TYPE_NAME (t)
454 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)));
455 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
456 }
457
458 /* Return the computed hashcode for ODR_TYPE. */
459
460 inline hashval_t
461 odr_name_hasher::hash (const odr_type_d *odr_type)
462 {
463 return hash_odr_name (odr_type->type);
464 }
465
466 static bool
467 can_be_vtable_hashed_p (tree t)
468 {
469 /* vtable hashing can distinguish only main variants. */
470 if (TYPE_MAIN_VARIANT (t) != t)
471 return false;
472 /* Anonymous namespace types are always handled by name hash. */
473 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
474 return false;
475 return (TREE_CODE (t) == RECORD_TYPE
476 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
477 }
478
479 /* Hash type by assembler name of its vtable. */
480
481 static hashval_t
482 hash_odr_vtable (const_tree t)
483 {
484 tree v = BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (t)));
485 inchash::hash hstate;
486
487 gcc_checking_assert (in_lto_p);
488 gcc_checking_assert (!type_in_anonymous_namespace_p (t));
489 gcc_checking_assert (TREE_CODE (t) == RECORD_TYPE
490 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)));
491 gcc_checking_assert (main_odr_variant (t) == t);
492
493 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
494 {
495 add_expr (TREE_OPERAND (v, 1), hstate);
496 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
497 }
498
499 hstate.add_wide_int (IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (v)));
500 return hstate.end ();
501 }
502
503 /* Return the computed hashcode for ODR_TYPE. */
504
505 inline hashval_t
506 odr_vtable_hasher::hash (const odr_type_d *odr_type)
507 {
508 return hash_odr_vtable (odr_type->type);
509 }
510
511 /* For languages with One Definition Rule, work out if
512 types are the same based on their name.
513
514 This is non-trivial for LTO where minor differences in
515 the type representation may have prevented type merging
516 to merge two copies of otherwise equivalent type.
517
518 Until we start streaming mangled type names, this function works
519 only for polymorphic types.
520
521 When STRICT is true, we compare types by their names for purposes of
522 ODR violation warnings. When strict is false, we consider variants
523 equivalent, becuase it is all that matters for devirtualization machinery.
524 */
525
526 bool
527 types_same_for_odr (const_tree type1, const_tree type2, bool strict)
528 {
529 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
530
531 type1 = main_odr_variant (type1);
532 type2 = main_odr_variant (type2);
533 if (!strict)
534 {
535 type1 = TYPE_MAIN_VARIANT (type1);
536 type2 = TYPE_MAIN_VARIANT (type2);
537 }
538
539 if (type1 == type2)
540 return true;
541
542 if (!in_lto_p)
543 return false;
544
545 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
546 on the corresponding TYPE_STUB_DECL. */
547 if ((type_with_linkage_p (type1) && type_in_anonymous_namespace_p (type1))
548 || (type_with_linkage_p (type2) && type_in_anonymous_namespace_p (type2)))
549 return false;
550
551
552 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
553
554 Ideally we should never need types without ODR names here. It can however
555 happen in two cases:
556
557 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
558 Here testing for equivalence is safe, since their MAIN_VARIANTs are
559 unique.
560 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
561 establish precise ODR equivalency, but for correctness we care only
562 about equivalency on complete polymorphic types. For these we can
563 compare assembler names of their virtual tables. */
564 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
565 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
566 {
567 /* See if types are obviously different (i.e. different codes
568 or polymorphic wrt non-polymorphic). This is not strictly correct
569 for ODR violating programs, but we can't do better without streaming
570 ODR names. */
571 if (TREE_CODE (type1) != TREE_CODE (type2))
572 return false;
573 if (TREE_CODE (type1) == RECORD_TYPE
574 && (TYPE_BINFO (type1) == NULL_TREE)
575 != (TYPE_BINFO (type1) == NULL_TREE))
576 return false;
577 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
578 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
579 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
580 return false;
581
582 /* At the moment we have no way to establish ODR equivalence at LTO
583 other than comparing virtual table pointers of polymorphic types.
584 Eventually we should start saving mangled names in TYPE_NAME.
585 Then this condition will become non-trivial. */
586
587 if (TREE_CODE (type1) == RECORD_TYPE
588 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
589 && BINFO_VTABLE (TYPE_BINFO (type1))
590 && BINFO_VTABLE (TYPE_BINFO (type2)))
591 {
592 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
593 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
594 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
595 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
596 return (operand_equal_p (TREE_OPERAND (v1, 1),
597 TREE_OPERAND (v2, 1), 0)
598 && DECL_ASSEMBLER_NAME
599 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
600 == DECL_ASSEMBLER_NAME
601 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
602 }
603 gcc_unreachable ();
604 }
605 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
606 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
607 }
608
609 /* Return true if we can decide on ODR equivalency.
610
611 In non-LTO it is always decide, in LTO however it depends in the type has
612 ODR info attached.
613
614 When STRICT is false, compare main variants. */
615
616 bool
617 types_odr_comparable (tree t1, tree t2, bool strict)
618 {
619 return (!in_lto_p
620 || (strict ? main_odr_variant (t1) == main_odr_variant (t2)
621 : TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
622 || (odr_type_p (t1) && odr_type_p (t2))
623 || (TREE_CODE (t1) == RECORD_TYPE && TREE_CODE (t2) == RECORD_TYPE
624 && TYPE_BINFO (t1) && TYPE_BINFO (t2)
625 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
626 && polymorphic_type_binfo_p (TYPE_BINFO (t2))));
627 }
628
629 /* Return true if T1 and T2 are ODR equivalent. If ODR equivalency is not
630 known, be conservative and return false. */
631
632 bool
633 types_must_be_same_for_odr (tree t1, tree t2)
634 {
635 if (types_odr_comparable (t1, t2))
636 return types_same_for_odr (t1, t2);
637 else
638 return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
639 }
640
641 /* If T is compound type, return type it is based on. */
642
643 static tree
644 compound_type_base (const_tree t)
645 {
646 if (TREE_CODE (t) == ARRAY_TYPE
647 || POINTER_TYPE_P (t)
648 || TREE_CODE (t) == COMPLEX_TYPE
649 || VECTOR_TYPE_P (t))
650 return TREE_TYPE (t);
651 if (TREE_CODE (t) == METHOD_TYPE)
652 return TYPE_METHOD_BASETYPE (t);
653 if (TREE_CODE (t) == OFFSET_TYPE)
654 return TYPE_OFFSET_BASETYPE (t);
655 return NULL_TREE;
656 }
657
658 /* Return true if T is either ODR type or compound type based from it.
659 If the function return true, we know that T is a type originating from C++
660 source even at link-time. */
661
662 bool
663 odr_or_derived_type_p (const_tree t)
664 {
665 do
666 {
667 if (odr_type_p (t))
668 return true;
669 /* Function type is a tricky one. Basically we can consider it
670 ODR derived if return type or any of the parameters is.
671 We need to check all parameters because LTO streaming merges
672 common types (such as void) and they are not considered ODR then. */
673 if (TREE_CODE (t) == FUNCTION_TYPE)
674 {
675 if (TYPE_METHOD_BASETYPE (t))
676 t = TYPE_METHOD_BASETYPE (t);
677 else
678 {
679 if (TREE_TYPE (t) && odr_or_derived_type_p (TREE_TYPE (t)))
680 return true;
681 for (t = TYPE_ARG_TYPES (t); t; t = TREE_CHAIN (t))
682 if (odr_or_derived_type_p (TREE_VALUE (t)))
683 return true;
684 return false;
685 }
686 }
687 else
688 t = compound_type_base (t);
689 }
690 while (t);
691 return t;
692 }
693
694 /* Compare types T1 and T2 and return true if they are
695 equivalent. */
696
697 inline bool
698 odr_name_hasher::equal (const odr_type_d *o1, const tree_node *t2)
699 {
700 tree t1 = o1->type;
701
702 gcc_checking_assert (main_odr_variant (t2) == t2);
703 gcc_checking_assert (main_odr_variant (t1) == t1);
704 if (t1 == t2)
705 return true;
706 if (!in_lto_p)
707 return false;
708 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
709 on the corresponding TYPE_STUB_DECL. */
710 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
711 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
712 return false;
713 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t1)));
714 gcc_checking_assert (DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
715 return (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
716 == DECL_ASSEMBLER_NAME (TYPE_NAME (t2)));
717 }
718
719 /* Compare types T1 and T2 and return true if they are
720 equivalent. */
721
722 inline bool
723 odr_vtable_hasher::equal (const odr_type_d *o1, const tree_node *t2)
724 {
725 tree t1 = o1->type;
726
727 gcc_checking_assert (main_odr_variant (t2) == t2);
728 gcc_checking_assert (main_odr_variant (t1) == t1);
729 gcc_checking_assert (in_lto_p);
730 t1 = TYPE_MAIN_VARIANT (t1);
731 t2 = TYPE_MAIN_VARIANT (t2);
732 if (t1 == t2)
733 return true;
734 tree v1 = BINFO_VTABLE (TYPE_BINFO (t1));
735 tree v2 = BINFO_VTABLE (TYPE_BINFO (t2));
736 return (operand_equal_p (TREE_OPERAND (v1, 1),
737 TREE_OPERAND (v2, 1), 0)
738 && DECL_ASSEMBLER_NAME
739 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
740 == DECL_ASSEMBLER_NAME
741 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
742 }
743
744 /* Free ODR type V. */
745
746 inline void
747 odr_name_hasher::remove (odr_type_d *v)
748 {
749 v->bases.release ();
750 v->derived_types.release ();
751 if (v->types_set)
752 delete v->types_set;
753 ggc_free (v);
754 }
755
756 /* ODR type hash used to look up ODR type based on tree type node. */
757
758 typedef hash_table<odr_name_hasher> odr_hash_type;
759 static odr_hash_type *odr_hash;
760 typedef hash_table<odr_vtable_hasher> odr_vtable_hash_type;
761 static odr_vtable_hash_type *odr_vtable_hash;
762
763 /* ODR types are also stored into ODR_TYPE vector to allow consistent
764 walking. Bases appear before derived types. Vector is garbage collected
765 so we won't end up visiting empty types. */
766
767 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
768 #define odr_types (*odr_types_ptr)
769
770 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
771 void
772 set_type_binfo (tree type, tree binfo)
773 {
774 for (; type; type = TYPE_NEXT_VARIANT (type))
775 if (COMPLETE_TYPE_P (type))
776 TYPE_BINFO (type) = binfo;
777 else
778 gcc_assert (!TYPE_BINFO (type));
779 }
780
781 /* Compare T2 and T2 based on name or structure. */
782
783 static bool
784 odr_subtypes_equivalent_p (tree t1, tree t2,
785 hash_set<type_pair,pair_traits> *visited)
786 {
787
788 /* This can happen in incomplete types that should be handled earlier. */
789 gcc_assert (t1 && t2);
790
791 t1 = main_odr_variant (t1);
792 t2 = main_odr_variant (t2);
793 if (t1 == t2)
794 return true;
795
796 /* Anonymous namespace types must match exactly. */
797 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
798 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
799 return false;
800
801 /* For ODR types be sure to compare their names.
802 To support -wno-odr-type-merging we allow one type to be non-ODR
803 and other ODR even though it is a violation. */
804 if (types_odr_comparable (t1, t2, true))
805 {
806 if (!types_same_for_odr (t1, t2, true))
807 return false;
808 /* Limit recursion: If subtypes are ODR types and we know
809 that they are same, be happy. */
810 if (!odr_type_p (t1) || !get_odr_type (t1, true)->odr_violated)
811 return true;
812 }
813
814 /* Component types, builtins and possibly violating ODR types
815 have to be compared structurally. */
816 if (TREE_CODE (t1) != TREE_CODE (t2))
817 return false;
818 if (AGGREGATE_TYPE_P (t1)
819 && (TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
820 return false;
821
822 type_pair pair={t1,t2};
823 if (TYPE_UID (t1) > TYPE_UID (t2))
824 {
825 pair.first = t2;
826 pair.second = t1;
827 }
828 if (visited->add (pair))
829 return true;
830 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
831 }
832
833 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
834 violation warnings. */
835
836 void
837 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
838 {
839 int n1, n2;
840
841 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
842 {
843 odr_violation_reported = true;
844 if (DECL_VIRTUAL_P (prevailing->decl))
845 {
846 varpool_node *tmp = prevailing;
847 prevailing = vtable;
848 vtable = tmp;
849 }
850 if (warning_at (DECL_SOURCE_LOCATION
851 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
852 OPT_Wodr,
853 "virtual table of type %qD violates one definition rule",
854 DECL_CONTEXT (vtable->decl)))
855 inform (DECL_SOURCE_LOCATION (prevailing->decl),
856 "variable of same assembler name as the virtual table is "
857 "defined in another translation unit");
858 return;
859 }
860 if (!prevailing->definition || !vtable->definition)
861 return;
862
863 /* If we do not stream ODR type info, do not bother to do useful compare. */
864 if (!TYPE_BINFO (DECL_CONTEXT (vtable->decl))
865 || !polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (vtable->decl))))
866 return;
867
868 odr_type class_type = get_odr_type (DECL_CONTEXT (vtable->decl), true);
869
870 if (class_type->odr_violated)
871 return;
872
873 for (n1 = 0, n2 = 0; true; n1++, n2++)
874 {
875 struct ipa_ref *ref1, *ref2;
876 bool end1, end2;
877
878 end1 = !prevailing->iterate_reference (n1, ref1);
879 end2 = !vtable->iterate_reference (n2, ref2);
880
881 /* !DECL_VIRTUAL_P means RTTI entry;
882 We warn when RTTI is lost because non-RTTI previals; we silently
883 accept the other case. */
884 while (!end2
885 && (end1
886 || (DECL_ASSEMBLER_NAME (ref1->referred->decl)
887 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
888 && TREE_CODE (ref1->referred->decl) == FUNCTION_DECL))
889 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
890 {
891 if (!class_type->rtti_broken
892 && warning_at (DECL_SOURCE_LOCATION
893 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
894 OPT_Wodr,
895 "virtual table of type %qD contains RTTI "
896 "information",
897 DECL_CONTEXT (vtable->decl)))
898 {
899 inform (DECL_SOURCE_LOCATION
900 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
901 "but is prevailed by one without from other translation "
902 "unit");
903 inform (DECL_SOURCE_LOCATION
904 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
905 "RTTI will not work on this type");
906 class_type->rtti_broken = true;
907 }
908 n2++;
909 end2 = !vtable->iterate_reference (n2, ref2);
910 }
911 while (!end1
912 && (end2
913 || (DECL_ASSEMBLER_NAME (ref2->referred->decl)
914 != DECL_ASSEMBLER_NAME (ref1->referred->decl)
915 && TREE_CODE (ref2->referred->decl) == FUNCTION_DECL))
916 && TREE_CODE (ref1->referred->decl) != FUNCTION_DECL)
917 {
918 n1++;
919 end1 = !prevailing->iterate_reference (n1, ref1);
920 }
921
922 /* Finished? */
923 if (end1 && end2)
924 {
925 /* Extra paranoia; compare the sizes. We do not have information
926 about virtual inheritance offsets, so just be sure that these
927 match.
928 Do this as very last check so the not very informative error
929 is not output too often. */
930 if (DECL_SIZE (prevailing->decl) != DECL_SIZE (vtable->decl))
931 {
932 class_type->odr_violated = true;
933 if (warning_at (DECL_SOURCE_LOCATION
934 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
935 OPT_Wodr,
936 "virtual table of type %qD violates "
937 "one definition rule ",
938 DECL_CONTEXT (vtable->decl)))
939 {
940 inform (DECL_SOURCE_LOCATION
941 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
942 "the conflicting type defined in another translation "
943 "unit has virtual table of different size");
944 }
945 }
946 return;
947 }
948
949 if (!end1 && !end2)
950 {
951 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
952 == DECL_ASSEMBLER_NAME (ref2->referred->decl))
953 continue;
954
955 class_type->odr_violated = true;
956
957 /* If the loops above stopped on non-virtual pointer, we have
958 mismatch in RTTI information mangling. */
959 if (TREE_CODE (ref1->referred->decl) != FUNCTION_DECL
960 && TREE_CODE (ref2->referred->decl) != FUNCTION_DECL)
961 {
962 if (warning_at (DECL_SOURCE_LOCATION
963 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
964 OPT_Wodr,
965 "virtual table of type %qD violates "
966 "one definition rule ",
967 DECL_CONTEXT (vtable->decl)))
968 {
969 inform (DECL_SOURCE_LOCATION
970 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
971 "the conflicting type defined in another translation "
972 "unit with different RTTI information");
973 }
974 return;
975 }
976 /* At this point both REF1 and REF2 points either to virtual table
977 or virtual method. If one points to virtual table and other to
978 method we can complain the same way as if one table was shorter
979 than other pointing out the extra method. */
980 if (TREE_CODE (ref1->referred->decl)
981 != TREE_CODE (ref2->referred->decl))
982 {
983 if (TREE_CODE (ref1->referred->decl) == VAR_DECL)
984 end1 = true;
985 else if (TREE_CODE (ref2->referred->decl) == VAR_DECL)
986 end2 = true;
987 }
988 }
989
990 class_type->odr_violated = true;
991
992 /* Complain about size mismatch. Either we have too many virutal
993 functions or too many virtual table pointers. */
994 if (end1 || end2)
995 {
996 if (end1)
997 {
998 varpool_node *tmp = prevailing;
999 prevailing = vtable;
1000 vtable = tmp;
1001 ref1 = ref2;
1002 }
1003 if (warning_at (DECL_SOURCE_LOCATION
1004 (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
1005 OPT_Wodr,
1006 "virtual table of type %qD violates "
1007 "one definition rule",
1008 DECL_CONTEXT (vtable->decl)))
1009 {
1010 if (TREE_CODE (ref1->referring->decl) == FUNCTION_DECL)
1011 {
1012 inform (DECL_SOURCE_LOCATION
1013 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1014 "the conflicting type defined in another translation "
1015 "unit");
1016 inform (DECL_SOURCE_LOCATION
1017 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
1018 "contains additional virtual method %qD",
1019 ref1->referred->decl);
1020 }
1021 else
1022 {
1023 inform (DECL_SOURCE_LOCATION
1024 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1025 "the conflicting type defined in another translation "
1026 "unit has virtual table table with more entries");
1027 }
1028 }
1029 return;
1030 }
1031
1032 /* And in the last case we have either mistmatch in between two virtual
1033 methods or two virtual table pointers. */
1034 if (warning_at (DECL_SOURCE_LOCATION
1035 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), OPT_Wodr,
1036 "virtual table of type %qD violates "
1037 "one definition rule ",
1038 DECL_CONTEXT (vtable->decl)))
1039 {
1040 if (TREE_CODE (ref1->referred->decl) == FUNCTION_DECL)
1041 {
1042 inform (DECL_SOURCE_LOCATION
1043 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1044 "the conflicting type defined in another translation "
1045 "unit");
1046 gcc_assert (TREE_CODE (ref2->referred->decl)
1047 == FUNCTION_DECL);
1048 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
1049 "virtual method %qD", ref1->referred->decl);
1050 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
1051 "ought to match virtual method %qD but does not",
1052 ref2->referred->decl);
1053 }
1054 else
1055 inform (DECL_SOURCE_LOCATION
1056 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
1057 "the conflicting type defined in another translation "
1058 "unit has virtual table table with different contents");
1059 return;
1060 }
1061 }
1062 }
1063
1064 /* Output ODR violation warning about T1 and T2 with REASON.
1065 Display location of ST1 and ST2 if REASON speaks about field or
1066 method of the type.
1067 If WARN is false, do nothing. Set WARNED if warning was indeed
1068 output. */
1069
1070 void
1071 warn_odr (tree t1, tree t2, tree st1, tree st2,
1072 bool warn, bool *warned, const char *reason)
1073 {
1074 tree decl2 = TYPE_NAME (t2);
1075 if (warned)
1076 *warned = false;
1077
1078 if (!warn || !TYPE_NAME(t1))
1079 return;
1080
1081 /* ODR warnings are output druing LTO streaming; we must apply location
1082 cache for potential warnings to be output correctly. */
1083 if (lto_location_cache::current_cache)
1084 lto_location_cache::current_cache->apply_location_cache ();
1085
1086 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
1087 "type %qT violates one definition rule",
1088 t1))
1089 return;
1090 if (!st1 && !st2)
1091 ;
1092 /* For FIELD_DECL support also case where one of fields is
1093 NULL - this is used when the structures have mismatching number of
1094 elements. */
1095 else if (!st1 || TREE_CODE (st1) == FIELD_DECL)
1096 {
1097 inform (DECL_SOURCE_LOCATION (decl2),
1098 "a different type is defined in another translation unit");
1099 if (!st1)
1100 {
1101 st1 = st2;
1102 st2 = NULL;
1103 }
1104 inform (DECL_SOURCE_LOCATION (st1),
1105 "the first difference of corresponding definitions is field %qD",
1106 st1);
1107 if (st2)
1108 decl2 = st2;
1109 }
1110 else if (TREE_CODE (st1) == FUNCTION_DECL)
1111 {
1112 inform (DECL_SOURCE_LOCATION (decl2),
1113 "a different type is defined in another translation unit");
1114 inform (DECL_SOURCE_LOCATION (st1),
1115 "the first difference of corresponding definitions is method %qD",
1116 st1);
1117 decl2 = st2;
1118 }
1119 else
1120 return;
1121 inform (DECL_SOURCE_LOCATION (decl2), reason);
1122
1123 if (warned)
1124 *warned = true;
1125 }
1126
1127 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
1128 because they are used on same place in ODR matching types.
1129 They are not; inform the user. */
1130
1131 void
1132 warn_types_mismatch (tree t1, tree t2)
1133 {
1134 /* If types have names and they are different, it is most informative to
1135 output those. */
1136 if (TYPE_NAME (t1) && TYPE_NAME (t2)
1137 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t1))
1138 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t2))
1139 && DECL_ASSEMBLER_NAME (TYPE_NAME (t1))
1140 != DECL_ASSEMBLER_NAME (TYPE_NAME (t2)))
1141 {
1142 char *name1 = xstrdup (cplus_demangle
1143 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t1))),
1144 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES));
1145 char *name2 = cplus_demangle
1146 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t2))),
1147 DMGL_PARAMS | DMGL_ANSI | DMGL_TYPES);
1148 if (name1 && name2 && strcmp (name1, name2))
1149 {
1150 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1151 "type name %<%s%> should match type name %<%s%>",
1152 name1, name2);
1153 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
1154 "the incompatible type is defined here");
1155 free (name1);
1156 return;
1157 }
1158 free (name1);
1159 }
1160 /* It is a quite common bug to reference anonymous namespace type in
1161 non-anonymous namespace class. */
1162 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1163 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1164 {
1165 if (type_with_linkage_p (t1) && !type_in_anonymous_namespace_p (t1))
1166 {
1167 tree tmp = t1;;
1168 t1 = t2;
1169 t2 = tmp;
1170 }
1171 if (TYPE_NAME (t1) && TYPE_NAME (t2)
1172 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
1173 && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL)
1174 {
1175 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1176 "type %qT defined in anonymous namespace can not match "
1177 "type %qT",
1178 t1, t2);
1179 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
1180 "the incompatible type defined in anonymous namespace in "
1181 "another translation unit");
1182 }
1183 else
1184 inform (UNKNOWN_LOCATION,
1185 "types in anonymous namespace does not match across "
1186 "translation unit boundary");
1187 return;
1188 }
1189 /* A tricky case are component types. Often they appear the same in source
1190 code and the mismatch is dragged in by type they are build from.
1191 Look for those differences in subtypes and try to be informative. In other
1192 cases just output nothing because the source code is probably different
1193 and in this case we already output a all necessary info. */
1194 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
1195 {
1196 if (TREE_CODE (t1) == TREE_CODE (t2))
1197 {
1198 hash_set<type_pair,pair_traits> visited;
1199 if (TREE_CODE (t1) == ARRAY_TYPE
1200 && COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1201 {
1202 tree i1 = TYPE_DOMAIN (t1);
1203 tree i2 = TYPE_DOMAIN (t2);
1204
1205 if (i1 && i2
1206 && TYPE_MAX_VALUE (i1)
1207 && TYPE_MAX_VALUE (i2)
1208 && !operand_equal_p (TYPE_MAX_VALUE (i1),
1209 TYPE_MAX_VALUE (i2), 0))
1210 {
1211 inform (UNKNOWN_LOCATION,
1212 "array types have different bounds");
1213 return;
1214 }
1215 }
1216 if ((POINTER_TYPE_P (t1) || TREE_CODE (t1) == ARRAY_TYPE)
1217 && !odr_subtypes_equivalent_p (TREE_TYPE (t1),
1218 TREE_TYPE (t2),
1219 &visited))
1220 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1221 else if (TREE_CODE (t1) == METHOD_TYPE
1222 || TREE_CODE (t1) == FUNCTION_TYPE)
1223 {
1224 tree parms1 = NULL, parms2 = NULL;
1225 int count = 1;
1226
1227 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
1228 &visited))
1229 {
1230 inform (UNKNOWN_LOCATION, "return value type mismatch");
1231 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1232 return;
1233 }
1234 if (prototype_p (t1) && prototype_p (t2))
1235 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1236 parms1 && parms2;
1237 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2),
1238 count++)
1239 {
1240 if (!odr_subtypes_equivalent_p
1241 (TREE_VALUE (parms1), TREE_VALUE (parms2), &visited))
1242 {
1243 if (count == 1 && TREE_CODE (t1) == METHOD_TYPE)
1244 inform (UNKNOWN_LOCATION,
1245 "implicit this pointer type mismatch");
1246 else
1247 inform (UNKNOWN_LOCATION,
1248 "type mismatch in parameter %i",
1249 count - (TREE_CODE (t1) == METHOD_TYPE));
1250 warn_types_mismatch (TREE_VALUE (parms1),
1251 TREE_VALUE (parms2));
1252 return;
1253 }
1254 }
1255 if (parms1 || parms2)
1256 {
1257 inform (UNKNOWN_LOCATION,
1258 "types have different parameter counts");
1259 return;
1260 }
1261 }
1262 }
1263 return;
1264 }
1265 /* This should not happen but if it does, the warning would not be helpful.
1266 TODO: turn it into assert next stage1. */
1267 if (TYPE_NAME (t1) == TYPE_NAME (t2))
1268 return;
1269 /* In Firefox it is a common bug to have same types but in
1270 different namespaces. Be a bit more informative on
1271 this. */
1272 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
1273 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
1274 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
1275 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
1276 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
1277 DECL_NAME (TYPE_CONTEXT (t2))))))
1278 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1279 "type %qT should match type %qT but is defined "
1280 "in different namespace ",
1281 t1, t2);
1282 else if (types_odr_comparable (t1, t2, true)
1283 && types_same_for_odr (t1, t2, true))
1284 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1285 "type %qT should match type %qT that itself violate "
1286 "one definition rule",
1287 t1, t2);
1288 else
1289 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
1290 "type %qT should match type %qT",
1291 t1, t2);
1292 if (DECL_SOURCE_LOCATION (TYPE_NAME (t2)) > BUILTINS_LOCATION)
1293 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
1294 "the incompatible type is defined here");
1295 }
1296
1297 /* Compare T1 and T2, report ODR violations if WARN is true and set
1298 WARNED to true if anything is reported. Return true if types match.
1299 If true is returned, the types are also compatible in the sense of
1300 gimple_canonical_types_compatible_p. */
1301
1302 static bool
1303 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
1304 hash_set<type_pair,pair_traits> *visited)
1305 {
1306 /* Check first for the obvious case of pointer identity. */
1307 if (t1 == t2)
1308 return true;
1309 gcc_assert (!type_with_linkage_p (t1) || !type_in_anonymous_namespace_p (t1));
1310 gcc_assert (!type_with_linkage_p (t2) || !type_in_anonymous_namespace_p (t2));
1311
1312 /* Can't be the same type if the types don't have the same code. */
1313 if (TREE_CODE (t1) != TREE_CODE (t2))
1314 {
1315 warn_odr (t1, t2, NULL, NULL, warn, warned,
1316 G_("a different type is defined in another translation unit"));
1317 return false;
1318 }
1319
1320 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
1321 {
1322 warn_odr (t1, t2, NULL, NULL, warn, warned,
1323 G_("a type with different qualifiers is defined in another "
1324 "translation unit"));
1325 return false;
1326 }
1327
1328 if ((type_with_linkage_p (t1) && type_in_anonymous_namespace_p (t1))
1329 || (type_with_linkage_p (t2) && type_in_anonymous_namespace_p (t2)))
1330 {
1331 /* We can not trip this when comparing ODR types, only when trying to
1332 match different ODR derivations from different declarations.
1333 So WARN should be always false. */
1334 gcc_assert (!warn);
1335 return false;
1336 }
1337
1338 if (comp_type_attributes (t1, t2) != 1)
1339 {
1340 warn_odr (t1, t2, NULL, NULL, warn, warned,
1341 G_("a type with different attributes "
1342 "is defined in another translation unit"));
1343 return false;
1344 }
1345
1346 if (TREE_CODE (t1) == ENUMERAL_TYPE
1347 && TYPE_VALUES (t1) && TYPE_VALUES (t2))
1348 {
1349 tree v1, v2;
1350 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
1351 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
1352 {
1353 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
1354 {
1355 warn_odr (t1, t2, NULL, NULL, warn, warned,
1356 G_("an enum with different value name"
1357 " is defined in another translation unit"));
1358 return false;
1359 }
1360 if (TREE_VALUE (v1) != TREE_VALUE (v2)
1361 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
1362 DECL_INITIAL (TREE_VALUE (v2)), 0))
1363 {
1364 warn_odr (t1, t2, NULL, NULL, warn, warned,
1365 G_("an enum with different values is defined"
1366 " in another translation unit"));
1367 return false;
1368 }
1369 }
1370 if (v1 || v2)
1371 {
1372 warn_odr (t1, t2, NULL, NULL, warn, warned,
1373 G_("an enum with mismatching number of values "
1374 "is defined in another translation unit"));
1375 return false;
1376 }
1377 }
1378
1379 /* Non-aggregate types can be handled cheaply. */
1380 if (INTEGRAL_TYPE_P (t1)
1381 || SCALAR_FLOAT_TYPE_P (t1)
1382 || FIXED_POINT_TYPE_P (t1)
1383 || TREE_CODE (t1) == VECTOR_TYPE
1384 || TREE_CODE (t1) == COMPLEX_TYPE
1385 || TREE_CODE (t1) == OFFSET_TYPE
1386 || POINTER_TYPE_P (t1))
1387 {
1388 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
1389 {
1390 warn_odr (t1, t2, NULL, NULL, warn, warned,
1391 G_("a type with different precision is defined "
1392 "in another translation unit"));
1393 return false;
1394 }
1395 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
1396 {
1397 warn_odr (t1, t2, NULL, NULL, warn, warned,
1398 G_("a type with different signedness is defined "
1399 "in another translation unit"));
1400 return false;
1401 }
1402
1403 if (TREE_CODE (t1) == INTEGER_TYPE
1404 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
1405 {
1406 /* char WRT uint_8? */
1407 warn_odr (t1, t2, NULL, NULL, warn, warned,
1408 G_("a different type is defined in another "
1409 "translation unit"));
1410 return false;
1411 }
1412
1413 /* For canonical type comparisons we do not want to build SCCs
1414 so we cannot compare pointed-to types. But we can, for now,
1415 require the same pointed-to type kind and match what
1416 useless_type_conversion_p would do. */
1417 if (POINTER_TYPE_P (t1))
1418 {
1419 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
1420 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
1421 {
1422 warn_odr (t1, t2, NULL, NULL, warn, warned,
1423 G_("it is defined as a pointer in different address "
1424 "space in another translation unit"));
1425 return false;
1426 }
1427
1428 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1429 {
1430 warn_odr (t1, t2, NULL, NULL, warn, warned,
1431 G_("it is defined as a pointer to different type "
1432 "in another translation unit"));
1433 if (warn && warned)
1434 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1435 return false;
1436 }
1437 }
1438
1439 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
1440 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1441 {
1442 /* Probably specific enough. */
1443 warn_odr (t1, t2, NULL, NULL, warn, warned,
1444 G_("a different type is defined "
1445 "in another translation unit"));
1446 if (warn && warned)
1447 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1448 return false;
1449 }
1450 }
1451 /* Do type-specific comparisons. */
1452 else switch (TREE_CODE (t1))
1453 {
1454 case ARRAY_TYPE:
1455 {
1456 /* Array types are the same if the element types are the same and
1457 the number of elements are the same. */
1458 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1459 {
1460 warn_odr (t1, t2, NULL, NULL, warn, warned,
1461 G_("a different type is defined in another "
1462 "translation unit"));
1463 if (warn && warned)
1464 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1465 }
1466 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
1467 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
1468 == TYPE_NONALIASED_COMPONENT (t2));
1469
1470 tree i1 = TYPE_DOMAIN (t1);
1471 tree i2 = TYPE_DOMAIN (t2);
1472
1473 /* For an incomplete external array, the type domain can be
1474 NULL_TREE. Check this condition also. */
1475 if (i1 == NULL_TREE || i2 == NULL_TREE)
1476 return true;
1477
1478 tree min1 = TYPE_MIN_VALUE (i1);
1479 tree min2 = TYPE_MIN_VALUE (i2);
1480 tree max1 = TYPE_MAX_VALUE (i1);
1481 tree max2 = TYPE_MAX_VALUE (i2);
1482
1483 /* In C++, minimums should be always 0. */
1484 gcc_assert (min1 == min2);
1485 if (!operand_equal_p (max1, max2, 0))
1486 {
1487 warn_odr (t1, t2, NULL, NULL, warn, warned,
1488 G_("an array of different size is defined "
1489 "in another translation unit"));
1490 return false;
1491 }
1492 }
1493 break;
1494
1495 case METHOD_TYPE:
1496 case FUNCTION_TYPE:
1497 /* Function types are the same if the return type and arguments types
1498 are the same. */
1499 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
1500 {
1501 warn_odr (t1, t2, NULL, NULL, warn, warned,
1502 G_("has different return value "
1503 "in another translation unit"));
1504 if (warn && warned)
1505 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
1506 return false;
1507 }
1508
1509 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
1510 || !prototype_p (t1) || !prototype_p (t2))
1511 return true;
1512 else
1513 {
1514 tree parms1, parms2;
1515
1516 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
1517 parms1 && parms2;
1518 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
1519 {
1520 if (!odr_subtypes_equivalent_p
1521 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
1522 {
1523 warn_odr (t1, t2, NULL, NULL, warn, warned,
1524 G_("has different parameters in another "
1525 "translation unit"));
1526 if (warn && warned)
1527 warn_types_mismatch (TREE_VALUE (parms1),
1528 TREE_VALUE (parms2));
1529 return false;
1530 }
1531 }
1532
1533 if (parms1 || parms2)
1534 {
1535 warn_odr (t1, t2, NULL, NULL, warn, warned,
1536 G_("has different parameters "
1537 "in another translation unit"));
1538 return false;
1539 }
1540
1541 return true;
1542 }
1543
1544 case RECORD_TYPE:
1545 case UNION_TYPE:
1546 case QUAL_UNION_TYPE:
1547 {
1548 tree f1, f2;
1549
1550 /* For aggregate types, all the fields must be the same. */
1551 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
1552 {
1553 if (TYPE_BINFO (t1) && TYPE_BINFO (t2)
1554 && polymorphic_type_binfo_p (TYPE_BINFO (t1))
1555 != polymorphic_type_binfo_p (TYPE_BINFO (t2)))
1556 {
1557 if (polymorphic_type_binfo_p (TYPE_BINFO (t1)))
1558 warn_odr (t1, t2, NULL, NULL, warn, warned,
1559 G_("a type defined in another translation unit "
1560 "is not polymorphic"));
1561 else
1562 warn_odr (t1, t2, NULL, NULL, warn, warned,
1563 G_("a type defined in another translation unit "
1564 "is polymorphic"));
1565 return false;
1566 }
1567 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1568 f1 || f2;
1569 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1570 {
1571 /* Skip non-fields. */
1572 while (f1 && TREE_CODE (f1) != FIELD_DECL)
1573 f1 = TREE_CHAIN (f1);
1574 while (f2 && TREE_CODE (f2) != FIELD_DECL)
1575 f2 = TREE_CHAIN (f2);
1576 if (!f1 || !f2)
1577 break;
1578 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1579 {
1580 warn_odr (t1, t2, NULL, NULL, warn, warned,
1581 G_("a type with different virtual table pointers"
1582 " is defined in another translation unit"));
1583 return false;
1584 }
1585 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
1586 {
1587 warn_odr (t1, t2, NULL, NULL, warn, warned,
1588 G_("a type with different bases is defined "
1589 "in another translation unit"));
1590 return false;
1591 }
1592 if (DECL_NAME (f1) != DECL_NAME (f2)
1593 && !DECL_ARTIFICIAL (f1))
1594 {
1595 warn_odr (t1, t2, f1, f2, warn, warned,
1596 G_("a field with different name is defined "
1597 "in another translation unit"));
1598 return false;
1599 }
1600 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1),
1601 TREE_TYPE (f2), visited))
1602 {
1603 /* Do not warn about artificial fields and just go into
1604 generic field mismatch warning. */
1605 if (DECL_ARTIFICIAL (f1))
1606 break;
1607
1608 warn_odr (t1, t2, f1, f2, warn, warned,
1609 G_("a field of same name but different type "
1610 "is defined in another translation unit"));
1611 if (warn && warned)
1612 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
1613 return false;
1614 }
1615 if (!gimple_compare_field_offset (f1, f2))
1616 {
1617 /* Do not warn about artificial fields and just go into
1618 generic field mismatch warning. */
1619 if (DECL_ARTIFICIAL (f1))
1620 break;
1621 warn_odr (t1, t2, f1, f2, warn, warned,
1622 G_("fields has different layout "
1623 "in another translation unit"));
1624 return false;
1625 }
1626 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1627 == DECL_NONADDRESSABLE_P (f2));
1628 }
1629
1630 /* If one aggregate has more fields than the other, they
1631 are not the same. */
1632 if (f1 || f2)
1633 {
1634 if ((f1 && DECL_VIRTUAL_P (f1)) || (f2 && DECL_VIRTUAL_P (f2)))
1635 warn_odr (t1, t2, NULL, NULL, warn, warned,
1636 G_("a type with different virtual table pointers"
1637 " is defined in another translation unit"));
1638 else if ((f1 && DECL_ARTIFICIAL (f1))
1639 || (f2 && DECL_ARTIFICIAL (f2)))
1640 warn_odr (t1, t2, NULL, NULL, warn, warned,
1641 G_("a type with different bases is defined "
1642 "in another translation unit"));
1643 else
1644 warn_odr (t1, t2, f1, f2, warn, warned,
1645 G_("a type with different number of fields "
1646 "is defined in another translation unit"));
1647
1648 return false;
1649 }
1650 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1651 && COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t1))
1652 && COMPLETE_TYPE_P (TYPE_MAIN_VARIANT (t2))
1653 && odr_type_p (TYPE_MAIN_VARIANT (t1))
1654 && odr_type_p (TYPE_MAIN_VARIANT (t2))
1655 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1656 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1657 {
1658 /* Currently free_lang_data sets TYPE_METHODS to error_mark_node
1659 if it is non-NULL so this loop will never realy execute. */
1660 if (TYPE_METHODS (TYPE_MAIN_VARIANT (t1)) != error_mark_node
1661 && TYPE_METHODS (TYPE_MAIN_VARIANT (t2)) != error_mark_node)
1662 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1663 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1664 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1665 {
1666 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1667 {
1668 warn_odr (t1, t2, f1, f2, warn, warned,
1669 G_("a different method of same type "
1670 "is defined in another "
1671 "translation unit"));
1672 return false;
1673 }
1674 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1675 {
1676 warn_odr (t1, t2, f1, f2, warn, warned,
1677 G_("s definition that differs by virtual "
1678 "keyword in another translation unit"));
1679 return false;
1680 }
1681 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1682 {
1683 warn_odr (t1, t2, f1, f2, warn, warned,
1684 G_("virtual table layout differs "
1685 "in another translation unit"));
1686 return false;
1687 }
1688 if (odr_subtypes_equivalent_p (TREE_TYPE (f1),
1689 TREE_TYPE (f2), visited))
1690 {
1691 warn_odr (t1, t2, f1, f2, warn, warned,
1692 G_("method with incompatible type is "
1693 "defined in another translation unit"));
1694 return false;
1695 }
1696 }
1697 if ((f1 == NULL) != (f2 == NULL))
1698 {
1699 warn_odr (t1, t2, NULL, NULL, warn, warned,
1700 G_("a type with different number of methods "
1701 "is defined in another translation unit"));
1702 return false;
1703 }
1704 }
1705 }
1706 break;
1707 }
1708 case VOID_TYPE:
1709 case NULLPTR_TYPE:
1710 break;
1711
1712 default:
1713 debug_tree (t1);
1714 gcc_unreachable ();
1715 }
1716
1717 /* Those are better to come last as they are utterly uninformative. */
1718 if (TYPE_SIZE (t1) && TYPE_SIZE (t2)
1719 && !operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0))
1720 {
1721 warn_odr (t1, t2, NULL, NULL, warn, warned,
1722 G_("a type with different size "
1723 "is defined in another translation unit"));
1724 return false;
1725 }
1726 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)
1727 && TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
1728 {
1729 warn_odr (t1, t2, NULL, NULL, warn, warned,
1730 G_("a type with different alignment "
1731 "is defined in another translation unit"));
1732 return false;
1733 }
1734 gcc_assert (!TYPE_SIZE_UNIT (t1) || !TYPE_SIZE_UNIT (t2)
1735 || operand_equal_p (TYPE_SIZE_UNIT (t1),
1736 TYPE_SIZE_UNIT (t2), 0));
1737 return true;
1738 }
1739
1740 /* Return true if TYPE1 and TYPE2 are equivalent for One Definition Rule. */
1741
1742 bool
1743 odr_types_equivalent_p (tree type1, tree type2)
1744 {
1745 hash_set<type_pair,pair_traits> visited;
1746
1747 #ifdef ENABLE_CHECKING
1748 gcc_assert (odr_or_derived_type_p (type1) && odr_or_derived_type_p (type2));
1749 #endif
1750 return odr_types_equivalent_p (type1, type2, false, NULL,
1751 &visited);
1752 }
1753
1754 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1755 from VAL->type. This may happen in LTO where tree merging did not merge
1756 all variants of the same type or due to ODR violation.
1757
1758 Analyze and report ODR violations and add type to duplicate list.
1759 If TYPE is more specified than VAL->type, prevail VAL->type. Also if
1760 this is first time we see definition of a class return true so the
1761 base types are analyzed. */
1762
1763 static bool
1764 add_type_duplicate (odr_type val, tree type)
1765 {
1766 bool build_bases = false;
1767 bool prevail = false;
1768 bool odr_must_violate = false;
1769
1770 if (!val->types_set)
1771 val->types_set = new hash_set<tree>;
1772
1773 /* Chose polymorphic type as leader (this happens only in case of ODR
1774 violations. */
1775 if ((TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1776 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1777 && (TREE_CODE (val->type) != RECORD_TYPE || !TYPE_BINFO (val->type)
1778 || !polymorphic_type_binfo_p (TYPE_BINFO (val->type))))
1779 {
1780 prevail = true;
1781 build_bases = true;
1782 }
1783 /* Always prefer complete type to be the leader. */
1784 else if (!COMPLETE_TYPE_P (val->type) && COMPLETE_TYPE_P (type))
1785 {
1786 prevail = true;
1787 build_bases = TYPE_BINFO (type);
1788 }
1789 else if (COMPLETE_TYPE_P (val->type) && !COMPLETE_TYPE_P (type))
1790 ;
1791 else if (TREE_CODE (val->type) == ENUMERAL_TYPE
1792 && TREE_CODE (type) == ENUMERAL_TYPE
1793 && !TYPE_VALUES (val->type) && TYPE_VALUES (type))
1794 prevail = true;
1795 else if (TREE_CODE (val->type) == RECORD_TYPE
1796 && TREE_CODE (type) == RECORD_TYPE
1797 && TYPE_BINFO (type) && !TYPE_BINFO (val->type))
1798 {
1799 gcc_assert (!val->bases.length ());
1800 build_bases = true;
1801 prevail = true;
1802 }
1803
1804 if (prevail)
1805 {
1806 tree tmp = type;
1807
1808 type = val->type;
1809 val->type = tmp;
1810 }
1811
1812 val->types_set->add (type);
1813
1814 /* If we now have a mangled name, be sure to record it to val->type
1815 so ODR hash can work. */
1816
1817 if (can_be_name_hashed_p (type) && !can_be_name_hashed_p (val->type))
1818 SET_DECL_ASSEMBLER_NAME (TYPE_NAME (val->type),
1819 DECL_ASSEMBLER_NAME (TYPE_NAME (type)));
1820
1821 bool merge = true;
1822 bool base_mismatch = false;
1823 unsigned int i;
1824 bool warned = false;
1825 hash_set<type_pair,pair_traits> visited;
1826
1827 gcc_assert (in_lto_p);
1828 vec_safe_push (val->types, type);
1829
1830 /* If both are class types, compare the bases. */
1831 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1832 && TREE_CODE (val->type) == RECORD_TYPE
1833 && TREE_CODE (type) == RECORD_TYPE
1834 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1835 {
1836 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1837 != BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1838 {
1839 if (!flag_ltrans && !warned && !val->odr_violated)
1840 {
1841 tree extra_base;
1842 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1843 "a type with the same name but different "
1844 "number of polymorphic bases is "
1845 "defined in another translation unit");
1846 if (warned)
1847 {
1848 if (BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
1849 > BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)))
1850 extra_base = BINFO_BASE_BINFO
1851 (TYPE_BINFO (type),
1852 BINFO_N_BASE_BINFOS (TYPE_BINFO (val->type)));
1853 else
1854 extra_base = BINFO_BASE_BINFO
1855 (TYPE_BINFO (val->type),
1856 BINFO_N_BASE_BINFOS (TYPE_BINFO (type)));
1857 tree extra_base_type = BINFO_TYPE (extra_base);
1858 inform (DECL_SOURCE_LOCATION (TYPE_NAME (extra_base_type)),
1859 "the extra base is defined here");
1860 }
1861 }
1862 base_mismatch = true;
1863 }
1864 else
1865 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1866 {
1867 tree base1 = BINFO_BASE_BINFO (TYPE_BINFO (type), i);
1868 tree base2 = BINFO_BASE_BINFO (TYPE_BINFO (val->type), i);
1869 tree type1 = BINFO_TYPE (base1);
1870 tree type2 = BINFO_TYPE (base2);
1871
1872 if (types_odr_comparable (type1, type2))
1873 {
1874 if (!types_same_for_odr (type1, type2))
1875 base_mismatch = true;
1876 }
1877 else
1878 if (!odr_types_equivalent_p (type1, type2))
1879 base_mismatch = true;
1880 if (base_mismatch)
1881 {
1882 if (!warned && !val->odr_violated)
1883 {
1884 warn_odr (type, val->type, NULL, NULL,
1885 !warned, &warned,
1886 "a type with the same name but different base "
1887 "type is defined in another translation unit");
1888 if (warned)
1889 warn_types_mismatch (type1, type2);
1890 }
1891 break;
1892 }
1893 if (BINFO_OFFSET (base1) != BINFO_OFFSET (base2))
1894 {
1895 base_mismatch = true;
1896 if (!warned && !val->odr_violated)
1897 warn_odr (type, val->type, NULL, NULL,
1898 !warned, &warned,
1899 "a type with the same name but different base "
1900 "layout is defined in another translation unit");
1901 break;
1902 }
1903 /* One of bases is not of complete type. */
1904 if (!TYPE_BINFO (type1) != !TYPE_BINFO (type2))
1905 {
1906 /* If we have a polymorphic type info specified for TYPE1
1907 but not for TYPE2 we possibly missed a base when recording
1908 VAL->type earlier.
1909 Be sure this does not happen. */
1910 if (TYPE_BINFO (type1)
1911 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1912 && !build_bases)
1913 odr_must_violate = true;
1914 break;
1915 }
1916 /* One base is polymorphic and the other not.
1917 This ought to be diagnosed earlier, but do not ICE in the
1918 checking bellow. */
1919 else if (TYPE_BINFO (type1)
1920 && polymorphic_type_binfo_p (TYPE_BINFO (type1))
1921 != polymorphic_type_binfo_p (TYPE_BINFO (type2)))
1922 {
1923 if (!warned && !val->odr_violated)
1924 warn_odr (type, val->type, NULL, NULL,
1925 !warned, &warned,
1926 "a base of the type is polymorphic only in one "
1927 "translation unit");
1928 base_mismatch = true;
1929 break;
1930 }
1931 }
1932 if (base_mismatch)
1933 {
1934 merge = false;
1935 odr_violation_reported = true;
1936 val->odr_violated = true;
1937
1938 if (symtab->dump_file)
1939 {
1940 fprintf (symtab->dump_file, "ODR base violation\n");
1941
1942 print_node (symtab->dump_file, "", val->type, 0);
1943 putc ('\n',symtab->dump_file);
1944 print_node (symtab->dump_file, "", type, 0);
1945 putc ('\n',symtab->dump_file);
1946 }
1947 }
1948 }
1949
1950 /* Next compare memory layout. */
1951 if (!odr_types_equivalent_p (val->type, type,
1952 !flag_ltrans && !val->odr_violated && !warned,
1953 &warned, &visited))
1954 {
1955 merge = false;
1956 odr_violation_reported = true;
1957 val->odr_violated = true;
1958 if (symtab->dump_file)
1959 {
1960 fprintf (symtab->dump_file, "ODR violation\n");
1961
1962 print_node (symtab->dump_file, "", val->type, 0);
1963 putc ('\n',symtab->dump_file);
1964 print_node (symtab->dump_file, "", type, 0);
1965 putc ('\n',symtab->dump_file);
1966 }
1967 }
1968 gcc_assert (val->odr_violated || !odr_must_violate);
1969 /* Sanity check that all bases will be build same way again. */
1970 #ifdef ENABLE_CHECKING
1971 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1972 && TREE_CODE (val->type) == RECORD_TYPE
1973 && TREE_CODE (type) == RECORD_TYPE
1974 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1975 && !val->odr_violated
1976 && !base_mismatch && val->bases.length ())
1977 {
1978 unsigned int num_poly_bases = 0;
1979 unsigned int j;
1980
1981 for (i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1982 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1983 (TYPE_BINFO (type), i)))
1984 num_poly_bases++;
1985 gcc_assert (num_poly_bases == val->bases.length ());
1986 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type));
1987 i++)
1988 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO
1989 (TYPE_BINFO (type), i)))
1990 {
1991 odr_type base = get_odr_type
1992 (BINFO_TYPE
1993 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1994 i)),
1995 true);
1996 gcc_assert (val->bases[j] == base);
1997 j++;
1998 }
1999 }
2000 #endif
2001
2002
2003 /* Regularize things a little. During LTO same types may come with
2004 different BINFOs. Either because their virtual table was
2005 not merged by tree merging and only later at decl merging or
2006 because one type comes with external vtable, while other
2007 with internal. We want to merge equivalent binfos to conserve
2008 memory and streaming overhead.
2009
2010 The external vtables are more harmful: they contain references
2011 to external declarations of methods that may be defined in the
2012 merged LTO unit. For this reason we absolutely need to remove
2013 them and replace by internal variants. Not doing so will lead
2014 to incomplete answers from possible_polymorphic_call_targets.
2015
2016 FIXME: disable for now; because ODR types are now build during
2017 streaming in, the variants do not need to be linked to the type,
2018 yet. We need to do the merging in cleanup pass to be implemented
2019 soon. */
2020 if (!flag_ltrans && merge
2021 && 0
2022 && TREE_CODE (val->type) == RECORD_TYPE
2023 && TREE_CODE (type) == RECORD_TYPE
2024 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
2025 && TYPE_MAIN_VARIANT (type) == type
2026 && TYPE_MAIN_VARIANT (val->type) == val->type
2027 && BINFO_VTABLE (TYPE_BINFO (val->type))
2028 && BINFO_VTABLE (TYPE_BINFO (type)))
2029 {
2030 tree master_binfo = TYPE_BINFO (val->type);
2031 tree v1 = BINFO_VTABLE (master_binfo);
2032 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
2033
2034 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
2035 {
2036 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
2037 && operand_equal_p (TREE_OPERAND (v1, 1),
2038 TREE_OPERAND (v2, 1), 0));
2039 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
2040 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
2041 }
2042 gcc_assert (DECL_ASSEMBLER_NAME (v1)
2043 == DECL_ASSEMBLER_NAME (v2));
2044
2045 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
2046 {
2047 unsigned int i;
2048
2049 set_type_binfo (val->type, TYPE_BINFO (type));
2050 for (i = 0; i < val->types->length (); i++)
2051 {
2052 if (TYPE_BINFO ((*val->types)[i])
2053 == master_binfo)
2054 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
2055 }
2056 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
2057 }
2058 else
2059 set_type_binfo (type, master_binfo);
2060 }
2061 return build_bases;
2062 }
2063
2064 /* Get ODR type hash entry for TYPE. If INSERT is true, create
2065 possibly new entry. */
2066
2067 odr_type
2068 get_odr_type (tree type, bool insert)
2069 {
2070 odr_type_d **slot = NULL;
2071 odr_type_d **vtable_slot = NULL;
2072 odr_type val = NULL;
2073 hashval_t hash;
2074 bool build_bases = false;
2075 bool insert_to_odr_array = false;
2076 int base_id = -1;
2077
2078 type = main_odr_variant (type);
2079
2080 gcc_checking_assert (can_be_name_hashed_p (type)
2081 || can_be_vtable_hashed_p (type));
2082
2083 /* Lookup entry, first try name hash, fallback to vtable hash. */
2084 if (can_be_name_hashed_p (type))
2085 {
2086 hash = hash_odr_name (type);
2087 slot = odr_hash->find_slot_with_hash (type, hash,
2088 insert ? INSERT : NO_INSERT);
2089 }
2090 if ((!slot || !*slot) && in_lto_p && can_be_vtable_hashed_p (type))
2091 {
2092 hash = hash_odr_vtable (type);
2093 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2094 insert ? INSERT : NO_INSERT);
2095 }
2096
2097 if (!slot && !vtable_slot)
2098 return NULL;
2099
2100 /* See if we already have entry for type. */
2101 if ((slot && *slot) || (vtable_slot && *vtable_slot))
2102 {
2103 if (slot && *slot)
2104 {
2105 val = *slot;
2106 #ifdef ENABLE_CHECKING
2107 if (in_lto_p && can_be_vtable_hashed_p (type))
2108 {
2109 hash = hash_odr_vtable (type);
2110 vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
2111 NO_INSERT);
2112 gcc_assert (!vtable_slot || *vtable_slot == *slot);
2113 vtable_slot = NULL;
2114 }
2115 #endif
2116 }
2117 else if (*vtable_slot)
2118 val = *vtable_slot;
2119
2120 if (val->type != type
2121 && (!val->types_set || !val->types_set->add (type)))
2122 {
2123 gcc_assert (insert);
2124 /* We have type duplicate, but it may introduce vtable name or
2125 mangled name; be sure to keep hashes in sync. */
2126 if (in_lto_p && can_be_vtable_hashed_p (type)
2127 && (!vtable_slot || !*vtable_slot))
2128 {
2129 if (!vtable_slot)
2130 {
2131 hash = hash_odr_vtable (type);
2132 vtable_slot = odr_vtable_hash->find_slot_with_hash
2133 (type, hash, INSERT);
2134 gcc_checking_assert (!*vtable_slot || *vtable_slot == val);
2135 }
2136 *vtable_slot = val;
2137 }
2138 if (slot && !*slot)
2139 *slot = val;
2140 build_bases = add_type_duplicate (val, type);
2141 }
2142 }
2143 else
2144 {
2145 val = ggc_cleared_alloc<odr_type_d> ();
2146 val->type = type;
2147 val->bases = vNULL;
2148 val->derived_types = vNULL;
2149 if (type_with_linkage_p (type))
2150 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
2151 else
2152 val->anonymous_namespace = 0;
2153 build_bases = COMPLETE_TYPE_P (val->type);
2154 insert_to_odr_array = true;
2155 if (slot)
2156 *slot = val;
2157 if (vtable_slot)
2158 *vtable_slot = val;
2159 }
2160
2161 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
2162 && type_with_linkage_p (type)
2163 && type == TYPE_MAIN_VARIANT (type))
2164 {
2165 tree binfo = TYPE_BINFO (type);
2166 unsigned int i;
2167
2168 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) == type);
2169
2170 val->all_derivations_known = type_all_derivations_known_p (type);
2171 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
2172 /* For now record only polymorphic types. other are
2173 pointless for devirtualization and we can not precisely
2174 determine ODR equivalency of these during LTO. */
2175 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
2176 {
2177 tree base_type= BINFO_TYPE (BINFO_BASE_BINFO (binfo, i));
2178 odr_type base = get_odr_type (base_type, true);
2179 gcc_assert (TYPE_MAIN_VARIANT (base_type) == base_type);
2180 base->derived_types.safe_push (val);
2181 val->bases.safe_push (base);
2182 if (base->id > base_id)
2183 base_id = base->id;
2184 }
2185 }
2186 /* Ensure that type always appears after bases. */
2187 if (insert_to_odr_array)
2188 {
2189 if (odr_types_ptr)
2190 val->id = odr_types.length ();
2191 vec_safe_push (odr_types_ptr, val);
2192 }
2193 else if (base_id > val->id)
2194 {
2195 odr_types[val->id] = 0;
2196 /* Be sure we did not recorded any derived types; these may need
2197 renumbering too. */
2198 gcc_assert (val->derived_types.length() == 0);
2199 if (odr_types_ptr)
2200 val->id = odr_types.length ();
2201 vec_safe_push (odr_types_ptr, val);
2202 }
2203 return val;
2204 }
2205
2206 /* Add TYPE od ODR type hash. */
2207
2208 void
2209 register_odr_type (tree type)
2210 {
2211 if (!odr_hash)
2212 {
2213 odr_hash = new odr_hash_type (23);
2214 if (in_lto_p)
2215 odr_vtable_hash = new odr_vtable_hash_type (23);
2216 }
2217 /* Arrange things to be nicer and insert main variants first.
2218 ??? fundamental prerecorded types do not have mangled names; this
2219 makes it possible that non-ODR type is main_odr_variant of ODR type.
2220 Things may get smoother if LTO FE set mangled name of those types same
2221 way as C++ FE does. */
2222 if (odr_type_p (main_odr_variant (TYPE_MAIN_VARIANT (type)))
2223 && odr_type_p (TYPE_MAIN_VARIANT (type)))
2224 get_odr_type (TYPE_MAIN_VARIANT (type), true);
2225 if (TYPE_MAIN_VARIANT (type) != type && odr_type_p (main_odr_variant (type)))
2226 get_odr_type (type, true);
2227 }
2228
2229 /* Return true if type is known to have no derivations. */
2230
2231 bool
2232 type_known_to_have_no_derivations_p (tree t)
2233 {
2234 return (type_all_derivations_known_p (t)
2235 && (TYPE_FINAL_P (t)
2236 || (odr_hash
2237 && !get_odr_type (t, true)->derived_types.length())));
2238 }
2239
2240 /* Dump ODR type T and all its derived types. INDENT specifies indentation for
2241 recursive printing. */
2242
2243 static void
2244 dump_odr_type (FILE *f, odr_type t, int indent=0)
2245 {
2246 unsigned int i;
2247 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
2248 print_generic_expr (f, t->type, TDF_SLIM);
2249 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
2250 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
2251 if (TYPE_NAME (t->type))
2252 {
2253 /*fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
2254 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
2255 DECL_SOURCE_LINE (TYPE_NAME (t->type)));*/
2256 if (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t->type)))
2257 fprintf (f, "%*s mangled name: %s\n", indent * 2, "",
2258 IDENTIFIER_POINTER
2259 (DECL_ASSEMBLER_NAME (TYPE_NAME (t->type))));
2260 }
2261 if (t->bases.length ())
2262 {
2263 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
2264 for (i = 0; i < t->bases.length (); i++)
2265 fprintf (f, " %i", t->bases[i]->id);
2266 fprintf (f, "\n");
2267 }
2268 if (t->derived_types.length ())
2269 {
2270 fprintf (f, "%*s derived types:\n", indent * 2, "");
2271 for (i = 0; i < t->derived_types.length (); i++)
2272 dump_odr_type (f, t->derived_types[i], indent + 1);
2273 }
2274 fprintf (f, "\n");
2275 }
2276
2277 /* Dump the type inheritance graph. */
2278
2279 static void
2280 dump_type_inheritance_graph (FILE *f)
2281 {
2282 unsigned int i;
2283 if (!odr_types_ptr)
2284 return;
2285 fprintf (f, "\n\nType inheritance graph:\n");
2286 for (i = 0; i < odr_types.length (); i++)
2287 {
2288 if (odr_types[i] && odr_types[i]->bases.length () == 0)
2289 dump_odr_type (f, odr_types[i]);
2290 }
2291 for (i = 0; i < odr_types.length (); i++)
2292 {
2293 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
2294 {
2295 unsigned int j;
2296 fprintf (f, "Duplicate tree types for odr type %i\n", i);
2297 print_node (f, "", odr_types[i]->type, 0);
2298 for (j = 0; j < odr_types[i]->types->length (); j++)
2299 {
2300 tree t;
2301 fprintf (f, "duplicate #%i\n", j);
2302 print_node (f, "", (*odr_types[i]->types)[j], 0);
2303 t = (*odr_types[i]->types)[j];
2304 while (TYPE_P (t) && TYPE_CONTEXT (t))
2305 {
2306 t = TYPE_CONTEXT (t);
2307 print_node (f, "", t, 0);
2308 }
2309 putc ('\n',f);
2310 }
2311 }
2312 }
2313 }
2314
2315 /* Initialize IPA devirt and build inheritance tree graph. */
2316
2317 void
2318 build_type_inheritance_graph (void)
2319 {
2320 struct symtab_node *n;
2321 FILE *inheritance_dump_file;
2322 int flags;
2323
2324 if (odr_hash)
2325 return;
2326 timevar_push (TV_IPA_INHERITANCE);
2327 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
2328 odr_hash = new odr_hash_type (23);
2329 if (in_lto_p)
2330 odr_vtable_hash = new odr_vtable_hash_type (23);
2331
2332 /* We reconstruct the graph starting of types of all methods seen in the
2333 the unit. */
2334 FOR_EACH_SYMBOL (n)
2335 if (is_a <cgraph_node *> (n)
2336 && DECL_VIRTUAL_P (n->decl)
2337 && n->real_symbol_p ())
2338 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
2339
2340 /* Look also for virtual tables of types that do not define any methods.
2341
2342 We need it in a case where class B has virtual base of class A
2343 re-defining its virtual method and there is class C with no virtual
2344 methods with B as virtual base.
2345
2346 Here we output B's virtual method in two variant - for non-virtual
2347 and virtual inheritance. B's virtual table has non-virtual version,
2348 while C's has virtual.
2349
2350 For this reason we need to know about C in order to include both
2351 variants of B. More correctly, record_target_from_binfo should
2352 add both variants of the method when walking B, but we have no
2353 link in between them.
2354
2355 We rely on fact that either the method is exported and thus we
2356 assume it is called externally or C is in anonymous namespace and
2357 thus we will see the vtable. */
2358
2359 else if (is_a <varpool_node *> (n)
2360 && DECL_VIRTUAL_P (n->decl)
2361 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
2362 && TYPE_BINFO (DECL_CONTEXT (n->decl))
2363 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
2364 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
2365 if (inheritance_dump_file)
2366 {
2367 dump_type_inheritance_graph (inheritance_dump_file);
2368 dump_end (TDI_inheritance, inheritance_dump_file);
2369 }
2370 timevar_pop (TV_IPA_INHERITANCE);
2371 }
2372
2373 /* Return true if N has reference from live virtual table
2374 (and thus can be a destination of polymorphic call).
2375 Be conservatively correct when callgraph is not built or
2376 if the method may be referred externally. */
2377
2378 static bool
2379 referenced_from_vtable_p (struct cgraph_node *node)
2380 {
2381 int i;
2382 struct ipa_ref *ref;
2383 bool found = false;
2384
2385 if (node->externally_visible
2386 || DECL_EXTERNAL (node->decl)
2387 || node->used_from_other_partition)
2388 return true;
2389
2390 /* Keep this test constant time.
2391 It is unlikely this can happen except for the case where speculative
2392 devirtualization introduced many speculative edges to this node.
2393 In this case the target is very likely alive anyway. */
2394 if (node->ref_list.referring.length () > 100)
2395 return true;
2396
2397 /* We need references built. */
2398 if (symtab->state <= CONSTRUCTION)
2399 return true;
2400
2401 for (i = 0; node->iterate_referring (i, ref); i++)
2402 if ((ref->use == IPA_REF_ALIAS
2403 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
2404 || (ref->use == IPA_REF_ADDR
2405 && TREE_CODE (ref->referring->decl) == VAR_DECL
2406 && DECL_VIRTUAL_P (ref->referring->decl)))
2407 {
2408 found = true;
2409 break;
2410 }
2411 return found;
2412 }
2413
2414 /* If TARGET has associated node, record it in the NODES array.
2415 CAN_REFER specify if program can refer to the target directly.
2416 if TARGET is unknown (NULL) or it can not be inserted (for example because
2417 its body was already removed and there is no way to refer to it), clear
2418 COMPLETEP. */
2419
2420 static void
2421 maybe_record_node (vec <cgraph_node *> &nodes,
2422 tree target, hash_set<tree> *inserted,
2423 bool can_refer,
2424 bool *completep)
2425 {
2426 struct cgraph_node *target_node, *alias_target;
2427 enum availability avail;
2428
2429 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
2430 list of targets; the runtime effect of calling them is undefined.
2431 Only "real" virtual methods should be accounted. */
2432 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
2433 return;
2434
2435 if (!can_refer)
2436 {
2437 /* The only case when method of anonymous namespace becomes unreferable
2438 is when we completely optimized it out. */
2439 if (flag_ltrans
2440 || !target
2441 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
2442 *completep = false;
2443 return;
2444 }
2445
2446 if (!target)
2447 return;
2448
2449 target_node = cgraph_node::get (target);
2450
2451 /* Prefer alias target over aliases, so we do not get confused by
2452 fake duplicates. */
2453 if (target_node)
2454 {
2455 alias_target = target_node->ultimate_alias_target (&avail);
2456 if (target_node != alias_target
2457 && avail >= AVAIL_AVAILABLE
2458 && target_node->get_availability ())
2459 target_node = alias_target;
2460 }
2461
2462 /* Method can only be called by polymorphic call if any
2463 of vtables referring to it are alive.
2464
2465 While this holds for non-anonymous functions, too, there are
2466 cases where we want to keep them in the list; for example
2467 inline functions with -fno-weak are static, but we still
2468 may devirtualize them when instance comes from other unit.
2469 The same holds for LTO.
2470
2471 Currently we ignore these functions in speculative devirtualization.
2472 ??? Maybe it would make sense to be more aggressive for LTO even
2473 elsewhere. */
2474 if (!flag_ltrans
2475 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
2476 && (!target_node
2477 || !referenced_from_vtable_p (target_node)))
2478 ;
2479 /* See if TARGET is useful function we can deal with. */
2480 else if (target_node != NULL
2481 && (TREE_PUBLIC (target)
2482 || DECL_EXTERNAL (target)
2483 || target_node->definition)
2484 && target_node->real_symbol_p ())
2485 {
2486 gcc_assert (!target_node->global.inlined_to);
2487 gcc_assert (target_node->real_symbol_p ());
2488 if (!inserted->add (target))
2489 {
2490 cached_polymorphic_call_targets->add (target_node);
2491 nodes.safe_push (target_node);
2492 }
2493 }
2494 else if (completep
2495 && (!type_in_anonymous_namespace_p
2496 (DECL_CONTEXT (target))
2497 || flag_ltrans))
2498 *completep = false;
2499 }
2500
2501 /* See if BINFO's type matches OUTER_TYPE. If so, look up
2502 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
2503 method in vtable and insert method to NODES array
2504 or BASES_TO_CONSIDER if this array is non-NULL.
2505 Otherwise recurse to base BINFOs.
2506 This matches what get_binfo_at_offset does, but with offset
2507 being unknown.
2508
2509 TYPE_BINFOS is a stack of BINFOS of types with defined
2510 virtual table seen on way from class type to BINFO.
2511
2512 MATCHED_VTABLES tracks virtual tables we already did lookup
2513 for virtual function in. INSERTED tracks nodes we already
2514 inserted.
2515
2516 ANONYMOUS is true if BINFO is part of anonymous namespace.
2517
2518 Clear COMPLETEP when we hit unreferable target.
2519 */
2520
2521 static void
2522 record_target_from_binfo (vec <cgraph_node *> &nodes,
2523 vec <tree> *bases_to_consider,
2524 tree binfo,
2525 tree otr_type,
2526 vec <tree> &type_binfos,
2527 HOST_WIDE_INT otr_token,
2528 tree outer_type,
2529 HOST_WIDE_INT offset,
2530 hash_set<tree> *inserted,
2531 hash_set<tree> *matched_vtables,
2532 bool anonymous,
2533 bool *completep)
2534 {
2535 tree type = BINFO_TYPE (binfo);
2536 int i;
2537 tree base_binfo;
2538
2539
2540 if (BINFO_VTABLE (binfo))
2541 type_binfos.safe_push (binfo);
2542 if (types_same_for_odr (type, outer_type))
2543 {
2544 int i;
2545 tree type_binfo = NULL;
2546
2547 /* Look up BINFO with virtual table. For normal types it is always last
2548 binfo on stack. */
2549 for (i = type_binfos.length () - 1; i >= 0; i--)
2550 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
2551 {
2552 type_binfo = type_binfos[i];
2553 break;
2554 }
2555 if (BINFO_VTABLE (binfo))
2556 type_binfos.pop ();
2557 /* If this is duplicated BINFO for base shared by virtual inheritance,
2558 we may not have its associated vtable. This is not a problem, since
2559 we will walk it on the other path. */
2560 if (!type_binfo)
2561 return;
2562 tree inner_binfo = get_binfo_at_offset (type_binfo,
2563 offset, otr_type);
2564 if (!inner_binfo)
2565 {
2566 gcc_assert (odr_violation_reported);
2567 return;
2568 }
2569 /* For types in anonymous namespace first check if the respective vtable
2570 is alive. If not, we know the type can't be called. */
2571 if (!flag_ltrans && anonymous)
2572 {
2573 tree vtable = BINFO_VTABLE (inner_binfo);
2574 varpool_node *vnode;
2575
2576 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
2577 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
2578 vnode = varpool_node::get (vtable);
2579 if (!vnode || !vnode->definition)
2580 return;
2581 }
2582 gcc_assert (inner_binfo);
2583 if (bases_to_consider
2584 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
2585 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
2586 {
2587 bool can_refer;
2588 tree target = gimple_get_virt_method_for_binfo (otr_token,
2589 inner_binfo,
2590 &can_refer);
2591 if (!bases_to_consider)
2592 maybe_record_node (nodes, target, inserted, can_refer, completep);
2593 /* Destructors are never called via construction vtables. */
2594 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
2595 bases_to_consider->safe_push (target);
2596 }
2597 return;
2598 }
2599
2600 /* Walk bases. */
2601 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2602 /* Walking bases that have no virtual method is pointless exercise. */
2603 if (polymorphic_type_binfo_p (base_binfo))
2604 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
2605 type_binfos,
2606 otr_token, outer_type, offset, inserted,
2607 matched_vtables, anonymous, completep);
2608 if (BINFO_VTABLE (binfo))
2609 type_binfos.pop ();
2610 }
2611
2612 /* Look up virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
2613 of TYPE, insert them to NODES, recurse into derived nodes.
2614 INSERTED is used to avoid duplicate insertions of methods into NODES.
2615 MATCHED_VTABLES are used to avoid duplicate walking vtables.
2616 Clear COMPLETEP if unreferable target is found.
2617
2618 If CONSIDER_CONSTRUCTION is true, record to BASES_TO_CONSIDER
2619 all cases where BASE_SKIPPED is true (because the base is abstract
2620 class). */
2621
2622 static void
2623 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
2624 hash_set<tree> *inserted,
2625 hash_set<tree> *matched_vtables,
2626 tree otr_type,
2627 odr_type type,
2628 HOST_WIDE_INT otr_token,
2629 tree outer_type,
2630 HOST_WIDE_INT offset,
2631 bool *completep,
2632 vec <tree> &bases_to_consider,
2633 bool consider_construction)
2634 {
2635 tree binfo = TYPE_BINFO (type->type);
2636 unsigned int i;
2637 auto_vec <tree, 8> type_binfos;
2638 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
2639
2640 /* We may need to consider types w/o instances because of possible derived
2641 types using their methods either directly or via construction vtables.
2642 We are safe to skip them when all derivations are known, since we will
2643 handle them later.
2644 This is done by recording them to BASES_TO_CONSIDER array. */
2645 if (possibly_instantiated || consider_construction)
2646 {
2647 record_target_from_binfo (nodes,
2648 (!possibly_instantiated
2649 && type_all_derivations_known_p (type->type))
2650 ? &bases_to_consider : NULL,
2651 binfo, otr_type, type_binfos, otr_token,
2652 outer_type, offset,
2653 inserted, matched_vtables,
2654 type->anonymous_namespace, completep);
2655 }
2656 for (i = 0; i < type->derived_types.length (); i++)
2657 possible_polymorphic_call_targets_1 (nodes, inserted,
2658 matched_vtables,
2659 otr_type,
2660 type->derived_types[i],
2661 otr_token, outer_type, offset, completep,
2662 bases_to_consider, consider_construction);
2663 }
2664
2665 /* Cache of queries for polymorphic call targets.
2666
2667 Enumerating all call targets may get expensive when there are many
2668 polymorphic calls in the program, so we memoize all the previous
2669 queries and avoid duplicated work. */
2670
2671 struct polymorphic_call_target_d
2672 {
2673 HOST_WIDE_INT otr_token;
2674 ipa_polymorphic_call_context context;
2675 odr_type type;
2676 vec <cgraph_node *> targets;
2677 tree decl_warning;
2678 int type_warning;
2679 bool complete;
2680 bool speculative;
2681 };
2682
2683 /* Polymorphic call target cache helpers. */
2684
2685 struct polymorphic_call_target_hasher
2686 {
2687 typedef polymorphic_call_target_d *value_type;
2688 typedef polymorphic_call_target_d *compare_type;
2689 static inline hashval_t hash (const polymorphic_call_target_d *);
2690 static inline bool equal (const polymorphic_call_target_d *,
2691 const polymorphic_call_target_d *);
2692 static inline void remove (polymorphic_call_target_d *);
2693 };
2694
2695 /* Return the computed hashcode for ODR_QUERY. */
2696
2697 inline hashval_t
2698 polymorphic_call_target_hasher::hash (const polymorphic_call_target_d *odr_query)
2699 {
2700 inchash::hash hstate (odr_query->otr_token);
2701
2702 hstate.add_wide_int (odr_query->type->id);
2703 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
2704 hstate.add_wide_int (odr_query->context.offset);
2705
2706 if (odr_query->context.speculative_outer_type)
2707 {
2708 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
2709 hstate.add_wide_int (odr_query->context.speculative_offset);
2710 }
2711 hstate.add_flag (odr_query->speculative);
2712 hstate.add_flag (odr_query->context.maybe_in_construction);
2713 hstate.add_flag (odr_query->context.maybe_derived_type);
2714 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
2715 hstate.commit_flag ();
2716 return hstate.end ();
2717 }
2718
2719 /* Compare cache entries T1 and T2. */
2720
2721 inline bool
2722 polymorphic_call_target_hasher::equal (const polymorphic_call_target_d *t1,
2723 const polymorphic_call_target_d *t2)
2724 {
2725 return (t1->type == t2->type && t1->otr_token == t2->otr_token
2726 && t1->speculative == t2->speculative
2727 && t1->context.offset == t2->context.offset
2728 && t1->context.speculative_offset == t2->context.speculative_offset
2729 && t1->context.outer_type == t2->context.outer_type
2730 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
2731 && t1->context.maybe_in_construction
2732 == t2->context.maybe_in_construction
2733 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
2734 && (t1->context.speculative_maybe_derived_type
2735 == t2->context.speculative_maybe_derived_type));
2736 }
2737
2738 /* Remove entry in polymorphic call target cache hash. */
2739
2740 inline void
2741 polymorphic_call_target_hasher::remove (polymorphic_call_target_d *v)
2742 {
2743 v->targets.release ();
2744 free (v);
2745 }
2746
2747 /* Polymorphic call target query cache. */
2748
2749 typedef hash_table<polymorphic_call_target_hasher>
2750 polymorphic_call_target_hash_type;
2751 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
2752
2753 /* Destroy polymorphic call target query cache. */
2754
2755 static void
2756 free_polymorphic_call_targets_hash ()
2757 {
2758 if (cached_polymorphic_call_targets)
2759 {
2760 delete polymorphic_call_target_hash;
2761 polymorphic_call_target_hash = NULL;
2762 delete cached_polymorphic_call_targets;
2763 cached_polymorphic_call_targets = NULL;
2764 }
2765 }
2766
2767 /* When virtual function is removed, we may need to flush the cache. */
2768
2769 static void
2770 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
2771 {
2772 if (cached_polymorphic_call_targets
2773 && cached_polymorphic_call_targets->contains (n))
2774 free_polymorphic_call_targets_hash ();
2775 }
2776
2777 /* Look up base of BINFO that has virtual table VTABLE with OFFSET. */
2778
2779 tree
2780 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2781 tree vtable)
2782 {
2783 tree v = BINFO_VTABLE (binfo);
2784 int i;
2785 tree base_binfo;
2786 unsigned HOST_WIDE_INT this_offset;
2787
2788 if (v)
2789 {
2790 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2791 gcc_unreachable ();
2792
2793 if (offset == this_offset
2794 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2795 return binfo;
2796 }
2797
2798 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2799 if (polymorphic_type_binfo_p (base_binfo))
2800 {
2801 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2802 if (base_binfo)
2803 return base_binfo;
2804 }
2805 return NULL;
2806 }
2807
2808 /* T is known constant value of virtual table pointer.
2809 Store virtual table to V and its offset to OFFSET.
2810 Return false if T does not look like virtual table reference. */
2811
2812 bool
2813 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2814 unsigned HOST_WIDE_INT *offset)
2815 {
2816 /* We expect &MEM[(void *)&virtual_table + 16B].
2817 We obtain object's BINFO from the context of the virtual table.
2818 This one contains pointer to virtual table represented via
2819 POINTER_PLUS_EXPR. Verify that this pointer matches what
2820 we propagated through.
2821
2822 In the case of virtual inheritance, the virtual tables may
2823 be nested, i.e. the offset may be different from 16 and we may
2824 need to dive into the type representation. */
2825 if (TREE_CODE (t) == ADDR_EXPR
2826 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2827 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2828 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2829 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2830 == VAR_DECL)
2831 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2832 (TREE_OPERAND (t, 0), 0), 0)))
2833 {
2834 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2835 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2836 return true;
2837 }
2838
2839 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2840 We need to handle it when T comes from static variable initializer or
2841 BINFO. */
2842 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2843 {
2844 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2845 t = TREE_OPERAND (t, 0);
2846 }
2847 else
2848 *offset = 0;
2849
2850 if (TREE_CODE (t) != ADDR_EXPR)
2851 return false;
2852 *v = TREE_OPERAND (t, 0);
2853 return true;
2854 }
2855
2856 /* T is known constant value of virtual table pointer. Return BINFO of the
2857 instance type. */
2858
2859 tree
2860 vtable_pointer_value_to_binfo (const_tree t)
2861 {
2862 tree vtable;
2863 unsigned HOST_WIDE_INT offset;
2864
2865 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2866 return NULL_TREE;
2867
2868 /* FIXME: for stores of construction vtables we return NULL,
2869 because we do not have BINFO for those. Eventually we should fix
2870 our representation to allow this case to be handled, too.
2871 In the case we see store of BINFO we however may assume
2872 that standard folding will be able to cope with it. */
2873 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2874 offset, vtable);
2875 }
2876
2877 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2878 Look up their respective virtual methods for OTR_TOKEN and OTR_TYPE
2879 and insert them in NODES.
2880
2881 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2882
2883 static void
2884 record_targets_from_bases (tree otr_type,
2885 HOST_WIDE_INT otr_token,
2886 tree outer_type,
2887 HOST_WIDE_INT offset,
2888 vec <cgraph_node *> &nodes,
2889 hash_set<tree> *inserted,
2890 hash_set<tree> *matched_vtables,
2891 bool *completep)
2892 {
2893 while (true)
2894 {
2895 HOST_WIDE_INT pos, size;
2896 tree base_binfo;
2897 tree fld;
2898
2899 if (types_same_for_odr (outer_type, otr_type))
2900 return;
2901
2902 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2903 {
2904 if (TREE_CODE (fld) != FIELD_DECL)
2905 continue;
2906
2907 pos = int_bit_position (fld);
2908 size = tree_to_shwi (DECL_SIZE (fld));
2909 if (pos <= offset && (pos + size) > offset
2910 /* Do not get confused by zero sized bases. */
2911 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2912 break;
2913 }
2914 /* Within a class type we should always find corresponding fields. */
2915 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2916
2917 /* Nonbase types should have been stripped by outer_class_type. */
2918 gcc_assert (DECL_ARTIFICIAL (fld));
2919
2920 outer_type = TREE_TYPE (fld);
2921 offset -= pos;
2922
2923 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2924 offset, otr_type);
2925 if (!base_binfo)
2926 {
2927 gcc_assert (odr_violation_reported);
2928 return;
2929 }
2930 gcc_assert (base_binfo);
2931 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2932 {
2933 bool can_refer;
2934 tree target = gimple_get_virt_method_for_binfo (otr_token,
2935 base_binfo,
2936 &can_refer);
2937 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2938 maybe_record_node (nodes, target, inserted, can_refer, completep);
2939 matched_vtables->add (BINFO_VTABLE (base_binfo));
2940 }
2941 }
2942 }
2943
2944 /* When virtual table is removed, we may need to flush the cache. */
2945
2946 static void
2947 devirt_variable_node_removal_hook (varpool_node *n,
2948 void *d ATTRIBUTE_UNUSED)
2949 {
2950 if (cached_polymorphic_call_targets
2951 && DECL_VIRTUAL_P (n->decl)
2952 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2953 free_polymorphic_call_targets_hash ();
2954 }
2955
2956 /* Record about how many calls would benefit from given type to be final. */
2957
2958 struct odr_type_warn_count
2959 {
2960 tree type;
2961 int count;
2962 gcov_type dyn_count;
2963 };
2964
2965 /* Record about how many calls would benefit from given method to be final. */
2966
2967 struct decl_warn_count
2968 {
2969 tree decl;
2970 int count;
2971 gcov_type dyn_count;
2972 };
2973
2974 /* Information about type and decl warnings. */
2975
2976 struct final_warning_record
2977 {
2978 gcov_type dyn_count;
2979 vec<odr_type_warn_count> type_warnings;
2980 hash_map<tree, decl_warn_count> decl_warnings;
2981 };
2982 struct final_warning_record *final_warning_records;
2983
2984 /* Return vector containing possible targets of polymorphic call of type
2985 OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2986 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containing
2987 OTR_TYPE and include their virtual method. This is useful for types
2988 possibly in construction or destruction where the virtual table may
2989 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2990 us to walk the inheritance graph for all derivations.
2991
2992 If COMPLETEP is non-NULL, store true if the list is complete.
2993 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2994 in the target cache. If user needs to visit every target list
2995 just once, it can memoize them.
2996
2997 If SPECULATIVE is set, the list will not contain targets that
2998 are not speculatively taken.
2999
3000 Returned vector is placed into cache. It is NOT caller's responsibility
3001 to free it. The vector can be freed on cgraph_remove_node call if
3002 the particular node is a virtual function present in the cache. */
3003
3004 vec <cgraph_node *>
3005 possible_polymorphic_call_targets (tree otr_type,
3006 HOST_WIDE_INT otr_token,
3007 ipa_polymorphic_call_context context,
3008 bool *completep,
3009 void **cache_token,
3010 bool speculative)
3011 {
3012 static struct cgraph_node_hook_list *node_removal_hook_holder;
3013 vec <cgraph_node *> nodes = vNULL;
3014 auto_vec <tree, 8> bases_to_consider;
3015 odr_type type, outer_type;
3016 polymorphic_call_target_d key;
3017 polymorphic_call_target_d **slot;
3018 unsigned int i;
3019 tree binfo, target;
3020 bool complete;
3021 bool can_refer = false;
3022 bool skipped = false;
3023
3024 otr_type = TYPE_MAIN_VARIANT (otr_type);
3025
3026 /* If ODR is not initialized or the context is invalid, return empty
3027 incomplete list. */
3028 if (!odr_hash || context.invalid || !TYPE_BINFO (otr_type))
3029 {
3030 if (completep)
3031 *completep = context.invalid;
3032 if (cache_token)
3033 *cache_token = NULL;
3034 return nodes;
3035 }
3036
3037 /* Do not bother to compute speculative info when user do not asks for it. */
3038 if (!speculative || !context.speculative_outer_type)
3039 context.clear_speculation ();
3040
3041 type = get_odr_type (otr_type, true);
3042
3043 /* Recording type variants would waste results cache. */
3044 gcc_assert (!context.outer_type
3045 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3046
3047 /* Look up the outer class type we want to walk.
3048 If we fail to do so, the context is invalid. */
3049 if ((context.outer_type || context.speculative_outer_type)
3050 && !context.restrict_to_inner_class (otr_type))
3051 {
3052 if (completep)
3053 *completep = true;
3054 if (cache_token)
3055 *cache_token = NULL;
3056 return nodes;
3057 }
3058 gcc_assert (!context.invalid);
3059
3060 /* Check that restrict_to_inner_class kept the main variant. */
3061 gcc_assert (!context.outer_type
3062 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3063
3064 /* We canonicalize our query, so we do not need extra hashtable entries. */
3065
3066 /* Without outer type, we have no use for offset. Just do the
3067 basic search from inner type. */
3068 if (!context.outer_type)
3069 context.clear_outer_type (otr_type);
3070 /* We need to update our hierarchy if the type does not exist. */
3071 outer_type = get_odr_type (context.outer_type, true);
3072 /* If the type is complete, there are no derivations. */
3073 if (TYPE_FINAL_P (outer_type->type))
3074 context.maybe_derived_type = false;
3075
3076 /* Initialize query cache. */
3077 if (!cached_polymorphic_call_targets)
3078 {
3079 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3080 polymorphic_call_target_hash
3081 = new polymorphic_call_target_hash_type (23);
3082 if (!node_removal_hook_holder)
3083 {
3084 node_removal_hook_holder =
3085 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3086 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3087 NULL);
3088 }
3089 }
3090
3091 if (in_lto_p)
3092 {
3093 if (context.outer_type != otr_type)
3094 context.outer_type
3095 = get_odr_type (context.outer_type, true)->type;
3096 if (context.speculative_outer_type)
3097 context.speculative_outer_type
3098 = get_odr_type (context.speculative_outer_type, true)->type;
3099 }
3100
3101 /* Look up cached answer. */
3102 key.type = type;
3103 key.otr_token = otr_token;
3104 key.speculative = speculative;
3105 key.context = context;
3106 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3107 if (cache_token)
3108 *cache_token = (void *)*slot;
3109 if (*slot)
3110 {
3111 if (completep)
3112 *completep = (*slot)->complete;
3113 if ((*slot)->type_warning && final_warning_records)
3114 {
3115 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3116 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3117 += final_warning_records->dyn_count;
3118 }
3119 if (!speculative && (*slot)->decl_warning && final_warning_records)
3120 {
3121 struct decl_warn_count *c =
3122 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3123 c->count++;
3124 c->dyn_count += final_warning_records->dyn_count;
3125 }
3126 return (*slot)->targets;
3127 }
3128
3129 complete = true;
3130
3131 /* Do actual search. */
3132 timevar_push (TV_IPA_VIRTUAL_CALL);
3133 *slot = XCNEW (polymorphic_call_target_d);
3134 if (cache_token)
3135 *cache_token = (void *)*slot;
3136 (*slot)->type = type;
3137 (*slot)->otr_token = otr_token;
3138 (*slot)->context = context;
3139 (*slot)->speculative = speculative;
3140
3141 hash_set<tree> inserted;
3142 hash_set<tree> matched_vtables;
3143
3144 /* First insert targets we speculatively identified as likely. */
3145 if (context.speculative_outer_type)
3146 {
3147 odr_type speculative_outer_type;
3148 bool speculation_complete = true;
3149
3150 /* First insert target from type itself and check if it may have
3151 derived types. */
3152 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3153 if (TYPE_FINAL_P (speculative_outer_type->type))
3154 context.speculative_maybe_derived_type = false;
3155 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3156 context.speculative_offset, otr_type);
3157 if (binfo)
3158 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3159 &can_refer);
3160 else
3161 target = NULL;
3162
3163 /* In the case we get complete method, we don't need
3164 to walk derivations. */
3165 if (target && DECL_FINAL_P (target))
3166 context.speculative_maybe_derived_type = false;
3167 if (type_possibly_instantiated_p (speculative_outer_type->type))
3168 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3169 if (binfo)
3170 matched_vtables.add (BINFO_VTABLE (binfo));
3171
3172
3173 /* Next walk recursively all derived types. */
3174 if (context.speculative_maybe_derived_type)
3175 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3176 possible_polymorphic_call_targets_1 (nodes, &inserted,
3177 &matched_vtables,
3178 otr_type,
3179 speculative_outer_type->derived_types[i],
3180 otr_token, speculative_outer_type->type,
3181 context.speculative_offset,
3182 &speculation_complete,
3183 bases_to_consider,
3184 false);
3185 }
3186
3187 if (!speculative || !nodes.length ())
3188 {
3189 /* First see virtual method of type itself. */
3190 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3191 context.offset, otr_type);
3192 if (binfo)
3193 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3194 &can_refer);
3195 else
3196 {
3197 gcc_assert (odr_violation_reported);
3198 target = NULL;
3199 }
3200
3201 /* Destructors are never called through construction virtual tables,
3202 because the type is always known. */
3203 if (target && DECL_CXX_DESTRUCTOR_P (target))
3204 context.maybe_in_construction = false;
3205
3206 if (target)
3207 {
3208 /* In the case we get complete method, we don't need
3209 to walk derivations. */
3210 if (DECL_FINAL_P (target))
3211 context.maybe_derived_type = false;
3212 }
3213
3214 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3215 if (type_possibly_instantiated_p (outer_type->type))
3216 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3217 else
3218 skipped = true;
3219
3220 if (binfo)
3221 matched_vtables.add (BINFO_VTABLE (binfo));
3222
3223 /* Next walk recursively all derived types. */
3224 if (context.maybe_derived_type)
3225 {
3226 for (i = 0; i < outer_type->derived_types.length(); i++)
3227 possible_polymorphic_call_targets_1 (nodes, &inserted,
3228 &matched_vtables,
3229 otr_type,
3230 outer_type->derived_types[i],
3231 otr_token, outer_type->type,
3232 context.offset, &complete,
3233 bases_to_consider,
3234 context.maybe_in_construction);
3235
3236 if (!outer_type->all_derivations_known)
3237 {
3238 if (!speculative && final_warning_records)
3239 {
3240 if (complete
3241 && nodes.length () == 1
3242 && warn_suggest_final_types
3243 && !outer_type->derived_types.length ())
3244 {
3245 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3246 final_warning_records->type_warnings.safe_grow_cleared
3247 (odr_types.length ());
3248 final_warning_records->type_warnings[outer_type->id].count++;
3249 final_warning_records->type_warnings[outer_type->id].dyn_count
3250 += final_warning_records->dyn_count;
3251 final_warning_records->type_warnings[outer_type->id].type
3252 = outer_type->type;
3253 (*slot)->type_warning = outer_type->id + 1;
3254 }
3255 if (complete
3256 && warn_suggest_final_methods
3257 && nodes.length () == 1
3258 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3259 outer_type->type))
3260 {
3261 bool existed;
3262 struct decl_warn_count &c =
3263 final_warning_records->decl_warnings.get_or_insert
3264 (nodes[0]->decl, &existed);
3265
3266 if (existed)
3267 {
3268 c.count++;
3269 c.dyn_count += final_warning_records->dyn_count;
3270 }
3271 else
3272 {
3273 c.count = 1;
3274 c.dyn_count = final_warning_records->dyn_count;
3275 c.decl = nodes[0]->decl;
3276 }
3277 (*slot)->decl_warning = nodes[0]->decl;
3278 }
3279 }
3280 complete = false;
3281 }
3282 }
3283
3284 if (!speculative)
3285 {
3286 /* Destructors are never called through construction virtual tables,
3287 because the type is always known. One of entries may be
3288 cxa_pure_virtual so look to at least two of them. */
3289 if (context.maybe_in_construction)
3290 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3291 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3292 context.maybe_in_construction = false;
3293 if (context.maybe_in_construction)
3294 {
3295 if (type != outer_type
3296 && (!skipped
3297 || (context.maybe_derived_type
3298 && !type_all_derivations_known_p (outer_type->type))))
3299 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3300 context.offset, nodes, &inserted,
3301 &matched_vtables, &complete);
3302 if (skipped)
3303 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3304 for (i = 0; i < bases_to_consider.length(); i++)
3305 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3306 }
3307 }
3308 }
3309
3310 (*slot)->targets = nodes;
3311 (*slot)->complete = complete;
3312 if (completep)
3313 *completep = complete;
3314
3315 timevar_pop (TV_IPA_VIRTUAL_CALL);
3316 return nodes;
3317 }
3318
3319 bool
3320 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3321 vec<const decl_warn_count*> *vec)
3322 {
3323 vec->safe_push (&value);
3324 return true;
3325 }
3326
3327 /* Dump target list TARGETS into FILE. */
3328
3329 static void
3330 dump_targets (FILE *f, vec <cgraph_node *> targets)
3331 {
3332 unsigned int i;
3333
3334 for (i = 0; i < targets.length (); i++)
3335 {
3336 char *name = NULL;
3337 if (in_lto_p)
3338 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3339 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3340 if (in_lto_p)
3341 free (name);
3342 if (!targets[i]->definition)
3343 fprintf (f, " (no definition%s)",
3344 DECL_DECLARED_INLINE_P (targets[i]->decl)
3345 ? " inline" : "");
3346 }
3347 fprintf (f, "\n");
3348 }
3349
3350 /* Dump all possible targets of a polymorphic call. */
3351
3352 void
3353 dump_possible_polymorphic_call_targets (FILE *f,
3354 tree otr_type,
3355 HOST_WIDE_INT otr_token,
3356 const ipa_polymorphic_call_context &ctx)
3357 {
3358 vec <cgraph_node *> targets;
3359 bool final;
3360 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3361 unsigned int len;
3362
3363 if (!type)
3364 return;
3365 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3366 ctx,
3367 &final, NULL, false);
3368 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3369 print_generic_expr (f, type->type, TDF_SLIM);
3370 fprintf (f, " token %i\n", (int)otr_token);
3371
3372 ctx.dump (f);
3373
3374 fprintf (f, " %s%s%s%s\n ",
3375 final ? "This is a complete list." :
3376 "This is partial list; extra targets may be defined in other units.",
3377 ctx.maybe_in_construction ? " (base types included)" : "",
3378 ctx.maybe_derived_type ? " (derived types included)" : "",
3379 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3380 len = targets.length ();
3381 dump_targets (f, targets);
3382
3383 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3384 ctx,
3385 &final, NULL, true);
3386 if (targets.length () != len)
3387 {
3388 fprintf (f, " Speculative targets:");
3389 dump_targets (f, targets);
3390 }
3391 gcc_assert (targets.length () <= len);
3392 fprintf (f, "\n");
3393 }
3394
3395
3396 /* Return true if N can be possibly target of a polymorphic call of
3397 OTR_TYPE/OTR_TOKEN. */
3398
3399 bool
3400 possible_polymorphic_call_target_p (tree otr_type,
3401 HOST_WIDE_INT otr_token,
3402 const ipa_polymorphic_call_context &ctx,
3403 struct cgraph_node *n)
3404 {
3405 vec <cgraph_node *> targets;
3406 unsigned int i;
3407 enum built_in_function fcode;
3408 bool final;
3409
3410 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3411 && ((fcode = DECL_FUNCTION_CODE (n->decl))
3412 == BUILT_IN_UNREACHABLE
3413 || fcode == BUILT_IN_TRAP))
3414 return true;
3415
3416 if (!odr_hash)
3417 return true;
3418 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3419 for (i = 0; i < targets.length (); i++)
3420 if (n->semantically_equivalent_p (targets[i]))
3421 return true;
3422
3423 /* At a moment we allow middle end to dig out new external declarations
3424 as a targets of polymorphic calls. */
3425 if (!final && !n->definition)
3426 return true;
3427 return false;
3428 }
3429
3430
3431
3432 /* Return true if N can be possibly target of a polymorphic call of
3433 OBJ_TYPE_REF expression REF in STMT. */
3434
3435 bool
3436 possible_polymorphic_call_target_p (tree ref,
3437 gimple stmt,
3438 struct cgraph_node *n)
3439 {
3440 ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
3441 tree call_fn = gimple_call_fn (stmt);
3442
3443 return possible_polymorphic_call_target_p (obj_type_ref_class (call_fn),
3444 tree_to_uhwi
3445 (OBJ_TYPE_REF_TOKEN (call_fn)),
3446 context,
3447 n);
3448 }
3449
3450
3451 /* After callgraph construction new external nodes may appear.
3452 Add them into the graph. */
3453
3454 void
3455 update_type_inheritance_graph (void)
3456 {
3457 struct cgraph_node *n;
3458
3459 if (!odr_hash)
3460 return;
3461 free_polymorphic_call_targets_hash ();
3462 timevar_push (TV_IPA_INHERITANCE);
3463 /* We reconstruct the graph starting from types of all methods seen in the
3464 the unit. */
3465 FOR_EACH_FUNCTION (n)
3466 if (DECL_VIRTUAL_P (n->decl)
3467 && !n->definition
3468 && n->real_symbol_p ())
3469 get_odr_type (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)), true);
3470 timevar_pop (TV_IPA_INHERITANCE);
3471 }
3472
3473
3474 /* Return true if N looks like likely target of a polymorphic call.
3475 Rule out cxa_pure_virtual, noreturns, function declared cold and
3476 other obvious cases. */
3477
3478 bool
3479 likely_target_p (struct cgraph_node *n)
3480 {
3481 int flags;
3482 /* cxa_pure_virtual and similar things are not likely. */
3483 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3484 return false;
3485 flags = flags_from_decl_or_type (n->decl);
3486 if (flags & ECF_NORETURN)
3487 return false;
3488 if (lookup_attribute ("cold",
3489 DECL_ATTRIBUTES (n->decl)))
3490 return false;
3491 if (n->frequency < NODE_FREQUENCY_NORMAL)
3492 return false;
3493 /* If there are no live virtual tables referring the target,
3494 the only way the target can be called is an instance coming from other
3495 compilation unit; speculative devirtualization is built around an
3496 assumption that won't happen. */
3497 if (!referenced_from_vtable_p (n))
3498 return false;
3499 return true;
3500 }
3501
3502 /* Compare type warning records P1 and P2 and choose one with larger count;
3503 helper for qsort. */
3504
3505 int
3506 type_warning_cmp (const void *p1, const void *p2)
3507 {
3508 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3509 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3510
3511 if (t1->dyn_count < t2->dyn_count)
3512 return 1;
3513 if (t1->dyn_count > t2->dyn_count)
3514 return -1;
3515 return t2->count - t1->count;
3516 }
3517
3518 /* Compare decl warning records P1 and P2 and choose one with larger count;
3519 helper for qsort. */
3520
3521 int
3522 decl_warning_cmp (const void *p1, const void *p2)
3523 {
3524 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3525 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3526
3527 if (t1->dyn_count < t2->dyn_count)
3528 return 1;
3529 if (t1->dyn_count > t2->dyn_count)
3530 return -1;
3531 return t2->count - t1->count;
3532 }
3533
3534
3535 /* Try to speculatively devirtualize call to OTR_TYPE with OTR_TOKEN with
3536 context CTX. */
3537
3538 struct cgraph_node *
3539 try_speculative_devirtualization (tree otr_type, HOST_WIDE_INT otr_token,
3540 ipa_polymorphic_call_context ctx)
3541 {
3542 vec <cgraph_node *>targets
3543 = possible_polymorphic_call_targets
3544 (otr_type, otr_token, ctx, NULL, NULL, true);
3545 unsigned int i;
3546 struct cgraph_node *likely_target = NULL;
3547
3548 for (i = 0; i < targets.length (); i++)
3549 if (likely_target_p (targets[i]))
3550 {
3551 if (likely_target)
3552 return NULL;
3553 likely_target = targets[i];
3554 }
3555 if (!likely_target
3556 ||!likely_target->definition
3557 || DECL_EXTERNAL (likely_target->decl))
3558 return NULL;
3559
3560 /* Don't use an implicitly-declared destructor (c++/58678). */
3561 struct cgraph_node *non_thunk_target
3562 = likely_target->function_symbol ();
3563 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3564 return NULL;
3565 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3566 && likely_target->can_be_discarded_p ())
3567 return NULL;
3568 return likely_target;
3569 }
3570
3571 /* The ipa-devirt pass.
3572 When polymorphic call has only one likely target in the unit,
3573 turn it into a speculative call. */
3574
3575 static unsigned int
3576 ipa_devirt (void)
3577 {
3578 struct cgraph_node *n;
3579 hash_set<void *> bad_call_targets;
3580 struct cgraph_edge *e;
3581
3582 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3583 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3584 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3585 int ndropped = 0;
3586
3587 if (!odr_types_ptr)
3588 return 0;
3589
3590 if (dump_file)
3591 dump_type_inheritance_graph (dump_file);
3592
3593 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3594 This is implemented by setting up final_warning_records that are updated
3595 by get_polymorphic_call_targets.
3596 We need to clear cache in this case to trigger recomputation of all
3597 entries. */
3598 if (warn_suggest_final_methods || warn_suggest_final_types)
3599 {
3600 final_warning_records = new (final_warning_record);
3601 final_warning_records->type_warnings = vNULL;
3602 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3603 free_polymorphic_call_targets_hash ();
3604 }
3605
3606 FOR_EACH_DEFINED_FUNCTION (n)
3607 {
3608 bool update = false;
3609 if (!opt_for_fn (n->decl, flag_devirtualize))
3610 continue;
3611 if (dump_file && n->indirect_calls)
3612 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3613 n->name (), n->order);
3614 for (e = n->indirect_calls; e; e = e->next_callee)
3615 if (e->indirect_info->polymorphic)
3616 {
3617 struct cgraph_node *likely_target = NULL;
3618 void *cache_token;
3619 bool final;
3620
3621 if (final_warning_records)
3622 final_warning_records->dyn_count = e->count;
3623
3624 vec <cgraph_node *>targets
3625 = possible_polymorphic_call_targets
3626 (e, &final, &cache_token, true);
3627 unsigned int i;
3628
3629 /* Trigger warnings by calculating non-speculative targets. */
3630 if (warn_suggest_final_methods || warn_suggest_final_types)
3631 possible_polymorphic_call_targets (e);
3632
3633 if (dump_file)
3634 dump_possible_polymorphic_call_targets
3635 (dump_file, e);
3636
3637 npolymorphic++;
3638
3639 /* See if the call can be devirtualized by means of ipa-prop's
3640 polymorphic call context propagation. If not, we can just
3641 forget about this call being polymorphic and avoid some heavy
3642 lifting in remove_unreachable_nodes that will otherwise try to
3643 keep all possible targets alive until inlining and in the inliner
3644 itself.
3645
3646 This may need to be revisited once we add further ways to use
3647 the may edges, but it is a resonable thing to do right now. */
3648
3649 if ((e->indirect_info->param_index == -1
3650 || (!opt_for_fn (n->decl, flag_devirtualize_speculatively)
3651 && e->indirect_info->vptr_changed))
3652 && !flag_ltrans_devirtualize)
3653 {
3654 e->indirect_info->polymorphic = false;
3655 ndropped++;
3656 if (dump_file)
3657 fprintf (dump_file, "Dropping polymorphic call info;"
3658 " it can not be used by ipa-prop\n");
3659 }
3660
3661 if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
3662 continue;
3663
3664 if (!e->maybe_hot_p ())
3665 {
3666 if (dump_file)
3667 fprintf (dump_file, "Call is cold\n\n");
3668 ncold++;
3669 continue;
3670 }
3671 if (e->speculative)
3672 {
3673 if (dump_file)
3674 fprintf (dump_file, "Call is already speculated\n\n");
3675 nspeculated++;
3676
3677 /* When dumping see if we agree with speculation. */
3678 if (!dump_file)
3679 continue;
3680 }
3681 if (bad_call_targets.contains (cache_token))
3682 {
3683 if (dump_file)
3684 fprintf (dump_file, "Target list is known to be useless\n\n");
3685 nmultiple++;
3686 continue;
3687 }
3688 for (i = 0; i < targets.length (); i++)
3689 if (likely_target_p (targets[i]))
3690 {
3691 if (likely_target)
3692 {
3693 likely_target = NULL;
3694 if (dump_file)
3695 fprintf (dump_file, "More than one likely target\n\n");
3696 nmultiple++;
3697 break;
3698 }
3699 likely_target = targets[i];
3700 }
3701 if (!likely_target)
3702 {
3703 bad_call_targets.add (cache_token);
3704 continue;
3705 }
3706 /* This is reached only when dumping; check if we agree or disagree
3707 with the speculation. */
3708 if (e->speculative)
3709 {
3710 struct cgraph_edge *e2;
3711 struct ipa_ref *ref;
3712 e->speculative_call_info (e2, e, ref);
3713 if (e2->callee->ultimate_alias_target ()
3714 == likely_target->ultimate_alias_target ())
3715 {
3716 fprintf (dump_file, "We agree with speculation\n\n");
3717 nok++;
3718 }
3719 else
3720 {
3721 fprintf (dump_file, "We disagree with speculation\n\n");
3722 nwrong++;
3723 }
3724 continue;
3725 }
3726 if (!likely_target->definition)
3727 {
3728 if (dump_file)
3729 fprintf (dump_file, "Target is not a definition\n\n");
3730 nnotdefined++;
3731 continue;
3732 }
3733 /* Do not introduce new references to external symbols. While we
3734 can handle these just well, it is common for programs to
3735 incorrectly with headers defining methods they are linked
3736 with. */
3737 if (DECL_EXTERNAL (likely_target->decl))
3738 {
3739 if (dump_file)
3740 fprintf (dump_file, "Target is external\n\n");
3741 nexternal++;
3742 continue;
3743 }
3744 /* Don't use an implicitly-declared destructor (c++/58678). */
3745 struct cgraph_node *non_thunk_target
3746 = likely_target->function_symbol ();
3747 if (DECL_ARTIFICIAL (non_thunk_target->decl))
3748 {
3749 if (dump_file)
3750 fprintf (dump_file, "Target is artificial\n\n");
3751 nartificial++;
3752 continue;
3753 }
3754 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3755 && likely_target->can_be_discarded_p ())
3756 {
3757 if (dump_file)
3758 fprintf (dump_file, "Target is overwritable\n\n");
3759 noverwritable++;
3760 continue;
3761 }
3762 else if (dbg_cnt (devirt))
3763 {
3764 if (dump_enabled_p ())
3765 {
3766 location_t locus = gimple_location_safe (e->call_stmt);
3767 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3768 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3769 n->name (), n->order,
3770 likely_target->name (),
3771 likely_target->order);
3772 }
3773 if (!likely_target->can_be_discarded_p ())
3774 {
3775 cgraph_node *alias;
3776 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3777 if (alias)
3778 likely_target = alias;
3779 }
3780 nconverted++;
3781 update = true;
3782 e->make_speculative
3783 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3784 }
3785 }
3786 if (update)
3787 inline_update_overall_summary (n);
3788 }
3789 if (warn_suggest_final_methods || warn_suggest_final_types)
3790 {
3791 if (warn_suggest_final_types)
3792 {
3793 final_warning_records->type_warnings.qsort (type_warning_cmp);
3794 for (unsigned int i = 0;
3795 i < final_warning_records->type_warnings.length (); i++)
3796 if (final_warning_records->type_warnings[i].count)
3797 {
3798 tree type = final_warning_records->type_warnings[i].type;
3799 int count = final_warning_records->type_warnings[i].count;
3800 long long dyn_count
3801 = final_warning_records->type_warnings[i].dyn_count;
3802
3803 if (!dyn_count)
3804 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3805 OPT_Wsuggest_final_types, count,
3806 "Declaring type %qD final "
3807 "would enable devirtualization of %i call",
3808 "Declaring type %qD final "
3809 "would enable devirtualization of %i calls",
3810 type,
3811 count);
3812 else
3813 warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3814 OPT_Wsuggest_final_types, count,
3815 "Declaring type %qD final "
3816 "would enable devirtualization of %i call "
3817 "executed %lli times",
3818 "Declaring type %qD final "
3819 "would enable devirtualization of %i calls "
3820 "executed %lli times",
3821 type,
3822 count,
3823 dyn_count);
3824 }
3825 }
3826
3827 if (warn_suggest_final_methods)
3828 {
3829 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3830
3831 final_warning_records->decl_warnings.traverse
3832 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3833 decl_warnings_vec.qsort (decl_warning_cmp);
3834 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3835 {
3836 tree decl = decl_warnings_vec[i]->decl;
3837 int count = decl_warnings_vec[i]->count;
3838 long long dyn_count = decl_warnings_vec[i]->dyn_count;
3839
3840 if (!dyn_count)
3841 if (DECL_CXX_DESTRUCTOR_P (decl))
3842 warning_n (DECL_SOURCE_LOCATION (decl),
3843 OPT_Wsuggest_final_methods, count,
3844 "Declaring virtual destructor of %qD final "
3845 "would enable devirtualization of %i call",
3846 "Declaring virtual destructor of %qD final "
3847 "would enable devirtualization of %i calls",
3848 DECL_CONTEXT (decl), count);
3849 else
3850 warning_n (DECL_SOURCE_LOCATION (decl),
3851 OPT_Wsuggest_final_methods, count,
3852 "Declaring method %qD final "
3853 "would enable devirtualization of %i call",
3854 "Declaring method %qD final "
3855 "would enable devirtualization of %i calls",
3856 decl, count);
3857 else if (DECL_CXX_DESTRUCTOR_P (decl))
3858 warning_n (DECL_SOURCE_LOCATION (decl),
3859 OPT_Wsuggest_final_methods, count,
3860 "Declaring virtual destructor of %qD final "
3861 "would enable devirtualization of %i call "
3862 "executed %lli times",
3863 "Declaring virtual destructor of %qD final "
3864 "would enable devirtualization of %i calls "
3865 "executed %lli times",
3866 DECL_CONTEXT (decl), count, dyn_count);
3867 else
3868 warning_n (DECL_SOURCE_LOCATION (decl),
3869 OPT_Wsuggest_final_methods, count,
3870 "Declaring method %qD final "
3871 "would enable devirtualization of %i call "
3872 "executed %lli times",
3873 "Declaring method %qD final "
3874 "would enable devirtualization of %i calls "
3875 "executed %lli times",
3876 decl, count, dyn_count);
3877 }
3878 }
3879
3880 delete (final_warning_records);
3881 final_warning_records = 0;
3882 }
3883
3884 if (dump_file)
3885 fprintf (dump_file,
3886 "%i polymorphic calls, %i devirtualized,"
3887 " %i speculatively devirtualized, %i cold\n"
3888 "%i have multiple targets, %i overwritable,"
3889 " %i already speculated (%i agree, %i disagree),"
3890 " %i external, %i not defined, %i artificial, %i infos dropped\n",
3891 npolymorphic, ndevirtualized, nconverted, ncold,
3892 nmultiple, noverwritable, nspeculated, nok, nwrong,
3893 nexternal, nnotdefined, nartificial, ndropped);
3894 return ndevirtualized || ndropped ? TODO_remove_functions : 0;
3895 }
3896
3897 namespace {
3898
3899 const pass_data pass_data_ipa_devirt =
3900 {
3901 IPA_PASS, /* type */
3902 "devirt", /* name */
3903 OPTGROUP_NONE, /* optinfo_flags */
3904 TV_IPA_DEVIRT, /* tv_id */
3905 0, /* properties_required */
3906 0, /* properties_provided */
3907 0, /* properties_destroyed */
3908 0, /* todo_flags_start */
3909 ( TODO_dump_symtab ), /* todo_flags_finish */
3910 };
3911
3912 class pass_ipa_devirt : public ipa_opt_pass_d
3913 {
3914 public:
3915 pass_ipa_devirt (gcc::context *ctxt)
3916 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3917 NULL, /* generate_summary */
3918 NULL, /* write_summary */
3919 NULL, /* read_summary */
3920 NULL, /* write_optimization_summary */
3921 NULL, /* read_optimization_summary */
3922 NULL, /* stmt_fixup */
3923 0, /* function_transform_todo_flags_start */
3924 NULL, /* function_transform */
3925 NULL) /* variable_transform */
3926 {}
3927
3928 /* opt_pass methods: */
3929 virtual bool gate (function *)
3930 {
3931 /* In LTO, always run the IPA passes and decide on function basis if the
3932 pass is enabled. */
3933 if (in_lto_p)
3934 return true;
3935 return (flag_devirtualize
3936 && (flag_devirtualize_speculatively
3937 || (warn_suggest_final_methods
3938 || warn_suggest_final_types))
3939 && optimize);
3940 }
3941
3942 virtual unsigned int execute (function *) { return ipa_devirt (); }
3943
3944 }; // class pass_ipa_devirt
3945
3946 } // anon namespace
3947
3948 ipa_opt_pass_d *
3949 make_pass_ipa_devirt (gcc::context *ctxt)
3950 {
3951 return new pass_ipa_devirt (ctxt);
3952 }
3953
3954 #include "gt-ipa-devirt.h"