ipa-devirt.c (odr_type_warn_count): Add type.
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "hash-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "inchash.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-alias.h"
125 #include "internal-fn.h"
126 #include "gimple-fold.h"
127 #include "gimple-expr.h"
128 #include "gimple.h"
129 #include "ipa-inline.h"
130 #include "diagnostic.h"
131 #include "tree-dfa.h"
132 #include "demangle.h"
133 #include "dbgcnt.h"
134 #include "stor-layout.h"
135 #include "intl.h"
136 #include "hash-map.h"
137
138 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
139 hash_set<tree> *);
140
141 static bool odr_violation_reported = false;
142
143 /* Dummy polymorphic call context. */
144
145 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
146 = {0, 0, NULL, NULL, false, true, true};
147
148 /* Pointer set of all call targets appearing in the cache. */
149 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
150
151 /* The node of type inheritance graph. For each type unique in
152 One Defintion Rule (ODR) sense, we produce one node linking all
153 main variants of types equivalent to it, bases and derived types. */
154
155 struct GTY(()) odr_type_d
156 {
157 /* leader type. */
158 tree type;
159 /* All bases; built only for main variants of types */
160 vec<odr_type> GTY((skip)) bases;
161 /* All derrived types with virtual methods seen in unit;
162 built only for main variants oftypes */
163 vec<odr_type> GTY((skip)) derived_types;
164
165 /* All equivalent types, if more than one. */
166 vec<tree, va_gc> *types;
167 /* Set of all equivalent types, if NON-NULL. */
168 hash_set<tree> * GTY((skip)) types_set;
169
170 /* Unique ID indexing the type in odr_types array. */
171 int id;
172 /* Is it in anonymous namespace? */
173 bool anonymous_namespace;
174 /* Do we know about all derivations of given type? */
175 bool all_derivations_known;
176 /* Did we report ODR violation here? */
177 bool odr_violated;
178 };
179
180 static bool contains_type_p (tree, HOST_WIDE_INT, tree);
181
182
183 /* Return true if BINFO corresponds to a type with virtual methods.
184
185 Every type has several BINFOs. One is the BINFO associated by the type
186 while other represents bases of derived types. The BINFOs representing
187 bases do not have BINFO_VTABLE pointer set when this is the single
188 inheritance (because vtables are shared). Look up the BINFO of type
189 and check presence of its vtable. */
190
191 static inline bool
192 polymorphic_type_binfo_p (tree binfo)
193 {
194 /* See if BINFO's type has an virtual table associtated with it. */
195 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
196 }
197
198 /* Return TRUE if all derived types of T are known and thus
199 we may consider the walk of derived type complete.
200
201 This is typically true only for final anonymous namespace types and types
202 defined within functions (that may be COMDAT and thus shared across units,
203 but with the same set of derived types). */
204
205 static bool
206 type_all_derivations_known_p (tree t)
207 {
208 if (TYPE_FINAL_P (t))
209 return true;
210 if (flag_ltrans)
211 return false;
212 if (type_in_anonymous_namespace_p (t))
213 return true;
214 return (decl_function_context (TYPE_NAME (t)) != NULL);
215 }
216
217 /* Return TURE if type's constructors are all visible. */
218
219 static bool
220 type_all_ctors_visible_p (tree t)
221 {
222 return !flag_ltrans
223 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
224 /* We can not always use type_all_derivations_known_p.
225 For function local types we must assume case where
226 the function is COMDAT and shared in between units.
227
228 TODO: These cases are quite easy to get, but we need
229 to keep track of C++ privatizing via -Wno-weak
230 as well as the IPA privatizing. */
231 && type_in_anonymous_namespace_p (t);
232 }
233
234 /* Return TRUE if type may have instance. */
235
236 static bool
237 type_possibly_instantiated_p (tree t)
238 {
239 tree vtable;
240 varpool_node *vnode;
241
242 /* TODO: Add abstract types here. */
243 if (!type_all_ctors_visible_p (t))
244 return true;
245
246 vtable = BINFO_VTABLE (TYPE_BINFO (t));
247 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
248 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
249 vnode = varpool_node::get (vtable);
250 return vnode && vnode->definition;
251 }
252
253 /* One Definition Rule hashtable helpers. */
254
255 struct odr_hasher
256 {
257 typedef odr_type_d value_type;
258 typedef union tree_node compare_type;
259 static inline hashval_t hash (const value_type *);
260 static inline bool equal (const value_type *, const compare_type *);
261 static inline void remove (value_type *);
262 };
263
264 /* Return type that was declared with T's name so that T is an
265 qualified variant of it. */
266
267 static inline tree
268 main_odr_variant (const_tree t)
269 {
270 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
271 return TREE_TYPE (TYPE_NAME (t));
272 /* Unnamed types and non-C++ produced types can be compared by variants. */
273 else
274 return TYPE_MAIN_VARIANT (t);
275 }
276
277 /* Produce hash based on type name. */
278
279 static hashval_t
280 hash_type_name (tree t)
281 {
282 gcc_checking_assert (main_odr_variant (t) == t);
283
284 /* If not in LTO, all main variants are unique, so we can do
285 pointer hash. */
286 if (!in_lto_p)
287 return htab_hash_pointer (t);
288
289 /* Anonymous types are unique. */
290 if (type_in_anonymous_namespace_p (t))
291 return htab_hash_pointer (t);
292
293 /* For polymorphic types, we can simply hash the virtual table. */
294 if (TREE_CODE (t) == RECORD_TYPE
295 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
296 {
297 tree v = BINFO_VTABLE (TYPE_BINFO (t));
298 hashval_t hash = 0;
299
300 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
301 {
302 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
303 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
304 }
305
306 v = DECL_ASSEMBLER_NAME (v);
307 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
308 return hash;
309 }
310
311 /* Rest is not implemented yet. */
312 gcc_unreachable ();
313 }
314
315 /* Return the computed hashcode for ODR_TYPE. */
316
317 inline hashval_t
318 odr_hasher::hash (const value_type *odr_type)
319 {
320 return hash_type_name (odr_type->type);
321 }
322
323 /* For languages with One Definition Rule, work out if
324 types are the same based on their name.
325
326 This is non-trivial for LTO where minnor differences in
327 the type representation may have prevented type merging
328 to merge two copies of otherwise equivalent type.
329
330 Until we start streaming mangled type names, this function works
331 only for polymorphic types. */
332
333 bool
334 types_same_for_odr (const_tree type1, const_tree type2)
335 {
336 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
337
338 type1 = main_odr_variant (type1);
339 type2 = main_odr_variant (type2);
340
341 if (type1 == type2)
342 return true;
343
344 if (!in_lto_p)
345 return false;
346
347 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
348 on the corresponding TYPE_STUB_DECL. */
349 if (type_in_anonymous_namespace_p (type1)
350 || type_in_anonymous_namespace_p (type2))
351 return false;
352
353 /* See if types are obvoiusly different (i.e. different codes
354 or polymorphis wrt non-polymorphic). This is not strictly correct
355 for ODR violating programs, but we can't do better without streaming
356 ODR names. */
357 if (TREE_CODE (type1) != TREE_CODE (type2))
358 return false;
359 if (TREE_CODE (type1) == RECORD_TYPE
360 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
361 return false;
362 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
363 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
364 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
365 return false;
366
367 /* At the moment we have no way to establish ODR equivlaence at LTO
368 other than comparing virtual table pointrs of polymorphic types.
369 Eventually we should start saving mangled names in TYPE_NAME.
370 Then this condition will become non-trivial. */
371
372 if (TREE_CODE (type1) == RECORD_TYPE
373 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
374 && BINFO_VTABLE (TYPE_BINFO (type1))
375 && BINFO_VTABLE (TYPE_BINFO (type2)))
376 {
377 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
378 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
379 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
380 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
381 return (operand_equal_p (TREE_OPERAND (v1, 1),
382 TREE_OPERAND (v2, 1), 0)
383 && DECL_ASSEMBLER_NAME
384 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
385 == DECL_ASSEMBLER_NAME
386 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
387 }
388 gcc_unreachable ();
389 }
390
391
392 /* Compare types T1 and T2 and return true if they are
393 equivalent. */
394
395 inline bool
396 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
397 {
398 tree t2 = const_cast <tree> (ct2);
399
400 gcc_checking_assert (main_odr_variant (t2) == t2);
401 if (t1->type == t2)
402 return true;
403 if (!in_lto_p)
404 return false;
405 return types_same_for_odr (t1->type, t2);
406 }
407
408 /* Free ODR type V. */
409
410 inline void
411 odr_hasher::remove (value_type *v)
412 {
413 v->bases.release ();
414 v->derived_types.release ();
415 if (v->types_set)
416 delete v->types_set;
417 ggc_free (v);
418 }
419
420 /* ODR type hash used to lookup ODR type based on tree type node. */
421
422 typedef hash_table<odr_hasher> odr_hash_type;
423 static odr_hash_type *odr_hash;
424
425 /* ODR types are also stored into ODR_TYPE vector to allow consistent
426 walking. Bases appear before derived types. Vector is garbage collected
427 so we won't end up visiting empty types. */
428
429 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
430 #define odr_types (*odr_types_ptr)
431
432 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
433 void
434 set_type_binfo (tree type, tree binfo)
435 {
436 for (; type; type = TYPE_NEXT_VARIANT (type))
437 if (COMPLETE_TYPE_P (type))
438 TYPE_BINFO (type) = binfo;
439 else
440 gcc_assert (!TYPE_BINFO (type));
441 }
442
443 /* Compare T2 and T2 based on name or structure. */
444
445 static bool
446 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<tree> *visited)
447 {
448 bool an1, an2;
449
450 /* This can happen in incomplete types that should be handled earlier. */
451 gcc_assert (t1 && t2);
452
453 t1 = main_odr_variant (t1);
454 t2 = main_odr_variant (t2);
455 if (t1 == t2)
456 return true;
457 if (TREE_CODE (t1) != TREE_CODE (t2))
458 return false;
459 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
460 return false;
461 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
462 return false;
463
464 /* Anonymous namespace types must match exactly. */
465 an1 = type_in_anonymous_namespace_p (t1);
466 an2 = type_in_anonymous_namespace_p (t2);
467 if (an1 != an2 || an1)
468 return false;
469
470 /* For types where we can not establish ODR equivalency, recurse and deeply
471 compare. */
472 if (TREE_CODE (t1) != RECORD_TYPE
473 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
474 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
475 || !polymorphic_type_binfo_p (TYPE_BINFO (t2)))
476 {
477 /* This should really be a pair hash, but for the moment we do not need
478 100% reliability and it would be better to compare all ODR types so
479 recursion here is needed only for component types. */
480 if (visited->add (t1))
481 return true;
482 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
483 }
484 return types_same_for_odr (t1, t2);
485 }
486
487 /* Output ODR violation warning about T1 and T2 with REASON.
488 Display location of ST1 and ST2 if REASON speaks about field or
489 method of the type.
490 If WARN is false, do nothing. Set WARNED if warning was indeed
491 output. */
492
493 void
494 warn_odr (tree t1, tree t2, tree st1, tree st2,
495 bool warn, bool *warned, const char *reason)
496 {
497 tree decl2 = TYPE_NAME (t2);
498
499 if (!warn)
500 return;
501 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
502 "type %qT violates one definition rule",
503 t1))
504 return;
505 if (!st1)
506 ;
507 else if (TREE_CODE (st1) == FIELD_DECL)
508 {
509 inform (DECL_SOURCE_LOCATION (decl2),
510 "a different type is defined in another translation unit");
511 inform (DECL_SOURCE_LOCATION (st1),
512 "the first difference of corresponding definitions is field %qD",
513 st1);
514 decl2 = st2;
515 }
516 else if (TREE_CODE (st1) == FUNCTION_DECL)
517 {
518 inform (DECL_SOURCE_LOCATION (decl2),
519 "a different type is defined in another translation unit");
520 inform (DECL_SOURCE_LOCATION (st1),
521 "the first difference of corresponding definitions is method %qD",
522 st1);
523 decl2 = st2;
524 }
525 else
526 return;
527 inform (DECL_SOURCE_LOCATION (decl2), reason);
528
529 if (warned)
530 *warned = true;
531 }
532
533 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
534 because they are used on same place in ODR matching types.
535 They are not; inform the user. */
536
537 void
538 warn_types_mismatch (tree t1, tree t2)
539 {
540 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
541 return;
542 /* In Firefox it is a common bug to have same types but in
543 different namespaces. Be a bit more informative on
544 this. */
545 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
546 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
547 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
548 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
549 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
550 DECL_NAME (TYPE_CONTEXT (t2))))))
551 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
552 "type %qT should match type %qT but is defined "
553 "in different namespace ",
554 t1, t2);
555 else
556 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
557 "type %qT should match type %qT",
558 t1, t2);
559 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
560 "the incompatible type is defined here");
561 }
562
563 /* Compare T1 and T2, report ODR violations if WARN is true and set
564 WARNED to true if anything is reported. Return true if types match.
565 If true is returned, the types are also compatible in the sense of
566 gimple_canonical_types_compatible_p. */
567
568 static bool
569 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<tree> *visited)
570 {
571 /* Check first for the obvious case of pointer identity. */
572 if (t1 == t2)
573 return true;
574 gcc_assert (!type_in_anonymous_namespace_p (t1));
575 gcc_assert (!type_in_anonymous_namespace_p (t2));
576
577 /* Can't be the same type if the types don't have the same code. */
578 if (TREE_CODE (t1) != TREE_CODE (t2))
579 {
580 warn_odr (t1, t2, NULL, NULL, warn, warned,
581 G_("a different type is defined in another translation unit"));
582 return false;
583 }
584
585 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
586 {
587 warn_odr (t1, t2, NULL, NULL, warn, warned,
588 G_("a type with different qualifiers is defined in another "
589 "translation unit"));
590 return false;
591 }
592
593 if (comp_type_attributes (t1, t2) != 1)
594 {
595 warn_odr (t1, t2, NULL, NULL, warn, warned,
596 G_("a type with attributes "
597 "is defined in another translation unit"));
598 return false;
599 }
600
601 if (TREE_CODE (t1) == ENUMERAL_TYPE)
602 {
603 tree v1, v2;
604 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
605 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
606 {
607 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
608 {
609 warn_odr (t1, t2, NULL, NULL, warn, warned,
610 G_("an enum with different value name"
611 " is defined in another translation unit"));
612 return false;
613 }
614 if (TREE_VALUE (v1) != TREE_VALUE (v2)
615 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
616 DECL_INITIAL (TREE_VALUE (v2)), 0))
617 {
618 warn_odr (t1, t2, NULL, NULL, warn, warned,
619 G_("an enum with different values is defined"
620 " in another translation unit"));
621 return false;
622 }
623 }
624 if (v1 || v2)
625 {
626 warn_odr (t1, t2, NULL, NULL, warn, warned,
627 G_("an enum with mismatching number of values "
628 "is defined in another translation unit"));
629 return false;
630 }
631 }
632
633 /* Non-aggregate types can be handled cheaply. */
634 if (INTEGRAL_TYPE_P (t1)
635 || SCALAR_FLOAT_TYPE_P (t1)
636 || FIXED_POINT_TYPE_P (t1)
637 || TREE_CODE (t1) == VECTOR_TYPE
638 || TREE_CODE (t1) == COMPLEX_TYPE
639 || TREE_CODE (t1) == OFFSET_TYPE
640 || POINTER_TYPE_P (t1))
641 {
642 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
643 {
644 warn_odr (t1, t2, NULL, NULL, warn, warned,
645 G_("a type with different precision is defined "
646 "in another translation unit"));
647 return false;
648 }
649 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
650 {
651 warn_odr (t1, t2, NULL, NULL, warn, warned,
652 G_("a type with different signedness is defined "
653 "in another translation unit"));
654 return false;
655 }
656
657 if (TREE_CODE (t1) == INTEGER_TYPE
658 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
659 {
660 /* char WRT uint_8? */
661 warn_odr (t1, t2, NULL, NULL, warn, warned,
662 G_("a different type is defined in another "
663 "translation unit"));
664 return false;
665 }
666
667 /* For canonical type comparisons we do not want to build SCCs
668 so we cannot compare pointed-to types. But we can, for now,
669 require the same pointed-to type kind and match what
670 useless_type_conversion_p would do. */
671 if (POINTER_TYPE_P (t1))
672 {
673 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
674 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
675 {
676 warn_odr (t1, t2, NULL, NULL, warn, warned,
677 G_("it is defined as a pointer in different address "
678 "space in another translation unit"));
679 return false;
680 }
681
682 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
683 {
684 warn_odr (t1, t2, NULL, NULL, warn, warned,
685 G_("it is defined as a pointer to different type "
686 "in another translation unit"));
687 if (warn && warned)
688 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
689 return false;
690 }
691 }
692
693 /* Tail-recurse to components. */
694 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
695 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
696 {
697 /* Probably specific enough. */
698 warn_odr (t1, t2, NULL, NULL, warn, warned,
699 G_("a different type is defined "
700 "in another translation unit"));
701 if (warn && warned)
702 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
703 return false;
704 }
705
706 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
707 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
708 TYPE_SIZE_UNIT (t2), 0));
709 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
710
711 return true;
712 }
713
714 /* Do type-specific comparisons. */
715 switch (TREE_CODE (t1))
716 {
717 case ARRAY_TYPE:
718 {
719 /* Array types are the same if the element types are the same and
720 the number of elements are the same. */
721 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
722 {
723 warn_odr (t1, t2, NULL, NULL, warn, warned,
724 G_("a different type is defined in another "
725 "translation unit"));
726 if (warn && warned)
727 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
728 }
729 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
730 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
731 == TYPE_NONALIASED_COMPONENT (t2));
732
733 tree i1 = TYPE_DOMAIN (t1);
734 tree i2 = TYPE_DOMAIN (t2);
735
736 /* For an incomplete external array, the type domain can be
737 NULL_TREE. Check this condition also. */
738 if (i1 == NULL_TREE || i2 == NULL_TREE)
739 return true;
740
741 tree min1 = TYPE_MIN_VALUE (i1);
742 tree min2 = TYPE_MIN_VALUE (i2);
743 tree max1 = TYPE_MAX_VALUE (i1);
744 tree max2 = TYPE_MAX_VALUE (i2);
745
746 /* In C++, minimums should be always 0. */
747 gcc_assert (min1 == min2);
748 if (!operand_equal_p (max1, max2, 0))
749 {
750 warn_odr (t1, t2, NULL, NULL, warn, warned,
751 G_("an array of different size is defined "
752 "in another translation unit"));
753 return false;
754 }
755 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
756 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
757 TYPE_SIZE_UNIT (t2), 0));
758 }
759 return true;
760
761 case METHOD_TYPE:
762 case FUNCTION_TYPE:
763 /* Function types are the same if the return type and arguments types
764 are the same. */
765 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
766 {
767 warn_odr (t1, t2, NULL, NULL, warn, warned,
768 G_("has different return value "
769 "in another translation unit"));
770 if (warn && warned)
771 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
772 return false;
773 }
774
775 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
776 return true;
777 else
778 {
779 tree parms1, parms2;
780
781 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
782 parms1 && parms2;
783 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
784 {
785 if (!odr_subtypes_equivalent_p
786 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
787 {
788 warn_odr (t1, t2, NULL, NULL, warn, warned,
789 G_("has different parameters in another "
790 "translation unit"));
791 if (warn && warned)
792 warn_types_mismatch (TREE_VALUE (parms1),
793 TREE_VALUE (parms2));
794 return false;
795 }
796 }
797
798 if (parms1 || parms2)
799 {
800 warn_odr (t1, t2, NULL, NULL, warn, warned,
801 G_("has different parameters "
802 "in another translation unit"));
803 return false;
804 }
805
806 return true;
807 }
808
809 case RECORD_TYPE:
810 case UNION_TYPE:
811 case QUAL_UNION_TYPE:
812 {
813 tree f1, f2;
814
815 /* For aggregate types, all the fields must be the same. */
816 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
817 {
818 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
819 f1 || f2;
820 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
821 {
822 /* Skip non-fields. */
823 while (f1 && TREE_CODE (f1) != FIELD_DECL)
824 f1 = TREE_CHAIN (f1);
825 while (f2 && TREE_CODE (f2) != FIELD_DECL)
826 f2 = TREE_CHAIN (f2);
827 if (!f1 || !f2)
828 break;
829 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
830 break;
831 if (DECL_NAME (f1) != DECL_NAME (f2)
832 && !DECL_ARTIFICIAL (f1))
833 {
834 warn_odr (t1, t2, f1, f2, warn, warned,
835 G_("a field with different name is defined "
836 "in another translation unit"));
837 return false;
838 }
839 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
840 {
841 /* Do not warn about artificial fields and just go into generic
842 field mismatch warning. */
843 if (DECL_ARTIFICIAL (f1))
844 break;
845
846 warn_odr (t1, t2, f1, f2, warn, warned,
847 G_("a field of same name but different type "
848 "is defined in another translation unit"));
849 if (warn && warned)
850 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
851 return false;
852 }
853 if (!gimple_compare_field_offset (f1, f2))
854 {
855 /* Do not warn about artificial fields and just go into generic
856 field mismatch warning. */
857 if (DECL_ARTIFICIAL (f1))
858 break;
859 warn_odr (t1, t2, t1, t2, warn, warned,
860 G_("fields has different layout "
861 "in another translation unit"));
862 return false;
863 }
864 gcc_assert (DECL_NONADDRESSABLE_P (f1)
865 == DECL_NONADDRESSABLE_P (f2));
866 }
867
868 /* If one aggregate has more fields than the other, they
869 are not the same. */
870 if (f1 || f2)
871 {
872 warn_odr (t1, t2, NULL, NULL, warn, warned,
873 G_("a type with different number of fields "
874 "is defined in another translation unit"));
875 return false;
876 }
877 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
878 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
879 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
880 {
881 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
882 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
883 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
884 {
885 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
886 {
887 warn_odr (t1, t2, f1, f2, warn, warned,
888 G_("a different method of same type "
889 "is defined in another translation unit"));
890 return false;
891 }
892 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
893 {
894 warn_odr (t1, t2, f1, f2, warn, warned,
895 G_("s definition that differs by virtual "
896 "keyword in another translation unit"));
897 return false;
898 }
899 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
900 {
901 warn_odr (t1, t2, f1, f2, warn, warned,
902 G_("virtual table layout differs in another "
903 "translation unit"));
904 return false;
905 }
906 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
907 {
908 warn_odr (t1, t2, f1, f2, warn, warned,
909 G_("method with incompatible type is defined "
910 "in another translation unit"));
911 return false;
912 }
913 }
914 if (f1 || f2)
915 {
916 warn_odr (t1, t2, NULL, NULL, warn, warned,
917 G_("a type with different number of methods "
918 "is defined in another translation unit"));
919 return false;
920 }
921 }
922 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
923 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
924 TYPE_SIZE_UNIT (t2), 0));
925 }
926
927 return true;
928 }
929
930 default:
931 gcc_unreachable ();
932 }
933 }
934
935 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
936 from VAL->type. This may happen in LTO where tree merging did not merge
937 all variants of the same type. It may or may not mean the ODR violation.
938 Add it to the list of duplicates and warn on some violations. */
939
940 static bool
941 add_type_duplicate (odr_type val, tree type)
942 {
943 bool build_bases = false;
944 if (!val->types_set)
945 val->types_set = new hash_set<tree>;
946
947 /* Always prefer complete type to be the leader. */
948 if (!COMPLETE_TYPE_P (val->type)
949 && COMPLETE_TYPE_P (type))
950 {
951 tree tmp = type;
952
953 build_bases = true;
954 type = val->type;
955 val->type = tmp;
956 }
957
958 /* See if this duplicate is new. */
959 if (!val->types_set->add (type))
960 {
961 bool merge = true;
962 bool base_mismatch = false;
963 unsigned int i,j;
964 bool warned = false;
965 hash_set<tree> visited;
966
967 gcc_assert (in_lto_p);
968 vec_safe_push (val->types, type);
969
970 /* First we compare memory layout. */
971 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
972 &warned, &visited))
973 {
974 merge = false;
975 odr_violation_reported = true;
976 val->odr_violated = true;
977 if (cgraph_dump_file)
978 {
979 fprintf (cgraph_dump_file, "ODR violation\n");
980
981 print_node (cgraph_dump_file, "", val->type, 0);
982 putc ('\n',cgraph_dump_file);
983 print_node (cgraph_dump_file, "", type, 0);
984 putc ('\n',cgraph_dump_file);
985 }
986 }
987
988 /* Next sanity check that bases are the same. If not, we will end
989 up producing wrong answers. */
990 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
991 && TREE_CODE (val->type) == RECORD_TYPE
992 && TREE_CODE (type) == RECORD_TYPE
993 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
994 {
995 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
996 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
997 {
998 odr_type base = get_odr_type
999 (BINFO_TYPE
1000 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1001 i)),
1002 true);
1003 if (val->bases.length () <= j || val->bases[j] != base)
1004 base_mismatch = true;
1005 j++;
1006 }
1007 if (base_mismatch)
1008 {
1009 merge = false;
1010 odr_violation_reported = true;
1011
1012 if (!warned && !val->odr_violated)
1013 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1014 "a type with the same name but different bases is "
1015 "defined in another translation unit");
1016 val->odr_violated = true;
1017 if (cgraph_dump_file)
1018 {
1019 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
1020
1021 print_node (cgraph_dump_file, "", val->type, 0);
1022 putc ('\n',cgraph_dump_file);
1023 print_node (cgraph_dump_file, "", type, 0);
1024 putc ('\n',cgraph_dump_file);
1025 }
1026 }
1027 }
1028
1029 /* Regularize things a little. During LTO same types may come with
1030 different BINFOs. Either because their virtual table was
1031 not merged by tree merging and only later at decl merging or
1032 because one type comes with external vtable, while other
1033 with internal. We want to merge equivalent binfos to conserve
1034 memory and streaming overhead.
1035
1036 The external vtables are more harmful: they contain references
1037 to external declarations of methods that may be defined in the
1038 merged LTO unit. For this reason we absolutely need to remove
1039 them and replace by internal variants. Not doing so will lead
1040 to incomplete answers from possible_polymorphic_call_targets. */
1041 if (!flag_ltrans && merge
1042 && TREE_CODE (val->type) == RECORD_TYPE
1043 && TREE_CODE (type) == RECORD_TYPE
1044 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1045 && TYPE_MAIN_VARIANT (type) == type
1046 && TYPE_MAIN_VARIANT (val->type) == val->type
1047 && BINFO_VTABLE (TYPE_BINFO (val->type))
1048 && BINFO_VTABLE (TYPE_BINFO (type)))
1049 {
1050 tree master_binfo = TYPE_BINFO (val->type);
1051 tree v1 = BINFO_VTABLE (master_binfo);
1052 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1053
1054 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1055 {
1056 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1057 && operand_equal_p (TREE_OPERAND (v1, 1),
1058 TREE_OPERAND (v2, 1), 0));
1059 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1060 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1061 }
1062 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1063 == DECL_ASSEMBLER_NAME (v2));
1064
1065 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1066 {
1067 unsigned int i;
1068
1069 set_type_binfo (val->type, TYPE_BINFO (type));
1070 for (i = 0; i < val->types->length (); i++)
1071 {
1072 if (TYPE_BINFO ((*val->types)[i])
1073 == master_binfo)
1074 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1075 }
1076 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1077 }
1078 else
1079 set_type_binfo (type, master_binfo);
1080 }
1081 }
1082 return build_bases;
1083 }
1084
1085 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1086 possibly new entry. */
1087
1088 odr_type
1089 get_odr_type (tree type, bool insert)
1090 {
1091 odr_type_d **slot;
1092 odr_type val;
1093 hashval_t hash;
1094 bool build_bases = false;
1095 bool insert_to_odr_array = false;
1096 int base_id = -1;
1097
1098 type = main_odr_variant (type);
1099
1100 hash = hash_type_name (type);
1101 slot
1102 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
1103 if (!slot)
1104 return NULL;
1105
1106 /* See if we already have entry for type. */
1107 if (*slot)
1108 {
1109 val = *slot;
1110
1111 /* With LTO we need to support multiple tree representation of
1112 the same ODR type. */
1113 if (val->type != type)
1114 build_bases = add_type_duplicate (val, type);
1115 }
1116 else
1117 {
1118 val = ggc_cleared_alloc<odr_type_d> ();
1119 val->type = type;
1120 val->bases = vNULL;
1121 val->derived_types = vNULL;
1122 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1123 build_bases = COMPLETE_TYPE_P (val->type);
1124 insert_to_odr_array = true;
1125 }
1126
1127 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1128 && type == TYPE_MAIN_VARIANT (type))
1129 {
1130 tree binfo = TYPE_BINFO (type);
1131 unsigned int i;
1132
1133 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1134
1135 val->all_derivations_known = type_all_derivations_known_p (type);
1136 *slot = val;
1137 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1138 /* For now record only polymorphic types. other are
1139 pointless for devirtualization and we can not precisely
1140 determine ODR equivalency of these during LTO. */
1141 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1142 {
1143 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1144 i)),
1145 true);
1146 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1147 base->derived_types.safe_push (val);
1148 val->bases.safe_push (base);
1149 if (base->id > base_id)
1150 base_id = base->id;
1151 }
1152 }
1153 /* Ensure that type always appears after bases. */
1154 if (insert_to_odr_array)
1155 {
1156 if (odr_types_ptr)
1157 val->id = odr_types.length ();
1158 vec_safe_push (odr_types_ptr, val);
1159 }
1160 else if (base_id > val->id)
1161 {
1162 odr_types[val->id] = 0;
1163 /* Be sure we did not recorded any derived types; these may need
1164 renumbering too. */
1165 gcc_assert (val->derived_types.length() == 0);
1166 if (odr_types_ptr)
1167 val->id = odr_types.length ();
1168 vec_safe_push (odr_types_ptr, val);
1169 }
1170 return val;
1171 }
1172
1173 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1174 recusive printing. */
1175
1176 static void
1177 dump_odr_type (FILE *f, odr_type t, int indent=0)
1178 {
1179 unsigned int i;
1180 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1181 print_generic_expr (f, t->type, TDF_SLIM);
1182 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1183 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1184 if (TYPE_NAME (t->type))
1185 {
1186 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1187 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1188 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1189 }
1190 if (t->bases.length ())
1191 {
1192 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1193 for (i = 0; i < t->bases.length (); i++)
1194 fprintf (f, " %i", t->bases[i]->id);
1195 fprintf (f, "\n");
1196 }
1197 if (t->derived_types.length ())
1198 {
1199 fprintf (f, "%*s derived types:\n", indent * 2, "");
1200 for (i = 0; i < t->derived_types.length (); i++)
1201 dump_odr_type (f, t->derived_types[i], indent + 1);
1202 }
1203 fprintf (f, "\n");
1204 }
1205
1206 /* Dump the type inheritance graph. */
1207
1208 static void
1209 dump_type_inheritance_graph (FILE *f)
1210 {
1211 unsigned int i;
1212 if (!odr_types_ptr)
1213 return;
1214 fprintf (f, "\n\nType inheritance graph:\n");
1215 for (i = 0; i < odr_types.length (); i++)
1216 {
1217 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1218 dump_odr_type (f, odr_types[i]);
1219 }
1220 for (i = 0; i < odr_types.length (); i++)
1221 {
1222 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1223 {
1224 unsigned int j;
1225 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1226 print_node (f, "", odr_types[i]->type, 0);
1227 for (j = 0; j < odr_types[i]->types->length (); j++)
1228 {
1229 tree t;
1230 fprintf (f, "duplicate #%i\n", j);
1231 print_node (f, "", (*odr_types[i]->types)[j], 0);
1232 t = (*odr_types[i]->types)[j];
1233 while (TYPE_P (t) && TYPE_CONTEXT (t))
1234 {
1235 t = TYPE_CONTEXT (t);
1236 print_node (f, "", t, 0);
1237 }
1238 putc ('\n',f);
1239 }
1240 }
1241 }
1242 }
1243
1244 /* Given method type T, return type of class it belongs to.
1245 Lookup this pointer and get its type. */
1246
1247 tree
1248 method_class_type (const_tree t)
1249 {
1250 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1251 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1252
1253 return TREE_TYPE (first_parm_type);
1254 }
1255
1256 /* Initialize IPA devirt and build inheritance tree graph. */
1257
1258 void
1259 build_type_inheritance_graph (void)
1260 {
1261 struct symtab_node *n;
1262 FILE *inheritance_dump_file;
1263 int flags;
1264
1265 if (odr_hash)
1266 return;
1267 timevar_push (TV_IPA_INHERITANCE);
1268 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1269 odr_hash = new odr_hash_type (23);
1270
1271 /* We reconstruct the graph starting of types of all methods seen in the
1272 the unit. */
1273 FOR_EACH_SYMBOL (n)
1274 if (is_a <cgraph_node *> (n)
1275 && DECL_VIRTUAL_P (n->decl)
1276 && n->real_symbol_p ())
1277 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1278 true);
1279
1280 /* Look also for virtual tables of types that do not define any methods.
1281
1282 We need it in a case where class B has virtual base of class A
1283 re-defining its virtual method and there is class C with no virtual
1284 methods with B as virtual base.
1285
1286 Here we output B's virtual method in two variant - for non-virtual
1287 and virtual inheritance. B's virtual table has non-virtual version,
1288 while C's has virtual.
1289
1290 For this reason we need to know about C in order to include both
1291 variants of B. More correctly, record_target_from_binfo should
1292 add both variants of the method when walking B, but we have no
1293 link in between them.
1294
1295 We rely on fact that either the method is exported and thus we
1296 assume it is called externally or C is in anonymous namespace and
1297 thus we will see the vtable. */
1298
1299 else if (is_a <varpool_node *> (n)
1300 && DECL_VIRTUAL_P (n->decl)
1301 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1302 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1303 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1304 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1305 if (inheritance_dump_file)
1306 {
1307 dump_type_inheritance_graph (inheritance_dump_file);
1308 dump_end (TDI_inheritance, inheritance_dump_file);
1309 }
1310 timevar_pop (TV_IPA_INHERITANCE);
1311 }
1312
1313 /* Return true if N has reference from live virtual table
1314 (and thus can be a destination of polymorphic call).
1315 Be conservatively correct when callgraph is not built or
1316 if the method may be referred externally. */
1317
1318 static bool
1319 referenced_from_vtable_p (struct cgraph_node *node)
1320 {
1321 int i;
1322 struct ipa_ref *ref;
1323 bool found = false;
1324
1325 if (node->externally_visible
1326 || node->used_from_other_partition)
1327 return true;
1328
1329 /* Keep this test constant time.
1330 It is unlikely this can happen except for the case where speculative
1331 devirtualization introduced many speculative edges to this node.
1332 In this case the target is very likely alive anyway. */
1333 if (node->ref_list.referring.length () > 100)
1334 return true;
1335
1336 /* We need references built. */
1337 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
1338 return true;
1339
1340 for (i = 0; node->iterate_referring (i, ref); i++)
1341
1342 if ((ref->use == IPA_REF_ALIAS
1343 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1344 || (ref->use == IPA_REF_ADDR
1345 && TREE_CODE (ref->referring->decl) == VAR_DECL
1346 && DECL_VIRTUAL_P (ref->referring->decl)))
1347 {
1348 found = true;
1349 break;
1350 }
1351 return found;
1352 }
1353
1354 /* If TARGET has associated node, record it in the NODES array.
1355 CAN_REFER specify if program can refer to the target directly.
1356 if TARGET is unknown (NULL) or it can not be inserted (for example because
1357 its body was already removed and there is no way to refer to it), clear
1358 COMPLETEP. */
1359
1360 static void
1361 maybe_record_node (vec <cgraph_node *> &nodes,
1362 tree target, hash_set<tree> *inserted,
1363 bool can_refer,
1364 bool *completep)
1365 {
1366 struct cgraph_node *target_node, *alias_target;
1367 enum availability avail;
1368
1369 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1370 list of targets; the runtime effect of calling them is undefined.
1371 Only "real" virtual methods should be accounted. */
1372 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1373 return;
1374
1375 if (!can_refer)
1376 {
1377 /* The only case when method of anonymous namespace becomes unreferable
1378 is when we completely optimized it out. */
1379 if (flag_ltrans
1380 || !target
1381 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1382 *completep = false;
1383 return;
1384 }
1385
1386 if (!target)
1387 return;
1388
1389 target_node = cgraph_node::get (target);
1390
1391 /* Preffer alias target over aliases, so we do not get confused by
1392 fake duplicates. */
1393 if (target_node)
1394 {
1395 alias_target = target_node->ultimate_alias_target (&avail);
1396 if (target_node != alias_target
1397 && avail >= AVAIL_AVAILABLE
1398 && target_node->get_availability ())
1399 target_node = alias_target;
1400 }
1401
1402 /* Method can only be called by polymorphic call if any
1403 of vtables refering to it are alive.
1404
1405 While this holds for non-anonymous functions, too, there are
1406 cases where we want to keep them in the list; for example
1407 inline functions with -fno-weak are static, but we still
1408 may devirtualize them when instance comes from other unit.
1409 The same holds for LTO.
1410
1411 Currently we ignore these functions in speculative devirtualization.
1412 ??? Maybe it would make sense to be more aggressive for LTO even
1413 eslewhere. */
1414 if (!flag_ltrans
1415 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1416 && (!target_node
1417 || !referenced_from_vtable_p (target_node)))
1418 ;
1419 /* See if TARGET is useful function we can deal with. */
1420 else if (target_node != NULL
1421 && (TREE_PUBLIC (target)
1422 || DECL_EXTERNAL (target)
1423 || target_node->definition)
1424 && target_node->real_symbol_p ())
1425 {
1426 gcc_assert (!target_node->global.inlined_to);
1427 gcc_assert (target_node->real_symbol_p ());
1428 if (!inserted->add (target))
1429 {
1430 cached_polymorphic_call_targets->add (target_node);
1431 nodes.safe_push (target_node);
1432 }
1433 }
1434 else if (completep
1435 && (!type_in_anonymous_namespace_p
1436 (DECL_CONTEXT (target))
1437 || flag_ltrans))
1438 *completep = false;
1439 }
1440
1441 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1442 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1443 method in vtable and insert method to NODES array
1444 or BASES_TO_CONSIDER if this array is non-NULL.
1445 Otherwise recurse to base BINFOs.
1446 This match what get_binfo_at_offset does, but with offset
1447 being unknown.
1448
1449 TYPE_BINFOS is a stack of BINFOS of types with defined
1450 virtual table seen on way from class type to BINFO.
1451
1452 MATCHED_VTABLES tracks virtual tables we already did lookup
1453 for virtual function in. INSERTED tracks nodes we already
1454 inserted.
1455
1456 ANONYMOUS is true if BINFO is part of anonymous namespace.
1457
1458 Clear COMPLETEP when we hit unreferable target.
1459 */
1460
1461 static void
1462 record_target_from_binfo (vec <cgraph_node *> &nodes,
1463 vec <tree> *bases_to_consider,
1464 tree binfo,
1465 tree otr_type,
1466 vec <tree> &type_binfos,
1467 HOST_WIDE_INT otr_token,
1468 tree outer_type,
1469 HOST_WIDE_INT offset,
1470 hash_set<tree> *inserted,
1471 hash_set<tree> *matched_vtables,
1472 bool anonymous,
1473 bool *completep)
1474 {
1475 tree type = BINFO_TYPE (binfo);
1476 int i;
1477 tree base_binfo;
1478
1479
1480 if (BINFO_VTABLE (binfo))
1481 type_binfos.safe_push (binfo);
1482 if (types_same_for_odr (type, outer_type))
1483 {
1484 int i;
1485 tree type_binfo = NULL;
1486
1487 /* Lookup BINFO with virtual table. For normal types it is always last
1488 binfo on stack. */
1489 for (i = type_binfos.length () - 1; i >= 0; i--)
1490 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1491 {
1492 type_binfo = type_binfos[i];
1493 break;
1494 }
1495 if (BINFO_VTABLE (binfo))
1496 type_binfos.pop ();
1497 /* If this is duplicated BINFO for base shared by virtual inheritance,
1498 we may not have its associated vtable. This is not a problem, since
1499 we will walk it on the other path. */
1500 if (!type_binfo)
1501 return;
1502 tree inner_binfo = get_binfo_at_offset (type_binfo,
1503 offset, otr_type);
1504 if (!inner_binfo)
1505 {
1506 gcc_assert (odr_violation_reported);
1507 return;
1508 }
1509 /* For types in anonymous namespace first check if the respective vtable
1510 is alive. If not, we know the type can't be called. */
1511 if (!flag_ltrans && anonymous)
1512 {
1513 tree vtable = BINFO_VTABLE (inner_binfo);
1514 varpool_node *vnode;
1515
1516 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1517 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1518 vnode = varpool_node::get (vtable);
1519 if (!vnode || !vnode->definition)
1520 return;
1521 }
1522 gcc_assert (inner_binfo);
1523 if (bases_to_consider
1524 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1525 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1526 {
1527 bool can_refer;
1528 tree target = gimple_get_virt_method_for_binfo (otr_token,
1529 inner_binfo,
1530 &can_refer);
1531 if (!bases_to_consider)
1532 maybe_record_node (nodes, target, inserted, can_refer, completep);
1533 /* Destructors are never called via construction vtables. */
1534 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1535 bases_to_consider->safe_push (target);
1536 }
1537 return;
1538 }
1539
1540 /* Walk bases. */
1541 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1542 /* Walking bases that have no virtual method is pointless excercise. */
1543 if (polymorphic_type_binfo_p (base_binfo))
1544 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1545 type_binfos,
1546 otr_token, outer_type, offset, inserted,
1547 matched_vtables, anonymous, completep);
1548 if (BINFO_VTABLE (binfo))
1549 type_binfos.pop ();
1550 }
1551
1552 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1553 of TYPE, insert them to NODES, recurse into derived nodes.
1554 INSERTED is used to avoid duplicate insertions of methods into NODES.
1555 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1556 Clear COMPLETEP if unreferable target is found.
1557
1558 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1559 all cases where BASE_SKIPPED is true (because the base is abstract
1560 class). */
1561
1562 static void
1563 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1564 hash_set<tree> *inserted,
1565 hash_set<tree> *matched_vtables,
1566 tree otr_type,
1567 odr_type type,
1568 HOST_WIDE_INT otr_token,
1569 tree outer_type,
1570 HOST_WIDE_INT offset,
1571 bool *completep,
1572 vec <tree> &bases_to_consider,
1573 bool consider_construction)
1574 {
1575 tree binfo = TYPE_BINFO (type->type);
1576 unsigned int i;
1577 vec <tree> type_binfos = vNULL;
1578 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1579
1580 /* We may need to consider types w/o instances because of possible derived
1581 types using their methods either directly or via construction vtables.
1582 We are safe to skip them when all derivations are known, since we will
1583 handle them later.
1584 This is done by recording them to BASES_TO_CONSIDER array. */
1585 if (possibly_instantiated || consider_construction)
1586 {
1587 record_target_from_binfo (nodes,
1588 (!possibly_instantiated
1589 && type_all_derivations_known_p (type->type))
1590 ? &bases_to_consider : NULL,
1591 binfo, otr_type, type_binfos, otr_token,
1592 outer_type, offset,
1593 inserted, matched_vtables,
1594 type->anonymous_namespace, completep);
1595 }
1596 type_binfos.release ();
1597 for (i = 0; i < type->derived_types.length (); i++)
1598 possible_polymorphic_call_targets_1 (nodes, inserted,
1599 matched_vtables,
1600 otr_type,
1601 type->derived_types[i],
1602 otr_token, outer_type, offset, completep,
1603 bases_to_consider, consider_construction);
1604 }
1605
1606 /* Cache of queries for polymorphic call targets.
1607
1608 Enumerating all call targets may get expensive when there are many
1609 polymorphic calls in the program, so we memoize all the previous
1610 queries and avoid duplicated work. */
1611
1612 struct polymorphic_call_target_d
1613 {
1614 HOST_WIDE_INT otr_token;
1615 ipa_polymorphic_call_context context;
1616 odr_type type;
1617 vec <cgraph_node *> targets;
1618 int speculative_targets;
1619 bool complete;
1620 int type_warning;
1621 tree decl_warning;
1622 };
1623
1624 /* Polymorphic call target cache helpers. */
1625
1626 struct polymorphic_call_target_hasher
1627 {
1628 typedef polymorphic_call_target_d value_type;
1629 typedef polymorphic_call_target_d compare_type;
1630 static inline hashval_t hash (const value_type *);
1631 static inline bool equal (const value_type *, const compare_type *);
1632 static inline void remove (value_type *);
1633 };
1634
1635 /* Return the computed hashcode for ODR_QUERY. */
1636
1637 inline hashval_t
1638 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1639 {
1640 inchash::hash hstate (odr_query->otr_token);
1641
1642 hstate.add_wide_int (odr_query->type->id);
1643 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1644 hstate.add_wide_int (odr_query->context.offset);
1645
1646 if (odr_query->context.speculative_outer_type)
1647 {
1648 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1649 hstate.add_wide_int (odr_query->context.speculative_offset);
1650 }
1651 hstate.add_flag (odr_query->context.maybe_in_construction);
1652 hstate.add_flag (odr_query->context.maybe_derived_type);
1653 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1654 hstate.commit_flag ();
1655 return hstate.end ();
1656 }
1657
1658 /* Compare cache entries T1 and T2. */
1659
1660 inline bool
1661 polymorphic_call_target_hasher::equal (const value_type *t1,
1662 const compare_type *t2)
1663 {
1664 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1665 && t1->context.offset == t2->context.offset
1666 && t1->context.speculative_offset == t2->context.speculative_offset
1667 && t1->context.outer_type == t2->context.outer_type
1668 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1669 && t1->context.maybe_in_construction
1670 == t2->context.maybe_in_construction
1671 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1672 && (t1->context.speculative_maybe_derived_type
1673 == t2->context.speculative_maybe_derived_type));
1674 }
1675
1676 /* Remove entry in polymorphic call target cache hash. */
1677
1678 inline void
1679 polymorphic_call_target_hasher::remove (value_type *v)
1680 {
1681 v->targets.release ();
1682 free (v);
1683 }
1684
1685 /* Polymorphic call target query cache. */
1686
1687 typedef hash_table<polymorphic_call_target_hasher>
1688 polymorphic_call_target_hash_type;
1689 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1690
1691 /* Destroy polymorphic call target query cache. */
1692
1693 static void
1694 free_polymorphic_call_targets_hash ()
1695 {
1696 if (cached_polymorphic_call_targets)
1697 {
1698 delete polymorphic_call_target_hash;
1699 polymorphic_call_target_hash = NULL;
1700 delete cached_polymorphic_call_targets;
1701 cached_polymorphic_call_targets = NULL;
1702 }
1703 }
1704
1705 /* When virtual function is removed, we may need to flush the cache. */
1706
1707 static void
1708 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1709 {
1710 if (cached_polymorphic_call_targets
1711 && cached_polymorphic_call_targets->contains (n))
1712 free_polymorphic_call_targets_hash ();
1713 }
1714
1715 /* Return true when TYPE contains an polymorphic type and thus is interesting
1716 for devirtualization machinery. */
1717
1718 bool
1719 contains_polymorphic_type_p (const_tree type)
1720 {
1721 type = TYPE_MAIN_VARIANT (type);
1722
1723 if (RECORD_OR_UNION_TYPE_P (type))
1724 {
1725 if (TYPE_BINFO (type)
1726 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1727 return true;
1728 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1729 if (TREE_CODE (fld) == FIELD_DECL
1730 && !DECL_ARTIFICIAL (fld)
1731 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1732 return true;
1733 return false;
1734 }
1735 if (TREE_CODE (type) == ARRAY_TYPE)
1736 return contains_polymorphic_type_p (TREE_TYPE (type));
1737 return false;
1738 }
1739
1740 /* Clear speculative info from CONTEXT. */
1741
1742 static void
1743 clear_speculation (ipa_polymorphic_call_context *context)
1744 {
1745 context->speculative_outer_type = NULL;
1746 context->speculative_offset = 0;
1747 context->speculative_maybe_derived_type = false;
1748 }
1749
1750 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1751 is contained at CONTEXT->OFFSET. Walk the memory representation of
1752 CONTEXT->OUTER_TYPE and find the outermost class type that match
1753 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1754 to represent it.
1755
1756 For example when CONTEXT represents type
1757 class A
1758 {
1759 int a;
1760 class B b;
1761 }
1762 and we look for type at offset sizeof(int), we end up with B and offset 0.
1763 If the same is produced by multiple inheritance, we end up with A and offset
1764 sizeof(int).
1765
1766 If we can not find corresponding class, give up by setting
1767 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1768 Return true when lookup was sucesful. */
1769
1770 static bool
1771 get_class_context (ipa_polymorphic_call_context *context,
1772 tree expected_type)
1773 {
1774 tree type = context->outer_type;
1775 HOST_WIDE_INT offset = context->offset;
1776 bool speculative = false;
1777 bool speculation_valid = false;
1778 bool valid = false;
1779
1780 if (!context->outer_type)
1781 {
1782 type = context->outer_type = expected_type;
1783 context->offset = offset = 0;
1784 }
1785
1786 if (context->speculative_outer_type == context->outer_type
1787 && (!context->maybe_derived_type
1788 || context->speculative_maybe_derived_type))
1789 {
1790 context->speculative_outer_type = NULL;
1791 context->speculative_offset = 0;
1792 context->speculative_maybe_derived_type = false;
1793 }
1794
1795 /* See if speculative type seem to be derrived from outer_type.
1796 Then speculation is valid only if it really is a derivate and derived types
1797 are allowed.
1798
1799 The test does not really look for derivate, but also accepts the case where
1800 outer_type is a field of speculative_outer_type. In this case eiter
1801 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
1802 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
1803 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
1804 if (context->speculative_outer_type
1805 && context->speculative_offset >= context->offset
1806 && contains_type_p (context->speculative_outer_type,
1807 context->offset - context->speculative_offset,
1808 context->outer_type))
1809 speculation_valid = context->maybe_derived_type;
1810 else
1811 clear_speculation (context);
1812
1813 /* Find the sub-object the constant actually refers to and mark whether it is
1814 an artificial one (as opposed to a user-defined one).
1815
1816 This loop is performed twice; first time for outer_type and second time
1817 for speculative_outer_type. The second iteration has SPECULATIVE set. */
1818 while (true)
1819 {
1820 HOST_WIDE_INT pos, size;
1821 tree fld;
1822
1823 /* On a match, just return what we found. */
1824 if (TREE_CODE (type) == TREE_CODE (expected_type)
1825 && (!in_lto_p
1826 || (TREE_CODE (type) == RECORD_TYPE
1827 && TYPE_BINFO (type)
1828 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
1829 && types_same_for_odr (type, expected_type))
1830 {
1831 if (speculative)
1832 {
1833 gcc_assert (speculation_valid);
1834 gcc_assert (valid);
1835
1836 /* If we did not match the offset, just give up on speculation. */
1837 if (offset != 0
1838 || (types_same_for_odr (context->speculative_outer_type,
1839 context->outer_type)
1840 && (context->maybe_derived_type
1841 == context->speculative_maybe_derived_type)))
1842 clear_speculation (context);
1843 return true;
1844 }
1845 else
1846 {
1847 /* Type can not contain itself on an non-zero offset. In that case
1848 just give up. */
1849 if (offset != 0)
1850 {
1851 valid = false;
1852 goto give_up;
1853 }
1854 valid = true;
1855 /* If speculation is not valid or we determined type precisely,
1856 we are done. */
1857 if (!speculation_valid
1858 || !context->maybe_derived_type)
1859 {
1860 clear_speculation (context);
1861 return true;
1862 }
1863 /* Otherwise look into speculation now. */
1864 else
1865 {
1866 speculative = true;
1867 type = context->speculative_outer_type;
1868 offset = context->speculative_offset;
1869 continue;
1870 }
1871 }
1872 }
1873
1874 /* Walk fields and find corresponding on at OFFSET. */
1875 if (TREE_CODE (type) == RECORD_TYPE)
1876 {
1877 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1878 {
1879 if (TREE_CODE (fld) != FIELD_DECL)
1880 continue;
1881
1882 pos = int_bit_position (fld);
1883 size = tree_to_uhwi (DECL_SIZE (fld));
1884 if (pos <= offset && (pos + size) > offset)
1885 break;
1886 }
1887
1888 if (!fld)
1889 goto give_up;
1890
1891 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1892 offset -= pos;
1893 /* DECL_ARTIFICIAL represents a basetype. */
1894 if (!DECL_ARTIFICIAL (fld))
1895 {
1896 if (!speculative)
1897 {
1898 context->outer_type = type;
1899 context->offset = offset;
1900 /* As soon as we se an field containing the type,
1901 we know we are not looking for derivations. */
1902 context->maybe_derived_type = false;
1903 }
1904 else
1905 {
1906 context->speculative_outer_type = type;
1907 context->speculative_offset = offset;
1908 context->speculative_maybe_derived_type = false;
1909 }
1910 }
1911 }
1912 else if (TREE_CODE (type) == ARRAY_TYPE)
1913 {
1914 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1915
1916 /* Give up if we don't know array size. */
1917 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1918 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1919 goto give_up;
1920 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1921 type = subtype;
1922 if (!speculative)
1923 {
1924 context->outer_type = type;
1925 context->offset = offset;
1926 context->maybe_derived_type = false;
1927 }
1928 else
1929 {
1930 context->speculative_outer_type = type;
1931 context->speculative_offset = offset;
1932 context->speculative_maybe_derived_type = false;
1933 }
1934 }
1935 /* Give up on anything else. */
1936 else
1937 goto give_up;
1938 }
1939
1940 /* If we failed to find subtype we look for, give up and fall back to the
1941 most generic query. */
1942 give_up:
1943 clear_speculation (context);
1944 if (valid)
1945 return true;
1946 context->outer_type = expected_type;
1947 context->offset = 0;
1948 context->maybe_derived_type = true;
1949 context->maybe_in_construction = true;
1950 /* POD can be changed to an instance of a polymorphic type by
1951 placement new. Here we play safe and assume that any
1952 non-polymorphic type is POD. */
1953 if ((TREE_CODE (type) != RECORD_TYPE
1954 || !TYPE_BINFO (type)
1955 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1956 && (!TYPE_SIZE (type)
1957 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1958 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1959 tree_to_uhwi (TYPE_SIZE (type)))))
1960 return true;
1961 return false;
1962 }
1963
1964 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1965
1966 static bool
1967 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1968 tree otr_type)
1969 {
1970 ipa_polymorphic_call_context context = {offset, 0,
1971 TYPE_MAIN_VARIANT (outer_type),
1972 NULL, false, true, false};
1973 return get_class_context (&context, otr_type);
1974 }
1975
1976 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1977
1978 static tree
1979 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1980 tree vtable)
1981 {
1982 tree v = BINFO_VTABLE (binfo);
1983 int i;
1984 tree base_binfo;
1985 unsigned HOST_WIDE_INT this_offset;
1986
1987 if (v)
1988 {
1989 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1990 gcc_unreachable ();
1991
1992 if (offset == this_offset
1993 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1994 return binfo;
1995 }
1996
1997 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1998 if (polymorphic_type_binfo_p (base_binfo))
1999 {
2000 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2001 if (base_binfo)
2002 return base_binfo;
2003 }
2004 return NULL;
2005 }
2006
2007 /* T is known constant value of virtual table pointer.
2008 Store virtual table to V and its offset to OFFSET.
2009 Return false if T does not look like virtual table reference. */
2010
2011 bool
2012 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2013 unsigned HOST_WIDE_INT *offset)
2014 {
2015 /* We expect &MEM[(void *)&virtual_table + 16B].
2016 We obtain object's BINFO from the context of the virtual table.
2017 This one contains pointer to virtual table represented via
2018 POINTER_PLUS_EXPR. Verify that this pointer match to what
2019 we propagated through.
2020
2021 In the case of virtual inheritance, the virtual tables may
2022 be nested, i.e. the offset may be different from 16 and we may
2023 need to dive into the type representation. */
2024 if (TREE_CODE (t) == ADDR_EXPR
2025 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2026 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2027 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2028 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2029 == VAR_DECL)
2030 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2031 (TREE_OPERAND (t, 0), 0), 0)))
2032 {
2033 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2034 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2035 return true;
2036 }
2037
2038 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2039 We need to handle it when T comes from static variable initializer or
2040 BINFO. */
2041 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2042 {
2043 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2044 t = TREE_OPERAND (t, 0);
2045 }
2046 else
2047 *offset = 0;
2048
2049 if (TREE_CODE (t) != ADDR_EXPR)
2050 return false;
2051 *v = TREE_OPERAND (t, 0);
2052 return true;
2053 }
2054
2055 /* T is known constant value of virtual table pointer. Return BINFO of the
2056 instance type. */
2057
2058 tree
2059 vtable_pointer_value_to_binfo (const_tree t)
2060 {
2061 tree vtable;
2062 unsigned HOST_WIDE_INT offset;
2063
2064 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2065 return NULL_TREE;
2066
2067 /* FIXME: for stores of construction vtables we return NULL,
2068 because we do not have BINFO for those. Eventually we should fix
2069 our representation to allow this case to be handled, too.
2070 In the case we see store of BINFO we however may assume
2071 that standard folding will be ale to cope with it. */
2072 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2073 offset, vtable);
2074 }
2075
2076 /* We know that the instance is stored in variable or parameter
2077 (not dynamically allocated) and we want to disprove the fact
2078 that it may be in construction at invocation of CALL.
2079
2080 For the variable to be in construction we actually need to
2081 be in constructor of corresponding global variable or
2082 the inline stack of CALL must contain the constructor.
2083 Check this condition. This check works safely only before
2084 IPA passes, because inline stacks may become out of date
2085 later. */
2086
2087 bool
2088 decl_maybe_in_construction_p (tree base, tree outer_type,
2089 gimple call, tree function)
2090 {
2091 outer_type = TYPE_MAIN_VARIANT (outer_type);
2092 gcc_assert (DECL_P (base));
2093
2094 /* After inlining the code unification optimizations may invalidate
2095 inline stacks. Also we need to give up on global variables after
2096 IPA, because addresses of these may have been propagated to their
2097 constructors. */
2098 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2099 return true;
2100
2101 /* Pure functions can not do any changes on the dynamic type;
2102 that require writting to memory. */
2103 if (!auto_var_in_fn_p (base, function)
2104 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2105 return false;
2106
2107 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2108 block = BLOCK_SUPERCONTEXT (block))
2109 if (BLOCK_ABSTRACT_ORIGIN (block)
2110 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2111 {
2112 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2113
2114 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2115 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2116 || !DECL_CXX_DESTRUCTOR_P (fn)))
2117 {
2118 /* Watch for clones where we constant propagated the first
2119 argument (pointer to the instance). */
2120 fn = DECL_ABSTRACT_ORIGIN (fn);
2121 if (!fn
2122 || !is_global_var (base)
2123 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2124 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2125 || !DECL_CXX_DESTRUCTOR_P (fn)))
2126 continue;
2127 }
2128 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2129 continue;
2130
2131 /* FIXME: this can go away once we have ODR types equivalency on
2132 LTO level. */
2133 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2134 return true;
2135 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2136 if (types_same_for_odr (type, outer_type))
2137 return true;
2138 }
2139
2140 if (TREE_CODE (base) == VAR_DECL
2141 && is_global_var (base))
2142 {
2143 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2144 || (!DECL_CXX_CONSTRUCTOR_P (function)
2145 || !DECL_CXX_DESTRUCTOR_P (function)))
2146 {
2147 if (!DECL_ABSTRACT_ORIGIN (function))
2148 return false;
2149 /* Watch for clones where we constant propagated the first
2150 argument (pointer to the instance). */
2151 function = DECL_ABSTRACT_ORIGIN (function);
2152 if (!function
2153 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2154 || (!DECL_CXX_CONSTRUCTOR_P (function)
2155 || !DECL_CXX_DESTRUCTOR_P (function)))
2156 return false;
2157 }
2158 /* FIXME: this can go away once we have ODR types equivalency on
2159 LTO level. */
2160 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2161 return true;
2162 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2163 if (types_same_for_odr (type, outer_type))
2164 return true;
2165 }
2166 return false;
2167 }
2168
2169 /* Proudce polymorphic call context for call method of instance
2170 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2171
2172 static void
2173 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2174 tree base, HOST_WIDE_INT offset)
2175 {
2176 gcc_assert (DECL_P (base));
2177
2178 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
2179 context->offset = offset;
2180 context->speculative_outer_type = NULL;
2181 context->speculative_offset = 0;
2182 context->speculative_maybe_derived_type = true;
2183 /* Make very conservative assumption that all objects
2184 may be in construction.
2185 TODO: ipa-prop already contains code to tell better.
2186 merge it later. */
2187 context->maybe_in_construction = true;
2188 context->maybe_derived_type = false;
2189 }
2190
2191 /* CST is an invariant (address of decl), try to get meaningful
2192 polymorphic call context for polymorphic call of method
2193 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2194 Return FALSE if nothing meaningful can be found. */
2195
2196 bool
2197 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2198 tree cst,
2199 tree otr_type,
2200 HOST_WIDE_INT offset)
2201 {
2202 HOST_WIDE_INT offset2, size, max_size;
2203 tree base;
2204
2205 if (TREE_CODE (cst) != ADDR_EXPR)
2206 return false;
2207
2208 cst = TREE_OPERAND (cst, 0);
2209 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
2210 if (!DECL_P (base) || max_size == -1 || max_size != size)
2211 return false;
2212
2213 /* Only type inconsistent programs can have otr_type that is
2214 not part of outer type. */
2215 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2216 return false;
2217
2218 get_polymorphic_call_info_for_decl (context, base, offset);
2219 return true;
2220 }
2221
2222 /* See if OP is SSA name initialized as a copy or by single assignment.
2223 If so, walk the SSA graph up. */
2224
2225 static tree
2226 walk_ssa_copies (tree op)
2227 {
2228 STRIP_NOPS (op);
2229 while (TREE_CODE (op) == SSA_NAME
2230 && !SSA_NAME_IS_DEFAULT_DEF (op)
2231 && SSA_NAME_DEF_STMT (op)
2232 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2233 {
2234 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2235 return op;
2236 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2237 STRIP_NOPS (op);
2238 }
2239 return op;
2240 }
2241
2242 /* Given REF call in FNDECL, determine class of the polymorphic
2243 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
2244 CALL is optional argument giving the actual statement (usually call) where
2245 the context is used.
2246 Return pointer to object described by the context */
2247
2248 tree
2249 get_polymorphic_call_info (tree fndecl,
2250 tree ref,
2251 tree *otr_type,
2252 HOST_WIDE_INT *otr_token,
2253 ipa_polymorphic_call_context *context,
2254 gimple call)
2255 {
2256 tree base_pointer;
2257 *otr_type = obj_type_ref_class (ref);
2258 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2259
2260 /* Set up basic info in case we find nothing interesting in the analysis. */
2261 context->speculative_outer_type = NULL;
2262 context->speculative_offset = 0;
2263 context->speculative_maybe_derived_type = true;
2264 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
2265 context->offset = 0;
2266 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2267 context->maybe_derived_type = true;
2268 context->maybe_in_construction = true;
2269
2270 /* Walk SSA for outer object. */
2271 do
2272 {
2273 base_pointer = walk_ssa_copies (base_pointer);
2274 if (TREE_CODE (base_pointer) == ADDR_EXPR)
2275 {
2276 HOST_WIDE_INT size, max_size;
2277 HOST_WIDE_INT offset2;
2278 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2279 &offset2, &size, &max_size);
2280
2281 /* If this is a varying address, punt. */
2282 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2283 && max_size != -1
2284 && max_size == size)
2285 {
2286 /* We found dereference of a pointer. Type of the pointer
2287 and MEM_REF is meaningless, but we can look futher. */
2288 if (TREE_CODE (base) == MEM_REF)
2289 {
2290 base_pointer = TREE_OPERAND (base, 0);
2291 context->offset
2292 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
2293 context->outer_type = NULL;
2294 }
2295 /* We found base object. In this case the outer_type
2296 is known. */
2297 else if (DECL_P (base))
2298 {
2299 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
2300
2301 /* Only type inconsistent programs can have otr_type that is
2302 not part of outer type. */
2303 if (!contains_type_p (TREE_TYPE (base),
2304 context->offset + offset2, *otr_type))
2305 {
2306 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2307 code sequences; we arrange the calls to be builtin_unreachable
2308 later. */
2309 *otr_token = INT_MAX;
2310 return base_pointer;
2311 }
2312 get_polymorphic_call_info_for_decl (context, base,
2313 context->offset + offset2);
2314 if (context->maybe_in_construction && call)
2315 context->maybe_in_construction
2316 = decl_maybe_in_construction_p (base,
2317 context->outer_type,
2318 call,
2319 current_function_decl);
2320 return base_pointer;
2321 }
2322 else
2323 break;
2324 }
2325 else
2326 break;
2327 }
2328 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2329 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2330 {
2331 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2332 * BITS_PER_UNIT;
2333 base_pointer = TREE_OPERAND (base_pointer, 0);
2334 }
2335 else
2336 break;
2337 }
2338 while (true);
2339
2340 /* Try to determine type of the outer object. */
2341 if (TREE_CODE (base_pointer) == SSA_NAME
2342 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2343 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2344 {
2345 /* See if parameter is THIS pointer of a method. */
2346 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2347 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2348 {
2349 context->outer_type
2350 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2351 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2352
2353 /* Dynamic casting has possibly upcasted the type
2354 in the hiearchy. In this case outer type is less
2355 informative than inner type and we should forget
2356 about it. */
2357 if (!contains_type_p (context->outer_type, context->offset,
2358 *otr_type))
2359 {
2360 context->outer_type = NULL;
2361 return base_pointer;
2362 }
2363
2364 /* If the function is constructor or destructor, then
2365 the type is possibly in construction, but we know
2366 it is not derived type. */
2367 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2368 || DECL_CXX_DESTRUCTOR_P (fndecl))
2369 {
2370 context->maybe_in_construction = true;
2371 context->maybe_derived_type = false;
2372 }
2373 else
2374 {
2375 context->maybe_derived_type = true;
2376 context->maybe_in_construction = false;
2377 }
2378 return base_pointer;
2379 }
2380 /* Non-PODs passed by value are really passed by invisible
2381 reference. In this case we also know the type of the
2382 object. */
2383 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2384 {
2385 context->outer_type
2386 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2387 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2388 /* Only type inconsistent programs can have otr_type that is
2389 not part of outer type. */
2390 if (!contains_type_p (context->outer_type, context->offset,
2391 *otr_type))
2392 {
2393 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2394 code sequences; we arrange the calls to be builtin_unreachable
2395 later. */
2396 *otr_token = INT_MAX;
2397 return base_pointer;
2398 }
2399 context->maybe_derived_type = false;
2400 context->maybe_in_construction = false;
2401 return base_pointer;
2402 }
2403 }
2404
2405 tree base_type = TREE_TYPE (base_pointer);
2406
2407 if (TREE_CODE (base_pointer) == SSA_NAME
2408 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2409 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2410 {
2411 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2412 code sequences; we arrange the calls to be builtin_unreachable
2413 later. */
2414 *otr_token = INT_MAX;
2415 return base_pointer;
2416 }
2417 if (TREE_CODE (base_pointer) == SSA_NAME
2418 && SSA_NAME_DEF_STMT (base_pointer)
2419 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2420 base_type = TREE_TYPE (gimple_assign_rhs1
2421 (SSA_NAME_DEF_STMT (base_pointer)));
2422
2423 if (POINTER_TYPE_P (base_type)
2424 && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2425 context->offset,
2426 *otr_type))
2427 {
2428 context->speculative_outer_type = TYPE_MAIN_VARIANT
2429 (TREE_TYPE (base_type));
2430 context->speculative_offset = context->offset;
2431 context->speculative_maybe_derived_type = true;
2432 }
2433 /* TODO: There are multiple ways to derive a type. For instance
2434 if BASE_POINTER is passed to an constructor call prior our refernece.
2435 We do not make this type of flow sensitive analysis yet. */
2436 return base_pointer;
2437 }
2438
2439 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2440 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
2441 and insert them to NODES.
2442
2443 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2444
2445 static void
2446 record_targets_from_bases (tree otr_type,
2447 HOST_WIDE_INT otr_token,
2448 tree outer_type,
2449 HOST_WIDE_INT offset,
2450 vec <cgraph_node *> &nodes,
2451 hash_set<tree> *inserted,
2452 hash_set<tree> *matched_vtables,
2453 bool *completep)
2454 {
2455 while (true)
2456 {
2457 HOST_WIDE_INT pos, size;
2458 tree base_binfo;
2459 tree fld;
2460
2461 if (types_same_for_odr (outer_type, otr_type))
2462 return;
2463
2464 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2465 {
2466 if (TREE_CODE (fld) != FIELD_DECL)
2467 continue;
2468
2469 pos = int_bit_position (fld);
2470 size = tree_to_shwi (DECL_SIZE (fld));
2471 if (pos <= offset && (pos + size) > offset
2472 /* Do not get confused by zero sized bases. */
2473 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2474 break;
2475 }
2476 /* Within a class type we should always find correcponding fields. */
2477 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2478
2479 /* Nonbasetypes should have been stripped by outer_class_type. */
2480 gcc_assert (DECL_ARTIFICIAL (fld));
2481
2482 outer_type = TREE_TYPE (fld);
2483 offset -= pos;
2484
2485 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2486 offset, otr_type);
2487 if (!base_binfo)
2488 {
2489 gcc_assert (odr_violation_reported);
2490 return;
2491 }
2492 gcc_assert (base_binfo);
2493 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2494 {
2495 bool can_refer;
2496 tree target = gimple_get_virt_method_for_binfo (otr_token,
2497 base_binfo,
2498 &can_refer);
2499 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2500 maybe_record_node (nodes, target, inserted, can_refer, completep);
2501 matched_vtables->add (BINFO_VTABLE (base_binfo));
2502 }
2503 }
2504 }
2505
2506 /* When virtual table is removed, we may need to flush the cache. */
2507
2508 static void
2509 devirt_variable_node_removal_hook (varpool_node *n,
2510 void *d ATTRIBUTE_UNUSED)
2511 {
2512 if (cached_polymorphic_call_targets
2513 && DECL_VIRTUAL_P (n->decl)
2514 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2515 free_polymorphic_call_targets_hash ();
2516 }
2517
2518 /* Record about how many calls would benefit from given type to be final. */
2519 struct odr_type_warn_count
2520 {
2521 tree type;
2522 int count;
2523 gcov_type dyn_count;
2524 };
2525
2526 /* Record about how many calls would benefit from given method to be final. */
2527 struct decl_warn_count
2528 {
2529 tree decl;
2530 int count;
2531 gcov_type dyn_count;
2532 };
2533
2534 /* Information about type and decl warnings. */
2535 struct final_warning_record
2536 {
2537 gcov_type dyn_count;
2538 vec<odr_type_warn_count> type_warnings;
2539 hash_map<tree, decl_warn_count> decl_warnings;
2540 };
2541 struct final_warning_record *final_warning_records;
2542
2543 /* Return vector containing possible targets of polymorphic call of type
2544 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2545 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
2546 OTR_TYPE and include their virtual method. This is useful for types
2547 possibly in construction or destruction where the virtual table may
2548 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2549 us to walk the inheritance graph for all derivations.
2550
2551 OTR_TOKEN == INT_MAX is used to mark calls that are provably
2552 undefined and should be redirected to unreachable.
2553
2554 If COMPLETEP is non-NULL, store true if the list is complete.
2555 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2556 in the target cache. If user needs to visit every target list
2557 just once, it can memoize them.
2558
2559 SPECULATION_TARGETS specify number of targets that are speculatively
2560 likely. These include targets specified by the speculative part
2561 of polymoprhic call context and also exclude all targets for classes
2562 in construction.
2563
2564 Returned vector is placed into cache. It is NOT caller's responsibility
2565 to free it. The vector can be freed on cgraph_remove_node call if
2566 the particular node is a virtual function present in the cache. */
2567
2568 vec <cgraph_node *>
2569 possible_polymorphic_call_targets (tree otr_type,
2570 HOST_WIDE_INT otr_token,
2571 ipa_polymorphic_call_context context,
2572 bool *completep,
2573 void **cache_token,
2574 int *speculative_targetsp)
2575 {
2576 static struct cgraph_node_hook_list *node_removal_hook_holder;
2577 vec <cgraph_node *> nodes = vNULL;
2578 vec <tree> bases_to_consider = vNULL;
2579 odr_type type, outer_type;
2580 polymorphic_call_target_d key;
2581 polymorphic_call_target_d **slot;
2582 unsigned int i;
2583 tree binfo, target;
2584 bool complete;
2585 bool can_refer;
2586 bool skipped = false;
2587
2588 otr_type = TYPE_MAIN_VARIANT (otr_type);
2589
2590 /* If ODR is not initialized, return empty incomplete list. */
2591 if (!odr_hash)
2592 {
2593 if (completep)
2594 *completep = false;
2595 if (cache_token)
2596 *cache_token = NULL;
2597 if (speculative_targetsp)
2598 *speculative_targetsp = 0;
2599 return nodes;
2600 }
2601
2602 /* If we hit type inconsistency, just return empty list of targets. */
2603 if (otr_token == INT_MAX)
2604 {
2605 if (completep)
2606 *completep = true;
2607 if (cache_token)
2608 *cache_token = NULL;
2609 if (speculative_targetsp)
2610 *speculative_targetsp = 0;
2611 return nodes;
2612 }
2613
2614 /* Do not bother to compute speculative info when user do not asks for it. */
2615 if (!speculative_targetsp || !context.speculative_outer_type)
2616 clear_speculation (&context);
2617
2618 type = get_odr_type (otr_type, true);
2619
2620 /* Recording type variants would wast results cache. */
2621 gcc_assert (!context.outer_type
2622 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2623
2624 /* Lookup the outer class type we want to walk. */
2625 if ((context.outer_type || context.speculative_outer_type)
2626 && !get_class_context (&context, otr_type))
2627 {
2628 if (completep)
2629 *completep = false;
2630 if (cache_token)
2631 *cache_token = NULL;
2632 if (speculative_targetsp)
2633 *speculative_targetsp = 0;
2634 return nodes;
2635 }
2636
2637 /* Check that get_class_context kept the main variant. */
2638 gcc_assert (!context.outer_type
2639 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2640
2641 /* We canonicalize our query, so we do not need extra hashtable entries. */
2642
2643 /* Without outer type, we have no use for offset. Just do the
2644 basic search from innter type */
2645 if (!context.outer_type)
2646 {
2647 context.outer_type = otr_type;
2648 context.offset = 0;
2649 }
2650 /* We need to update our hiearchy if the type does not exist. */
2651 outer_type = get_odr_type (context.outer_type, true);
2652 /* If the type is complete, there are no derivations. */
2653 if (TYPE_FINAL_P (outer_type->type))
2654 context.maybe_derived_type = false;
2655
2656 /* Initialize query cache. */
2657 if (!cached_polymorphic_call_targets)
2658 {
2659 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
2660 polymorphic_call_target_hash
2661 = new polymorphic_call_target_hash_type (23);
2662 if (!node_removal_hook_holder)
2663 {
2664 node_removal_hook_holder =
2665 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
2666 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
2667 NULL);
2668 }
2669 }
2670
2671 /* Lookup cached answer. */
2672 key.type = type;
2673 key.otr_token = otr_token;
2674 key.context = context;
2675 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2676 if (cache_token)
2677 *cache_token = (void *)*slot;
2678 if (*slot)
2679 {
2680 if (completep)
2681 *completep = (*slot)->complete;
2682 if (speculative_targetsp)
2683 *speculative_targetsp = (*slot)->speculative_targets;
2684 if ((*slot)->type_warning && final_warning_records)
2685 {
2686 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
2687 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
2688 += final_warning_records->dyn_count;
2689 }
2690 if ((*slot)->decl_warning && final_warning_records)
2691 {
2692 struct decl_warn_count *c =
2693 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
2694 c->count++;
2695 c->dyn_count += final_warning_records->dyn_count;
2696 }
2697 return (*slot)->targets;
2698 }
2699
2700 complete = true;
2701
2702 /* Do actual search. */
2703 timevar_push (TV_IPA_VIRTUAL_CALL);
2704 *slot = XCNEW (polymorphic_call_target_d);
2705 if (cache_token)
2706 *cache_token = (void *)*slot;
2707 (*slot)->type = type;
2708 (*slot)->otr_token = otr_token;
2709 (*slot)->context = context;
2710 (*slot)->speculative_targets = 0;
2711
2712 hash_set<tree> inserted;
2713 hash_set<tree> matched_vtables;
2714
2715 /* First insert targets we speculatively identified as likely. */
2716 if (context.speculative_outer_type)
2717 {
2718 odr_type speculative_outer_type;
2719 bool speculation_complete = true;
2720
2721 /* First insert target from type itself and check if it may have derived types. */
2722 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2723 if (TYPE_FINAL_P (speculative_outer_type->type))
2724 context.speculative_maybe_derived_type = false;
2725 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2726 context.speculative_offset, otr_type);
2727 if (binfo)
2728 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2729 &can_refer);
2730 else
2731 target = NULL;
2732
2733 /* In the case we get complete method, we don't need
2734 to walk derivations. */
2735 if (target && DECL_FINAL_P (target))
2736 context.speculative_maybe_derived_type = false;
2737 if (type_possibly_instantiated_p (speculative_outer_type->type))
2738 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
2739 if (binfo)
2740 matched_vtables.add (BINFO_VTABLE (binfo));
2741
2742
2743 /* Next walk recursively all derived types. */
2744 if (context.speculative_maybe_derived_type)
2745 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2746 possible_polymorphic_call_targets_1 (nodes, &inserted,
2747 &matched_vtables,
2748 otr_type,
2749 speculative_outer_type->derived_types[i],
2750 otr_token, speculative_outer_type->type,
2751 context.speculative_offset,
2752 &speculation_complete,
2753 bases_to_consider,
2754 false);
2755 (*slot)->speculative_targets = nodes.length();
2756 }
2757
2758 /* First see virtual method of type itself. */
2759 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2760 context.offset, otr_type);
2761 if (binfo)
2762 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2763 &can_refer);
2764 else
2765 {
2766 gcc_assert (odr_violation_reported);
2767 target = NULL;
2768 }
2769
2770 /* Destructors are never called through construction virtual tables,
2771 because the type is always known. */
2772 if (target && DECL_CXX_DESTRUCTOR_P (target))
2773 context.maybe_in_construction = false;
2774
2775 if (target)
2776 {
2777 /* In the case we get complete method, we don't need
2778 to walk derivations. */
2779 if (DECL_FINAL_P (target))
2780 context.maybe_derived_type = false;
2781 }
2782
2783 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2784 if (type_possibly_instantiated_p (outer_type->type))
2785 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2786 else
2787 {
2788 skipped = true;
2789 gcc_assert (in_lto_p || context.maybe_derived_type);
2790 }
2791
2792 if (binfo)
2793 matched_vtables.add (BINFO_VTABLE (binfo));
2794
2795 /* Next walk recursively all derived types. */
2796 if (context.maybe_derived_type)
2797 {
2798 for (i = 0; i < outer_type->derived_types.length(); i++)
2799 possible_polymorphic_call_targets_1 (nodes, &inserted,
2800 &matched_vtables,
2801 otr_type,
2802 outer_type->derived_types[i],
2803 otr_token, outer_type->type,
2804 context.offset, &complete,
2805 bases_to_consider,
2806 context.maybe_in_construction);
2807
2808 if (!outer_type->all_derivations_known)
2809 {
2810 if (final_warning_records)
2811 {
2812 if (complete
2813 && nodes.length () == 1
2814 && warn_suggest_final_types
2815 && !outer_type->derived_types.length ())
2816 {
2817 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
2818 final_warning_records->type_warnings.safe_grow_cleared
2819 (odr_types.length ());
2820 final_warning_records->type_warnings[outer_type->id].count++;
2821 final_warning_records->type_warnings[outer_type->id].dyn_count
2822 += final_warning_records->dyn_count;
2823 final_warning_records->type_warnings[outer_type->id].type
2824 = outer_type->type;
2825 (*slot)->type_warning = outer_type->id + 1;
2826 }
2827 if (complete
2828 && warn_suggest_final_methods
2829 && nodes.length () == 1
2830 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
2831 outer_type->type))
2832 {
2833 bool existed;
2834 struct decl_warn_count &c =
2835 final_warning_records->decl_warnings.get_or_insert
2836 (nodes[0]->decl, &existed);
2837
2838 if (existed)
2839 {
2840 c.count++;
2841 c.dyn_count += final_warning_records->dyn_count;
2842 }
2843 else
2844 {
2845 c.count = 1;
2846 c.dyn_count = final_warning_records->dyn_count;
2847 c.decl = nodes[0]->decl;
2848 }
2849 (*slot)->decl_warning = nodes[0]->decl;
2850 }
2851 }
2852 complete = false;
2853 }
2854 }
2855
2856 /* Finally walk bases, if asked to. */
2857 if (!(*slot)->speculative_targets)
2858 (*slot)->speculative_targets = nodes.length();
2859
2860 /* Destructors are never called through construction virtual tables,
2861 because the type is always known. One of entries may be cxa_pure_virtual
2862 so look to at least two of them. */
2863 if (context.maybe_in_construction)
2864 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2865 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2866 context.maybe_in_construction = false;
2867 if (context.maybe_in_construction)
2868 {
2869 if (type != outer_type
2870 && (!skipped
2871 || (context.maybe_derived_type
2872 && !type_all_derivations_known_p (outer_type->type))))
2873 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2874 context.offset, nodes, &inserted,
2875 &matched_vtables, &complete);
2876 if (skipped)
2877 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2878 for (i = 0; i < bases_to_consider.length(); i++)
2879 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
2880 }
2881 bases_to_consider.release();
2882
2883 (*slot)->targets = nodes;
2884 (*slot)->complete = complete;
2885 if (completep)
2886 *completep = complete;
2887 if (speculative_targetsp)
2888 *speculative_targetsp = (*slot)->speculative_targets;
2889
2890 timevar_pop (TV_IPA_VIRTUAL_CALL);
2891 return nodes;
2892 }
2893
2894 bool
2895 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
2896 vec<const decl_warn_count*> *vec)
2897 {
2898 vec->safe_push (&value);
2899 return true;
2900 }
2901
2902 /* Dump all possible targets of a polymorphic call. */
2903
2904 void
2905 dump_possible_polymorphic_call_targets (FILE *f,
2906 tree otr_type,
2907 HOST_WIDE_INT otr_token,
2908 const ipa_polymorphic_call_context &ctx)
2909 {
2910 vec <cgraph_node *> targets;
2911 bool final;
2912 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2913 unsigned int i;
2914 int speculative;
2915
2916 if (!type)
2917 return;
2918 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2919 ctx,
2920 &final, NULL, &speculative);
2921 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2922 print_generic_expr (f, type->type, TDF_SLIM);
2923 fprintf (f, " token %i\n", (int)otr_token);
2924 if (ctx.outer_type || ctx.offset)
2925 {
2926 fprintf (f, " Contained in type:");
2927 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2928 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2929 ctx.offset);
2930 }
2931 if (ctx.speculative_outer_type)
2932 {
2933 fprintf (f, " Speculatively contained in type:");
2934 print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM);
2935 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2936 ctx.speculative_offset);
2937 }
2938
2939 fprintf (f, " %s%s%s%s\n ",
2940 final ? "This is a complete list." :
2941 "This is partial list; extra targets may be defined in other units.",
2942 ctx.maybe_in_construction ? " (base types included)" : "",
2943 ctx.maybe_derived_type ? " (derived types included)" : "",
2944 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
2945 for (i = 0; i < targets.length (); i++)
2946 {
2947 char *name = NULL;
2948 if (i == (unsigned)speculative)
2949 fprintf (f, "\n Targets that are not likely:\n"
2950 " ");
2951 if (in_lto_p)
2952 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2953 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2954 if (in_lto_p)
2955 free (name);
2956 if (!targets[i]->definition)
2957 fprintf (f, " (no definition%s)",
2958 DECL_DECLARED_INLINE_P (targets[i]->decl)
2959 ? " inline" : "");
2960 }
2961 fprintf (f, "\n\n");
2962 }
2963
2964
2965 /* Return true if N can be possibly target of a polymorphic call of
2966 OTR_TYPE/OTR_TOKEN. */
2967
2968 bool
2969 possible_polymorphic_call_target_p (tree otr_type,
2970 HOST_WIDE_INT otr_token,
2971 const ipa_polymorphic_call_context &ctx,
2972 struct cgraph_node *n)
2973 {
2974 vec <cgraph_node *> targets;
2975 unsigned int i;
2976 enum built_in_function fcode;
2977 bool final;
2978
2979 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2980 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2981 == BUILT_IN_UNREACHABLE
2982 || fcode == BUILT_IN_TRAP))
2983 return true;
2984
2985 if (!odr_hash)
2986 return true;
2987 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2988 for (i = 0; i < targets.length (); i++)
2989 if (n->semantically_equivalent_p (targets[i]))
2990 return true;
2991
2992 /* At a moment we allow middle end to dig out new external declarations
2993 as a targets of polymorphic calls. */
2994 if (!final && !n->definition)
2995 return true;
2996 return false;
2997 }
2998
2999
3000 /* After callgraph construction new external nodes may appear.
3001 Add them into the graph. */
3002
3003 void
3004 update_type_inheritance_graph (void)
3005 {
3006 struct cgraph_node *n;
3007
3008 if (!odr_hash)
3009 return;
3010 free_polymorphic_call_targets_hash ();
3011 timevar_push (TV_IPA_INHERITANCE);
3012 /* We reconstruct the graph starting from types of all methods seen in the
3013 the unit. */
3014 FOR_EACH_FUNCTION (n)
3015 if (DECL_VIRTUAL_P (n->decl)
3016 && !n->definition
3017 && n->real_symbol_p ())
3018 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
3019 true);
3020 timevar_pop (TV_IPA_INHERITANCE);
3021 }
3022
3023
3024 /* Return true if N looks like likely target of a polymorphic call.
3025 Rule out cxa_pure_virtual, noreturns, function declared cold and
3026 other obvious cases. */
3027
3028 bool
3029 likely_target_p (struct cgraph_node *n)
3030 {
3031 int flags;
3032 /* cxa_pure_virtual and similar things are not likely. */
3033 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3034 return false;
3035 flags = flags_from_decl_or_type (n->decl);
3036 if (flags & ECF_NORETURN)
3037 return false;
3038 if (lookup_attribute ("cold",
3039 DECL_ATTRIBUTES (n->decl)))
3040 return false;
3041 if (n->frequency < NODE_FREQUENCY_NORMAL)
3042 return false;
3043 /* If there are no virtual tables refering the target alive,
3044 the only way the target can be called is an instance comming from other
3045 compilation unit; speculative devirtualization is build around an
3046 assumption that won't happen. */
3047 if (!referenced_from_vtable_p (n))
3048 return false;
3049 return true;
3050 }
3051
3052 /* Compare type warning records P1 and P2 and chose one with larger count;
3053 helper for qsort. */
3054
3055 int
3056 type_warning_cmp (const void *p1, const void *p2)
3057 {
3058 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3059 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3060
3061 if (t1->dyn_count < t2->dyn_count)
3062 return 1;
3063 if (t1->dyn_count > t2->dyn_count)
3064 return -1;
3065 return t2->count - t1->count;
3066 }
3067
3068 /* Compare decl warning records P1 and P2 and chose one with larger count;
3069 helper for qsort. */
3070
3071 int
3072 decl_warning_cmp (const void *p1, const void *p2)
3073 {
3074 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3075 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3076
3077 if (t1->dyn_count < t2->dyn_count)
3078 return 1;
3079 if (t1->dyn_count > t2->dyn_count)
3080 return -1;
3081 return t2->count - t1->count;
3082 }
3083
3084 /* The ipa-devirt pass.
3085 When polymorphic call has only one likely target in the unit,
3086 turn it into speculative call. */
3087
3088 static unsigned int
3089 ipa_devirt (void)
3090 {
3091 struct cgraph_node *n;
3092 hash_set<void *> bad_call_targets;
3093 struct cgraph_edge *e;
3094
3095 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3096 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3097 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3098
3099 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3100 This is implemented by setting up final_warning_records that are updated
3101 by get_polymorphic_call_targets.
3102 We need to clear cache in this case to trigger recomputation of all
3103 entries. */
3104 if (warn_suggest_final_methods || warn_suggest_final_types)
3105 {
3106 final_warning_records = new (final_warning_record);
3107 final_warning_records->type_warnings = vNULL;
3108 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3109 free_polymorphic_call_targets_hash ();
3110 }
3111
3112 FOR_EACH_DEFINED_FUNCTION (n)
3113 {
3114 bool update = false;
3115 if (dump_file && n->indirect_calls)
3116 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3117 n->name (), n->order);
3118 for (e = n->indirect_calls; e; e = e->next_callee)
3119 if (e->indirect_info->polymorphic)
3120 {
3121 struct cgraph_node *likely_target = NULL;
3122 void *cache_token;
3123 bool final;
3124 int speculative_targets;
3125
3126 if (final_warning_records)
3127 final_warning_records->dyn_count = e->count;
3128
3129 vec <cgraph_node *>targets
3130 = possible_polymorphic_call_targets
3131 (e, &final, &cache_token, &speculative_targets);
3132 unsigned int i;
3133
3134 if (dump_file)
3135 dump_possible_polymorphic_call_targets
3136 (dump_file, e);
3137
3138 npolymorphic++;
3139
3140 if (!flag_devirtualize_speculatively)
3141 continue;
3142
3143 if (!cgraph_maybe_hot_edge_p (e))
3144 {
3145 if (dump_file)
3146 fprintf (dump_file, "Call is cold\n\n");
3147 ncold++;
3148 continue;
3149 }
3150 if (e->speculative)
3151 {
3152 if (dump_file)
3153 fprintf (dump_file, "Call is aready speculated\n\n");
3154 nspeculated++;
3155
3156 /* When dumping see if we agree with speculation. */
3157 if (!dump_file)
3158 continue;
3159 }
3160 if (bad_call_targets.contains (cache_token))
3161 {
3162 if (dump_file)
3163 fprintf (dump_file, "Target list is known to be useless\n\n");
3164 nmultiple++;
3165 continue;
3166 }
3167 for (i = 0; i < targets.length (); i++)
3168 if (likely_target_p (targets[i]))
3169 {
3170 if (likely_target)
3171 {
3172 if (i < (unsigned) speculative_targets)
3173 {
3174 likely_target = NULL;
3175 if (dump_file)
3176 fprintf (dump_file, "More than one likely target\n\n");
3177 nmultiple++;
3178 }
3179 break;
3180 }
3181 likely_target = targets[i];
3182 }
3183 if (!likely_target)
3184 {
3185 bad_call_targets.add (cache_token);
3186 continue;
3187 }
3188 /* This is reached only when dumping; check if we agree or disagree
3189 with the speculation. */
3190 if (e->speculative)
3191 {
3192 struct cgraph_edge *e2;
3193 struct ipa_ref *ref;
3194 cgraph_speculative_call_info (e, e2, e, ref);
3195 if (e2->callee->ultimate_alias_target ()
3196 == likely_target->ultimate_alias_target ())
3197 {
3198 fprintf (dump_file, "We agree with speculation\n\n");
3199 nok++;
3200 }
3201 else
3202 {
3203 fprintf (dump_file, "We disagree with speculation\n\n");
3204 nwrong++;
3205 }
3206 continue;
3207 }
3208 if (!likely_target->definition)
3209 {
3210 if (dump_file)
3211 fprintf (dump_file, "Target is not an definition\n\n");
3212 nnotdefined++;
3213 continue;
3214 }
3215 /* Do not introduce new references to external symbols. While we
3216 can handle these just well, it is common for programs to
3217 incorrectly with headers defining methods they are linked
3218 with. */
3219 if (DECL_EXTERNAL (likely_target->decl))
3220 {
3221 if (dump_file)
3222 fprintf (dump_file, "Target is external\n\n");
3223 nexternal++;
3224 continue;
3225 }
3226 /* Don't use an implicitly-declared destructor (c++/58678). */
3227 struct cgraph_node *non_thunk_target
3228 = likely_target->function_symbol ();
3229 if (DECL_ARTIFICIAL (non_thunk_target->decl)
3230 && DECL_COMDAT (non_thunk_target->decl))
3231 {
3232 if (dump_file)
3233 fprintf (dump_file, "Target is artificial\n\n");
3234 nartificial++;
3235 continue;
3236 }
3237 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3238 && likely_target->can_be_discarded_p ())
3239 {
3240 if (dump_file)
3241 fprintf (dump_file, "Target is overwritable\n\n");
3242 noverwritable++;
3243 continue;
3244 }
3245 else if (dbg_cnt (devirt))
3246 {
3247 if (dump_enabled_p ())
3248 {
3249 location_t locus = gimple_location_safe (e->call_stmt);
3250 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3251 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3252 n->name (), n->order,
3253 likely_target->name (),
3254 likely_target->order);
3255 }
3256 if (!likely_target->can_be_discarded_p ())
3257 {
3258 cgraph_node *alias;
3259 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3260 if (alias)
3261 likely_target = alias;
3262 }
3263 nconverted++;
3264 update = true;
3265 cgraph_turn_edge_to_speculative
3266 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3267 }
3268 }
3269 if (update)
3270 inline_update_overall_summary (n);
3271 }
3272 if (warn_suggest_final_methods || warn_suggest_final_types)
3273 {
3274 if (warn_suggest_final_types)
3275 {
3276 final_warning_records->type_warnings.qsort (type_warning_cmp);
3277 for (unsigned int i = 0;
3278 i < final_warning_records->type_warnings.length (); i++)
3279 if (final_warning_records->type_warnings[i].count)
3280 {
3281 tree type = final_warning_records->type_warnings[i].type;
3282 warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3283 OPT_Wsuggest_final_types,
3284 "Declaring type %qD final "
3285 "would enable devirtualization of %i calls",
3286 type,
3287 final_warning_records->type_warnings[i].count);
3288 }
3289 }
3290
3291 if (warn_suggest_final_methods)
3292 {
3293 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3294
3295 final_warning_records->decl_warnings.traverse
3296 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3297 decl_warnings_vec.qsort (decl_warning_cmp);
3298 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3299 {
3300 tree decl = decl_warnings_vec[i]->decl;
3301 int count = decl_warnings_vec[i]->count;
3302
3303 if (DECL_CXX_DESTRUCTOR_P (decl))
3304 warning_at (DECL_SOURCE_LOCATION (decl),
3305 OPT_Wsuggest_final_methods,
3306 "Declaring virtual destructor of %qD final "
3307 "would enable devirtualization of %i calls",
3308 DECL_CONTEXT (decl), count);
3309 else
3310 warning_at (DECL_SOURCE_LOCATION (decl),
3311 OPT_Wsuggest_final_methods,
3312 "Declaring method %qD final "
3313 "would enable devirtualization of %i calls",
3314 decl, count);
3315 }
3316 }
3317
3318 delete (final_warning_records);
3319 final_warning_records = 0;
3320 }
3321
3322 if (dump_file)
3323 fprintf (dump_file,
3324 "%i polymorphic calls, %i devirtualized,"
3325 " %i speculatively devirtualized, %i cold\n"
3326 "%i have multiple targets, %i overwritable,"
3327 " %i already speculated (%i agree, %i disagree),"
3328 " %i external, %i not defined, %i artificial\n",
3329 npolymorphic, ndevirtualized, nconverted, ncold,
3330 nmultiple, noverwritable, nspeculated, nok, nwrong,
3331 nexternal, nnotdefined, nartificial);
3332 return ndevirtualized ? TODO_remove_functions : 0;
3333 }
3334
3335 namespace {
3336
3337 const pass_data pass_data_ipa_devirt =
3338 {
3339 IPA_PASS, /* type */
3340 "devirt", /* name */
3341 OPTGROUP_NONE, /* optinfo_flags */
3342 TV_IPA_DEVIRT, /* tv_id */
3343 0, /* properties_required */
3344 0, /* properties_provided */
3345 0, /* properties_destroyed */
3346 0, /* todo_flags_start */
3347 ( TODO_dump_symtab ), /* todo_flags_finish */
3348 };
3349
3350 class pass_ipa_devirt : public ipa_opt_pass_d
3351 {
3352 public:
3353 pass_ipa_devirt (gcc::context *ctxt)
3354 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3355 NULL, /* generate_summary */
3356 NULL, /* write_summary */
3357 NULL, /* read_summary */
3358 NULL, /* write_optimization_summary */
3359 NULL, /* read_optimization_summary */
3360 NULL, /* stmt_fixup */
3361 0, /* function_transform_todo_flags_start */
3362 NULL, /* function_transform */
3363 NULL) /* variable_transform */
3364 {}
3365
3366 /* opt_pass methods: */
3367 virtual bool gate (function *)
3368 {
3369 return (flag_devirtualize
3370 && (flag_devirtualize_speculatively
3371 || (warn_suggest_final_methods
3372 || warn_suggest_final_types))
3373 && optimize);
3374 }
3375
3376 virtual unsigned int execute (function *) { return ipa_devirt (); }
3377
3378 }; // class pass_ipa_devirt
3379
3380 } // anon namespace
3381
3382 ipa_opt_pass_d *
3383 make_pass_ipa_devirt (gcc::context *ctxt)
3384 {
3385 return new pass_ipa_devirt (ctxt);
3386 }
3387
3388 #include "gt-ipa-devirt.h"