ipa-devirt.c (get_polymorphic_call_info): Use fndecl instead of current_function_decl.
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "hash-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "inchash.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-alias.h"
125 #include "internal-fn.h"
126 #include "gimple-fold.h"
127 #include "gimple-expr.h"
128 #include "gimple.h"
129 #include "ipa-inline.h"
130 #include "diagnostic.h"
131 #include "tree-dfa.h"
132 #include "demangle.h"
133 #include "dbgcnt.h"
134 #include "gimple-pretty-print.h"
135 #include "stor-layout.h"
136 #include "intl.h"
137 #include "hash-map.h"
138
139 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
140 hash_set<tree> *);
141
142 static bool odr_violation_reported = false;
143
144 /* Dummy polymorphic call context. */
145
146 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
147 = {0, 0, NULL, NULL, false, true, true};
148
149 /* Pointer set of all call targets appearing in the cache. */
150 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
151
152 /* The node of type inheritance graph. For each type unique in
153 One Defintion Rule (ODR) sense, we produce one node linking all
154 main variants of types equivalent to it, bases and derived types. */
155
156 struct GTY(()) odr_type_d
157 {
158 /* leader type. */
159 tree type;
160 /* All bases; built only for main variants of types */
161 vec<odr_type> GTY((skip)) bases;
162 /* All derrived types with virtual methods seen in unit;
163 built only for main variants oftypes */
164 vec<odr_type> GTY((skip)) derived_types;
165
166 /* All equivalent types, if more than one. */
167 vec<tree, va_gc> *types;
168 /* Set of all equivalent types, if NON-NULL. */
169 hash_set<tree> * GTY((skip)) types_set;
170
171 /* Unique ID indexing the type in odr_types array. */
172 int id;
173 /* Is it in anonymous namespace? */
174 bool anonymous_namespace;
175 /* Do we know about all derivations of given type? */
176 bool all_derivations_known;
177 /* Did we report ODR violation here? */
178 bool odr_violated;
179 };
180
181 static bool contains_type_p (tree, HOST_WIDE_INT, tree);
182
183
184 /* Return true if BINFO corresponds to a type with virtual methods.
185
186 Every type has several BINFOs. One is the BINFO associated by the type
187 while other represents bases of derived types. The BINFOs representing
188 bases do not have BINFO_VTABLE pointer set when this is the single
189 inheritance (because vtables are shared). Look up the BINFO of type
190 and check presence of its vtable. */
191
192 static inline bool
193 polymorphic_type_binfo_p (tree binfo)
194 {
195 /* See if BINFO's type has an virtual table associtated with it. */
196 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
197 }
198
199 /* Return TRUE if all derived types of T are known and thus
200 we may consider the walk of derived type complete.
201
202 This is typically true only for final anonymous namespace types and types
203 defined within functions (that may be COMDAT and thus shared across units,
204 but with the same set of derived types). */
205
206 static bool
207 type_all_derivations_known_p (tree t)
208 {
209 if (TYPE_FINAL_P (t))
210 return true;
211 if (flag_ltrans)
212 return false;
213 if (type_in_anonymous_namespace_p (t))
214 return true;
215 return (decl_function_context (TYPE_NAME (t)) != NULL);
216 }
217
218 /* Return TURE if type's constructors are all visible. */
219
220 static bool
221 type_all_ctors_visible_p (tree t)
222 {
223 return !flag_ltrans
224 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
225 /* We can not always use type_all_derivations_known_p.
226 For function local types we must assume case where
227 the function is COMDAT and shared in between units.
228
229 TODO: These cases are quite easy to get, but we need
230 to keep track of C++ privatizing via -Wno-weak
231 as well as the IPA privatizing. */
232 && type_in_anonymous_namespace_p (t);
233 }
234
235 /* Return TRUE if type may have instance. */
236
237 static bool
238 type_possibly_instantiated_p (tree t)
239 {
240 tree vtable;
241 varpool_node *vnode;
242
243 /* TODO: Add abstract types here. */
244 if (!type_all_ctors_visible_p (t))
245 return true;
246
247 vtable = BINFO_VTABLE (TYPE_BINFO (t));
248 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
249 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
250 vnode = varpool_node::get (vtable);
251 return vnode && vnode->definition;
252 }
253
254 /* One Definition Rule hashtable helpers. */
255
256 struct odr_hasher
257 {
258 typedef odr_type_d value_type;
259 typedef union tree_node compare_type;
260 static inline hashval_t hash (const value_type *);
261 static inline bool equal (const value_type *, const compare_type *);
262 static inline void remove (value_type *);
263 };
264
265 /* Return type that was declared with T's name so that T is an
266 qualified variant of it. */
267
268 static inline tree
269 main_odr_variant (const_tree t)
270 {
271 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
272 return TREE_TYPE (TYPE_NAME (t));
273 /* Unnamed types and non-C++ produced types can be compared by variants. */
274 else
275 return TYPE_MAIN_VARIANT (t);
276 }
277
278 /* Produce hash based on type name. */
279
280 static hashval_t
281 hash_type_name (tree t)
282 {
283 gcc_checking_assert (main_odr_variant (t) == t);
284
285 /* If not in LTO, all main variants are unique, so we can do
286 pointer hash. */
287 if (!in_lto_p)
288 return htab_hash_pointer (t);
289
290 /* Anonymous types are unique. */
291 if (type_in_anonymous_namespace_p (t))
292 return htab_hash_pointer (t);
293
294 /* For polymorphic types, we can simply hash the virtual table. */
295 if (TREE_CODE (t) == RECORD_TYPE
296 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
297 {
298 tree v = BINFO_VTABLE (TYPE_BINFO (t));
299 hashval_t hash = 0;
300
301 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
302 {
303 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
304 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
305 }
306
307 v = DECL_ASSEMBLER_NAME (v);
308 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
309 return hash;
310 }
311
312 /* Rest is not implemented yet. */
313 gcc_unreachable ();
314 }
315
316 /* Return the computed hashcode for ODR_TYPE. */
317
318 inline hashval_t
319 odr_hasher::hash (const value_type *odr_type)
320 {
321 return hash_type_name (odr_type->type);
322 }
323
324 /* For languages with One Definition Rule, work out if
325 types are the same based on their name.
326
327 This is non-trivial for LTO where minnor differences in
328 the type representation may have prevented type merging
329 to merge two copies of otherwise equivalent type.
330
331 Until we start streaming mangled type names, this function works
332 only for polymorphic types. */
333
334 bool
335 types_same_for_odr (const_tree type1, const_tree type2)
336 {
337 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
338
339 type1 = main_odr_variant (type1);
340 type2 = main_odr_variant (type2);
341
342 if (type1 == type2)
343 return true;
344
345 if (!in_lto_p)
346 return false;
347
348 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
349 on the corresponding TYPE_STUB_DECL. */
350 if (type_in_anonymous_namespace_p (type1)
351 || type_in_anonymous_namespace_p (type2))
352 return false;
353
354 /* See if types are obvoiusly different (i.e. different codes
355 or polymorphis wrt non-polymorphic). This is not strictly correct
356 for ODR violating programs, but we can't do better without streaming
357 ODR names. */
358 if (TREE_CODE (type1) != TREE_CODE (type2))
359 return false;
360 if (TREE_CODE (type1) == RECORD_TYPE
361 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
362 return false;
363 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
364 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
365 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
366 return false;
367
368 /* At the moment we have no way to establish ODR equivlaence at LTO
369 other than comparing virtual table pointrs of polymorphic types.
370 Eventually we should start saving mangled names in TYPE_NAME.
371 Then this condition will become non-trivial. */
372
373 if (TREE_CODE (type1) == RECORD_TYPE
374 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
375 && BINFO_VTABLE (TYPE_BINFO (type1))
376 && BINFO_VTABLE (TYPE_BINFO (type2)))
377 {
378 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
379 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
380 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
381 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
382 return (operand_equal_p (TREE_OPERAND (v1, 1),
383 TREE_OPERAND (v2, 1), 0)
384 && DECL_ASSEMBLER_NAME
385 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
386 == DECL_ASSEMBLER_NAME
387 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
388 }
389 gcc_unreachable ();
390 }
391
392
393 /* Compare types T1 and T2 and return true if they are
394 equivalent. */
395
396 inline bool
397 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
398 {
399 tree t2 = const_cast <tree> (ct2);
400
401 gcc_checking_assert (main_odr_variant (t2) == t2);
402 if (t1->type == t2)
403 return true;
404 if (!in_lto_p)
405 return false;
406 return types_same_for_odr (t1->type, t2);
407 }
408
409 /* Free ODR type V. */
410
411 inline void
412 odr_hasher::remove (value_type *v)
413 {
414 v->bases.release ();
415 v->derived_types.release ();
416 if (v->types_set)
417 delete v->types_set;
418 ggc_free (v);
419 }
420
421 /* ODR type hash used to lookup ODR type based on tree type node. */
422
423 typedef hash_table<odr_hasher> odr_hash_type;
424 static odr_hash_type *odr_hash;
425
426 /* ODR types are also stored into ODR_TYPE vector to allow consistent
427 walking. Bases appear before derived types. Vector is garbage collected
428 so we won't end up visiting empty types. */
429
430 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
431 #define odr_types (*odr_types_ptr)
432
433 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
434 void
435 set_type_binfo (tree type, tree binfo)
436 {
437 for (; type; type = TYPE_NEXT_VARIANT (type))
438 if (COMPLETE_TYPE_P (type))
439 TYPE_BINFO (type) = binfo;
440 else
441 gcc_assert (!TYPE_BINFO (type));
442 }
443
444 /* Compare T2 and T2 based on name or structure. */
445
446 static bool
447 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<tree> *visited)
448 {
449 bool an1, an2;
450
451 /* This can happen in incomplete types that should be handled earlier. */
452 gcc_assert (t1 && t2);
453
454 t1 = main_odr_variant (t1);
455 t2 = main_odr_variant (t2);
456 if (t1 == t2)
457 return true;
458 if (TREE_CODE (t1) != TREE_CODE (t2))
459 return false;
460 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
461 return false;
462 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
463 return false;
464
465 /* Anonymous namespace types must match exactly. */
466 an1 = type_in_anonymous_namespace_p (t1);
467 an2 = type_in_anonymous_namespace_p (t2);
468 if (an1 != an2 || an1)
469 return false;
470
471 /* For types where we can not establish ODR equivalency, recurse and deeply
472 compare. */
473 if (TREE_CODE (t1) != RECORD_TYPE
474 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
475 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
476 || !polymorphic_type_binfo_p (TYPE_BINFO (t2)))
477 {
478 /* This should really be a pair hash, but for the moment we do not need
479 100% reliability and it would be better to compare all ODR types so
480 recursion here is needed only for component types. */
481 if (visited->add (t1))
482 return true;
483 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
484 }
485 return types_same_for_odr (t1, t2);
486 }
487
488 /* Output ODR violation warning about T1 and T2 with REASON.
489 Display location of ST1 and ST2 if REASON speaks about field or
490 method of the type.
491 If WARN is false, do nothing. Set WARNED if warning was indeed
492 output. */
493
494 void
495 warn_odr (tree t1, tree t2, tree st1, tree st2,
496 bool warn, bool *warned, const char *reason)
497 {
498 tree decl2 = TYPE_NAME (t2);
499
500 if (!warn)
501 return;
502 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
503 "type %qT violates one definition rule",
504 t1))
505 return;
506 if (!st1)
507 ;
508 else if (TREE_CODE (st1) == FIELD_DECL)
509 {
510 inform (DECL_SOURCE_LOCATION (decl2),
511 "a different type is defined in another translation unit");
512 inform (DECL_SOURCE_LOCATION (st1),
513 "the first difference of corresponding definitions is field %qD",
514 st1);
515 decl2 = st2;
516 }
517 else if (TREE_CODE (st1) == FUNCTION_DECL)
518 {
519 inform (DECL_SOURCE_LOCATION (decl2),
520 "a different type is defined in another translation unit");
521 inform (DECL_SOURCE_LOCATION (st1),
522 "the first difference of corresponding definitions is method %qD",
523 st1);
524 decl2 = st2;
525 }
526 else
527 return;
528 inform (DECL_SOURCE_LOCATION (decl2), reason);
529
530 if (warned)
531 *warned = true;
532 }
533
534 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
535 because they are used on same place in ODR matching types.
536 They are not; inform the user. */
537
538 void
539 warn_types_mismatch (tree t1, tree t2)
540 {
541 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
542 return;
543 /* In Firefox it is a common bug to have same types but in
544 different namespaces. Be a bit more informative on
545 this. */
546 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
547 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
548 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
549 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
550 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
551 DECL_NAME (TYPE_CONTEXT (t2))))))
552 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
553 "type %qT should match type %qT but is defined "
554 "in different namespace ",
555 t1, t2);
556 else
557 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
558 "type %qT should match type %qT",
559 t1, t2);
560 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
561 "the incompatible type is defined here");
562 }
563
564 /* Compare T1 and T2, report ODR violations if WARN is true and set
565 WARNED to true if anything is reported. Return true if types match.
566 If true is returned, the types are also compatible in the sense of
567 gimple_canonical_types_compatible_p. */
568
569 static bool
570 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<tree> *visited)
571 {
572 /* Check first for the obvious case of pointer identity. */
573 if (t1 == t2)
574 return true;
575 gcc_assert (!type_in_anonymous_namespace_p (t1));
576 gcc_assert (!type_in_anonymous_namespace_p (t2));
577
578 /* Can't be the same type if the types don't have the same code. */
579 if (TREE_CODE (t1) != TREE_CODE (t2))
580 {
581 warn_odr (t1, t2, NULL, NULL, warn, warned,
582 G_("a different type is defined in another translation unit"));
583 return false;
584 }
585
586 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
587 {
588 warn_odr (t1, t2, NULL, NULL, warn, warned,
589 G_("a type with different qualifiers is defined in another "
590 "translation unit"));
591 return false;
592 }
593
594 if (comp_type_attributes (t1, t2) != 1)
595 {
596 warn_odr (t1, t2, NULL, NULL, warn, warned,
597 G_("a type with attributes "
598 "is defined in another translation unit"));
599 return false;
600 }
601
602 if (TREE_CODE (t1) == ENUMERAL_TYPE)
603 {
604 tree v1, v2;
605 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
606 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
607 {
608 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
609 {
610 warn_odr (t1, t2, NULL, NULL, warn, warned,
611 G_("an enum with different value name"
612 " is defined in another translation unit"));
613 return false;
614 }
615 if (TREE_VALUE (v1) != TREE_VALUE (v2)
616 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
617 DECL_INITIAL (TREE_VALUE (v2)), 0))
618 {
619 warn_odr (t1, t2, NULL, NULL, warn, warned,
620 G_("an enum with different values is defined"
621 " in another translation unit"));
622 return false;
623 }
624 }
625 if (v1 || v2)
626 {
627 warn_odr (t1, t2, NULL, NULL, warn, warned,
628 G_("an enum with mismatching number of values "
629 "is defined in another translation unit"));
630 return false;
631 }
632 }
633
634 /* Non-aggregate types can be handled cheaply. */
635 if (INTEGRAL_TYPE_P (t1)
636 || SCALAR_FLOAT_TYPE_P (t1)
637 || FIXED_POINT_TYPE_P (t1)
638 || TREE_CODE (t1) == VECTOR_TYPE
639 || TREE_CODE (t1) == COMPLEX_TYPE
640 || TREE_CODE (t1) == OFFSET_TYPE
641 || POINTER_TYPE_P (t1))
642 {
643 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
644 {
645 warn_odr (t1, t2, NULL, NULL, warn, warned,
646 G_("a type with different precision is defined "
647 "in another translation unit"));
648 return false;
649 }
650 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
651 {
652 warn_odr (t1, t2, NULL, NULL, warn, warned,
653 G_("a type with different signedness is defined "
654 "in another translation unit"));
655 return false;
656 }
657
658 if (TREE_CODE (t1) == INTEGER_TYPE
659 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
660 {
661 /* char WRT uint_8? */
662 warn_odr (t1, t2, NULL, NULL, warn, warned,
663 G_("a different type is defined in another "
664 "translation unit"));
665 return false;
666 }
667
668 /* For canonical type comparisons we do not want to build SCCs
669 so we cannot compare pointed-to types. But we can, for now,
670 require the same pointed-to type kind and match what
671 useless_type_conversion_p would do. */
672 if (POINTER_TYPE_P (t1))
673 {
674 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
675 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
676 {
677 warn_odr (t1, t2, NULL, NULL, warn, warned,
678 G_("it is defined as a pointer in different address "
679 "space in another translation unit"));
680 return false;
681 }
682
683 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
684 {
685 warn_odr (t1, t2, NULL, NULL, warn, warned,
686 G_("it is defined as a pointer to different type "
687 "in another translation unit"));
688 if (warn && warned)
689 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
690 return false;
691 }
692 }
693
694 /* Tail-recurse to components. */
695 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
696 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
697 {
698 /* Probably specific enough. */
699 warn_odr (t1, t2, NULL, NULL, warn, warned,
700 G_("a different type is defined "
701 "in another translation unit"));
702 if (warn && warned)
703 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
704 return false;
705 }
706
707 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
708 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
709 TYPE_SIZE_UNIT (t2), 0));
710 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
711
712 return true;
713 }
714
715 /* Do type-specific comparisons. */
716 switch (TREE_CODE (t1))
717 {
718 case ARRAY_TYPE:
719 {
720 /* Array types are the same if the element types are the same and
721 the number of elements are the same. */
722 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
723 {
724 warn_odr (t1, t2, NULL, NULL, warn, warned,
725 G_("a different type is defined in another "
726 "translation unit"));
727 if (warn && warned)
728 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
729 }
730 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
731 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
732 == TYPE_NONALIASED_COMPONENT (t2));
733
734 tree i1 = TYPE_DOMAIN (t1);
735 tree i2 = TYPE_DOMAIN (t2);
736
737 /* For an incomplete external array, the type domain can be
738 NULL_TREE. Check this condition also. */
739 if (i1 == NULL_TREE || i2 == NULL_TREE)
740 return true;
741
742 tree min1 = TYPE_MIN_VALUE (i1);
743 tree min2 = TYPE_MIN_VALUE (i2);
744 tree max1 = TYPE_MAX_VALUE (i1);
745 tree max2 = TYPE_MAX_VALUE (i2);
746
747 /* In C++, minimums should be always 0. */
748 gcc_assert (min1 == min2);
749 if (!operand_equal_p (max1, max2, 0))
750 {
751 warn_odr (t1, t2, NULL, NULL, warn, warned,
752 G_("an array of different size is defined "
753 "in another translation unit"));
754 return false;
755 }
756 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
757 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
758 TYPE_SIZE_UNIT (t2), 0));
759 }
760 return true;
761
762 case METHOD_TYPE:
763 case FUNCTION_TYPE:
764 /* Function types are the same if the return type and arguments types
765 are the same. */
766 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
767 {
768 warn_odr (t1, t2, NULL, NULL, warn, warned,
769 G_("has different return value "
770 "in another translation unit"));
771 if (warn && warned)
772 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
773 return false;
774 }
775
776 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
777 return true;
778 else
779 {
780 tree parms1, parms2;
781
782 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
783 parms1 && parms2;
784 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
785 {
786 if (!odr_subtypes_equivalent_p
787 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
788 {
789 warn_odr (t1, t2, NULL, NULL, warn, warned,
790 G_("has different parameters in another "
791 "translation unit"));
792 if (warn && warned)
793 warn_types_mismatch (TREE_VALUE (parms1),
794 TREE_VALUE (parms2));
795 return false;
796 }
797 }
798
799 if (parms1 || parms2)
800 {
801 warn_odr (t1, t2, NULL, NULL, warn, warned,
802 G_("has different parameters "
803 "in another translation unit"));
804 return false;
805 }
806
807 return true;
808 }
809
810 case RECORD_TYPE:
811 case UNION_TYPE:
812 case QUAL_UNION_TYPE:
813 {
814 tree f1, f2;
815
816 /* For aggregate types, all the fields must be the same. */
817 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
818 {
819 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
820 f1 || f2;
821 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
822 {
823 /* Skip non-fields. */
824 while (f1 && TREE_CODE (f1) != FIELD_DECL)
825 f1 = TREE_CHAIN (f1);
826 while (f2 && TREE_CODE (f2) != FIELD_DECL)
827 f2 = TREE_CHAIN (f2);
828 if (!f1 || !f2)
829 break;
830 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
831 break;
832 if (DECL_NAME (f1) != DECL_NAME (f2)
833 && !DECL_ARTIFICIAL (f1))
834 {
835 warn_odr (t1, t2, f1, f2, warn, warned,
836 G_("a field with different name is defined "
837 "in another translation unit"));
838 return false;
839 }
840 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
841 {
842 /* Do not warn about artificial fields and just go into generic
843 field mismatch warning. */
844 if (DECL_ARTIFICIAL (f1))
845 break;
846
847 warn_odr (t1, t2, f1, f2, warn, warned,
848 G_("a field of same name but different type "
849 "is defined in another translation unit"));
850 if (warn && warned)
851 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
852 return false;
853 }
854 if (!gimple_compare_field_offset (f1, f2))
855 {
856 /* Do not warn about artificial fields and just go into generic
857 field mismatch warning. */
858 if (DECL_ARTIFICIAL (f1))
859 break;
860 warn_odr (t1, t2, t1, t2, warn, warned,
861 G_("fields has different layout "
862 "in another translation unit"));
863 return false;
864 }
865 gcc_assert (DECL_NONADDRESSABLE_P (f1)
866 == DECL_NONADDRESSABLE_P (f2));
867 }
868
869 /* If one aggregate has more fields than the other, they
870 are not the same. */
871 if (f1 || f2)
872 {
873 warn_odr (t1, t2, NULL, NULL, warn, warned,
874 G_("a type with different number of fields "
875 "is defined in another translation unit"));
876 return false;
877 }
878 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
879 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
880 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
881 {
882 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
883 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
884 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
885 {
886 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
887 {
888 warn_odr (t1, t2, f1, f2, warn, warned,
889 G_("a different method of same type "
890 "is defined in another translation unit"));
891 return false;
892 }
893 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
894 {
895 warn_odr (t1, t2, f1, f2, warn, warned,
896 G_("s definition that differs by virtual "
897 "keyword in another translation unit"));
898 return false;
899 }
900 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
901 {
902 warn_odr (t1, t2, f1, f2, warn, warned,
903 G_("virtual table layout differs in another "
904 "translation unit"));
905 return false;
906 }
907 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
908 {
909 warn_odr (t1, t2, f1, f2, warn, warned,
910 G_("method with incompatible type is defined "
911 "in another translation unit"));
912 return false;
913 }
914 }
915 if (f1 || f2)
916 {
917 warn_odr (t1, t2, NULL, NULL, warn, warned,
918 G_("a type with different number of methods "
919 "is defined in another translation unit"));
920 return false;
921 }
922 }
923 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
924 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
925 TYPE_SIZE_UNIT (t2), 0));
926 }
927
928 return true;
929 }
930
931 default:
932 gcc_unreachable ();
933 }
934 }
935
936 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
937 from VAL->type. This may happen in LTO where tree merging did not merge
938 all variants of the same type. It may or may not mean the ODR violation.
939 Add it to the list of duplicates and warn on some violations. */
940
941 static bool
942 add_type_duplicate (odr_type val, tree type)
943 {
944 bool build_bases = false;
945 if (!val->types_set)
946 val->types_set = new hash_set<tree>;
947
948 /* Always prefer complete type to be the leader. */
949 if (!COMPLETE_TYPE_P (val->type)
950 && COMPLETE_TYPE_P (type))
951 {
952 tree tmp = type;
953
954 build_bases = true;
955 type = val->type;
956 val->type = tmp;
957 }
958
959 /* See if this duplicate is new. */
960 if (!val->types_set->add (type))
961 {
962 bool merge = true;
963 bool base_mismatch = false;
964 unsigned int i,j;
965 bool warned = false;
966 hash_set<tree> visited;
967
968 gcc_assert (in_lto_p);
969 vec_safe_push (val->types, type);
970
971 /* First we compare memory layout. */
972 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
973 &warned, &visited))
974 {
975 merge = false;
976 odr_violation_reported = true;
977 val->odr_violated = true;
978 if (cgraph_dump_file)
979 {
980 fprintf (cgraph_dump_file, "ODR violation\n");
981
982 print_node (cgraph_dump_file, "", val->type, 0);
983 putc ('\n',cgraph_dump_file);
984 print_node (cgraph_dump_file, "", type, 0);
985 putc ('\n',cgraph_dump_file);
986 }
987 }
988
989 /* Next sanity check that bases are the same. If not, we will end
990 up producing wrong answers. */
991 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
992 && TREE_CODE (val->type) == RECORD_TYPE
993 && TREE_CODE (type) == RECORD_TYPE
994 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
995 {
996 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
997 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
998 {
999 odr_type base = get_odr_type
1000 (BINFO_TYPE
1001 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1002 i)),
1003 true);
1004 if (val->bases.length () <= j || val->bases[j] != base)
1005 base_mismatch = true;
1006 j++;
1007 }
1008 if (base_mismatch)
1009 {
1010 merge = false;
1011 odr_violation_reported = true;
1012
1013 if (!warned && !val->odr_violated)
1014 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1015 "a type with the same name but different bases is "
1016 "defined in another translation unit");
1017 val->odr_violated = true;
1018 if (cgraph_dump_file)
1019 {
1020 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
1021
1022 print_node (cgraph_dump_file, "", val->type, 0);
1023 putc ('\n',cgraph_dump_file);
1024 print_node (cgraph_dump_file, "", type, 0);
1025 putc ('\n',cgraph_dump_file);
1026 }
1027 }
1028 }
1029
1030 /* Regularize things a little. During LTO same types may come with
1031 different BINFOs. Either because their virtual table was
1032 not merged by tree merging and only later at decl merging or
1033 because one type comes with external vtable, while other
1034 with internal. We want to merge equivalent binfos to conserve
1035 memory and streaming overhead.
1036
1037 The external vtables are more harmful: they contain references
1038 to external declarations of methods that may be defined in the
1039 merged LTO unit. For this reason we absolutely need to remove
1040 them and replace by internal variants. Not doing so will lead
1041 to incomplete answers from possible_polymorphic_call_targets. */
1042 if (!flag_ltrans && merge
1043 && TREE_CODE (val->type) == RECORD_TYPE
1044 && TREE_CODE (type) == RECORD_TYPE
1045 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1046 && TYPE_MAIN_VARIANT (type) == type
1047 && TYPE_MAIN_VARIANT (val->type) == val->type
1048 && BINFO_VTABLE (TYPE_BINFO (val->type))
1049 && BINFO_VTABLE (TYPE_BINFO (type)))
1050 {
1051 tree master_binfo = TYPE_BINFO (val->type);
1052 tree v1 = BINFO_VTABLE (master_binfo);
1053 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1054
1055 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1056 {
1057 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1058 && operand_equal_p (TREE_OPERAND (v1, 1),
1059 TREE_OPERAND (v2, 1), 0));
1060 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1061 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1062 }
1063 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1064 == DECL_ASSEMBLER_NAME (v2));
1065
1066 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1067 {
1068 unsigned int i;
1069
1070 set_type_binfo (val->type, TYPE_BINFO (type));
1071 for (i = 0; i < val->types->length (); i++)
1072 {
1073 if (TYPE_BINFO ((*val->types)[i])
1074 == master_binfo)
1075 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1076 }
1077 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1078 }
1079 else
1080 set_type_binfo (type, master_binfo);
1081 }
1082 }
1083 return build_bases;
1084 }
1085
1086 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1087 possibly new entry. */
1088
1089 odr_type
1090 get_odr_type (tree type, bool insert)
1091 {
1092 odr_type_d **slot;
1093 odr_type val;
1094 hashval_t hash;
1095 bool build_bases = false;
1096 bool insert_to_odr_array = false;
1097 int base_id = -1;
1098
1099 type = main_odr_variant (type);
1100
1101 hash = hash_type_name (type);
1102 slot
1103 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
1104 if (!slot)
1105 return NULL;
1106
1107 /* See if we already have entry for type. */
1108 if (*slot)
1109 {
1110 val = *slot;
1111
1112 /* With LTO we need to support multiple tree representation of
1113 the same ODR type. */
1114 if (val->type != type)
1115 build_bases = add_type_duplicate (val, type);
1116 }
1117 else
1118 {
1119 val = ggc_cleared_alloc<odr_type_d> ();
1120 val->type = type;
1121 val->bases = vNULL;
1122 val->derived_types = vNULL;
1123 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1124 build_bases = COMPLETE_TYPE_P (val->type);
1125 insert_to_odr_array = true;
1126 }
1127
1128 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1129 && type == TYPE_MAIN_VARIANT (type))
1130 {
1131 tree binfo = TYPE_BINFO (type);
1132 unsigned int i;
1133
1134 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1135
1136 val->all_derivations_known = type_all_derivations_known_p (type);
1137 *slot = val;
1138 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1139 /* For now record only polymorphic types. other are
1140 pointless for devirtualization and we can not precisely
1141 determine ODR equivalency of these during LTO. */
1142 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1143 {
1144 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1145 i)),
1146 true);
1147 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1148 base->derived_types.safe_push (val);
1149 val->bases.safe_push (base);
1150 if (base->id > base_id)
1151 base_id = base->id;
1152 }
1153 }
1154 /* Ensure that type always appears after bases. */
1155 if (insert_to_odr_array)
1156 {
1157 if (odr_types_ptr)
1158 val->id = odr_types.length ();
1159 vec_safe_push (odr_types_ptr, val);
1160 }
1161 else if (base_id > val->id)
1162 {
1163 odr_types[val->id] = 0;
1164 /* Be sure we did not recorded any derived types; these may need
1165 renumbering too. */
1166 gcc_assert (val->derived_types.length() == 0);
1167 if (odr_types_ptr)
1168 val->id = odr_types.length ();
1169 vec_safe_push (odr_types_ptr, val);
1170 }
1171 return val;
1172 }
1173
1174 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1175 recusive printing. */
1176
1177 static void
1178 dump_odr_type (FILE *f, odr_type t, int indent=0)
1179 {
1180 unsigned int i;
1181 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1182 print_generic_expr (f, t->type, TDF_SLIM);
1183 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1184 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1185 if (TYPE_NAME (t->type))
1186 {
1187 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1188 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1189 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1190 }
1191 if (t->bases.length ())
1192 {
1193 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1194 for (i = 0; i < t->bases.length (); i++)
1195 fprintf (f, " %i", t->bases[i]->id);
1196 fprintf (f, "\n");
1197 }
1198 if (t->derived_types.length ())
1199 {
1200 fprintf (f, "%*s derived types:\n", indent * 2, "");
1201 for (i = 0; i < t->derived_types.length (); i++)
1202 dump_odr_type (f, t->derived_types[i], indent + 1);
1203 }
1204 fprintf (f, "\n");
1205 }
1206
1207 /* Dump the type inheritance graph. */
1208
1209 static void
1210 dump_type_inheritance_graph (FILE *f)
1211 {
1212 unsigned int i;
1213 if (!odr_types_ptr)
1214 return;
1215 fprintf (f, "\n\nType inheritance graph:\n");
1216 for (i = 0; i < odr_types.length (); i++)
1217 {
1218 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1219 dump_odr_type (f, odr_types[i]);
1220 }
1221 for (i = 0; i < odr_types.length (); i++)
1222 {
1223 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1224 {
1225 unsigned int j;
1226 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1227 print_node (f, "", odr_types[i]->type, 0);
1228 for (j = 0; j < odr_types[i]->types->length (); j++)
1229 {
1230 tree t;
1231 fprintf (f, "duplicate #%i\n", j);
1232 print_node (f, "", (*odr_types[i]->types)[j], 0);
1233 t = (*odr_types[i]->types)[j];
1234 while (TYPE_P (t) && TYPE_CONTEXT (t))
1235 {
1236 t = TYPE_CONTEXT (t);
1237 print_node (f, "", t, 0);
1238 }
1239 putc ('\n',f);
1240 }
1241 }
1242 }
1243 }
1244
1245 /* Given method type T, return type of class it belongs to.
1246 Lookup this pointer and get its type. */
1247
1248 tree
1249 method_class_type (const_tree t)
1250 {
1251 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1252 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1253
1254 return TREE_TYPE (first_parm_type);
1255 }
1256
1257 /* Initialize IPA devirt and build inheritance tree graph. */
1258
1259 void
1260 build_type_inheritance_graph (void)
1261 {
1262 struct symtab_node *n;
1263 FILE *inheritance_dump_file;
1264 int flags;
1265
1266 if (odr_hash)
1267 return;
1268 timevar_push (TV_IPA_INHERITANCE);
1269 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1270 odr_hash = new odr_hash_type (23);
1271
1272 /* We reconstruct the graph starting of types of all methods seen in the
1273 the unit. */
1274 FOR_EACH_SYMBOL (n)
1275 if (is_a <cgraph_node *> (n)
1276 && DECL_VIRTUAL_P (n->decl)
1277 && n->real_symbol_p ())
1278 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1279 true);
1280
1281 /* Look also for virtual tables of types that do not define any methods.
1282
1283 We need it in a case where class B has virtual base of class A
1284 re-defining its virtual method and there is class C with no virtual
1285 methods with B as virtual base.
1286
1287 Here we output B's virtual method in two variant - for non-virtual
1288 and virtual inheritance. B's virtual table has non-virtual version,
1289 while C's has virtual.
1290
1291 For this reason we need to know about C in order to include both
1292 variants of B. More correctly, record_target_from_binfo should
1293 add both variants of the method when walking B, but we have no
1294 link in between them.
1295
1296 We rely on fact that either the method is exported and thus we
1297 assume it is called externally or C is in anonymous namespace and
1298 thus we will see the vtable. */
1299
1300 else if (is_a <varpool_node *> (n)
1301 && DECL_VIRTUAL_P (n->decl)
1302 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1303 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1304 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1305 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1306 if (inheritance_dump_file)
1307 {
1308 dump_type_inheritance_graph (inheritance_dump_file);
1309 dump_end (TDI_inheritance, inheritance_dump_file);
1310 }
1311 timevar_pop (TV_IPA_INHERITANCE);
1312 }
1313
1314 /* Return true if N has reference from live virtual table
1315 (and thus can be a destination of polymorphic call).
1316 Be conservatively correct when callgraph is not built or
1317 if the method may be referred externally. */
1318
1319 static bool
1320 referenced_from_vtable_p (struct cgraph_node *node)
1321 {
1322 int i;
1323 struct ipa_ref *ref;
1324 bool found = false;
1325
1326 if (node->externally_visible
1327 || DECL_EXTERNAL (node->decl)
1328 || node->used_from_other_partition)
1329 return true;
1330
1331 /* Keep this test constant time.
1332 It is unlikely this can happen except for the case where speculative
1333 devirtualization introduced many speculative edges to this node.
1334 In this case the target is very likely alive anyway. */
1335 if (node->ref_list.referring.length () > 100)
1336 return true;
1337
1338 /* We need references built. */
1339 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
1340 return true;
1341
1342 for (i = 0; node->iterate_referring (i, ref); i++)
1343
1344 if ((ref->use == IPA_REF_ALIAS
1345 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1346 || (ref->use == IPA_REF_ADDR
1347 && TREE_CODE (ref->referring->decl) == VAR_DECL
1348 && DECL_VIRTUAL_P (ref->referring->decl)))
1349 {
1350 found = true;
1351 break;
1352 }
1353 return found;
1354 }
1355
1356 /* If TARGET has associated node, record it in the NODES array.
1357 CAN_REFER specify if program can refer to the target directly.
1358 if TARGET is unknown (NULL) or it can not be inserted (for example because
1359 its body was already removed and there is no way to refer to it), clear
1360 COMPLETEP. */
1361
1362 static void
1363 maybe_record_node (vec <cgraph_node *> &nodes,
1364 tree target, hash_set<tree> *inserted,
1365 bool can_refer,
1366 bool *completep)
1367 {
1368 struct cgraph_node *target_node, *alias_target;
1369 enum availability avail;
1370
1371 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1372 list of targets; the runtime effect of calling them is undefined.
1373 Only "real" virtual methods should be accounted. */
1374 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1375 return;
1376
1377 if (!can_refer)
1378 {
1379 /* The only case when method of anonymous namespace becomes unreferable
1380 is when we completely optimized it out. */
1381 if (flag_ltrans
1382 || !target
1383 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1384 *completep = false;
1385 return;
1386 }
1387
1388 if (!target)
1389 return;
1390
1391 target_node = cgraph_node::get (target);
1392
1393 /* Preffer alias target over aliases, so we do not get confused by
1394 fake duplicates. */
1395 if (target_node)
1396 {
1397 alias_target = target_node->ultimate_alias_target (&avail);
1398 if (target_node != alias_target
1399 && avail >= AVAIL_AVAILABLE
1400 && target_node->get_availability ())
1401 target_node = alias_target;
1402 }
1403
1404 /* Method can only be called by polymorphic call if any
1405 of vtables refering to it are alive.
1406
1407 While this holds for non-anonymous functions, too, there are
1408 cases where we want to keep them in the list; for example
1409 inline functions with -fno-weak are static, but we still
1410 may devirtualize them when instance comes from other unit.
1411 The same holds for LTO.
1412
1413 Currently we ignore these functions in speculative devirtualization.
1414 ??? Maybe it would make sense to be more aggressive for LTO even
1415 eslewhere. */
1416 if (!flag_ltrans
1417 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1418 && (!target_node
1419 || !referenced_from_vtable_p (target_node)))
1420 ;
1421 /* See if TARGET is useful function we can deal with. */
1422 else if (target_node != NULL
1423 && (TREE_PUBLIC (target)
1424 || DECL_EXTERNAL (target)
1425 || target_node->definition)
1426 && target_node->real_symbol_p ())
1427 {
1428 gcc_assert (!target_node->global.inlined_to);
1429 gcc_assert (target_node->real_symbol_p ());
1430 if (!inserted->add (target))
1431 {
1432 cached_polymorphic_call_targets->add (target_node);
1433 nodes.safe_push (target_node);
1434 }
1435 }
1436 else if (completep
1437 && (!type_in_anonymous_namespace_p
1438 (DECL_CONTEXT (target))
1439 || flag_ltrans))
1440 *completep = false;
1441 }
1442
1443 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1444 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1445 method in vtable and insert method to NODES array
1446 or BASES_TO_CONSIDER if this array is non-NULL.
1447 Otherwise recurse to base BINFOs.
1448 This match what get_binfo_at_offset does, but with offset
1449 being unknown.
1450
1451 TYPE_BINFOS is a stack of BINFOS of types with defined
1452 virtual table seen on way from class type to BINFO.
1453
1454 MATCHED_VTABLES tracks virtual tables we already did lookup
1455 for virtual function in. INSERTED tracks nodes we already
1456 inserted.
1457
1458 ANONYMOUS is true if BINFO is part of anonymous namespace.
1459
1460 Clear COMPLETEP when we hit unreferable target.
1461 */
1462
1463 static void
1464 record_target_from_binfo (vec <cgraph_node *> &nodes,
1465 vec <tree> *bases_to_consider,
1466 tree binfo,
1467 tree otr_type,
1468 vec <tree> &type_binfos,
1469 HOST_WIDE_INT otr_token,
1470 tree outer_type,
1471 HOST_WIDE_INT offset,
1472 hash_set<tree> *inserted,
1473 hash_set<tree> *matched_vtables,
1474 bool anonymous,
1475 bool *completep)
1476 {
1477 tree type = BINFO_TYPE (binfo);
1478 int i;
1479 tree base_binfo;
1480
1481
1482 if (BINFO_VTABLE (binfo))
1483 type_binfos.safe_push (binfo);
1484 if (types_same_for_odr (type, outer_type))
1485 {
1486 int i;
1487 tree type_binfo = NULL;
1488
1489 /* Lookup BINFO with virtual table. For normal types it is always last
1490 binfo on stack. */
1491 for (i = type_binfos.length () - 1; i >= 0; i--)
1492 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1493 {
1494 type_binfo = type_binfos[i];
1495 break;
1496 }
1497 if (BINFO_VTABLE (binfo))
1498 type_binfos.pop ();
1499 /* If this is duplicated BINFO for base shared by virtual inheritance,
1500 we may not have its associated vtable. This is not a problem, since
1501 we will walk it on the other path. */
1502 if (!type_binfo)
1503 return;
1504 tree inner_binfo = get_binfo_at_offset (type_binfo,
1505 offset, otr_type);
1506 if (!inner_binfo)
1507 {
1508 gcc_assert (odr_violation_reported);
1509 return;
1510 }
1511 /* For types in anonymous namespace first check if the respective vtable
1512 is alive. If not, we know the type can't be called. */
1513 if (!flag_ltrans && anonymous)
1514 {
1515 tree vtable = BINFO_VTABLE (inner_binfo);
1516 varpool_node *vnode;
1517
1518 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1519 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1520 vnode = varpool_node::get (vtable);
1521 if (!vnode || !vnode->definition)
1522 return;
1523 }
1524 gcc_assert (inner_binfo);
1525 if (bases_to_consider
1526 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1527 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1528 {
1529 bool can_refer;
1530 tree target = gimple_get_virt_method_for_binfo (otr_token,
1531 inner_binfo,
1532 &can_refer);
1533 if (!bases_to_consider)
1534 maybe_record_node (nodes, target, inserted, can_refer, completep);
1535 /* Destructors are never called via construction vtables. */
1536 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1537 bases_to_consider->safe_push (target);
1538 }
1539 return;
1540 }
1541
1542 /* Walk bases. */
1543 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1544 /* Walking bases that have no virtual method is pointless excercise. */
1545 if (polymorphic_type_binfo_p (base_binfo))
1546 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1547 type_binfos,
1548 otr_token, outer_type, offset, inserted,
1549 matched_vtables, anonymous, completep);
1550 if (BINFO_VTABLE (binfo))
1551 type_binfos.pop ();
1552 }
1553
1554 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1555 of TYPE, insert them to NODES, recurse into derived nodes.
1556 INSERTED is used to avoid duplicate insertions of methods into NODES.
1557 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1558 Clear COMPLETEP if unreferable target is found.
1559
1560 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1561 all cases where BASE_SKIPPED is true (because the base is abstract
1562 class). */
1563
1564 static void
1565 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1566 hash_set<tree> *inserted,
1567 hash_set<tree> *matched_vtables,
1568 tree otr_type,
1569 odr_type type,
1570 HOST_WIDE_INT otr_token,
1571 tree outer_type,
1572 HOST_WIDE_INT offset,
1573 bool *completep,
1574 vec <tree> &bases_to_consider,
1575 bool consider_construction)
1576 {
1577 tree binfo = TYPE_BINFO (type->type);
1578 unsigned int i;
1579 vec <tree> type_binfos = vNULL;
1580 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1581
1582 /* We may need to consider types w/o instances because of possible derived
1583 types using their methods either directly or via construction vtables.
1584 We are safe to skip them when all derivations are known, since we will
1585 handle them later.
1586 This is done by recording them to BASES_TO_CONSIDER array. */
1587 if (possibly_instantiated || consider_construction)
1588 {
1589 record_target_from_binfo (nodes,
1590 (!possibly_instantiated
1591 && type_all_derivations_known_p (type->type))
1592 ? &bases_to_consider : NULL,
1593 binfo, otr_type, type_binfos, otr_token,
1594 outer_type, offset,
1595 inserted, matched_vtables,
1596 type->anonymous_namespace, completep);
1597 }
1598 type_binfos.release ();
1599 for (i = 0; i < type->derived_types.length (); i++)
1600 possible_polymorphic_call_targets_1 (nodes, inserted,
1601 matched_vtables,
1602 otr_type,
1603 type->derived_types[i],
1604 otr_token, outer_type, offset, completep,
1605 bases_to_consider, consider_construction);
1606 }
1607
1608 /* Cache of queries for polymorphic call targets.
1609
1610 Enumerating all call targets may get expensive when there are many
1611 polymorphic calls in the program, so we memoize all the previous
1612 queries and avoid duplicated work. */
1613
1614 struct polymorphic_call_target_d
1615 {
1616 HOST_WIDE_INT otr_token;
1617 ipa_polymorphic_call_context context;
1618 odr_type type;
1619 vec <cgraph_node *> targets;
1620 int speculative_targets;
1621 bool complete;
1622 int type_warning;
1623 tree decl_warning;
1624 };
1625
1626 /* Polymorphic call target cache helpers. */
1627
1628 struct polymorphic_call_target_hasher
1629 {
1630 typedef polymorphic_call_target_d value_type;
1631 typedef polymorphic_call_target_d compare_type;
1632 static inline hashval_t hash (const value_type *);
1633 static inline bool equal (const value_type *, const compare_type *);
1634 static inline void remove (value_type *);
1635 };
1636
1637 /* Return the computed hashcode for ODR_QUERY. */
1638
1639 inline hashval_t
1640 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1641 {
1642 inchash::hash hstate (odr_query->otr_token);
1643
1644 hstate.add_wide_int (odr_query->type->id);
1645 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1646 hstate.add_wide_int (odr_query->context.offset);
1647
1648 if (odr_query->context.speculative_outer_type)
1649 {
1650 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1651 hstate.add_wide_int (odr_query->context.speculative_offset);
1652 }
1653 hstate.add_flag (odr_query->context.maybe_in_construction);
1654 hstate.add_flag (odr_query->context.maybe_derived_type);
1655 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1656 hstate.commit_flag ();
1657 return hstate.end ();
1658 }
1659
1660 /* Compare cache entries T1 and T2. */
1661
1662 inline bool
1663 polymorphic_call_target_hasher::equal (const value_type *t1,
1664 const compare_type *t2)
1665 {
1666 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1667 && t1->context.offset == t2->context.offset
1668 && t1->context.speculative_offset == t2->context.speculative_offset
1669 && t1->context.outer_type == t2->context.outer_type
1670 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1671 && t1->context.maybe_in_construction
1672 == t2->context.maybe_in_construction
1673 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1674 && (t1->context.speculative_maybe_derived_type
1675 == t2->context.speculative_maybe_derived_type));
1676 }
1677
1678 /* Remove entry in polymorphic call target cache hash. */
1679
1680 inline void
1681 polymorphic_call_target_hasher::remove (value_type *v)
1682 {
1683 v->targets.release ();
1684 free (v);
1685 }
1686
1687 /* Polymorphic call target query cache. */
1688
1689 typedef hash_table<polymorphic_call_target_hasher>
1690 polymorphic_call_target_hash_type;
1691 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1692
1693 /* Destroy polymorphic call target query cache. */
1694
1695 static void
1696 free_polymorphic_call_targets_hash ()
1697 {
1698 if (cached_polymorphic_call_targets)
1699 {
1700 delete polymorphic_call_target_hash;
1701 polymorphic_call_target_hash = NULL;
1702 delete cached_polymorphic_call_targets;
1703 cached_polymorphic_call_targets = NULL;
1704 }
1705 }
1706
1707 /* When virtual function is removed, we may need to flush the cache. */
1708
1709 static void
1710 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1711 {
1712 if (cached_polymorphic_call_targets
1713 && cached_polymorphic_call_targets->contains (n))
1714 free_polymorphic_call_targets_hash ();
1715 }
1716
1717 /* Return true when TYPE contains an polymorphic type and thus is interesting
1718 for devirtualization machinery. */
1719
1720 bool
1721 contains_polymorphic_type_p (const_tree type)
1722 {
1723 type = TYPE_MAIN_VARIANT (type);
1724
1725 if (RECORD_OR_UNION_TYPE_P (type))
1726 {
1727 if (TYPE_BINFO (type)
1728 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1729 return true;
1730 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1731 if (TREE_CODE (fld) == FIELD_DECL
1732 && !DECL_ARTIFICIAL (fld)
1733 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1734 return true;
1735 return false;
1736 }
1737 if (TREE_CODE (type) == ARRAY_TYPE)
1738 return contains_polymorphic_type_p (TREE_TYPE (type));
1739 return false;
1740 }
1741
1742 /* Clear speculative info from CONTEXT. */
1743
1744 static void
1745 clear_speculation (ipa_polymorphic_call_context *context)
1746 {
1747 context->speculative_outer_type = NULL;
1748 context->speculative_offset = 0;
1749 context->speculative_maybe_derived_type = false;
1750 }
1751
1752 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1753 is contained at CONTEXT->OFFSET. Walk the memory representation of
1754 CONTEXT->OUTER_TYPE and find the outermost class type that match
1755 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1756 to represent it.
1757
1758 For example when CONTEXT represents type
1759 class A
1760 {
1761 int a;
1762 class B b;
1763 }
1764 and we look for type at offset sizeof(int), we end up with B and offset 0.
1765 If the same is produced by multiple inheritance, we end up with A and offset
1766 sizeof(int).
1767
1768 If we can not find corresponding class, give up by setting
1769 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1770 Return true when lookup was sucesful. */
1771
1772 static bool
1773 get_class_context (ipa_polymorphic_call_context *context,
1774 tree expected_type)
1775 {
1776 tree type = context->outer_type;
1777 HOST_WIDE_INT offset = context->offset;
1778 bool speculative = false;
1779 bool speculation_valid = false;
1780 bool valid = false;
1781
1782 if (!context->outer_type)
1783 {
1784 type = context->outer_type = expected_type;
1785 context->offset = offset = 0;
1786 }
1787
1788 if (context->speculative_outer_type == context->outer_type
1789 && (!context->maybe_derived_type
1790 || context->speculative_maybe_derived_type))
1791 {
1792 context->speculative_outer_type = NULL;
1793 context->speculative_offset = 0;
1794 context->speculative_maybe_derived_type = false;
1795 }
1796
1797 /* See if speculative type seem to be derrived from outer_type.
1798 Then speculation is valid only if it really is a derivate and derived types
1799 are allowed.
1800
1801 The test does not really look for derivate, but also accepts the case where
1802 outer_type is a field of speculative_outer_type. In this case eiter
1803 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
1804 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
1805 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
1806 if (context->speculative_outer_type
1807 && context->speculative_offset >= context->offset
1808 && contains_type_p (context->speculative_outer_type,
1809 context->offset - context->speculative_offset,
1810 context->outer_type))
1811 speculation_valid = context->maybe_derived_type;
1812 else
1813 clear_speculation (context);
1814
1815 /* Find the sub-object the constant actually refers to and mark whether it is
1816 an artificial one (as opposed to a user-defined one).
1817
1818 This loop is performed twice; first time for outer_type and second time
1819 for speculative_outer_type. The second iteration has SPECULATIVE set. */
1820 while (true)
1821 {
1822 HOST_WIDE_INT pos, size;
1823 tree fld;
1824
1825 /* On a match, just return what we found. */
1826 if (TREE_CODE (type) == TREE_CODE (expected_type)
1827 && (!in_lto_p
1828 || (TREE_CODE (type) == RECORD_TYPE
1829 && TYPE_BINFO (type)
1830 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
1831 && types_same_for_odr (type, expected_type))
1832 {
1833 if (speculative)
1834 {
1835 gcc_assert (speculation_valid);
1836 gcc_assert (valid);
1837
1838 /* If we did not match the offset, just give up on speculation. */
1839 if (offset != 0
1840 || (types_same_for_odr (context->speculative_outer_type,
1841 context->outer_type)
1842 && (context->maybe_derived_type
1843 == context->speculative_maybe_derived_type)))
1844 clear_speculation (context);
1845 return true;
1846 }
1847 else
1848 {
1849 /* Type can not contain itself on an non-zero offset. In that case
1850 just give up. */
1851 if (offset != 0)
1852 {
1853 valid = false;
1854 goto give_up;
1855 }
1856 valid = true;
1857 /* If speculation is not valid or we determined type precisely,
1858 we are done. */
1859 if (!speculation_valid
1860 || !context->maybe_derived_type)
1861 {
1862 clear_speculation (context);
1863 return true;
1864 }
1865 /* Otherwise look into speculation now. */
1866 else
1867 {
1868 speculative = true;
1869 type = context->speculative_outer_type;
1870 offset = context->speculative_offset;
1871 continue;
1872 }
1873 }
1874 }
1875
1876 /* Walk fields and find corresponding on at OFFSET. */
1877 if (TREE_CODE (type) == RECORD_TYPE)
1878 {
1879 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1880 {
1881 if (TREE_CODE (fld) != FIELD_DECL)
1882 continue;
1883
1884 pos = int_bit_position (fld);
1885 size = tree_to_uhwi (DECL_SIZE (fld));
1886 if (pos <= offset && (pos + size) > offset)
1887 break;
1888 }
1889
1890 if (!fld)
1891 goto give_up;
1892
1893 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1894 offset -= pos;
1895 /* DECL_ARTIFICIAL represents a basetype. */
1896 if (!DECL_ARTIFICIAL (fld))
1897 {
1898 if (!speculative)
1899 {
1900 context->outer_type = type;
1901 context->offset = offset;
1902 /* As soon as we se an field containing the type,
1903 we know we are not looking for derivations. */
1904 context->maybe_derived_type = false;
1905 }
1906 else
1907 {
1908 context->speculative_outer_type = type;
1909 context->speculative_offset = offset;
1910 context->speculative_maybe_derived_type = false;
1911 }
1912 }
1913 }
1914 else if (TREE_CODE (type) == ARRAY_TYPE)
1915 {
1916 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1917
1918 /* Give up if we don't know array size. */
1919 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1920 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1921 goto give_up;
1922 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1923 type = subtype;
1924 if (!speculative)
1925 {
1926 context->outer_type = type;
1927 context->offset = offset;
1928 context->maybe_derived_type = false;
1929 }
1930 else
1931 {
1932 context->speculative_outer_type = type;
1933 context->speculative_offset = offset;
1934 context->speculative_maybe_derived_type = false;
1935 }
1936 }
1937 /* Give up on anything else. */
1938 else
1939 goto give_up;
1940 }
1941
1942 /* If we failed to find subtype we look for, give up and fall back to the
1943 most generic query. */
1944 give_up:
1945 clear_speculation (context);
1946 if (valid)
1947 return true;
1948 context->outer_type = expected_type;
1949 context->offset = 0;
1950 context->maybe_derived_type = true;
1951 context->maybe_in_construction = true;
1952 /* POD can be changed to an instance of a polymorphic type by
1953 placement new. Here we play safe and assume that any
1954 non-polymorphic type is POD. */
1955 if ((TREE_CODE (type) != RECORD_TYPE
1956 || !TYPE_BINFO (type)
1957 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1958 && (!TYPE_SIZE (type)
1959 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1960 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1961 tree_to_uhwi (TYPE_SIZE (type)))))
1962 return true;
1963 return false;
1964 }
1965
1966 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1967
1968 static bool
1969 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1970 tree otr_type)
1971 {
1972 ipa_polymorphic_call_context context = {offset, 0,
1973 TYPE_MAIN_VARIANT (outer_type),
1974 NULL, false, true, false};
1975 return get_class_context (&context, otr_type);
1976 }
1977
1978 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1979
1980 static tree
1981 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1982 tree vtable)
1983 {
1984 tree v = BINFO_VTABLE (binfo);
1985 int i;
1986 tree base_binfo;
1987 unsigned HOST_WIDE_INT this_offset;
1988
1989 if (v)
1990 {
1991 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1992 gcc_unreachable ();
1993
1994 if (offset == this_offset
1995 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1996 return binfo;
1997 }
1998
1999 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2000 if (polymorphic_type_binfo_p (base_binfo))
2001 {
2002 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2003 if (base_binfo)
2004 return base_binfo;
2005 }
2006 return NULL;
2007 }
2008
2009 /* T is known constant value of virtual table pointer.
2010 Store virtual table to V and its offset to OFFSET.
2011 Return false if T does not look like virtual table reference. */
2012
2013 bool
2014 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2015 unsigned HOST_WIDE_INT *offset)
2016 {
2017 /* We expect &MEM[(void *)&virtual_table + 16B].
2018 We obtain object's BINFO from the context of the virtual table.
2019 This one contains pointer to virtual table represented via
2020 POINTER_PLUS_EXPR. Verify that this pointer match to what
2021 we propagated through.
2022
2023 In the case of virtual inheritance, the virtual tables may
2024 be nested, i.e. the offset may be different from 16 and we may
2025 need to dive into the type representation. */
2026 if (TREE_CODE (t) == ADDR_EXPR
2027 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2028 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2029 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2030 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2031 == VAR_DECL)
2032 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2033 (TREE_OPERAND (t, 0), 0), 0)))
2034 {
2035 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2036 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2037 return true;
2038 }
2039
2040 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2041 We need to handle it when T comes from static variable initializer or
2042 BINFO. */
2043 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2044 {
2045 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2046 t = TREE_OPERAND (t, 0);
2047 }
2048 else
2049 *offset = 0;
2050
2051 if (TREE_CODE (t) != ADDR_EXPR)
2052 return false;
2053 *v = TREE_OPERAND (t, 0);
2054 return true;
2055 }
2056
2057 /* T is known constant value of virtual table pointer. Return BINFO of the
2058 instance type. */
2059
2060 tree
2061 vtable_pointer_value_to_binfo (const_tree t)
2062 {
2063 tree vtable;
2064 unsigned HOST_WIDE_INT offset;
2065
2066 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2067 return NULL_TREE;
2068
2069 /* FIXME: for stores of construction vtables we return NULL,
2070 because we do not have BINFO for those. Eventually we should fix
2071 our representation to allow this case to be handled, too.
2072 In the case we see store of BINFO we however may assume
2073 that standard folding will be ale to cope with it. */
2074 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2075 offset, vtable);
2076 }
2077
2078 /* We know that the instance is stored in variable or parameter
2079 (not dynamically allocated) and we want to disprove the fact
2080 that it may be in construction at invocation of CALL.
2081
2082 For the variable to be in construction we actually need to
2083 be in constructor of corresponding global variable or
2084 the inline stack of CALL must contain the constructor.
2085 Check this condition. This check works safely only before
2086 IPA passes, because inline stacks may become out of date
2087 later. */
2088
2089 bool
2090 decl_maybe_in_construction_p (tree base, tree outer_type,
2091 gimple call, tree function)
2092 {
2093 outer_type = TYPE_MAIN_VARIANT (outer_type);
2094 gcc_assert (DECL_P (base));
2095
2096 /* After inlining the code unification optimizations may invalidate
2097 inline stacks. Also we need to give up on global variables after
2098 IPA, because addresses of these may have been propagated to their
2099 constructors. */
2100 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2101 return true;
2102
2103 /* Pure functions can not do any changes on the dynamic type;
2104 that require writting to memory. */
2105 if (!auto_var_in_fn_p (base, function)
2106 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2107 return false;
2108
2109 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2110 block = BLOCK_SUPERCONTEXT (block))
2111 if (BLOCK_ABSTRACT_ORIGIN (block)
2112 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2113 {
2114 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2115
2116 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2117 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2118 && !DECL_CXX_DESTRUCTOR_P (fn)))
2119 {
2120 /* Watch for clones where we constant propagated the first
2121 argument (pointer to the instance). */
2122 fn = DECL_ABSTRACT_ORIGIN (fn);
2123 if (!fn
2124 || !is_global_var (base)
2125 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2126 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2127 && !DECL_CXX_DESTRUCTOR_P (fn)))
2128 continue;
2129 }
2130 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2131 continue;
2132
2133 /* FIXME: this can go away once we have ODR types equivalency on
2134 LTO level. */
2135 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2136 return true;
2137 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2138 if (types_same_for_odr (type, outer_type))
2139 return true;
2140 }
2141
2142 if (TREE_CODE (base) == VAR_DECL
2143 && is_global_var (base))
2144 {
2145 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2146 || (!DECL_CXX_CONSTRUCTOR_P (function)
2147 && !DECL_CXX_DESTRUCTOR_P (function)))
2148 {
2149 if (!DECL_ABSTRACT_ORIGIN (function))
2150 return false;
2151 /* Watch for clones where we constant propagated the first
2152 argument (pointer to the instance). */
2153 function = DECL_ABSTRACT_ORIGIN (function);
2154 if (!function
2155 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2156 || (!DECL_CXX_CONSTRUCTOR_P (function)
2157 && !DECL_CXX_DESTRUCTOR_P (function)))
2158 return false;
2159 }
2160 /* FIXME: this can go away once we have ODR types equivalency on
2161 LTO level. */
2162 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2163 return true;
2164 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2165 if (types_same_for_odr (type, outer_type))
2166 return true;
2167 }
2168 return false;
2169 }
2170
2171 /* Proudce polymorphic call context for call method of instance
2172 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2173
2174 static void
2175 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2176 tree base, HOST_WIDE_INT offset)
2177 {
2178 gcc_assert (DECL_P (base));
2179
2180 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
2181 context->offset = offset;
2182 context->speculative_outer_type = NULL;
2183 context->speculative_offset = 0;
2184 context->speculative_maybe_derived_type = true;
2185 /* Make very conservative assumption that all objects
2186 may be in construction.
2187 TODO: ipa-prop already contains code to tell better.
2188 merge it later. */
2189 context->maybe_in_construction = true;
2190 context->maybe_derived_type = false;
2191 }
2192
2193 /* CST is an invariant (address of decl), try to get meaningful
2194 polymorphic call context for polymorphic call of method
2195 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2196 Return FALSE if nothing meaningful can be found. */
2197
2198 bool
2199 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2200 tree cst,
2201 tree otr_type,
2202 HOST_WIDE_INT offset)
2203 {
2204 HOST_WIDE_INT offset2, size, max_size;
2205 tree base;
2206
2207 if (TREE_CODE (cst) != ADDR_EXPR)
2208 return false;
2209
2210 cst = TREE_OPERAND (cst, 0);
2211 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
2212 if (!DECL_P (base) || max_size == -1 || max_size != size)
2213 return false;
2214
2215 /* Only type inconsistent programs can have otr_type that is
2216 not part of outer type. */
2217 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2218 return false;
2219
2220 get_polymorphic_call_info_for_decl (context, base, offset);
2221 return true;
2222 }
2223
2224 /* See if OP is SSA name initialized as a copy or by single assignment.
2225 If so, walk the SSA graph up. */
2226
2227 static tree
2228 walk_ssa_copies (tree op)
2229 {
2230 STRIP_NOPS (op);
2231 while (TREE_CODE (op) == SSA_NAME
2232 && !SSA_NAME_IS_DEFAULT_DEF (op)
2233 && SSA_NAME_DEF_STMT (op)
2234 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2235 {
2236 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2237 return op;
2238 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2239 STRIP_NOPS (op);
2240 }
2241 return op;
2242 }
2243
2244 /* Given REF call in FNDECL, determine class of the polymorphic
2245 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
2246 CALL is optional argument giving the actual statement (usually call) where
2247 the context is used.
2248 Return pointer to object described by the context or an declaration if
2249 we found the instance to be stored in the static storage. */
2250
2251 tree
2252 get_polymorphic_call_info (tree fndecl,
2253 tree ref,
2254 tree *otr_type,
2255 HOST_WIDE_INT *otr_token,
2256 ipa_polymorphic_call_context *context,
2257 gimple call)
2258 {
2259 tree base_pointer;
2260 *otr_type = obj_type_ref_class (ref);
2261 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2262
2263 /* Set up basic info in case we find nothing interesting in the analysis. */
2264 context->speculative_outer_type = NULL;
2265 context->speculative_offset = 0;
2266 context->speculative_maybe_derived_type = true;
2267 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
2268 context->offset = 0;
2269 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2270 context->maybe_derived_type = true;
2271 context->maybe_in_construction = true;
2272
2273 /* Walk SSA for outer object. */
2274 do
2275 {
2276 base_pointer = walk_ssa_copies (base_pointer);
2277 if (TREE_CODE (base_pointer) == ADDR_EXPR)
2278 {
2279 HOST_WIDE_INT size, max_size;
2280 HOST_WIDE_INT offset2;
2281 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2282 &offset2, &size, &max_size);
2283
2284 /* If this is a varying address, punt. */
2285 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2286 && max_size != -1
2287 && max_size == size)
2288 {
2289 /* We found dereference of a pointer. Type of the pointer
2290 and MEM_REF is meaningless, but we can look futher. */
2291 if (TREE_CODE (base) == MEM_REF)
2292 {
2293 base_pointer = TREE_OPERAND (base, 0);
2294 context->offset
2295 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
2296 context->outer_type = NULL;
2297 }
2298 /* We found base object. In this case the outer_type
2299 is known. */
2300 else if (DECL_P (base))
2301 {
2302 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
2303
2304 /* Only type inconsistent programs can have otr_type that is
2305 not part of outer type. */
2306 if (!contains_type_p (TREE_TYPE (base),
2307 context->offset + offset2, *otr_type))
2308 {
2309 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2310 code sequences; we arrange the calls to be builtin_unreachable
2311 later. */
2312 *otr_token = INT_MAX;
2313 return base_pointer;
2314 }
2315 get_polymorphic_call_info_for_decl (context, base,
2316 context->offset + offset2);
2317 if (context->maybe_in_construction && call)
2318 context->maybe_in_construction
2319 = decl_maybe_in_construction_p (base,
2320 context->outer_type,
2321 call,
2322 fndecl);
2323 return base;
2324 }
2325 else
2326 break;
2327 }
2328 else
2329 break;
2330 }
2331 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2332 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2333 {
2334 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2335 * BITS_PER_UNIT;
2336 base_pointer = TREE_OPERAND (base_pointer, 0);
2337 }
2338 else
2339 break;
2340 }
2341 while (true);
2342
2343 /* Try to determine type of the outer object. */
2344 if (TREE_CODE (base_pointer) == SSA_NAME
2345 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2346 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2347 {
2348 /* See if parameter is THIS pointer of a method. */
2349 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2350 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2351 {
2352 context->outer_type
2353 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2354 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2355
2356 /* Dynamic casting has possibly upcasted the type
2357 in the hiearchy. In this case outer type is less
2358 informative than inner type and we should forget
2359 about it. */
2360 if (!contains_type_p (context->outer_type, context->offset,
2361 *otr_type))
2362 {
2363 context->outer_type = NULL;
2364 return base_pointer;
2365 }
2366
2367 /* If the function is constructor or destructor, then
2368 the type is possibly in construction, but we know
2369 it is not derived type. */
2370 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2371 || DECL_CXX_DESTRUCTOR_P (fndecl))
2372 {
2373 context->maybe_in_construction = true;
2374 context->maybe_derived_type = false;
2375 }
2376 else
2377 {
2378 context->maybe_derived_type = true;
2379 context->maybe_in_construction = false;
2380 }
2381 return base_pointer;
2382 }
2383 /* Non-PODs passed by value are really passed by invisible
2384 reference. In this case we also know the type of the
2385 object. */
2386 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2387 {
2388 context->outer_type
2389 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2390 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2391 /* Only type inconsistent programs can have otr_type that is
2392 not part of outer type. */
2393 if (!contains_type_p (context->outer_type, context->offset,
2394 *otr_type))
2395 {
2396 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2397 code sequences; we arrange the calls to be builtin_unreachable
2398 later. */
2399 *otr_token = INT_MAX;
2400 return base_pointer;
2401 }
2402 context->maybe_derived_type = false;
2403 context->maybe_in_construction = false;
2404 return base_pointer;
2405 }
2406 }
2407
2408 tree base_type = TREE_TYPE (base_pointer);
2409
2410 if (TREE_CODE (base_pointer) == SSA_NAME
2411 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2412 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2413 {
2414 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2415 code sequences; we arrange the calls to be builtin_unreachable
2416 later. */
2417 *otr_token = INT_MAX;
2418 return base_pointer;
2419 }
2420 if (TREE_CODE (base_pointer) == SSA_NAME
2421 && SSA_NAME_DEF_STMT (base_pointer)
2422 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2423 base_type = TREE_TYPE (gimple_assign_rhs1
2424 (SSA_NAME_DEF_STMT (base_pointer)));
2425
2426 if (POINTER_TYPE_P (base_type)
2427 && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2428 context->offset,
2429 *otr_type))
2430 {
2431 context->speculative_outer_type = TYPE_MAIN_VARIANT
2432 (TREE_TYPE (base_type));
2433 context->speculative_offset = context->offset;
2434 context->speculative_maybe_derived_type = true;
2435 }
2436 /* TODO: There are multiple ways to derive a type. For instance
2437 if BASE_POINTER is passed to an constructor call prior our refernece.
2438 We do not make this type of flow sensitive analysis yet. */
2439 return base_pointer;
2440 }
2441
2442 /* Structure to be passed in between detect_type_change and
2443 check_stmt_for_type_change. */
2444
2445 struct type_change_info
2446 {
2447 /* Offset into the object where there is the virtual method pointer we are
2448 looking for. */
2449 HOST_WIDE_INT offset;
2450 /* The declaration or SSA_NAME pointer of the base that we are checking for
2451 type change. */
2452 tree instance;
2453 /* The reference to virtual table pointer used. */
2454 tree vtbl_ptr_ref;
2455 tree otr_type;
2456 /* If we actually can tell the type that the object has changed to, it is
2457 stored in this field. Otherwise it remains NULL_TREE. */
2458 tree known_current_type;
2459 HOST_WIDE_INT known_current_offset;
2460
2461 /* Set to true if dynamic type change has been detected. */
2462 bool type_maybe_changed;
2463 /* Set to true if multiple types have been encountered. known_current_type
2464 must be disregarded in that case. */
2465 bool multiple_types_encountered;
2466 /* Set to true if we possibly missed some dynamic type changes and we should
2467 consider the set to be speculative. */
2468 bool speculative;
2469 bool seen_unanalyzed_store;
2470 };
2471
2472 /* Return true if STMT is not call and can modify a virtual method table pointer.
2473 We take advantage of fact that vtable stores must appear within constructor
2474 and destructor functions. */
2475
2476 bool
2477 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
2478 {
2479 if (is_gimple_assign (stmt))
2480 {
2481 tree lhs = gimple_assign_lhs (stmt);
2482
2483 if (gimple_clobber_p (stmt))
2484 return false;
2485 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
2486 {
2487 if (flag_strict_aliasing
2488 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
2489 return false;
2490
2491 if (TREE_CODE (lhs) == COMPONENT_REF
2492 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2493 return false;
2494 /* In the future we might want to use get_base_ref_and_offset to find
2495 if there is a field corresponding to the offset and if so, proceed
2496 almost like if it was a component ref. */
2497 }
2498 }
2499
2500 /* Code unification may mess with inline stacks. */
2501 if (cfun->after_inlining)
2502 return true;
2503
2504 /* Walk the inline stack and watch out for ctors/dtors.
2505 TODO: Maybe we can require the store to appear in toplevel
2506 block of CTOR/DTOR. */
2507 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
2508 block = BLOCK_SUPERCONTEXT (block))
2509 if (BLOCK_ABSTRACT_ORIGIN (block)
2510 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2511 {
2512 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2513
2514 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2515 return false;
2516 return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2517 && (DECL_CXX_CONSTRUCTOR_P (fn)
2518 || DECL_CXX_DESTRUCTOR_P (fn)));
2519 }
2520 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
2521 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
2522 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
2523 }
2524
2525 /* If STMT can be proved to be an assignment to the virtual method table
2526 pointer of ANALYZED_OBJ and the type associated with the new table
2527 identified, return the type. Otherwise return NULL_TREE. */
2528
2529 static tree
2530 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
2531 HOST_WIDE_INT *type_offset)
2532 {
2533 HOST_WIDE_INT offset, size, max_size;
2534 tree lhs, rhs, base, binfo;
2535
2536 if (!gimple_assign_single_p (stmt))
2537 return NULL_TREE;
2538
2539 lhs = gimple_assign_lhs (stmt);
2540 rhs = gimple_assign_rhs1 (stmt);
2541 if (TREE_CODE (lhs) != COMPONENT_REF
2542 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2543 return NULL_TREE;
2544
2545 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
2546 ;
2547 else
2548 {
2549 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2550 if (offset != tci->offset
2551 || size != POINTER_SIZE
2552 || max_size != POINTER_SIZE)
2553 return NULL_TREE;
2554 if (DECL_P (tci->instance))
2555 {
2556 if (base != tci->instance)
2557 return NULL_TREE;
2558 }
2559 else if (TREE_CODE (base) == MEM_REF)
2560 {
2561 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)
2562 || !integer_zerop (TREE_OPERAND (base, 1)))
2563 return NULL_TREE;
2564 }
2565 else if (!operand_equal_p (tci->instance, base, 0)
2566 || tci->offset)
2567 return NULL_TREE;
2568 }
2569
2570 binfo = vtable_pointer_value_to_binfo (rhs);
2571
2572 if (!binfo)
2573 return NULL;
2574 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
2575 if (TYPE_BINFO (BINFO_TYPE (binfo)) == binfo)
2576 return BINFO_TYPE (binfo);
2577
2578 /* TODO: Figure out the type containing BINFO. */
2579 return NULL;
2580 }
2581
2582 /* Record dynamic type change of TCI to TYPE. */
2583
2584 void
2585 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
2586 {
2587 if (dump_file)
2588 {
2589 if (type)
2590 {
2591 fprintf (dump_file, " Recording type: ");
2592 print_generic_expr (dump_file, type, TDF_SLIM);
2593 fprintf (dump_file, " at offset %i\n", (int)offset);
2594 }
2595 else
2596 fprintf (dump_file, " Recording unknown type\n");
2597 }
2598 if (tci->type_maybe_changed
2599 && (type != tci->known_current_type
2600 || offset != tci->known_current_offset))
2601 tci->multiple_types_encountered = true;
2602 tci->known_current_type = type;
2603 tci->known_current_offset = offset;
2604 tci->type_maybe_changed = true;
2605 }
2606
2607 /* Callback of walk_aliased_vdefs and a helper function for
2608 detect_type_change to check whether a particular statement may modify
2609 the virtual table pointer, and if possible also determine the new type of
2610 the (sub-)object. It stores its result into DATA, which points to a
2611 type_change_info structure. */
2612
2613 static bool
2614 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
2615 {
2616 gimple stmt = SSA_NAME_DEF_STMT (vdef);
2617 struct type_change_info *tci = (struct type_change_info *) data;
2618 tree fn;
2619
2620 /* If we already gave up, just terminate the rest of walk. */
2621 if (tci->multiple_types_encountered)
2622 return true;
2623
2624 if (is_gimple_call (stmt))
2625 {
2626 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
2627 return false;
2628
2629 /* Check for a constructor call. */
2630 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
2631 && DECL_CXX_CONSTRUCTOR_P (fn)
2632 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2633 && gimple_call_num_args (stmt))
2634 {
2635 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
2636 tree type = method_class_type (TREE_TYPE (fn));
2637 HOST_WIDE_INT offset = 0, size, max_size;
2638
2639 if (dump_file)
2640 {
2641 fprintf (dump_file, " Checking constructor call: ");
2642 print_gimple_stmt (dump_file, stmt, 0, 0);
2643 }
2644
2645 /* See if THIS parameter seems like instance pointer. */
2646 if (TREE_CODE (op) == ADDR_EXPR)
2647 {
2648 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
2649 &offset, &size, &max_size);
2650 if (size != max_size || max_size == -1)
2651 {
2652 tci->speculative = true;
2653 return false;
2654 }
2655 if (op && TREE_CODE (op) == MEM_REF)
2656 {
2657 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
2658 {
2659 tci->speculative = true;
2660 return false;
2661 }
2662 offset += tree_to_shwi (TREE_OPERAND (op, 1))
2663 * BITS_PER_UNIT;
2664 op = TREE_OPERAND (op, 0);
2665 }
2666 else
2667 {
2668 tci->speculative = true;
2669 return false;
2670 }
2671 op = walk_ssa_copies (op);
2672 }
2673 if (operand_equal_p (op, tci->instance, 0)
2674 && TYPE_SIZE (type)
2675 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2676 && tree_fits_shwi_p (TYPE_SIZE (type))
2677 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
2678 {
2679 record_known_type (tci, type, tci->offset - offset);
2680 return true;
2681 }
2682 }
2683 /* Calls may possibly change dynamic type by placement new. Assume
2684 it will not happen, but make result speculative only. */
2685 if (dump_file)
2686 {
2687 fprintf (dump_file, " Function call may change dynamic type:");
2688 print_gimple_stmt (dump_file, stmt, 0, 0);
2689 }
2690 tci->speculative = true;
2691 return false;
2692 }
2693 /* Check for inlined virtual table store. */
2694 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
2695 {
2696 tree type;
2697 HOST_WIDE_INT offset = 0;
2698 if (dump_file)
2699 {
2700 fprintf (dump_file, " Checking vtbl store: ");
2701 print_gimple_stmt (dump_file, stmt, 0, 0);
2702 }
2703
2704 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
2705 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
2706 if (!type)
2707 {
2708 if (dump_file)
2709 fprintf (dump_file, " Unanalyzed store may change type.\n");
2710 tci->seen_unanalyzed_store = true;
2711 tci->speculative = true;
2712 }
2713 else
2714 record_known_type (tci, type, offset);
2715 return true;
2716 }
2717 else
2718 return false;
2719 }
2720
2721 /* CONTEXT is polymorphic call context obtained from get_polymorphic_context.
2722 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
2723 INSTANCE is pointer to the outer instance as returned by
2724 get_polymorphic_context. To avoid creation of temporary expressions,
2725 INSTANCE may also be an declaration of get_polymorphic_context found the
2726 value to be in static storage.
2727
2728 If the type of instance is not fully determined
2729 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
2730 is set), try to walk memory writes and find the actual construction of the
2731 instance.
2732
2733 We do not include this analysis in the context analysis itself, because
2734 it needs memory SSA to be fully built and the walk may be expensive.
2735 So it is not suitable for use withing fold_stmt and similar uses. */
2736
2737 bool
2738 get_dynamic_type (tree instance,
2739 ipa_polymorphic_call_context *context,
2740 tree otr_object,
2741 tree otr_type,
2742 gimple call)
2743 {
2744 struct type_change_info tci;
2745 ao_ref ao;
2746 bool function_entry_reached = false;
2747 tree instance_ref = NULL;
2748 gimple stmt = call;
2749
2750 if (!context->maybe_in_construction && !context->maybe_derived_type)
2751 return false;
2752
2753 /* We need to obtain refernce to virtual table pointer. It is better
2754 to look it up in the code rather than build our own. This require bit
2755 of pattern matching, but we end up verifying that what we found is
2756 correct.
2757
2758 What we pattern match is:
2759
2760 tmp = instance->_vptr.A; // vtbl ptr load
2761 tmp2 = tmp[otr_token]; // vtable lookup
2762 OBJ_TYPE_REF(tmp2;instance->0) (instance);
2763
2764 We want to start alias oracle walk from vtbl pointer load,
2765 but we may not be able to identify it, for example, when PRE moved the
2766 load around. */
2767
2768 if (gimple_code (call) == GIMPLE_CALL)
2769 {
2770 tree ref = gimple_call_fn (call);
2771 HOST_WIDE_INT offset2, size, max_size;
2772
2773 if (TREE_CODE (ref) == OBJ_TYPE_REF)
2774 {
2775 ref = OBJ_TYPE_REF_EXPR (ref);
2776 ref = walk_ssa_copies (ref);
2777
2778 /* Check if definition looks like vtable lookup. */
2779 if (TREE_CODE (ref) == SSA_NAME
2780 && !SSA_NAME_IS_DEFAULT_DEF (ref)
2781 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
2782 && TREE_CODE (gimple_assign_rhs1
2783 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
2784 {
2785 ref = get_base_address
2786 (TREE_OPERAND (gimple_assign_rhs1
2787 (SSA_NAME_DEF_STMT (ref)), 0));
2788 ref = walk_ssa_copies (ref);
2789 /* Find base address of the lookup and see if it looks like
2790 vptr load. */
2791 if (TREE_CODE (ref) == SSA_NAME
2792 && !SSA_NAME_IS_DEFAULT_DEF (ref)
2793 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
2794 {
2795 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
2796 tree base_ref = get_ref_base_and_extent
2797 (ref_exp, &offset2, &size, &max_size);
2798
2799 /* Finally verify that what we found looks like read from OTR_OBJECT
2800 or from INSTANCE with offset OFFSET. */
2801 if (base_ref
2802 && ((TREE_CODE (base_ref) == MEM_REF
2803 && ((offset2 == context->offset
2804 && TREE_OPERAND (base_ref, 0) == instance)
2805 || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
2806 || (DECL_P (instance) && base_ref == instance
2807 && offset2 == context->offset)))
2808 {
2809 stmt = SSA_NAME_DEF_STMT (ref);
2810 instance_ref = ref_exp;
2811 }
2812 }
2813 }
2814 }
2815 }
2816
2817 /* If we failed to look up the refernece in code, build our own. */
2818 if (!instance_ref)
2819 {
2820 /* If the statement in question does not use memory, we can't tell
2821 anything. */
2822 if (!gimple_vuse (stmt))
2823 return false;
2824 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
2825 }
2826 else
2827 /* Otherwise use the real reference. */
2828 ao_ref_init (&ao, instance_ref);
2829
2830 /* We look for vtbl pointer read. */
2831 ao.size = POINTER_SIZE;
2832 ao.max_size = ao.size;
2833 ao.ref_alias_set
2834 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
2835
2836 if (dump_file)
2837 {
2838 fprintf (dump_file, "Determining dynamic type for call: ");
2839 print_gimple_stmt (dump_file, call, 0, 0);
2840 fprintf (dump_file, " Starting walk at: ");
2841 print_gimple_stmt (dump_file, stmt, 0, 0);
2842 fprintf (dump_file, " instance pointer: ");
2843 print_generic_expr (dump_file, otr_object, TDF_SLIM);
2844 fprintf (dump_file, " Outer instance pointer: ");
2845 print_generic_expr (dump_file, instance, TDF_SLIM);
2846 fprintf (dump_file, " offset: %i (bits)", (int)context->offset);
2847 fprintf (dump_file, " vtbl reference: ");
2848 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
2849 fprintf (dump_file, "\n");
2850 }
2851
2852 tci.offset = context->offset;
2853 tci.instance = instance;
2854 tci.vtbl_ptr_ref = instance_ref;
2855 gcc_assert (TREE_CODE (instance) != MEM_REF);
2856 tci.known_current_type = NULL_TREE;
2857 tci.known_current_offset = 0;
2858 tci.otr_type = otr_type;
2859 tci.type_maybe_changed = false;
2860 tci.multiple_types_encountered = false;
2861 tci.speculative = false;
2862 tci.seen_unanalyzed_store = false;
2863
2864 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
2865 &tci, NULL, &function_entry_reached);
2866
2867 /* If we did not find any type changing statements, we may still drop
2868 maybe_in_construction flag if the context already have outer type.
2869
2870 Here we make special assumptions about both constructors and
2871 destructors which are all the functions that are allowed to alter the
2872 VMT pointers. It assumes that destructors begin with assignment into
2873 all VMT pointers and that constructors essentially look in the
2874 following way:
2875
2876 1) The very first thing they do is that they call constructors of
2877 ancestor sub-objects that have them.
2878
2879 2) Then VMT pointers of this and all its ancestors is set to new
2880 values corresponding to the type corresponding to the constructor.
2881
2882 3) Only afterwards, other stuff such as constructor of member
2883 sub-objects and the code written by the user is run. Only this may
2884 include calling virtual functions, directly or indirectly.
2885
2886 4) placement new can not be used to change type of non-POD statically
2887 allocated variables.
2888
2889 There is no way to call a constructor of an ancestor sub-object in any
2890 other way.
2891
2892 This means that we do not have to care whether constructors get the
2893 correct type information because they will always change it (in fact,
2894 if we define the type to be given by the VMT pointer, it is undefined).
2895
2896 The most important fact to derive from the above is that if, for some
2897 statement in the section 3, we try to detect whether the dynamic type
2898 has changed, we can safely ignore all calls as we examine the function
2899 body backwards until we reach statements in section 2 because these
2900 calls cannot be ancestor constructors or destructors (if the input is
2901 not bogus) and so do not change the dynamic type (this holds true only
2902 for automatically allocated objects but at the moment we devirtualize
2903 only these). We then must detect that statements in section 2 change
2904 the dynamic type and can try to derive the new type. That is enough
2905 and we can stop, we will never see the calls into constructors of
2906 sub-objects in this code.
2907
2908 Therefore if the static outer type was found (context->outer_type)
2909 we can safely ignore tci.speculative that is set on calls and give up
2910 only if there was dyanmic type store that may affect given variable
2911 (seen_unanalyzed_store) */
2912
2913 if (!tci.type_maybe_changed)
2914 {
2915 if (!context->outer_type || tci.seen_unanalyzed_store)
2916 return false;
2917 if (context->maybe_in_construction)
2918 context->maybe_in_construction = false;
2919 if (dump_file)
2920 fprintf (dump_file, " No dynamic type change found.\n");
2921 return true;
2922 }
2923
2924 if (tci.known_current_type
2925 && !function_entry_reached
2926 && !tci.multiple_types_encountered)
2927 {
2928 if (!tci.speculative
2929 /* Again in instances located in static storage we are interested only
2930 in constructor stores. */
2931 || (context->outer_type
2932 && !tci.seen_unanalyzed_store
2933 && context->offset == tci.offset
2934 && types_same_for_odr (tci.known_current_type,
2935 context->outer_type)))
2936 {
2937 context->outer_type = tci.known_current_type;
2938 context->offset = tci.known_current_offset;
2939 context->maybe_in_construction = false;
2940 context->maybe_derived_type = false;
2941 if (dump_file)
2942 fprintf (dump_file, " Determined dynamic type.\n");
2943 }
2944 else if (!context->speculative_outer_type
2945 || context->speculative_maybe_derived_type)
2946 {
2947 context->speculative_outer_type = tci.known_current_type;
2948 context->speculative_offset = tci.known_current_offset;
2949 context->speculative_maybe_derived_type = false;
2950 if (dump_file)
2951 fprintf (dump_file, " Determined speculative dynamic type.\n");
2952 }
2953 }
2954 else if (dump_file)
2955 fprintf (dump_file, " Found multiple types.\n");
2956
2957 return true;
2958 }
2959
2960 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2961 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
2962 and insert them to NODES.
2963
2964 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2965
2966 static void
2967 record_targets_from_bases (tree otr_type,
2968 HOST_WIDE_INT otr_token,
2969 tree outer_type,
2970 HOST_WIDE_INT offset,
2971 vec <cgraph_node *> &nodes,
2972 hash_set<tree> *inserted,
2973 hash_set<tree> *matched_vtables,
2974 bool *completep)
2975 {
2976 while (true)
2977 {
2978 HOST_WIDE_INT pos, size;
2979 tree base_binfo;
2980 tree fld;
2981
2982 if (types_same_for_odr (outer_type, otr_type))
2983 return;
2984
2985 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2986 {
2987 if (TREE_CODE (fld) != FIELD_DECL)
2988 continue;
2989
2990 pos = int_bit_position (fld);
2991 size = tree_to_shwi (DECL_SIZE (fld));
2992 if (pos <= offset && (pos + size) > offset
2993 /* Do not get confused by zero sized bases. */
2994 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2995 break;
2996 }
2997 /* Within a class type we should always find correcponding fields. */
2998 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2999
3000 /* Nonbasetypes should have been stripped by outer_class_type. */
3001 gcc_assert (DECL_ARTIFICIAL (fld));
3002
3003 outer_type = TREE_TYPE (fld);
3004 offset -= pos;
3005
3006 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
3007 offset, otr_type);
3008 if (!base_binfo)
3009 {
3010 gcc_assert (odr_violation_reported);
3011 return;
3012 }
3013 gcc_assert (base_binfo);
3014 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
3015 {
3016 bool can_refer;
3017 tree target = gimple_get_virt_method_for_binfo (otr_token,
3018 base_binfo,
3019 &can_refer);
3020 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
3021 maybe_record_node (nodes, target, inserted, can_refer, completep);
3022 matched_vtables->add (BINFO_VTABLE (base_binfo));
3023 }
3024 }
3025 }
3026
3027 /* When virtual table is removed, we may need to flush the cache. */
3028
3029 static void
3030 devirt_variable_node_removal_hook (varpool_node *n,
3031 void *d ATTRIBUTE_UNUSED)
3032 {
3033 if (cached_polymorphic_call_targets
3034 && DECL_VIRTUAL_P (n->decl)
3035 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3036 free_polymorphic_call_targets_hash ();
3037 }
3038
3039 /* Record about how many calls would benefit from given type to be final. */
3040
3041 struct odr_type_warn_count
3042 {
3043 tree type;
3044 int count;
3045 gcov_type dyn_count;
3046 };
3047
3048 /* Record about how many calls would benefit from given method to be final. */
3049
3050 struct decl_warn_count
3051 {
3052 tree decl;
3053 int count;
3054 gcov_type dyn_count;
3055 };
3056
3057 /* Information about type and decl warnings. */
3058
3059 struct final_warning_record
3060 {
3061 gcov_type dyn_count;
3062 vec<odr_type_warn_count> type_warnings;
3063 hash_map<tree, decl_warn_count> decl_warnings;
3064 };
3065 struct final_warning_record *final_warning_records;
3066
3067 /* Return vector containing possible targets of polymorphic call of type
3068 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3069 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
3070 OTR_TYPE and include their virtual method. This is useful for types
3071 possibly in construction or destruction where the virtual table may
3072 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
3073 us to walk the inheritance graph for all derivations.
3074
3075 OTR_TOKEN == INT_MAX is used to mark calls that are provably
3076 undefined and should be redirected to unreachable.
3077
3078 If COMPLETEP is non-NULL, store true if the list is complete.
3079 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3080 in the target cache. If user needs to visit every target list
3081 just once, it can memoize them.
3082
3083 SPECULATION_TARGETS specify number of targets that are speculatively
3084 likely. These include targets specified by the speculative part
3085 of polymoprhic call context and also exclude all targets for classes
3086 in construction.
3087
3088 Returned vector is placed into cache. It is NOT caller's responsibility
3089 to free it. The vector can be freed on cgraph_remove_node call if
3090 the particular node is a virtual function present in the cache. */
3091
3092 vec <cgraph_node *>
3093 possible_polymorphic_call_targets (tree otr_type,
3094 HOST_WIDE_INT otr_token,
3095 ipa_polymorphic_call_context context,
3096 bool *completep,
3097 void **cache_token,
3098 int *speculative_targetsp)
3099 {
3100 static struct cgraph_node_hook_list *node_removal_hook_holder;
3101 vec <cgraph_node *> nodes = vNULL;
3102 vec <tree> bases_to_consider = vNULL;
3103 odr_type type, outer_type;
3104 polymorphic_call_target_d key;
3105 polymorphic_call_target_d **slot;
3106 unsigned int i;
3107 tree binfo, target;
3108 bool complete;
3109 bool can_refer;
3110 bool skipped = false;
3111
3112 otr_type = TYPE_MAIN_VARIANT (otr_type);
3113
3114 /* If ODR is not initialized, return empty incomplete list. */
3115 if (!odr_hash)
3116 {
3117 if (completep)
3118 *completep = false;
3119 if (cache_token)
3120 *cache_token = NULL;
3121 if (speculative_targetsp)
3122 *speculative_targetsp = 0;
3123 return nodes;
3124 }
3125
3126 /* If we hit type inconsistency, just return empty list of targets. */
3127 if (otr_token == INT_MAX)
3128 {
3129 if (completep)
3130 *completep = true;
3131 if (cache_token)
3132 *cache_token = NULL;
3133 if (speculative_targetsp)
3134 *speculative_targetsp = 0;
3135 return nodes;
3136 }
3137
3138 /* Do not bother to compute speculative info when user do not asks for it. */
3139 if (!speculative_targetsp || !context.speculative_outer_type)
3140 clear_speculation (&context);
3141
3142 type = get_odr_type (otr_type, true);
3143
3144 /* Recording type variants would wast results cache. */
3145 gcc_assert (!context.outer_type
3146 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3147
3148 /* Lookup the outer class type we want to walk. */
3149 if ((context.outer_type || context.speculative_outer_type)
3150 && !get_class_context (&context, otr_type))
3151 {
3152 if (completep)
3153 *completep = false;
3154 if (cache_token)
3155 *cache_token = NULL;
3156 if (speculative_targetsp)
3157 *speculative_targetsp = 0;
3158 return nodes;
3159 }
3160
3161 /* Check that get_class_context kept the main variant. */
3162 gcc_assert (!context.outer_type
3163 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3164
3165 /* We canonicalize our query, so we do not need extra hashtable entries. */
3166
3167 /* Without outer type, we have no use for offset. Just do the
3168 basic search from innter type */
3169 if (!context.outer_type)
3170 {
3171 context.outer_type = otr_type;
3172 context.offset = 0;
3173 }
3174 /* We need to update our hiearchy if the type does not exist. */
3175 outer_type = get_odr_type (context.outer_type, true);
3176 /* If the type is complete, there are no derivations. */
3177 if (TYPE_FINAL_P (outer_type->type))
3178 context.maybe_derived_type = false;
3179
3180 /* Initialize query cache. */
3181 if (!cached_polymorphic_call_targets)
3182 {
3183 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3184 polymorphic_call_target_hash
3185 = new polymorphic_call_target_hash_type (23);
3186 if (!node_removal_hook_holder)
3187 {
3188 node_removal_hook_holder =
3189 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
3190 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
3191 NULL);
3192 }
3193 }
3194
3195 /* Lookup cached answer. */
3196 key.type = type;
3197 key.otr_token = otr_token;
3198 key.context = context;
3199 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3200 if (cache_token)
3201 *cache_token = (void *)*slot;
3202 if (*slot)
3203 {
3204 if (completep)
3205 *completep = (*slot)->complete;
3206 if (speculative_targetsp)
3207 *speculative_targetsp = (*slot)->speculative_targets;
3208 if ((*slot)->type_warning && final_warning_records)
3209 {
3210 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3211 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3212 += final_warning_records->dyn_count;
3213 }
3214 if ((*slot)->decl_warning && final_warning_records)
3215 {
3216 struct decl_warn_count *c =
3217 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3218 c->count++;
3219 c->dyn_count += final_warning_records->dyn_count;
3220 }
3221 return (*slot)->targets;
3222 }
3223
3224 complete = true;
3225
3226 /* Do actual search. */
3227 timevar_push (TV_IPA_VIRTUAL_CALL);
3228 *slot = XCNEW (polymorphic_call_target_d);
3229 if (cache_token)
3230 *cache_token = (void *)*slot;
3231 (*slot)->type = type;
3232 (*slot)->otr_token = otr_token;
3233 (*slot)->context = context;
3234 (*slot)->speculative_targets = 0;
3235
3236 hash_set<tree> inserted;
3237 hash_set<tree> matched_vtables;
3238
3239 /* First insert targets we speculatively identified as likely. */
3240 if (context.speculative_outer_type)
3241 {
3242 odr_type speculative_outer_type;
3243 bool speculation_complete = true;
3244
3245 /* First insert target from type itself and check if it may have derived types. */
3246 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3247 if (TYPE_FINAL_P (speculative_outer_type->type))
3248 context.speculative_maybe_derived_type = false;
3249 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3250 context.speculative_offset, otr_type);
3251 if (binfo)
3252 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3253 &can_refer);
3254 else
3255 target = NULL;
3256
3257 /* In the case we get complete method, we don't need
3258 to walk derivations. */
3259 if (target && DECL_FINAL_P (target))
3260 context.speculative_maybe_derived_type = false;
3261 if (type_possibly_instantiated_p (speculative_outer_type->type))
3262 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3263 if (binfo)
3264 matched_vtables.add (BINFO_VTABLE (binfo));
3265
3266
3267 /* Next walk recursively all derived types. */
3268 if (context.speculative_maybe_derived_type)
3269 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3270 possible_polymorphic_call_targets_1 (nodes, &inserted,
3271 &matched_vtables,
3272 otr_type,
3273 speculative_outer_type->derived_types[i],
3274 otr_token, speculative_outer_type->type,
3275 context.speculative_offset,
3276 &speculation_complete,
3277 bases_to_consider,
3278 false);
3279 (*slot)->speculative_targets = nodes.length();
3280 }
3281
3282 /* First see virtual method of type itself. */
3283 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3284 context.offset, otr_type);
3285 if (binfo)
3286 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3287 &can_refer);
3288 else
3289 {
3290 gcc_assert (odr_violation_reported);
3291 target = NULL;
3292 }
3293
3294 /* Destructors are never called through construction virtual tables,
3295 because the type is always known. */
3296 if (target && DECL_CXX_DESTRUCTOR_P (target))
3297 context.maybe_in_construction = false;
3298
3299 if (target)
3300 {
3301 /* In the case we get complete method, we don't need
3302 to walk derivations. */
3303 if (DECL_FINAL_P (target))
3304 context.maybe_derived_type = false;
3305 }
3306
3307 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3308 if (type_possibly_instantiated_p (outer_type->type))
3309 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3310 else
3311 {
3312 skipped = true;
3313 gcc_assert (in_lto_p || context.maybe_derived_type);
3314 }
3315
3316 if (binfo)
3317 matched_vtables.add (BINFO_VTABLE (binfo));
3318
3319 /* Next walk recursively all derived types. */
3320 if (context.maybe_derived_type)
3321 {
3322 for (i = 0; i < outer_type->derived_types.length(); i++)
3323 possible_polymorphic_call_targets_1 (nodes, &inserted,
3324 &matched_vtables,
3325 otr_type,
3326 outer_type->derived_types[i],
3327 otr_token, outer_type->type,
3328 context.offset, &complete,
3329 bases_to_consider,
3330 context.maybe_in_construction);
3331
3332 if (!outer_type->all_derivations_known)
3333 {
3334 if (final_warning_records)
3335 {
3336 if (complete
3337 && nodes.length () == 1
3338 && warn_suggest_final_types
3339 && !outer_type->derived_types.length ())
3340 {
3341 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3342 final_warning_records->type_warnings.safe_grow_cleared
3343 (odr_types.length ());
3344 final_warning_records->type_warnings[outer_type->id].count++;
3345 final_warning_records->type_warnings[outer_type->id].dyn_count
3346 += final_warning_records->dyn_count;
3347 final_warning_records->type_warnings[outer_type->id].type
3348 = outer_type->type;
3349 (*slot)->type_warning = outer_type->id + 1;
3350 }
3351 if (complete
3352 && warn_suggest_final_methods
3353 && nodes.length () == 1
3354 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3355 outer_type->type))
3356 {
3357 bool existed;
3358 struct decl_warn_count &c =
3359 final_warning_records->decl_warnings.get_or_insert
3360 (nodes[0]->decl, &existed);
3361
3362 if (existed)
3363 {
3364 c.count++;
3365 c.dyn_count += final_warning_records->dyn_count;
3366 }
3367 else
3368 {
3369 c.count = 1;
3370 c.dyn_count = final_warning_records->dyn_count;
3371 c.decl = nodes[0]->decl;
3372 }
3373 (*slot)->decl_warning = nodes[0]->decl;
3374 }
3375 }
3376 complete = false;
3377 }
3378 }
3379
3380 /* Finally walk bases, if asked to. */
3381 if (!(*slot)->speculative_targets)
3382 (*slot)->speculative_targets = nodes.length();
3383
3384 /* Destructors are never called through construction virtual tables,
3385 because the type is always known. One of entries may be cxa_pure_virtual
3386 so look to at least two of them. */
3387 if (context.maybe_in_construction)
3388 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3389 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3390 context.maybe_in_construction = false;
3391 if (context.maybe_in_construction)
3392 {
3393 if (type != outer_type
3394 && (!skipped
3395 || (context.maybe_derived_type
3396 && !type_all_derivations_known_p (outer_type->type))))
3397 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3398 context.offset, nodes, &inserted,
3399 &matched_vtables, &complete);
3400 if (skipped)
3401 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3402 for (i = 0; i < bases_to_consider.length(); i++)
3403 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3404 }
3405 bases_to_consider.release();
3406
3407 (*slot)->targets = nodes;
3408 (*slot)->complete = complete;
3409 if (completep)
3410 *completep = complete;
3411 if (speculative_targetsp)
3412 *speculative_targetsp = (*slot)->speculative_targets;
3413
3414 timevar_pop (TV_IPA_VIRTUAL_CALL);
3415 return nodes;
3416 }
3417
3418 bool
3419 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3420 vec<const decl_warn_count*> *vec)
3421 {
3422 vec->safe_push (&value);
3423 return true;
3424 }
3425
3426 /* Dump all possible targets of a polymorphic call. */
3427
3428 void
3429 dump_possible_polymorphic_call_targets (FILE *f,
3430 tree otr_type,
3431 HOST_WIDE_INT otr_token,
3432 const ipa_polymorphic_call_context &ctx)
3433 {
3434 vec <cgraph_node *> targets;
3435 bool final;
3436 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3437 unsigned int i;
3438 int speculative;
3439
3440 if (!type)
3441 return;
3442 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3443 ctx,
3444 &final, NULL, &speculative);
3445 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3446 print_generic_expr (f, type->type, TDF_SLIM);
3447 fprintf (f, " token %i\n", (int)otr_token);
3448 if (ctx.outer_type || ctx.offset)
3449 {
3450 fprintf (f, " Contained in type:");
3451 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
3452 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3453 ctx.offset);
3454 }
3455 if (ctx.speculative_outer_type)
3456 {
3457 fprintf (f, " Speculatively contained in type:");
3458 print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM);
3459 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3460 ctx.speculative_offset);
3461 }
3462
3463 fprintf (f, " %s%s%s%s\n ",
3464 final ? "This is a complete list." :
3465 "This is partial list; extra targets may be defined in other units.",
3466 ctx.maybe_in_construction ? " (base types included)" : "",
3467 ctx.maybe_derived_type ? " (derived types included)" : "",
3468 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3469 for (i = 0; i < targets.length (); i++)
3470 {
3471 char *name = NULL;
3472 if (i == (unsigned)speculative)
3473 fprintf (f, "\n Targets that are not likely:\n"
3474 " ");
3475 if (in_lto_p)
3476 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3477 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3478 if (in_lto_p)
3479 free (name);
3480 if (!targets[i]->definition)
3481 fprintf (f, " (no definition%s)",
3482 DECL_DECLARED_INLINE_P (targets[i]->decl)
3483 ? " inline" : "");
3484 }
3485 fprintf (f, "\n\n");
3486 }
3487
3488
3489 /* Return true if N can be possibly target of a polymorphic call of
3490 OTR_TYPE/OTR_TOKEN. */
3491
3492 bool
3493 possible_polymorphic_call_target_p (tree otr_type,
3494 HOST_WIDE_INT otr_token,
3495 const ipa_polymorphic_call_context &ctx,
3496 struct cgraph_node *n)
3497 {
3498 vec <cgraph_node *> targets;
3499 unsigned int i;
3500 enum built_in_function fcode;
3501 bool final;
3502
3503 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3504 && ((fcode = DECL_FUNCTION_CODE (n->decl))
3505 == BUILT_IN_UNREACHABLE
3506 || fcode == BUILT_IN_TRAP))
3507 return true;
3508
3509 if (!odr_hash)
3510 return true;
3511 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3512 for (i = 0; i < targets.length (); i++)
3513 if (n->semantically_equivalent_p (targets[i]))
3514 return true;
3515
3516 /* At a moment we allow middle end to dig out new external declarations
3517 as a targets of polymorphic calls. */
3518 if (!final && !n->definition)
3519 return true;
3520 return false;
3521 }
3522
3523
3524 /* After callgraph construction new external nodes may appear.
3525 Add them into the graph. */
3526
3527 void
3528 update_type_inheritance_graph (void)
3529 {
3530 struct cgraph_node *n;
3531
3532 if (!odr_hash)
3533 return;
3534 free_polymorphic_call_targets_hash ();
3535 timevar_push (TV_IPA_INHERITANCE);
3536 /* We reconstruct the graph starting from types of all methods seen in the
3537 the unit. */
3538 FOR_EACH_FUNCTION (n)
3539 if (DECL_VIRTUAL_P (n->decl)
3540 && !n->definition
3541 && n->real_symbol_p ())
3542 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
3543 true);
3544 timevar_pop (TV_IPA_INHERITANCE);
3545 }
3546
3547
3548 /* Return true if N looks like likely target of a polymorphic call.
3549 Rule out cxa_pure_virtual, noreturns, function declared cold and
3550 other obvious cases. */
3551
3552 bool
3553 likely_target_p (struct cgraph_node *n)
3554 {
3555 int flags;
3556 /* cxa_pure_virtual and similar things are not likely. */
3557 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3558 return false;
3559 flags = flags_from_decl_or_type (n->decl);
3560 if (flags & ECF_NORETURN)
3561 return false;
3562 if (lookup_attribute ("cold",
3563 DECL_ATTRIBUTES (n->decl)))
3564 return false;
3565 if (n->frequency < NODE_FREQUENCY_NORMAL)
3566 return false;
3567 /* If there are no virtual tables refering the target alive,
3568 the only way the target can be called is an instance comming from other
3569 compilation unit; speculative devirtualization is build around an
3570 assumption that won't happen. */
3571 if (!referenced_from_vtable_p (n))
3572 return false;
3573 return true;
3574 }
3575
3576 /* Compare type warning records P1 and P2 and chose one with larger count;
3577 helper for qsort. */
3578
3579 int
3580 type_warning_cmp (const void *p1, const void *p2)
3581 {
3582 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3583 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3584
3585 if (t1->dyn_count < t2->dyn_count)
3586 return 1;
3587 if (t1->dyn_count > t2->dyn_count)
3588 return -1;
3589 return t2->count - t1->count;
3590 }
3591
3592 /* Compare decl warning records P1 and P2 and chose one with larger count;
3593 helper for qsort. */
3594
3595 int
3596 decl_warning_cmp (const void *p1, const void *p2)
3597 {
3598 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3599 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3600
3601 if (t1->dyn_count < t2->dyn_count)
3602 return 1;
3603 if (t1->dyn_count > t2->dyn_count)
3604 return -1;
3605 return t2->count - t1->count;
3606 }
3607
3608 /* The ipa-devirt pass.
3609 When polymorphic call has only one likely target in the unit,
3610 turn it into speculative call. */
3611
3612 static unsigned int
3613 ipa_devirt (void)
3614 {
3615 struct cgraph_node *n;
3616 hash_set<void *> bad_call_targets;
3617 struct cgraph_edge *e;
3618
3619 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3620 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3621 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3622
3623 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3624 This is implemented by setting up final_warning_records that are updated
3625 by get_polymorphic_call_targets.
3626 We need to clear cache in this case to trigger recomputation of all
3627 entries. */
3628 if (warn_suggest_final_methods || warn_suggest_final_types)
3629 {
3630 final_warning_records = new (final_warning_record);
3631 final_warning_records->type_warnings = vNULL;
3632 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3633 free_polymorphic_call_targets_hash ();
3634 }
3635
3636 FOR_EACH_DEFINED_FUNCTION (n)
3637 {
3638 bool update = false;
3639 if (dump_file && n->indirect_calls)
3640 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3641 n->name (), n->order);
3642 for (e = n->indirect_calls; e; e = e->next_callee)
3643 if (e->indirect_info->polymorphic)
3644 {
3645 struct cgraph_node *likely_target = NULL;
3646 void *cache_token;
3647 bool final;
3648 int speculative_targets;
3649
3650 if (final_warning_records)
3651 final_warning_records->dyn_count = e->count;
3652
3653 vec <cgraph_node *>targets
3654 = possible_polymorphic_call_targets
3655 (e, &final, &cache_token, &speculative_targets);
3656 unsigned int i;
3657
3658 if (dump_file)
3659 dump_possible_polymorphic_call_targets
3660 (dump_file, e);
3661
3662 npolymorphic++;
3663
3664 if (!flag_devirtualize_speculatively)
3665 continue;
3666
3667 if (!cgraph_maybe_hot_edge_p (e))
3668 {
3669 if (dump_file)
3670 fprintf (dump_file, "Call is cold\n\n");
3671 ncold++;
3672 continue;
3673 }
3674 if (e->speculative)
3675 {
3676 if (dump_file)
3677 fprintf (dump_file, "Call is aready speculated\n\n");
3678 nspeculated++;
3679
3680 /* When dumping see if we agree with speculation. */
3681 if (!dump_file)
3682 continue;
3683 }
3684 if (bad_call_targets.contains (cache_token))
3685 {
3686 if (dump_file)
3687 fprintf (dump_file, "Target list is known to be useless\n\n");
3688 nmultiple++;
3689 continue;
3690 }
3691 for (i = 0; i < targets.length (); i++)
3692 if (likely_target_p (targets[i]))
3693 {
3694 if (likely_target)
3695 {
3696 if (i < (unsigned) speculative_targets)
3697 {
3698 likely_target = NULL;
3699 if (dump_file)
3700 fprintf (dump_file, "More than one likely target\n\n");
3701 nmultiple++;
3702 }
3703 break;
3704 }
3705 likely_target = targets[i];
3706 }
3707 if (!likely_target)
3708 {
3709 bad_call_targets.add (cache_token);
3710 continue;
3711 }
3712 /* This is reached only when dumping; check if we agree or disagree
3713 with the speculation. */
3714 if (e->speculative)
3715 {
3716 struct cgraph_edge *e2;
3717 struct ipa_ref *ref;
3718 cgraph_speculative_call_info (e, e2, e, ref);
3719 if (e2->callee->ultimate_alias_target ()
3720 == likely_target->ultimate_alias_target ())
3721 {
3722 fprintf (dump_file, "We agree with speculation\n\n");
3723 nok++;
3724 }
3725 else
3726 {
3727 fprintf (dump_file, "We disagree with speculation\n\n");
3728 nwrong++;
3729 }
3730 continue;
3731 }
3732 if (!likely_target->definition)
3733 {
3734 if (dump_file)
3735 fprintf (dump_file, "Target is not an definition\n\n");
3736 nnotdefined++;
3737 continue;
3738 }
3739 /* Do not introduce new references to external symbols. While we
3740 can handle these just well, it is common for programs to
3741 incorrectly with headers defining methods they are linked
3742 with. */
3743 if (DECL_EXTERNAL (likely_target->decl))
3744 {
3745 if (dump_file)
3746 fprintf (dump_file, "Target is external\n\n");
3747 nexternal++;
3748 continue;
3749 }
3750 /* Don't use an implicitly-declared destructor (c++/58678). */
3751 struct cgraph_node *non_thunk_target
3752 = likely_target->function_symbol ();
3753 if (DECL_ARTIFICIAL (non_thunk_target->decl)
3754 && DECL_COMDAT (non_thunk_target->decl))
3755 {
3756 if (dump_file)
3757 fprintf (dump_file, "Target is artificial\n\n");
3758 nartificial++;
3759 continue;
3760 }
3761 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3762 && likely_target->can_be_discarded_p ())
3763 {
3764 if (dump_file)
3765 fprintf (dump_file, "Target is overwritable\n\n");
3766 noverwritable++;
3767 continue;
3768 }
3769 else if (dbg_cnt (devirt))
3770 {
3771 if (dump_enabled_p ())
3772 {
3773 location_t locus = gimple_location_safe (e->call_stmt);
3774 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3775 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3776 n->name (), n->order,
3777 likely_target->name (),
3778 likely_target->order);
3779 }
3780 if (!likely_target->can_be_discarded_p ())
3781 {
3782 cgraph_node *alias;
3783 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3784 if (alias)
3785 likely_target = alias;
3786 }
3787 nconverted++;
3788 update = true;
3789 cgraph_turn_edge_to_speculative
3790 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3791 }
3792 }
3793 if (update)
3794 inline_update_overall_summary (n);
3795 }
3796 if (warn_suggest_final_methods || warn_suggest_final_types)
3797 {
3798 if (warn_suggest_final_types)
3799 {
3800 final_warning_records->type_warnings.qsort (type_warning_cmp);
3801 for (unsigned int i = 0;
3802 i < final_warning_records->type_warnings.length (); i++)
3803 if (final_warning_records->type_warnings[i].count)
3804 {
3805 tree type = final_warning_records->type_warnings[i].type;
3806 warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3807 OPT_Wsuggest_final_types,
3808 "Declaring type %qD final "
3809 "would enable devirtualization of %i calls",
3810 type,
3811 final_warning_records->type_warnings[i].count);
3812 }
3813 }
3814
3815 if (warn_suggest_final_methods)
3816 {
3817 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3818
3819 final_warning_records->decl_warnings.traverse
3820 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3821 decl_warnings_vec.qsort (decl_warning_cmp);
3822 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3823 {
3824 tree decl = decl_warnings_vec[i]->decl;
3825 int count = decl_warnings_vec[i]->count;
3826
3827 if (DECL_CXX_DESTRUCTOR_P (decl))
3828 warning_at (DECL_SOURCE_LOCATION (decl),
3829 OPT_Wsuggest_final_methods,
3830 "Declaring virtual destructor of %qD final "
3831 "would enable devirtualization of %i calls",
3832 DECL_CONTEXT (decl), count);
3833 else
3834 warning_at (DECL_SOURCE_LOCATION (decl),
3835 OPT_Wsuggest_final_methods,
3836 "Declaring method %qD final "
3837 "would enable devirtualization of %i calls",
3838 decl, count);
3839 }
3840 }
3841
3842 delete (final_warning_records);
3843 final_warning_records = 0;
3844 }
3845
3846 if (dump_file)
3847 fprintf (dump_file,
3848 "%i polymorphic calls, %i devirtualized,"
3849 " %i speculatively devirtualized, %i cold\n"
3850 "%i have multiple targets, %i overwritable,"
3851 " %i already speculated (%i agree, %i disagree),"
3852 " %i external, %i not defined, %i artificial\n",
3853 npolymorphic, ndevirtualized, nconverted, ncold,
3854 nmultiple, noverwritable, nspeculated, nok, nwrong,
3855 nexternal, nnotdefined, nartificial);
3856 return ndevirtualized ? TODO_remove_functions : 0;
3857 }
3858
3859 namespace {
3860
3861 const pass_data pass_data_ipa_devirt =
3862 {
3863 IPA_PASS, /* type */
3864 "devirt", /* name */
3865 OPTGROUP_NONE, /* optinfo_flags */
3866 TV_IPA_DEVIRT, /* tv_id */
3867 0, /* properties_required */
3868 0, /* properties_provided */
3869 0, /* properties_destroyed */
3870 0, /* todo_flags_start */
3871 ( TODO_dump_symtab ), /* todo_flags_finish */
3872 };
3873
3874 class pass_ipa_devirt : public ipa_opt_pass_d
3875 {
3876 public:
3877 pass_ipa_devirt (gcc::context *ctxt)
3878 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3879 NULL, /* generate_summary */
3880 NULL, /* write_summary */
3881 NULL, /* read_summary */
3882 NULL, /* write_optimization_summary */
3883 NULL, /* read_optimization_summary */
3884 NULL, /* stmt_fixup */
3885 0, /* function_transform_todo_flags_start */
3886 NULL, /* function_transform */
3887 NULL) /* variable_transform */
3888 {}
3889
3890 /* opt_pass methods: */
3891 virtual bool gate (function *)
3892 {
3893 return (flag_devirtualize
3894 && (flag_devirtualize_speculatively
3895 || (warn_suggest_final_methods
3896 || warn_suggest_final_types))
3897 && optimize);
3898 }
3899
3900 virtual unsigned int execute (function *) { return ipa_devirt (); }
3901
3902 }; // class pass_ipa_devirt
3903
3904 } // anon namespace
3905
3906 ipa_opt_pass_d *
3907 make_pass_ipa_devirt (gcc::context *ctxt)
3908 {
3909 return new pass_ipa_devirt (ctxt);
3910 }
3911
3912 #include "gt-ipa-devirt.h"