ipa-utils.h (ipa_polymorphic_call_context): Turn into class; add ctors.
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "hash-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "inchash.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-alias.h"
125 #include "internal-fn.h"
126 #include "gimple-fold.h"
127 #include "gimple-expr.h"
128 #include "gimple.h"
129 #include "ipa-inline.h"
130 #include "diagnostic.h"
131 #include "tree-dfa.h"
132 #include "demangle.h"
133 #include "dbgcnt.h"
134 #include "gimple-pretty-print.h"
135 #include "stor-layout.h"
136 #include "intl.h"
137 #include "hash-map.h"
138
139 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
140 hash_set<tree> *);
141
142 static bool odr_violation_reported = false;
143
144
145 /* Pointer set of all call targets appearing in the cache. */
146 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
147
148 /* The node of type inheritance graph. For each type unique in
149 One Defintion Rule (ODR) sense, we produce one node linking all
150 main variants of types equivalent to it, bases and derived types. */
151
152 struct GTY(()) odr_type_d
153 {
154 /* leader type. */
155 tree type;
156 /* All bases; built only for main variants of types */
157 vec<odr_type> GTY((skip)) bases;
158 /* All derrived types with virtual methods seen in unit;
159 built only for main variants oftypes */
160 vec<odr_type> GTY((skip)) derived_types;
161
162 /* All equivalent types, if more than one. */
163 vec<tree, va_gc> *types;
164 /* Set of all equivalent types, if NON-NULL. */
165 hash_set<tree> * GTY((skip)) types_set;
166
167 /* Unique ID indexing the type in odr_types array. */
168 int id;
169 /* Is it in anonymous namespace? */
170 bool anonymous_namespace;
171 /* Do we know about all derivations of given type? */
172 bool all_derivations_known;
173 /* Did we report ODR violation here? */
174 bool odr_violated;
175 };
176
177 static bool contains_type_p (tree, HOST_WIDE_INT, tree);
178
179
180 /* Return true if BINFO corresponds to a type with virtual methods.
181
182 Every type has several BINFOs. One is the BINFO associated by the type
183 while other represents bases of derived types. The BINFOs representing
184 bases do not have BINFO_VTABLE pointer set when this is the single
185 inheritance (because vtables are shared). Look up the BINFO of type
186 and check presence of its vtable. */
187
188 static inline bool
189 polymorphic_type_binfo_p (tree binfo)
190 {
191 /* See if BINFO's type has an virtual table associtated with it. */
192 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
193 }
194
195 /* Return TRUE if all derived types of T are known and thus
196 we may consider the walk of derived type complete.
197
198 This is typically true only for final anonymous namespace types and types
199 defined within functions (that may be COMDAT and thus shared across units,
200 but with the same set of derived types). */
201
202 static bool
203 type_all_derivations_known_p (tree t)
204 {
205 if (TYPE_FINAL_P (t))
206 return true;
207 if (flag_ltrans)
208 return false;
209 if (type_in_anonymous_namespace_p (t))
210 return true;
211 return (decl_function_context (TYPE_NAME (t)) != NULL);
212 }
213
214 /* Return TURE if type's constructors are all visible. */
215
216 static bool
217 type_all_ctors_visible_p (tree t)
218 {
219 return !flag_ltrans
220 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
221 /* We can not always use type_all_derivations_known_p.
222 For function local types we must assume case where
223 the function is COMDAT and shared in between units.
224
225 TODO: These cases are quite easy to get, but we need
226 to keep track of C++ privatizing via -Wno-weak
227 as well as the IPA privatizing. */
228 && type_in_anonymous_namespace_p (t);
229 }
230
231 /* Return TRUE if type may have instance. */
232
233 static bool
234 type_possibly_instantiated_p (tree t)
235 {
236 tree vtable;
237 varpool_node *vnode;
238
239 /* TODO: Add abstract types here. */
240 if (!type_all_ctors_visible_p (t))
241 return true;
242
243 vtable = BINFO_VTABLE (TYPE_BINFO (t));
244 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
245 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
246 vnode = varpool_node::get (vtable);
247 return vnode && vnode->definition;
248 }
249
250 /* One Definition Rule hashtable helpers. */
251
252 struct odr_hasher
253 {
254 typedef odr_type_d value_type;
255 typedef union tree_node compare_type;
256 static inline hashval_t hash (const value_type *);
257 static inline bool equal (const value_type *, const compare_type *);
258 static inline void remove (value_type *);
259 };
260
261 /* Return type that was declared with T's name so that T is an
262 qualified variant of it. */
263
264 static inline tree
265 main_odr_variant (const_tree t)
266 {
267 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
268 return TREE_TYPE (TYPE_NAME (t));
269 /* Unnamed types and non-C++ produced types can be compared by variants. */
270 else
271 return TYPE_MAIN_VARIANT (t);
272 }
273
274 /* Produce hash based on type name. */
275
276 static hashval_t
277 hash_type_name (tree t)
278 {
279 gcc_checking_assert (main_odr_variant (t) == t);
280
281 /* If not in LTO, all main variants are unique, so we can do
282 pointer hash. */
283 if (!in_lto_p)
284 return htab_hash_pointer (t);
285
286 /* Anonymous types are unique. */
287 if (type_in_anonymous_namespace_p (t))
288 return htab_hash_pointer (t);
289
290 /* For polymorphic types, we can simply hash the virtual table. */
291 if (TREE_CODE (t) == RECORD_TYPE
292 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
293 {
294 tree v = BINFO_VTABLE (TYPE_BINFO (t));
295 hashval_t hash = 0;
296
297 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
298 {
299 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
300 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
301 }
302
303 v = DECL_ASSEMBLER_NAME (v);
304 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
305 return hash;
306 }
307
308 /* Rest is not implemented yet. */
309 gcc_unreachable ();
310 }
311
312 /* Return the computed hashcode for ODR_TYPE. */
313
314 inline hashval_t
315 odr_hasher::hash (const value_type *odr_type)
316 {
317 return hash_type_name (odr_type->type);
318 }
319
320 /* For languages with One Definition Rule, work out if
321 types are the same based on their name.
322
323 This is non-trivial for LTO where minnor differences in
324 the type representation may have prevented type merging
325 to merge two copies of otherwise equivalent type.
326
327 Until we start streaming mangled type names, this function works
328 only for polymorphic types. */
329
330 bool
331 types_same_for_odr (const_tree type1, const_tree type2)
332 {
333 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
334
335 type1 = main_odr_variant (type1);
336 type2 = main_odr_variant (type2);
337
338 if (type1 == type2)
339 return true;
340
341 if (!in_lto_p)
342 return false;
343
344 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
345 on the corresponding TYPE_STUB_DECL. */
346 if (type_in_anonymous_namespace_p (type1)
347 || type_in_anonymous_namespace_p (type2))
348 return false;
349
350 /* See if types are obvoiusly different (i.e. different codes
351 or polymorphis wrt non-polymorphic). This is not strictly correct
352 for ODR violating programs, but we can't do better without streaming
353 ODR names. */
354 if (TREE_CODE (type1) != TREE_CODE (type2))
355 return false;
356 if (TREE_CODE (type1) == RECORD_TYPE
357 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
358 return false;
359 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
360 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
361 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
362 return false;
363
364 /* At the moment we have no way to establish ODR equivlaence at LTO
365 other than comparing virtual table pointrs of polymorphic types.
366 Eventually we should start saving mangled names in TYPE_NAME.
367 Then this condition will become non-trivial. */
368
369 if (TREE_CODE (type1) == RECORD_TYPE
370 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
371 && BINFO_VTABLE (TYPE_BINFO (type1))
372 && BINFO_VTABLE (TYPE_BINFO (type2)))
373 {
374 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
375 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
376 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
377 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
378 return (operand_equal_p (TREE_OPERAND (v1, 1),
379 TREE_OPERAND (v2, 1), 0)
380 && DECL_ASSEMBLER_NAME
381 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
382 == DECL_ASSEMBLER_NAME
383 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
384 }
385 gcc_unreachable ();
386 }
387
388
389 /* Compare types T1 and T2 and return true if they are
390 equivalent. */
391
392 inline bool
393 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
394 {
395 tree t2 = const_cast <tree> (ct2);
396
397 gcc_checking_assert (main_odr_variant (t2) == t2);
398 if (t1->type == t2)
399 return true;
400 if (!in_lto_p)
401 return false;
402 return types_same_for_odr (t1->type, t2);
403 }
404
405 /* Free ODR type V. */
406
407 inline void
408 odr_hasher::remove (value_type *v)
409 {
410 v->bases.release ();
411 v->derived_types.release ();
412 if (v->types_set)
413 delete v->types_set;
414 ggc_free (v);
415 }
416
417 /* ODR type hash used to lookup ODR type based on tree type node. */
418
419 typedef hash_table<odr_hasher> odr_hash_type;
420 static odr_hash_type *odr_hash;
421
422 /* ODR types are also stored into ODR_TYPE vector to allow consistent
423 walking. Bases appear before derived types. Vector is garbage collected
424 so we won't end up visiting empty types. */
425
426 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
427 #define odr_types (*odr_types_ptr)
428
429 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
430 void
431 set_type_binfo (tree type, tree binfo)
432 {
433 for (; type; type = TYPE_NEXT_VARIANT (type))
434 if (COMPLETE_TYPE_P (type))
435 TYPE_BINFO (type) = binfo;
436 else
437 gcc_assert (!TYPE_BINFO (type));
438 }
439
440 /* Compare T2 and T2 based on name or structure. */
441
442 static bool
443 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<tree> *visited)
444 {
445 bool an1, an2;
446
447 /* This can happen in incomplete types that should be handled earlier. */
448 gcc_assert (t1 && t2);
449
450 t1 = main_odr_variant (t1);
451 t2 = main_odr_variant (t2);
452 if (t1 == t2)
453 return true;
454 if (TREE_CODE (t1) != TREE_CODE (t2))
455 return false;
456 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
457 return false;
458 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
459 return false;
460
461 /* Anonymous namespace types must match exactly. */
462 an1 = type_in_anonymous_namespace_p (t1);
463 an2 = type_in_anonymous_namespace_p (t2);
464 if (an1 != an2 || an1)
465 return false;
466
467 /* For types where we can not establish ODR equivalency, recurse and deeply
468 compare. */
469 if (TREE_CODE (t1) != RECORD_TYPE
470 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
471 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
472 || !polymorphic_type_binfo_p (TYPE_BINFO (t2)))
473 {
474 /* This should really be a pair hash, but for the moment we do not need
475 100% reliability and it would be better to compare all ODR types so
476 recursion here is needed only for component types. */
477 if (visited->add (t1))
478 return true;
479 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
480 }
481 return types_same_for_odr (t1, t2);
482 }
483
484 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
485 violation warings. */
486
487 void
488 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
489 {
490 int n1, n2;
491 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
492 {
493 odr_violation_reported = true;
494 if (DECL_VIRTUAL_P (prevailing->decl))
495 {
496 varpool_node *tmp = prevailing;
497 prevailing = vtable;
498 vtable = tmp;
499 }
500 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
501 OPT_Wodr,
502 "virtual table of type %qD violates one definition rule",
503 DECL_CONTEXT (vtable->decl)))
504 inform (DECL_SOURCE_LOCATION (prevailing->decl),
505 "variable of same assembler name as the virtual table is "
506 "defined in another translation unit");
507 return;
508 }
509 if (!prevailing->definition || !vtable->definition)
510 return;
511 for (n1 = 0, n2 = 0; true; n1++, n2++)
512 {
513 struct ipa_ref *ref1, *ref2;
514 bool end1, end2;
515 end1 = !prevailing->iterate_reference (n1, ref1);
516 end2 = !vtable->iterate_reference (n2, ref2);
517 if (end1 && end2)
518 return;
519 if (!end1 && !end2
520 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
521 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
522 && !n2
523 && !DECL_VIRTUAL_P (ref2->referred->decl)
524 && DECL_VIRTUAL_P (ref1->referred->decl))
525 {
526 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
527 "virtual table of type %qD contains RTTI information",
528 DECL_CONTEXT (vtable->decl)))
529 {
530 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
531 "but is prevailed by one without from other translation unit");
532 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
533 "RTTI will not work on this type");
534 }
535 n2++;
536 end2 = !vtable->iterate_reference (n2, ref2);
537 }
538 if (!end1 && !end2
539 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
540 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
541 && !n1
542 && !DECL_VIRTUAL_P (ref1->referred->decl)
543 && DECL_VIRTUAL_P (ref2->referred->decl))
544 {
545 n1++;
546 end1 = !vtable->iterate_reference (n1, ref1);
547 }
548 if (end1 || end2)
549 {
550 if (end1)
551 {
552 varpool_node *tmp = prevailing;
553 prevailing = vtable;
554 vtable = tmp;
555 ref1 = ref2;
556 }
557 if (warning_at (DECL_SOURCE_LOCATION
558 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
559 "virtual table of type %qD violates "
560 "one definition rule",
561 DECL_CONTEXT (vtable->decl)))
562 {
563 inform (DECL_SOURCE_LOCATION
564 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
565 "the conflicting type defined in another translation "
566 "unit");
567 inform (DECL_SOURCE_LOCATION
568 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
569 "contains additional virtual method %qD",
570 ref1->referred->decl);
571 }
572 return;
573 }
574 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
575 != DECL_ASSEMBLER_NAME (ref2->referred->decl))
576 {
577 if (warning_at (DECL_SOURCE_LOCATION
578 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
579 "virtual table of type %qD violates "
580 "one definition rule ",
581 DECL_CONTEXT (vtable->decl)))
582 {
583 inform (DECL_SOURCE_LOCATION
584 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
585 "the conflicting type defined in another translation "
586 "unit");
587 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
588 "virtual method %qD", ref1->referred->decl);
589 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
590 "ought to match virtual method %qD but does not",
591 ref2->referred->decl);
592 return;
593 }
594 }
595 }
596 }
597
598 /* Output ODR violation warning about T1 and T2 with REASON.
599 Display location of ST1 and ST2 if REASON speaks about field or
600 method of the type.
601 If WARN is false, do nothing. Set WARNED if warning was indeed
602 output. */
603
604 void
605 warn_odr (tree t1, tree t2, tree st1, tree st2,
606 bool warn, bool *warned, const char *reason)
607 {
608 tree decl2 = TYPE_NAME (t2);
609
610 if (!warn)
611 return;
612 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
613 "type %qT violates one definition rule",
614 t1))
615 return;
616 if (!st1)
617 ;
618 else if (TREE_CODE (st1) == FIELD_DECL)
619 {
620 inform (DECL_SOURCE_LOCATION (decl2),
621 "a different type is defined in another translation unit");
622 inform (DECL_SOURCE_LOCATION (st1),
623 "the first difference of corresponding definitions is field %qD",
624 st1);
625 decl2 = st2;
626 }
627 else if (TREE_CODE (st1) == FUNCTION_DECL)
628 {
629 inform (DECL_SOURCE_LOCATION (decl2),
630 "a different type is defined in another translation unit");
631 inform (DECL_SOURCE_LOCATION (st1),
632 "the first difference of corresponding definitions is method %qD",
633 st1);
634 decl2 = st2;
635 }
636 else
637 return;
638 inform (DECL_SOURCE_LOCATION (decl2), reason);
639
640 if (warned)
641 *warned = true;
642 }
643
644 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
645 because they are used on same place in ODR matching types.
646 They are not; inform the user. */
647
648 void
649 warn_types_mismatch (tree t1, tree t2)
650 {
651 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
652 return;
653 /* In Firefox it is a common bug to have same types but in
654 different namespaces. Be a bit more informative on
655 this. */
656 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
657 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
658 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
659 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
660 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
661 DECL_NAME (TYPE_CONTEXT (t2))))))
662 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
663 "type %qT should match type %qT but is defined "
664 "in different namespace ",
665 t1, t2);
666 else
667 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
668 "type %qT should match type %qT",
669 t1, t2);
670 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
671 "the incompatible type is defined here");
672 }
673
674 /* Compare T1 and T2, report ODR violations if WARN is true and set
675 WARNED to true if anything is reported. Return true if types match.
676 If true is returned, the types are also compatible in the sense of
677 gimple_canonical_types_compatible_p. */
678
679 static bool
680 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<tree> *visited)
681 {
682 /* Check first for the obvious case of pointer identity. */
683 if (t1 == t2)
684 return true;
685 gcc_assert (!type_in_anonymous_namespace_p (t1));
686 gcc_assert (!type_in_anonymous_namespace_p (t2));
687
688 /* Can't be the same type if the types don't have the same code. */
689 if (TREE_CODE (t1) != TREE_CODE (t2))
690 {
691 warn_odr (t1, t2, NULL, NULL, warn, warned,
692 G_("a different type is defined in another translation unit"));
693 return false;
694 }
695
696 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
697 {
698 warn_odr (t1, t2, NULL, NULL, warn, warned,
699 G_("a type with different qualifiers is defined in another "
700 "translation unit"));
701 return false;
702 }
703
704 if (comp_type_attributes (t1, t2) != 1)
705 {
706 warn_odr (t1, t2, NULL, NULL, warn, warned,
707 G_("a type with attributes "
708 "is defined in another translation unit"));
709 return false;
710 }
711
712 if (TREE_CODE (t1) == ENUMERAL_TYPE)
713 {
714 tree v1, v2;
715 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
716 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
717 {
718 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
719 {
720 warn_odr (t1, t2, NULL, NULL, warn, warned,
721 G_("an enum with different value name"
722 " is defined in another translation unit"));
723 return false;
724 }
725 if (TREE_VALUE (v1) != TREE_VALUE (v2)
726 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
727 DECL_INITIAL (TREE_VALUE (v2)), 0))
728 {
729 warn_odr (t1, t2, NULL, NULL, warn, warned,
730 G_("an enum with different values is defined"
731 " in another translation unit"));
732 return false;
733 }
734 }
735 if (v1 || v2)
736 {
737 warn_odr (t1, t2, NULL, NULL, warn, warned,
738 G_("an enum with mismatching number of values "
739 "is defined in another translation unit"));
740 return false;
741 }
742 }
743
744 /* Non-aggregate types can be handled cheaply. */
745 if (INTEGRAL_TYPE_P (t1)
746 || SCALAR_FLOAT_TYPE_P (t1)
747 || FIXED_POINT_TYPE_P (t1)
748 || TREE_CODE (t1) == VECTOR_TYPE
749 || TREE_CODE (t1) == COMPLEX_TYPE
750 || TREE_CODE (t1) == OFFSET_TYPE
751 || POINTER_TYPE_P (t1))
752 {
753 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
754 {
755 warn_odr (t1, t2, NULL, NULL, warn, warned,
756 G_("a type with different precision is defined "
757 "in another translation unit"));
758 return false;
759 }
760 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
761 {
762 warn_odr (t1, t2, NULL, NULL, warn, warned,
763 G_("a type with different signedness is defined "
764 "in another translation unit"));
765 return false;
766 }
767
768 if (TREE_CODE (t1) == INTEGER_TYPE
769 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
770 {
771 /* char WRT uint_8? */
772 warn_odr (t1, t2, NULL, NULL, warn, warned,
773 G_("a different type is defined in another "
774 "translation unit"));
775 return false;
776 }
777
778 /* For canonical type comparisons we do not want to build SCCs
779 so we cannot compare pointed-to types. But we can, for now,
780 require the same pointed-to type kind and match what
781 useless_type_conversion_p would do. */
782 if (POINTER_TYPE_P (t1))
783 {
784 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
785 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
786 {
787 warn_odr (t1, t2, NULL, NULL, warn, warned,
788 G_("it is defined as a pointer in different address "
789 "space in another translation unit"));
790 return false;
791 }
792
793 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
794 {
795 warn_odr (t1, t2, NULL, NULL, warn, warned,
796 G_("it is defined as a pointer to different type "
797 "in another translation unit"));
798 if (warn && warned)
799 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
800 return false;
801 }
802 }
803
804 /* Tail-recurse to components. */
805 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
806 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
807 {
808 /* Probably specific enough. */
809 warn_odr (t1, t2, NULL, NULL, warn, warned,
810 G_("a different type is defined "
811 "in another translation unit"));
812 if (warn && warned)
813 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
814 return false;
815 }
816
817 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
818 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
819 TYPE_SIZE_UNIT (t2), 0));
820 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
821
822 return true;
823 }
824
825 /* Do type-specific comparisons. */
826 switch (TREE_CODE (t1))
827 {
828 case ARRAY_TYPE:
829 {
830 /* Array types are the same if the element types are the same and
831 the number of elements are the same. */
832 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
833 {
834 warn_odr (t1, t2, NULL, NULL, warn, warned,
835 G_("a different type is defined in another "
836 "translation unit"));
837 if (warn && warned)
838 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
839 }
840 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
841 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
842 == TYPE_NONALIASED_COMPONENT (t2));
843
844 tree i1 = TYPE_DOMAIN (t1);
845 tree i2 = TYPE_DOMAIN (t2);
846
847 /* For an incomplete external array, the type domain can be
848 NULL_TREE. Check this condition also. */
849 if (i1 == NULL_TREE || i2 == NULL_TREE)
850 return true;
851
852 tree min1 = TYPE_MIN_VALUE (i1);
853 tree min2 = TYPE_MIN_VALUE (i2);
854 tree max1 = TYPE_MAX_VALUE (i1);
855 tree max2 = TYPE_MAX_VALUE (i2);
856
857 /* In C++, minimums should be always 0. */
858 gcc_assert (min1 == min2);
859 if (!operand_equal_p (max1, max2, 0))
860 {
861 warn_odr (t1, t2, NULL, NULL, warn, warned,
862 G_("an array of different size is defined "
863 "in another translation unit"));
864 return false;
865 }
866 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
867 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
868 TYPE_SIZE_UNIT (t2), 0));
869 }
870 return true;
871
872 case METHOD_TYPE:
873 case FUNCTION_TYPE:
874 /* Function types are the same if the return type and arguments types
875 are the same. */
876 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
877 {
878 warn_odr (t1, t2, NULL, NULL, warn, warned,
879 G_("has different return value "
880 "in another translation unit"));
881 if (warn && warned)
882 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
883 return false;
884 }
885
886 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
887 return true;
888 else
889 {
890 tree parms1, parms2;
891
892 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
893 parms1 && parms2;
894 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
895 {
896 if (!odr_subtypes_equivalent_p
897 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
898 {
899 warn_odr (t1, t2, NULL, NULL, warn, warned,
900 G_("has different parameters in another "
901 "translation unit"));
902 if (warn && warned)
903 warn_types_mismatch (TREE_VALUE (parms1),
904 TREE_VALUE (parms2));
905 return false;
906 }
907 }
908
909 if (parms1 || parms2)
910 {
911 warn_odr (t1, t2, NULL, NULL, warn, warned,
912 G_("has different parameters "
913 "in another translation unit"));
914 return false;
915 }
916
917 return true;
918 }
919
920 case RECORD_TYPE:
921 case UNION_TYPE:
922 case QUAL_UNION_TYPE:
923 {
924 tree f1, f2;
925
926 /* For aggregate types, all the fields must be the same. */
927 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
928 {
929 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
930 f1 || f2;
931 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
932 {
933 /* Skip non-fields. */
934 while (f1 && TREE_CODE (f1) != FIELD_DECL)
935 f1 = TREE_CHAIN (f1);
936 while (f2 && TREE_CODE (f2) != FIELD_DECL)
937 f2 = TREE_CHAIN (f2);
938 if (!f1 || !f2)
939 break;
940 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
941 break;
942 if (DECL_NAME (f1) != DECL_NAME (f2)
943 && !DECL_ARTIFICIAL (f1))
944 {
945 warn_odr (t1, t2, f1, f2, warn, warned,
946 G_("a field with different name is defined "
947 "in another translation unit"));
948 return false;
949 }
950 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
951 {
952 /* Do not warn about artificial fields and just go into generic
953 field mismatch warning. */
954 if (DECL_ARTIFICIAL (f1))
955 break;
956
957 warn_odr (t1, t2, f1, f2, warn, warned,
958 G_("a field of same name but different type "
959 "is defined in another translation unit"));
960 if (warn && warned)
961 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
962 return false;
963 }
964 if (!gimple_compare_field_offset (f1, f2))
965 {
966 /* Do not warn about artificial fields and just go into generic
967 field mismatch warning. */
968 if (DECL_ARTIFICIAL (f1))
969 break;
970 warn_odr (t1, t2, t1, t2, warn, warned,
971 G_("fields has different layout "
972 "in another translation unit"));
973 return false;
974 }
975 gcc_assert (DECL_NONADDRESSABLE_P (f1)
976 == DECL_NONADDRESSABLE_P (f2));
977 }
978
979 /* If one aggregate has more fields than the other, they
980 are not the same. */
981 if (f1 || f2)
982 {
983 warn_odr (t1, t2, NULL, NULL, warn, warned,
984 G_("a type with different number of fields "
985 "is defined in another translation unit"));
986 return false;
987 }
988 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
989 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
990 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
991 {
992 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
993 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
994 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
995 {
996 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
997 {
998 warn_odr (t1, t2, f1, f2, warn, warned,
999 G_("a different method of same type "
1000 "is defined in another translation unit"));
1001 return false;
1002 }
1003 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1004 {
1005 warn_odr (t1, t2, f1, f2, warn, warned,
1006 G_("s definition that differs by virtual "
1007 "keyword in another translation unit"));
1008 return false;
1009 }
1010 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1011 {
1012 warn_odr (t1, t2, f1, f2, warn, warned,
1013 G_("virtual table layout differs in another "
1014 "translation unit"));
1015 return false;
1016 }
1017 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1018 {
1019 warn_odr (t1, t2, f1, f2, warn, warned,
1020 G_("method with incompatible type is defined "
1021 "in another translation unit"));
1022 return false;
1023 }
1024 }
1025 if (f1 || f2)
1026 {
1027 warn_odr (t1, t2, NULL, NULL, warn, warned,
1028 G_("a type with different number of methods "
1029 "is defined in another translation unit"));
1030 return false;
1031 }
1032 }
1033 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
1034 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
1035 TYPE_SIZE_UNIT (t2), 0));
1036 }
1037
1038 return true;
1039 }
1040
1041 default:
1042 gcc_unreachable ();
1043 }
1044 }
1045
1046 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1047 from VAL->type. This may happen in LTO where tree merging did not merge
1048 all variants of the same type. It may or may not mean the ODR violation.
1049 Add it to the list of duplicates and warn on some violations. */
1050
1051 static bool
1052 add_type_duplicate (odr_type val, tree type)
1053 {
1054 bool build_bases = false;
1055 if (!val->types_set)
1056 val->types_set = new hash_set<tree>;
1057
1058 /* Always prefer complete type to be the leader. */
1059 if (!COMPLETE_TYPE_P (val->type)
1060 && COMPLETE_TYPE_P (type))
1061 {
1062 tree tmp = type;
1063
1064 build_bases = true;
1065 type = val->type;
1066 val->type = tmp;
1067 }
1068
1069 /* See if this duplicate is new. */
1070 if (!val->types_set->add (type))
1071 {
1072 bool merge = true;
1073 bool base_mismatch = false;
1074 unsigned int i,j;
1075 bool warned = false;
1076 hash_set<tree> visited;
1077
1078 gcc_assert (in_lto_p);
1079 vec_safe_push (val->types, type);
1080
1081 /* First we compare memory layout. */
1082 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
1083 &warned, &visited))
1084 {
1085 merge = false;
1086 odr_violation_reported = true;
1087 val->odr_violated = true;
1088 if (cgraph_dump_file)
1089 {
1090 fprintf (cgraph_dump_file, "ODR violation\n");
1091
1092 print_node (cgraph_dump_file, "", val->type, 0);
1093 putc ('\n',cgraph_dump_file);
1094 print_node (cgraph_dump_file, "", type, 0);
1095 putc ('\n',cgraph_dump_file);
1096 }
1097 }
1098
1099 /* Next sanity check that bases are the same. If not, we will end
1100 up producing wrong answers. */
1101 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1102 && TREE_CODE (val->type) == RECORD_TYPE
1103 && TREE_CODE (type) == RECORD_TYPE
1104 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1105 {
1106 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1107 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
1108 {
1109 odr_type base = get_odr_type
1110 (BINFO_TYPE
1111 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1112 i)),
1113 true);
1114 if (val->bases.length () <= j || val->bases[j] != base)
1115 base_mismatch = true;
1116 j++;
1117 }
1118 if (base_mismatch)
1119 {
1120 merge = false;
1121 odr_violation_reported = true;
1122
1123 if (!warned && !val->odr_violated)
1124 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1125 "a type with the same name but different bases is "
1126 "defined in another translation unit");
1127 val->odr_violated = true;
1128 if (cgraph_dump_file)
1129 {
1130 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
1131
1132 print_node (cgraph_dump_file, "", val->type, 0);
1133 putc ('\n',cgraph_dump_file);
1134 print_node (cgraph_dump_file, "", type, 0);
1135 putc ('\n',cgraph_dump_file);
1136 }
1137 }
1138 }
1139
1140 /* Regularize things a little. During LTO same types may come with
1141 different BINFOs. Either because their virtual table was
1142 not merged by tree merging and only later at decl merging or
1143 because one type comes with external vtable, while other
1144 with internal. We want to merge equivalent binfos to conserve
1145 memory and streaming overhead.
1146
1147 The external vtables are more harmful: they contain references
1148 to external declarations of methods that may be defined in the
1149 merged LTO unit. For this reason we absolutely need to remove
1150 them and replace by internal variants. Not doing so will lead
1151 to incomplete answers from possible_polymorphic_call_targets. */
1152 if (!flag_ltrans && merge
1153 && TREE_CODE (val->type) == RECORD_TYPE
1154 && TREE_CODE (type) == RECORD_TYPE
1155 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1156 && TYPE_MAIN_VARIANT (type) == type
1157 && TYPE_MAIN_VARIANT (val->type) == val->type
1158 && BINFO_VTABLE (TYPE_BINFO (val->type))
1159 && BINFO_VTABLE (TYPE_BINFO (type)))
1160 {
1161 tree master_binfo = TYPE_BINFO (val->type);
1162 tree v1 = BINFO_VTABLE (master_binfo);
1163 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1164
1165 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1166 {
1167 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1168 && operand_equal_p (TREE_OPERAND (v1, 1),
1169 TREE_OPERAND (v2, 1), 0));
1170 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1171 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1172 }
1173 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1174 == DECL_ASSEMBLER_NAME (v2));
1175
1176 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1177 {
1178 unsigned int i;
1179
1180 set_type_binfo (val->type, TYPE_BINFO (type));
1181 for (i = 0; i < val->types->length (); i++)
1182 {
1183 if (TYPE_BINFO ((*val->types)[i])
1184 == master_binfo)
1185 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1186 }
1187 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1188 }
1189 else
1190 set_type_binfo (type, master_binfo);
1191 }
1192 }
1193 return build_bases;
1194 }
1195
1196 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1197 possibly new entry. */
1198
1199 odr_type
1200 get_odr_type (tree type, bool insert)
1201 {
1202 odr_type_d **slot;
1203 odr_type val;
1204 hashval_t hash;
1205 bool build_bases = false;
1206 bool insert_to_odr_array = false;
1207 int base_id = -1;
1208
1209 type = main_odr_variant (type);
1210
1211 hash = hash_type_name (type);
1212 slot
1213 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
1214 if (!slot)
1215 return NULL;
1216
1217 /* See if we already have entry for type. */
1218 if (*slot)
1219 {
1220 val = *slot;
1221
1222 /* With LTO we need to support multiple tree representation of
1223 the same ODR type. */
1224 if (val->type != type)
1225 build_bases = add_type_duplicate (val, type);
1226 }
1227 else
1228 {
1229 val = ggc_cleared_alloc<odr_type_d> ();
1230 val->type = type;
1231 val->bases = vNULL;
1232 val->derived_types = vNULL;
1233 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1234 build_bases = COMPLETE_TYPE_P (val->type);
1235 insert_to_odr_array = true;
1236 }
1237
1238 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1239 && type == TYPE_MAIN_VARIANT (type))
1240 {
1241 tree binfo = TYPE_BINFO (type);
1242 unsigned int i;
1243
1244 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1245
1246 val->all_derivations_known = type_all_derivations_known_p (type);
1247 *slot = val;
1248 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1249 /* For now record only polymorphic types. other are
1250 pointless for devirtualization and we can not precisely
1251 determine ODR equivalency of these during LTO. */
1252 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1253 {
1254 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1255 i)),
1256 true);
1257 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1258 base->derived_types.safe_push (val);
1259 val->bases.safe_push (base);
1260 if (base->id > base_id)
1261 base_id = base->id;
1262 }
1263 }
1264 /* Ensure that type always appears after bases. */
1265 if (insert_to_odr_array)
1266 {
1267 if (odr_types_ptr)
1268 val->id = odr_types.length ();
1269 vec_safe_push (odr_types_ptr, val);
1270 }
1271 else if (base_id > val->id)
1272 {
1273 odr_types[val->id] = 0;
1274 /* Be sure we did not recorded any derived types; these may need
1275 renumbering too. */
1276 gcc_assert (val->derived_types.length() == 0);
1277 if (odr_types_ptr)
1278 val->id = odr_types.length ();
1279 vec_safe_push (odr_types_ptr, val);
1280 }
1281 return val;
1282 }
1283
1284 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1285 recusive printing. */
1286
1287 static void
1288 dump_odr_type (FILE *f, odr_type t, int indent=0)
1289 {
1290 unsigned int i;
1291 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1292 print_generic_expr (f, t->type, TDF_SLIM);
1293 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1294 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1295 if (TYPE_NAME (t->type))
1296 {
1297 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1298 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1299 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1300 }
1301 if (t->bases.length ())
1302 {
1303 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1304 for (i = 0; i < t->bases.length (); i++)
1305 fprintf (f, " %i", t->bases[i]->id);
1306 fprintf (f, "\n");
1307 }
1308 if (t->derived_types.length ())
1309 {
1310 fprintf (f, "%*s derived types:\n", indent * 2, "");
1311 for (i = 0; i < t->derived_types.length (); i++)
1312 dump_odr_type (f, t->derived_types[i], indent + 1);
1313 }
1314 fprintf (f, "\n");
1315 }
1316
1317 /* Dump the type inheritance graph. */
1318
1319 static void
1320 dump_type_inheritance_graph (FILE *f)
1321 {
1322 unsigned int i;
1323 if (!odr_types_ptr)
1324 return;
1325 fprintf (f, "\n\nType inheritance graph:\n");
1326 for (i = 0; i < odr_types.length (); i++)
1327 {
1328 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1329 dump_odr_type (f, odr_types[i]);
1330 }
1331 for (i = 0; i < odr_types.length (); i++)
1332 {
1333 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1334 {
1335 unsigned int j;
1336 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1337 print_node (f, "", odr_types[i]->type, 0);
1338 for (j = 0; j < odr_types[i]->types->length (); j++)
1339 {
1340 tree t;
1341 fprintf (f, "duplicate #%i\n", j);
1342 print_node (f, "", (*odr_types[i]->types)[j], 0);
1343 t = (*odr_types[i]->types)[j];
1344 while (TYPE_P (t) && TYPE_CONTEXT (t))
1345 {
1346 t = TYPE_CONTEXT (t);
1347 print_node (f, "", t, 0);
1348 }
1349 putc ('\n',f);
1350 }
1351 }
1352 }
1353 }
1354
1355 /* Given method type T, return type of class it belongs to.
1356 Lookup this pointer and get its type. */
1357
1358 tree
1359 method_class_type (const_tree t)
1360 {
1361 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1362 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1363
1364 return TREE_TYPE (first_parm_type);
1365 }
1366
1367 /* Initialize IPA devirt and build inheritance tree graph. */
1368
1369 void
1370 build_type_inheritance_graph (void)
1371 {
1372 struct symtab_node *n;
1373 FILE *inheritance_dump_file;
1374 int flags;
1375
1376 if (odr_hash)
1377 return;
1378 timevar_push (TV_IPA_INHERITANCE);
1379 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1380 odr_hash = new odr_hash_type (23);
1381
1382 /* We reconstruct the graph starting of types of all methods seen in the
1383 the unit. */
1384 FOR_EACH_SYMBOL (n)
1385 if (is_a <cgraph_node *> (n)
1386 && DECL_VIRTUAL_P (n->decl)
1387 && n->real_symbol_p ())
1388 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1389 true);
1390
1391 /* Look also for virtual tables of types that do not define any methods.
1392
1393 We need it in a case where class B has virtual base of class A
1394 re-defining its virtual method and there is class C with no virtual
1395 methods with B as virtual base.
1396
1397 Here we output B's virtual method in two variant - for non-virtual
1398 and virtual inheritance. B's virtual table has non-virtual version,
1399 while C's has virtual.
1400
1401 For this reason we need to know about C in order to include both
1402 variants of B. More correctly, record_target_from_binfo should
1403 add both variants of the method when walking B, but we have no
1404 link in between them.
1405
1406 We rely on fact that either the method is exported and thus we
1407 assume it is called externally or C is in anonymous namespace and
1408 thus we will see the vtable. */
1409
1410 else if (is_a <varpool_node *> (n)
1411 && DECL_VIRTUAL_P (n->decl)
1412 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1413 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1414 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1415 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1416 if (inheritance_dump_file)
1417 {
1418 dump_type_inheritance_graph (inheritance_dump_file);
1419 dump_end (TDI_inheritance, inheritance_dump_file);
1420 }
1421 timevar_pop (TV_IPA_INHERITANCE);
1422 }
1423
1424 /* Return true if N has reference from live virtual table
1425 (and thus can be a destination of polymorphic call).
1426 Be conservatively correct when callgraph is not built or
1427 if the method may be referred externally. */
1428
1429 static bool
1430 referenced_from_vtable_p (struct cgraph_node *node)
1431 {
1432 int i;
1433 struct ipa_ref *ref;
1434 bool found = false;
1435
1436 if (node->externally_visible
1437 || DECL_EXTERNAL (node->decl)
1438 || node->used_from_other_partition)
1439 return true;
1440
1441 /* Keep this test constant time.
1442 It is unlikely this can happen except for the case where speculative
1443 devirtualization introduced many speculative edges to this node.
1444 In this case the target is very likely alive anyway. */
1445 if (node->ref_list.referring.length () > 100)
1446 return true;
1447
1448 /* We need references built. */
1449 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
1450 return true;
1451
1452 for (i = 0; node->iterate_referring (i, ref); i++)
1453
1454 if ((ref->use == IPA_REF_ALIAS
1455 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1456 || (ref->use == IPA_REF_ADDR
1457 && TREE_CODE (ref->referring->decl) == VAR_DECL
1458 && DECL_VIRTUAL_P (ref->referring->decl)))
1459 {
1460 found = true;
1461 break;
1462 }
1463 return found;
1464 }
1465
1466 /* If TARGET has associated node, record it in the NODES array.
1467 CAN_REFER specify if program can refer to the target directly.
1468 if TARGET is unknown (NULL) or it can not be inserted (for example because
1469 its body was already removed and there is no way to refer to it), clear
1470 COMPLETEP. */
1471
1472 static void
1473 maybe_record_node (vec <cgraph_node *> &nodes,
1474 tree target, hash_set<tree> *inserted,
1475 bool can_refer,
1476 bool *completep)
1477 {
1478 struct cgraph_node *target_node, *alias_target;
1479 enum availability avail;
1480
1481 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1482 list of targets; the runtime effect of calling them is undefined.
1483 Only "real" virtual methods should be accounted. */
1484 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1485 return;
1486
1487 if (!can_refer)
1488 {
1489 /* The only case when method of anonymous namespace becomes unreferable
1490 is when we completely optimized it out. */
1491 if (flag_ltrans
1492 || !target
1493 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1494 *completep = false;
1495 return;
1496 }
1497
1498 if (!target)
1499 return;
1500
1501 target_node = cgraph_node::get (target);
1502
1503 /* Preffer alias target over aliases, so we do not get confused by
1504 fake duplicates. */
1505 if (target_node)
1506 {
1507 alias_target = target_node->ultimate_alias_target (&avail);
1508 if (target_node != alias_target
1509 && avail >= AVAIL_AVAILABLE
1510 && target_node->get_availability ())
1511 target_node = alias_target;
1512 }
1513
1514 /* Method can only be called by polymorphic call if any
1515 of vtables refering to it are alive.
1516
1517 While this holds for non-anonymous functions, too, there are
1518 cases where we want to keep them in the list; for example
1519 inline functions with -fno-weak are static, but we still
1520 may devirtualize them when instance comes from other unit.
1521 The same holds for LTO.
1522
1523 Currently we ignore these functions in speculative devirtualization.
1524 ??? Maybe it would make sense to be more aggressive for LTO even
1525 eslewhere. */
1526 if (!flag_ltrans
1527 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1528 && (!target_node
1529 || !referenced_from_vtable_p (target_node)))
1530 ;
1531 /* See if TARGET is useful function we can deal with. */
1532 else if (target_node != NULL
1533 && (TREE_PUBLIC (target)
1534 || DECL_EXTERNAL (target)
1535 || target_node->definition)
1536 && target_node->real_symbol_p ())
1537 {
1538 gcc_assert (!target_node->global.inlined_to);
1539 gcc_assert (target_node->real_symbol_p ());
1540 if (!inserted->add (target))
1541 {
1542 cached_polymorphic_call_targets->add (target_node);
1543 nodes.safe_push (target_node);
1544 }
1545 }
1546 else if (completep
1547 && (!type_in_anonymous_namespace_p
1548 (DECL_CONTEXT (target))
1549 || flag_ltrans))
1550 *completep = false;
1551 }
1552
1553 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1554 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1555 method in vtable and insert method to NODES array
1556 or BASES_TO_CONSIDER if this array is non-NULL.
1557 Otherwise recurse to base BINFOs.
1558 This match what get_binfo_at_offset does, but with offset
1559 being unknown.
1560
1561 TYPE_BINFOS is a stack of BINFOS of types with defined
1562 virtual table seen on way from class type to BINFO.
1563
1564 MATCHED_VTABLES tracks virtual tables we already did lookup
1565 for virtual function in. INSERTED tracks nodes we already
1566 inserted.
1567
1568 ANONYMOUS is true if BINFO is part of anonymous namespace.
1569
1570 Clear COMPLETEP when we hit unreferable target.
1571 */
1572
1573 static void
1574 record_target_from_binfo (vec <cgraph_node *> &nodes,
1575 vec <tree> *bases_to_consider,
1576 tree binfo,
1577 tree otr_type,
1578 vec <tree> &type_binfos,
1579 HOST_WIDE_INT otr_token,
1580 tree outer_type,
1581 HOST_WIDE_INT offset,
1582 hash_set<tree> *inserted,
1583 hash_set<tree> *matched_vtables,
1584 bool anonymous,
1585 bool *completep)
1586 {
1587 tree type = BINFO_TYPE (binfo);
1588 int i;
1589 tree base_binfo;
1590
1591
1592 if (BINFO_VTABLE (binfo))
1593 type_binfos.safe_push (binfo);
1594 if (types_same_for_odr (type, outer_type))
1595 {
1596 int i;
1597 tree type_binfo = NULL;
1598
1599 /* Lookup BINFO with virtual table. For normal types it is always last
1600 binfo on stack. */
1601 for (i = type_binfos.length () - 1; i >= 0; i--)
1602 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1603 {
1604 type_binfo = type_binfos[i];
1605 break;
1606 }
1607 if (BINFO_VTABLE (binfo))
1608 type_binfos.pop ();
1609 /* If this is duplicated BINFO for base shared by virtual inheritance,
1610 we may not have its associated vtable. This is not a problem, since
1611 we will walk it on the other path. */
1612 if (!type_binfo)
1613 return;
1614 tree inner_binfo = get_binfo_at_offset (type_binfo,
1615 offset, otr_type);
1616 if (!inner_binfo)
1617 {
1618 gcc_assert (odr_violation_reported);
1619 return;
1620 }
1621 /* For types in anonymous namespace first check if the respective vtable
1622 is alive. If not, we know the type can't be called. */
1623 if (!flag_ltrans && anonymous)
1624 {
1625 tree vtable = BINFO_VTABLE (inner_binfo);
1626 varpool_node *vnode;
1627
1628 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1629 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1630 vnode = varpool_node::get (vtable);
1631 if (!vnode || !vnode->definition)
1632 return;
1633 }
1634 gcc_assert (inner_binfo);
1635 if (bases_to_consider
1636 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1637 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1638 {
1639 bool can_refer;
1640 tree target = gimple_get_virt_method_for_binfo (otr_token,
1641 inner_binfo,
1642 &can_refer);
1643 if (!bases_to_consider)
1644 maybe_record_node (nodes, target, inserted, can_refer, completep);
1645 /* Destructors are never called via construction vtables. */
1646 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1647 bases_to_consider->safe_push (target);
1648 }
1649 return;
1650 }
1651
1652 /* Walk bases. */
1653 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1654 /* Walking bases that have no virtual method is pointless excercise. */
1655 if (polymorphic_type_binfo_p (base_binfo))
1656 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1657 type_binfos,
1658 otr_token, outer_type, offset, inserted,
1659 matched_vtables, anonymous, completep);
1660 if (BINFO_VTABLE (binfo))
1661 type_binfos.pop ();
1662 }
1663
1664 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1665 of TYPE, insert them to NODES, recurse into derived nodes.
1666 INSERTED is used to avoid duplicate insertions of methods into NODES.
1667 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1668 Clear COMPLETEP if unreferable target is found.
1669
1670 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1671 all cases where BASE_SKIPPED is true (because the base is abstract
1672 class). */
1673
1674 static void
1675 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1676 hash_set<tree> *inserted,
1677 hash_set<tree> *matched_vtables,
1678 tree otr_type,
1679 odr_type type,
1680 HOST_WIDE_INT otr_token,
1681 tree outer_type,
1682 HOST_WIDE_INT offset,
1683 bool *completep,
1684 vec <tree> &bases_to_consider,
1685 bool consider_construction)
1686 {
1687 tree binfo = TYPE_BINFO (type->type);
1688 unsigned int i;
1689 vec <tree> type_binfos = vNULL;
1690 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1691
1692 /* We may need to consider types w/o instances because of possible derived
1693 types using their methods either directly or via construction vtables.
1694 We are safe to skip them when all derivations are known, since we will
1695 handle them later.
1696 This is done by recording them to BASES_TO_CONSIDER array. */
1697 if (possibly_instantiated || consider_construction)
1698 {
1699 record_target_from_binfo (nodes,
1700 (!possibly_instantiated
1701 && type_all_derivations_known_p (type->type))
1702 ? &bases_to_consider : NULL,
1703 binfo, otr_type, type_binfos, otr_token,
1704 outer_type, offset,
1705 inserted, matched_vtables,
1706 type->anonymous_namespace, completep);
1707 }
1708 type_binfos.release ();
1709 for (i = 0; i < type->derived_types.length (); i++)
1710 possible_polymorphic_call_targets_1 (nodes, inserted,
1711 matched_vtables,
1712 otr_type,
1713 type->derived_types[i],
1714 otr_token, outer_type, offset, completep,
1715 bases_to_consider, consider_construction);
1716 }
1717
1718 /* Cache of queries for polymorphic call targets.
1719
1720 Enumerating all call targets may get expensive when there are many
1721 polymorphic calls in the program, so we memoize all the previous
1722 queries and avoid duplicated work. */
1723
1724 struct polymorphic_call_target_d
1725 {
1726 HOST_WIDE_INT otr_token;
1727 ipa_polymorphic_call_context context;
1728 odr_type type;
1729 vec <cgraph_node *> targets;
1730 int speculative_targets;
1731 bool complete;
1732 int type_warning;
1733 tree decl_warning;
1734 };
1735
1736 /* Polymorphic call target cache helpers. */
1737
1738 struct polymorphic_call_target_hasher
1739 {
1740 typedef polymorphic_call_target_d value_type;
1741 typedef polymorphic_call_target_d compare_type;
1742 static inline hashval_t hash (const value_type *);
1743 static inline bool equal (const value_type *, const compare_type *);
1744 static inline void remove (value_type *);
1745 };
1746
1747 /* Return the computed hashcode for ODR_QUERY. */
1748
1749 inline hashval_t
1750 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1751 {
1752 inchash::hash hstate (odr_query->otr_token);
1753
1754 hstate.add_wide_int (odr_query->type->id);
1755 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1756 hstate.add_wide_int (odr_query->context.offset);
1757
1758 if (odr_query->context.speculative_outer_type)
1759 {
1760 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1761 hstate.add_wide_int (odr_query->context.speculative_offset);
1762 }
1763 hstate.add_flag (odr_query->context.maybe_in_construction);
1764 hstate.add_flag (odr_query->context.maybe_derived_type);
1765 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1766 hstate.commit_flag ();
1767 return hstate.end ();
1768 }
1769
1770 /* Compare cache entries T1 and T2. */
1771
1772 inline bool
1773 polymorphic_call_target_hasher::equal (const value_type *t1,
1774 const compare_type *t2)
1775 {
1776 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1777 && t1->context.offset == t2->context.offset
1778 && t1->context.speculative_offset == t2->context.speculative_offset
1779 && t1->context.outer_type == t2->context.outer_type
1780 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1781 && t1->context.maybe_in_construction
1782 == t2->context.maybe_in_construction
1783 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1784 && (t1->context.speculative_maybe_derived_type
1785 == t2->context.speculative_maybe_derived_type));
1786 }
1787
1788 /* Remove entry in polymorphic call target cache hash. */
1789
1790 inline void
1791 polymorphic_call_target_hasher::remove (value_type *v)
1792 {
1793 v->targets.release ();
1794 free (v);
1795 }
1796
1797 /* Polymorphic call target query cache. */
1798
1799 typedef hash_table<polymorphic_call_target_hasher>
1800 polymorphic_call_target_hash_type;
1801 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1802
1803 /* Destroy polymorphic call target query cache. */
1804
1805 static void
1806 free_polymorphic_call_targets_hash ()
1807 {
1808 if (cached_polymorphic_call_targets)
1809 {
1810 delete polymorphic_call_target_hash;
1811 polymorphic_call_target_hash = NULL;
1812 delete cached_polymorphic_call_targets;
1813 cached_polymorphic_call_targets = NULL;
1814 }
1815 }
1816
1817 /* When virtual function is removed, we may need to flush the cache. */
1818
1819 static void
1820 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1821 {
1822 if (cached_polymorphic_call_targets
1823 && cached_polymorphic_call_targets->contains (n))
1824 free_polymorphic_call_targets_hash ();
1825 }
1826
1827 /* Return true when TYPE contains an polymorphic type and thus is interesting
1828 for devirtualization machinery. */
1829
1830 bool
1831 contains_polymorphic_type_p (const_tree type)
1832 {
1833 type = TYPE_MAIN_VARIANT (type);
1834
1835 if (RECORD_OR_UNION_TYPE_P (type))
1836 {
1837 if (TYPE_BINFO (type)
1838 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1839 return true;
1840 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1841 if (TREE_CODE (fld) == FIELD_DECL
1842 && !DECL_ARTIFICIAL (fld)
1843 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1844 return true;
1845 return false;
1846 }
1847 if (TREE_CODE (type) == ARRAY_TYPE)
1848 return contains_polymorphic_type_p (TREE_TYPE (type));
1849 return false;
1850 }
1851
1852 /* THIS->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1853 is contained at THIS->OFFSET. Walk the memory representation of
1854 THIS->OUTER_TYPE and find the outermost class type that match
1855 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update THIS
1856 to represent it.
1857
1858 For example when THIS represents type
1859 class A
1860 {
1861 int a;
1862 class B b;
1863 }
1864 and we look for type at offset sizeof(int), we end up with B and offset 0.
1865 If the same is produced by multiple inheritance, we end up with A and offset
1866 sizeof(int).
1867
1868 If we can not find corresponding class, give up by setting
1869 THIS->OUTER_TYPE to EXPECTED_TYPE and THIS->OFFSET to NULL.
1870 Return true when lookup was sucesful. */
1871
1872 bool
1873 ipa_polymorphic_call_context::restrict_to_inner_class (tree expected_type)
1874 {
1875 tree type = outer_type;
1876 HOST_WIDE_INT cur_offset = offset;
1877 bool speculative = false;
1878 bool speculation_valid = false;
1879 bool valid = false;
1880
1881 if (!outer_type)
1882 {
1883 type = outer_type = expected_type;
1884 offset = cur_offset = 0;
1885 }
1886
1887 if (speculative_outer_type == outer_type
1888 && (!maybe_derived_type
1889 || speculative_maybe_derived_type))
1890 {
1891 speculative_outer_type = NULL;
1892 speculative_offset = 0;
1893 speculative_maybe_derived_type = false;
1894 }
1895
1896 /* See if speculative type seem to be derrived from outer_type.
1897 Then speculation is valid only if it really is a derivate and derived types
1898 are allowed.
1899
1900 The test does not really look for derivate, but also accepts the case where
1901 outer_type is a field of speculative_outer_type. In this case eiter
1902 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
1903 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
1904 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
1905 if (speculative_outer_type
1906 && speculative_offset >= offset
1907 && contains_type_p (speculative_outer_type,
1908 offset - speculative_offset,
1909 outer_type))
1910 speculation_valid = maybe_derived_type;
1911 else
1912 clear_speculation ();
1913
1914 /* Find the sub-object the constant actually refers to and mark whether it is
1915 an artificial one (as opposed to a user-defined one).
1916
1917 This loop is performed twice; first time for outer_type and second time
1918 for speculative_outer_type. The second iteration has SPECULATIVE set. */
1919 while (true)
1920 {
1921 HOST_WIDE_INT pos, size;
1922 tree fld;
1923
1924 /* On a match, just return what we found. */
1925 if (TREE_CODE (type) == TREE_CODE (expected_type)
1926 && (!in_lto_p
1927 || (TREE_CODE (type) == RECORD_TYPE
1928 && TYPE_BINFO (type)
1929 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
1930 && types_same_for_odr (type, expected_type))
1931 {
1932 if (speculative)
1933 {
1934 gcc_assert (speculation_valid);
1935 gcc_assert (valid);
1936
1937 /* If we did not match the offset, just give up on speculation. */
1938 if (cur_offset != 0
1939 || (types_same_for_odr (speculative_outer_type,
1940 outer_type)
1941 && (maybe_derived_type
1942 == speculative_maybe_derived_type)))
1943 clear_speculation ();
1944 return true;
1945 }
1946 else
1947 {
1948 /* Type can not contain itself on an non-zero offset. In that case
1949 just give up. */
1950 if (cur_offset != 0)
1951 {
1952 valid = false;
1953 goto give_up;
1954 }
1955 valid = true;
1956 /* If speculation is not valid or we determined type precisely,
1957 we are done. */
1958 if (!speculation_valid
1959 || !maybe_derived_type)
1960 {
1961 clear_speculation ();
1962 return true;
1963 }
1964 /* Otherwise look into speculation now. */
1965 else
1966 {
1967 speculative = true;
1968 type = speculative_outer_type;
1969 cur_offset = speculative_offset;
1970 continue;
1971 }
1972 }
1973 }
1974
1975 /* Walk fields and find corresponding on at OFFSET. */
1976 if (TREE_CODE (type) == RECORD_TYPE)
1977 {
1978 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1979 {
1980 if (TREE_CODE (fld) != FIELD_DECL)
1981 continue;
1982
1983 pos = int_bit_position (fld);
1984 size = tree_to_uhwi (DECL_SIZE (fld));
1985 if (pos <= cur_offset && (pos + size) > cur_offset)
1986 break;
1987 }
1988
1989 if (!fld)
1990 goto give_up;
1991
1992 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1993 cur_offset -= pos;
1994 /* DECL_ARTIFICIAL represents a basetype. */
1995 if (!DECL_ARTIFICIAL (fld))
1996 {
1997 if (!speculative)
1998 {
1999 outer_type = type;
2000 offset = cur_offset;
2001 /* As soon as we se an field containing the type,
2002 we know we are not looking for derivations. */
2003 maybe_derived_type = false;
2004 }
2005 else
2006 {
2007 speculative_outer_type = type;
2008 speculative_offset = cur_offset;
2009 speculative_maybe_derived_type = false;
2010 }
2011 }
2012 }
2013 else if (TREE_CODE (type) == ARRAY_TYPE)
2014 {
2015 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
2016
2017 /* Give up if we don't know array size. */
2018 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
2019 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
2020 goto give_up;
2021 cur_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
2022 type = subtype;
2023 if (!speculative)
2024 {
2025 outer_type = type;
2026 offset = cur_offset;
2027 maybe_derived_type = false;
2028 }
2029 else
2030 {
2031 speculative_outer_type = type;
2032 speculative_offset = cur_offset;
2033 speculative_maybe_derived_type = false;
2034 }
2035 }
2036 /* Give up on anything else. */
2037 else
2038 goto give_up;
2039 }
2040
2041 /* If we failed to find subtype we look for, give up and fall back to the
2042 most generic query. */
2043 give_up:
2044 clear_speculation ();
2045 if (valid)
2046 return true;
2047 outer_type = expected_type;
2048 offset = 0;
2049 maybe_derived_type = true;
2050 maybe_in_construction = true;
2051 /* POD can be changed to an instance of a polymorphic type by
2052 placement new. Here we play safe and assume that any
2053 non-polymorphic type is POD. */
2054 if ((TREE_CODE (type) != RECORD_TYPE
2055 || !TYPE_BINFO (type)
2056 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
2057 && (!TYPE_SIZE (type)
2058 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2059 || (cur_offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
2060 tree_to_uhwi (TYPE_SIZE (type)))))
2061 return true;
2062 return false;
2063 }
2064
2065 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
2066
2067 static bool
2068 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
2069 tree otr_type)
2070 {
2071 ipa_polymorphic_call_context context;
2072 context.offset = offset;
2073 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
2074 return context.restrict_to_inner_class (otr_type);
2075 }
2076
2077 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
2078
2079 static tree
2080 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2081 tree vtable)
2082 {
2083 tree v = BINFO_VTABLE (binfo);
2084 int i;
2085 tree base_binfo;
2086 unsigned HOST_WIDE_INT this_offset;
2087
2088 if (v)
2089 {
2090 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2091 gcc_unreachable ();
2092
2093 if (offset == this_offset
2094 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2095 return binfo;
2096 }
2097
2098 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2099 if (polymorphic_type_binfo_p (base_binfo))
2100 {
2101 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2102 if (base_binfo)
2103 return base_binfo;
2104 }
2105 return NULL;
2106 }
2107
2108 /* T is known constant value of virtual table pointer.
2109 Store virtual table to V and its offset to OFFSET.
2110 Return false if T does not look like virtual table reference. */
2111
2112 bool
2113 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2114 unsigned HOST_WIDE_INT *offset)
2115 {
2116 /* We expect &MEM[(void *)&virtual_table + 16B].
2117 We obtain object's BINFO from the context of the virtual table.
2118 This one contains pointer to virtual table represented via
2119 POINTER_PLUS_EXPR. Verify that this pointer match to what
2120 we propagated through.
2121
2122 In the case of virtual inheritance, the virtual tables may
2123 be nested, i.e. the offset may be different from 16 and we may
2124 need to dive into the type representation. */
2125 if (TREE_CODE (t) == ADDR_EXPR
2126 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2127 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2128 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2129 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2130 == VAR_DECL)
2131 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2132 (TREE_OPERAND (t, 0), 0), 0)))
2133 {
2134 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2135 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2136 return true;
2137 }
2138
2139 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2140 We need to handle it when T comes from static variable initializer or
2141 BINFO. */
2142 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2143 {
2144 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2145 t = TREE_OPERAND (t, 0);
2146 }
2147 else
2148 *offset = 0;
2149
2150 if (TREE_CODE (t) != ADDR_EXPR)
2151 return false;
2152 *v = TREE_OPERAND (t, 0);
2153 return true;
2154 }
2155
2156 /* T is known constant value of virtual table pointer. Return BINFO of the
2157 instance type. */
2158
2159 tree
2160 vtable_pointer_value_to_binfo (const_tree t)
2161 {
2162 tree vtable;
2163 unsigned HOST_WIDE_INT offset;
2164
2165 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2166 return NULL_TREE;
2167
2168 /* FIXME: for stores of construction vtables we return NULL,
2169 because we do not have BINFO for those. Eventually we should fix
2170 our representation to allow this case to be handled, too.
2171 In the case we see store of BINFO we however may assume
2172 that standard folding will be ale to cope with it. */
2173 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2174 offset, vtable);
2175 }
2176
2177 /* We know that the instance is stored in variable or parameter
2178 (not dynamically allocated) and we want to disprove the fact
2179 that it may be in construction at invocation of CALL.
2180
2181 For the variable to be in construction we actually need to
2182 be in constructor of corresponding global variable or
2183 the inline stack of CALL must contain the constructor.
2184 Check this condition. This check works safely only before
2185 IPA passes, because inline stacks may become out of date
2186 later. */
2187
2188 bool
2189 decl_maybe_in_construction_p (tree base, tree outer_type,
2190 gimple call, tree function)
2191 {
2192 outer_type = TYPE_MAIN_VARIANT (outer_type);
2193 gcc_assert (DECL_P (base));
2194
2195 /* After inlining the code unification optimizations may invalidate
2196 inline stacks. Also we need to give up on global variables after
2197 IPA, because addresses of these may have been propagated to their
2198 constructors. */
2199 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2200 return true;
2201
2202 /* Pure functions can not do any changes on the dynamic type;
2203 that require writting to memory. */
2204 if (!auto_var_in_fn_p (base, function)
2205 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2206 return false;
2207
2208 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2209 block = BLOCK_SUPERCONTEXT (block))
2210 if (BLOCK_ABSTRACT_ORIGIN (block)
2211 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2212 {
2213 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2214
2215 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2216 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2217 && !DECL_CXX_DESTRUCTOR_P (fn)))
2218 {
2219 /* Watch for clones where we constant propagated the first
2220 argument (pointer to the instance). */
2221 fn = DECL_ABSTRACT_ORIGIN (fn);
2222 if (!fn
2223 || !is_global_var (base)
2224 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2225 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2226 && !DECL_CXX_DESTRUCTOR_P (fn)))
2227 continue;
2228 }
2229 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2230 continue;
2231
2232 /* FIXME: this can go away once we have ODR types equivalency on
2233 LTO level. */
2234 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2235 return true;
2236 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2237 if (types_same_for_odr (type, outer_type))
2238 return true;
2239 }
2240
2241 if (TREE_CODE (base) == VAR_DECL
2242 && is_global_var (base))
2243 {
2244 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2245 || (!DECL_CXX_CONSTRUCTOR_P (function)
2246 && !DECL_CXX_DESTRUCTOR_P (function)))
2247 {
2248 if (!DECL_ABSTRACT_ORIGIN (function))
2249 return false;
2250 /* Watch for clones where we constant propagated the first
2251 argument (pointer to the instance). */
2252 function = DECL_ABSTRACT_ORIGIN (function);
2253 if (!function
2254 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2255 || (!DECL_CXX_CONSTRUCTOR_P (function)
2256 && !DECL_CXX_DESTRUCTOR_P (function)))
2257 return false;
2258 }
2259 /* FIXME: this can go away once we have ODR types equivalency on
2260 LTO level. */
2261 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2262 return true;
2263 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2264 if (types_same_for_odr (type, outer_type))
2265 return true;
2266 }
2267 return false;
2268 }
2269
2270 /* Proudce polymorphic call context for call method of instance
2271 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2272
2273 static void
2274 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2275 tree base, HOST_WIDE_INT offset)
2276 {
2277 gcc_assert (DECL_P (base));
2278
2279 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
2280 context->offset = offset;
2281 context->speculative_outer_type = NULL;
2282 context->speculative_offset = 0;
2283 context->speculative_maybe_derived_type = true;
2284 /* Make very conservative assumption that all objects
2285 may be in construction.
2286 TODO: ipa-prop already contains code to tell better.
2287 merge it later. */
2288 context->maybe_in_construction = true;
2289 context->maybe_derived_type = false;
2290 }
2291
2292 /* CST is an invariant (address of decl), try to get meaningful
2293 polymorphic call context for polymorphic call of method
2294 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2295 Return FALSE if nothing meaningful can be found. */
2296
2297 bool
2298 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2299 tree cst,
2300 tree otr_type,
2301 HOST_WIDE_INT offset)
2302 {
2303 HOST_WIDE_INT offset2, size, max_size;
2304 tree base;
2305
2306 if (TREE_CODE (cst) != ADDR_EXPR)
2307 return false;
2308
2309 cst = TREE_OPERAND (cst, 0);
2310 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
2311 if (!DECL_P (base) || max_size == -1 || max_size != size)
2312 return false;
2313
2314 /* Only type inconsistent programs can have otr_type that is
2315 not part of outer type. */
2316 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2317 return false;
2318
2319 get_polymorphic_call_info_for_decl (context, base, offset);
2320 return true;
2321 }
2322
2323 /* See if OP is SSA name initialized as a copy or by single assignment.
2324 If so, walk the SSA graph up. */
2325
2326 static tree
2327 walk_ssa_copies (tree op)
2328 {
2329 STRIP_NOPS (op);
2330 while (TREE_CODE (op) == SSA_NAME
2331 && !SSA_NAME_IS_DEFAULT_DEF (op)
2332 && SSA_NAME_DEF_STMT (op)
2333 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2334 {
2335 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2336 return op;
2337 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2338 STRIP_NOPS (op);
2339 }
2340 return op;
2341 }
2342
2343 /* Given REF call in FNDECL, determine class of the polymorphic
2344 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
2345 CALL is optional argument giving the actual statement (usually call) where
2346 the context is used.
2347 Return pointer to object described by the context or an declaration if
2348 we found the instance to be stored in the static storage. */
2349
2350 tree
2351 get_polymorphic_call_info (tree fndecl,
2352 tree ref,
2353 tree *otr_type,
2354 HOST_WIDE_INT *otr_token,
2355 ipa_polymorphic_call_context *context,
2356 gimple call)
2357 {
2358 tree base_pointer;
2359 *otr_type = obj_type_ref_class (ref);
2360 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2361
2362 /* Set up basic info in case we find nothing interesting in the analysis. */
2363 context->speculative_outer_type = NULL;
2364 context->speculative_offset = 0;
2365 context->speculative_maybe_derived_type = true;
2366 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
2367 context->offset = 0;
2368 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2369 context->maybe_derived_type = true;
2370 context->maybe_in_construction = true;
2371
2372 /* Walk SSA for outer object. */
2373 do
2374 {
2375 base_pointer = walk_ssa_copies (base_pointer);
2376 if (TREE_CODE (base_pointer) == ADDR_EXPR)
2377 {
2378 HOST_WIDE_INT size, max_size;
2379 HOST_WIDE_INT offset2;
2380 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2381 &offset2, &size, &max_size);
2382
2383 /* If this is a varying address, punt. */
2384 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2385 && max_size != -1
2386 && max_size == size)
2387 {
2388 /* We found dereference of a pointer. Type of the pointer
2389 and MEM_REF is meaningless, but we can look futher. */
2390 if (TREE_CODE (base) == MEM_REF)
2391 {
2392 base_pointer = TREE_OPERAND (base, 0);
2393 context->offset
2394 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
2395 context->outer_type = NULL;
2396 }
2397 /* We found base object. In this case the outer_type
2398 is known. */
2399 else if (DECL_P (base))
2400 {
2401 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
2402
2403 /* Only type inconsistent programs can have otr_type that is
2404 not part of outer type. */
2405 if (!contains_type_p (TREE_TYPE (base),
2406 context->offset + offset2, *otr_type))
2407 {
2408 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2409 code sequences; we arrange the calls to be builtin_unreachable
2410 later. */
2411 *otr_token = INT_MAX;
2412 return base_pointer;
2413 }
2414 get_polymorphic_call_info_for_decl (context, base,
2415 context->offset + offset2);
2416 if (context->maybe_in_construction && call)
2417 context->maybe_in_construction
2418 = decl_maybe_in_construction_p (base,
2419 context->outer_type,
2420 call,
2421 fndecl);
2422 return base;
2423 }
2424 else
2425 break;
2426 }
2427 else
2428 break;
2429 }
2430 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2431 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2432 {
2433 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2434 * BITS_PER_UNIT;
2435 base_pointer = TREE_OPERAND (base_pointer, 0);
2436 }
2437 else
2438 break;
2439 }
2440 while (true);
2441
2442 /* Try to determine type of the outer object. */
2443 if (TREE_CODE (base_pointer) == SSA_NAME
2444 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2445 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2446 {
2447 /* See if parameter is THIS pointer of a method. */
2448 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2449 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2450 {
2451 context->outer_type
2452 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2453 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2454
2455 /* Dynamic casting has possibly upcasted the type
2456 in the hiearchy. In this case outer type is less
2457 informative than inner type and we should forget
2458 about it. */
2459 if (!contains_type_p (context->outer_type, context->offset,
2460 *otr_type))
2461 {
2462 context->outer_type = NULL;
2463 return base_pointer;
2464 }
2465
2466 /* If the function is constructor or destructor, then
2467 the type is possibly in construction, but we know
2468 it is not derived type. */
2469 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2470 || DECL_CXX_DESTRUCTOR_P (fndecl))
2471 {
2472 context->maybe_in_construction = true;
2473 context->maybe_derived_type = false;
2474 }
2475 else
2476 {
2477 context->maybe_derived_type = true;
2478 context->maybe_in_construction = false;
2479 }
2480 return base_pointer;
2481 }
2482 /* Non-PODs passed by value are really passed by invisible
2483 reference. In this case we also know the type of the
2484 object. */
2485 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2486 {
2487 context->outer_type
2488 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2489 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2490 /* Only type inconsistent programs can have otr_type that is
2491 not part of outer type. */
2492 if (!contains_type_p (context->outer_type, context->offset,
2493 *otr_type))
2494 {
2495 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2496 code sequences; we arrange the calls to be builtin_unreachable
2497 later. */
2498 *otr_token = INT_MAX;
2499 return base_pointer;
2500 }
2501 context->maybe_derived_type = false;
2502 context->maybe_in_construction = false;
2503 return base_pointer;
2504 }
2505 }
2506
2507 tree base_type = TREE_TYPE (base_pointer);
2508
2509 if (TREE_CODE (base_pointer) == SSA_NAME
2510 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2511 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2512 {
2513 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2514 code sequences; we arrange the calls to be builtin_unreachable
2515 later. */
2516 *otr_token = INT_MAX;
2517 return base_pointer;
2518 }
2519 if (TREE_CODE (base_pointer) == SSA_NAME
2520 && SSA_NAME_DEF_STMT (base_pointer)
2521 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2522 base_type = TREE_TYPE (gimple_assign_rhs1
2523 (SSA_NAME_DEF_STMT (base_pointer)));
2524
2525 if (POINTER_TYPE_P (base_type)
2526 && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2527 context->offset,
2528 *otr_type))
2529 {
2530 context->speculative_outer_type = TYPE_MAIN_VARIANT
2531 (TREE_TYPE (base_type));
2532 context->speculative_offset = context->offset;
2533 context->speculative_maybe_derived_type = true;
2534 }
2535 /* TODO: There are multiple ways to derive a type. For instance
2536 if BASE_POINTER is passed to an constructor call prior our refernece.
2537 We do not make this type of flow sensitive analysis yet. */
2538 return base_pointer;
2539 }
2540
2541 /* Structure to be passed in between detect_type_change and
2542 check_stmt_for_type_change. */
2543
2544 struct type_change_info
2545 {
2546 /* Offset into the object where there is the virtual method pointer we are
2547 looking for. */
2548 HOST_WIDE_INT offset;
2549 /* The declaration or SSA_NAME pointer of the base that we are checking for
2550 type change. */
2551 tree instance;
2552 /* The reference to virtual table pointer used. */
2553 tree vtbl_ptr_ref;
2554 tree otr_type;
2555 /* If we actually can tell the type that the object has changed to, it is
2556 stored in this field. Otherwise it remains NULL_TREE. */
2557 tree known_current_type;
2558 HOST_WIDE_INT known_current_offset;
2559
2560 /* Set to true if dynamic type change has been detected. */
2561 bool type_maybe_changed;
2562 /* Set to true if multiple types have been encountered. known_current_type
2563 must be disregarded in that case. */
2564 bool multiple_types_encountered;
2565 /* Set to true if we possibly missed some dynamic type changes and we should
2566 consider the set to be speculative. */
2567 bool speculative;
2568 bool seen_unanalyzed_store;
2569 };
2570
2571 /* Return true if STMT is not call and can modify a virtual method table pointer.
2572 We take advantage of fact that vtable stores must appear within constructor
2573 and destructor functions. */
2574
2575 bool
2576 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
2577 {
2578 if (is_gimple_assign (stmt))
2579 {
2580 tree lhs = gimple_assign_lhs (stmt);
2581
2582 if (gimple_clobber_p (stmt))
2583 return false;
2584 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
2585 {
2586 if (flag_strict_aliasing
2587 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
2588 return false;
2589
2590 if (TREE_CODE (lhs) == COMPONENT_REF
2591 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2592 return false;
2593 /* In the future we might want to use get_base_ref_and_offset to find
2594 if there is a field corresponding to the offset and if so, proceed
2595 almost like if it was a component ref. */
2596 }
2597 }
2598
2599 /* Code unification may mess with inline stacks. */
2600 if (cfun->after_inlining)
2601 return true;
2602
2603 /* Walk the inline stack and watch out for ctors/dtors.
2604 TODO: Maybe we can require the store to appear in toplevel
2605 block of CTOR/DTOR. */
2606 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
2607 block = BLOCK_SUPERCONTEXT (block))
2608 if (BLOCK_ABSTRACT_ORIGIN (block)
2609 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2610 {
2611 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2612
2613 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2614 return false;
2615 return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2616 && (DECL_CXX_CONSTRUCTOR_P (fn)
2617 || DECL_CXX_DESTRUCTOR_P (fn)));
2618 }
2619 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
2620 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
2621 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
2622 }
2623
2624 /* If STMT can be proved to be an assignment to the virtual method table
2625 pointer of ANALYZED_OBJ and the type associated with the new table
2626 identified, return the type. Otherwise return NULL_TREE. */
2627
2628 static tree
2629 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
2630 HOST_WIDE_INT *type_offset)
2631 {
2632 HOST_WIDE_INT offset, size, max_size;
2633 tree lhs, rhs, base, binfo;
2634
2635 if (!gimple_assign_single_p (stmt))
2636 return NULL_TREE;
2637
2638 lhs = gimple_assign_lhs (stmt);
2639 rhs = gimple_assign_rhs1 (stmt);
2640 if (TREE_CODE (lhs) != COMPONENT_REF
2641 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2642 return NULL_TREE;
2643
2644 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
2645 ;
2646 else
2647 {
2648 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2649 if (offset != tci->offset
2650 || size != POINTER_SIZE
2651 || max_size != POINTER_SIZE)
2652 return NULL_TREE;
2653 if (DECL_P (tci->instance))
2654 {
2655 if (base != tci->instance)
2656 return NULL_TREE;
2657 }
2658 else if (TREE_CODE (base) == MEM_REF)
2659 {
2660 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)
2661 || !integer_zerop (TREE_OPERAND (base, 1)))
2662 return NULL_TREE;
2663 }
2664 else if (!operand_equal_p (tci->instance, base, 0)
2665 || tci->offset)
2666 return NULL_TREE;
2667 }
2668
2669 binfo = vtable_pointer_value_to_binfo (rhs);
2670
2671 if (!binfo)
2672 return NULL;
2673 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
2674 if (TYPE_BINFO (BINFO_TYPE (binfo)) == binfo)
2675 return BINFO_TYPE (binfo);
2676
2677 /* TODO: Figure out the type containing BINFO. */
2678 return NULL;
2679 }
2680
2681 /* Record dynamic type change of TCI to TYPE. */
2682
2683 void
2684 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
2685 {
2686 if (dump_file)
2687 {
2688 if (type)
2689 {
2690 fprintf (dump_file, " Recording type: ");
2691 print_generic_expr (dump_file, type, TDF_SLIM);
2692 fprintf (dump_file, " at offset %i\n", (int)offset);
2693 }
2694 else
2695 fprintf (dump_file, " Recording unknown type\n");
2696 }
2697 if (tci->type_maybe_changed
2698 && (type != tci->known_current_type
2699 || offset != tci->known_current_offset))
2700 tci->multiple_types_encountered = true;
2701 tci->known_current_type = type;
2702 tci->known_current_offset = offset;
2703 tci->type_maybe_changed = true;
2704 }
2705
2706 /* Callback of walk_aliased_vdefs and a helper function for
2707 detect_type_change to check whether a particular statement may modify
2708 the virtual table pointer, and if possible also determine the new type of
2709 the (sub-)object. It stores its result into DATA, which points to a
2710 type_change_info structure. */
2711
2712 static bool
2713 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
2714 {
2715 gimple stmt = SSA_NAME_DEF_STMT (vdef);
2716 struct type_change_info *tci = (struct type_change_info *) data;
2717 tree fn;
2718
2719 /* If we already gave up, just terminate the rest of walk. */
2720 if (tci->multiple_types_encountered)
2721 return true;
2722
2723 if (is_gimple_call (stmt))
2724 {
2725 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
2726 return false;
2727
2728 /* Check for a constructor call. */
2729 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
2730 && DECL_CXX_CONSTRUCTOR_P (fn)
2731 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2732 && gimple_call_num_args (stmt))
2733 {
2734 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
2735 tree type = method_class_type (TREE_TYPE (fn));
2736 HOST_WIDE_INT offset = 0, size, max_size;
2737
2738 if (dump_file)
2739 {
2740 fprintf (dump_file, " Checking constructor call: ");
2741 print_gimple_stmt (dump_file, stmt, 0, 0);
2742 }
2743
2744 /* See if THIS parameter seems like instance pointer. */
2745 if (TREE_CODE (op) == ADDR_EXPR)
2746 {
2747 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
2748 &offset, &size, &max_size);
2749 if (size != max_size || max_size == -1)
2750 {
2751 tci->speculative = true;
2752 return false;
2753 }
2754 if (op && TREE_CODE (op) == MEM_REF)
2755 {
2756 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
2757 {
2758 tci->speculative = true;
2759 return false;
2760 }
2761 offset += tree_to_shwi (TREE_OPERAND (op, 1))
2762 * BITS_PER_UNIT;
2763 op = TREE_OPERAND (op, 0);
2764 }
2765 else if (DECL_P (op))
2766 ;
2767 else
2768 {
2769 tci->speculative = true;
2770 return false;
2771 }
2772 op = walk_ssa_copies (op);
2773 }
2774 if (operand_equal_p (op, tci->instance, 0)
2775 && TYPE_SIZE (type)
2776 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2777 && tree_fits_shwi_p (TYPE_SIZE (type))
2778 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
2779 {
2780 record_known_type (tci, type, tci->offset - offset);
2781 return true;
2782 }
2783 }
2784 /* Calls may possibly change dynamic type by placement new. Assume
2785 it will not happen, but make result speculative only. */
2786 if (dump_file)
2787 {
2788 fprintf (dump_file, " Function call may change dynamic type:");
2789 print_gimple_stmt (dump_file, stmt, 0, 0);
2790 }
2791 tci->speculative = true;
2792 return false;
2793 }
2794 /* Check for inlined virtual table store. */
2795 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
2796 {
2797 tree type;
2798 HOST_WIDE_INT offset = 0;
2799 if (dump_file)
2800 {
2801 fprintf (dump_file, " Checking vtbl store: ");
2802 print_gimple_stmt (dump_file, stmt, 0, 0);
2803 }
2804
2805 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
2806 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
2807 if (!type)
2808 {
2809 if (dump_file)
2810 fprintf (dump_file, " Unanalyzed store may change type.\n");
2811 tci->seen_unanalyzed_store = true;
2812 tci->speculative = true;
2813 }
2814 else
2815 record_known_type (tci, type, offset);
2816 return true;
2817 }
2818 else
2819 return false;
2820 }
2821
2822 /* THIS is polymorphic call context obtained from get_polymorphic_context.
2823 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
2824 INSTANCE is pointer to the outer instance as returned by
2825 get_polymorphic_context. To avoid creation of temporary expressions,
2826 INSTANCE may also be an declaration of get_polymorphic_context found the
2827 value to be in static storage.
2828
2829 If the type of instance is not fully determined
2830 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
2831 is set), try to walk memory writes and find the actual construction of the
2832 instance.
2833
2834 We do not include this analysis in the context analysis itself, because
2835 it needs memory SSA to be fully built and the walk may be expensive.
2836 So it is not suitable for use withing fold_stmt and similar uses. */
2837
2838 bool
2839 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
2840 tree otr_object,
2841 tree otr_type,
2842 gimple call)
2843 {
2844 struct type_change_info tci;
2845 ao_ref ao;
2846 bool function_entry_reached = false;
2847 tree instance_ref = NULL;
2848 gimple stmt = call;
2849
2850 if (!maybe_in_construction && !maybe_derived_type)
2851 return false;
2852
2853 /* We need to obtain refernce to virtual table pointer. It is better
2854 to look it up in the code rather than build our own. This require bit
2855 of pattern matching, but we end up verifying that what we found is
2856 correct.
2857
2858 What we pattern match is:
2859
2860 tmp = instance->_vptr.A; // vtbl ptr load
2861 tmp2 = tmp[otr_token]; // vtable lookup
2862 OBJ_TYPE_REF(tmp2;instance->0) (instance);
2863
2864 We want to start alias oracle walk from vtbl pointer load,
2865 but we may not be able to identify it, for example, when PRE moved the
2866 load around. */
2867
2868 if (gimple_code (call) == GIMPLE_CALL)
2869 {
2870 tree ref = gimple_call_fn (call);
2871 HOST_WIDE_INT offset2, size, max_size;
2872
2873 if (TREE_CODE (ref) == OBJ_TYPE_REF)
2874 {
2875 ref = OBJ_TYPE_REF_EXPR (ref);
2876 ref = walk_ssa_copies (ref);
2877
2878 /* Check if definition looks like vtable lookup. */
2879 if (TREE_CODE (ref) == SSA_NAME
2880 && !SSA_NAME_IS_DEFAULT_DEF (ref)
2881 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
2882 && TREE_CODE (gimple_assign_rhs1
2883 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
2884 {
2885 ref = get_base_address
2886 (TREE_OPERAND (gimple_assign_rhs1
2887 (SSA_NAME_DEF_STMT (ref)), 0));
2888 ref = walk_ssa_copies (ref);
2889 /* Find base address of the lookup and see if it looks like
2890 vptr load. */
2891 if (TREE_CODE (ref) == SSA_NAME
2892 && !SSA_NAME_IS_DEFAULT_DEF (ref)
2893 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
2894 {
2895 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
2896 tree base_ref = get_ref_base_and_extent
2897 (ref_exp, &offset2, &size, &max_size);
2898
2899 /* Finally verify that what we found looks like read from OTR_OBJECT
2900 or from INSTANCE with offset OFFSET. */
2901 if (base_ref
2902 && ((TREE_CODE (base_ref) == MEM_REF
2903 && ((offset2 == offset
2904 && TREE_OPERAND (base_ref, 0) == instance)
2905 || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
2906 || (DECL_P (instance) && base_ref == instance
2907 && offset2 == offset)))
2908 {
2909 stmt = SSA_NAME_DEF_STMT (ref);
2910 instance_ref = ref_exp;
2911 }
2912 }
2913 }
2914 }
2915 }
2916
2917 /* If we failed to look up the refernece in code, build our own. */
2918 if (!instance_ref)
2919 {
2920 /* If the statement in question does not use memory, we can't tell
2921 anything. */
2922 if (!gimple_vuse (stmt))
2923 return false;
2924 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
2925 }
2926 else
2927 /* Otherwise use the real reference. */
2928 ao_ref_init (&ao, instance_ref);
2929
2930 /* We look for vtbl pointer read. */
2931 ao.size = POINTER_SIZE;
2932 ao.max_size = ao.size;
2933 ao.ref_alias_set
2934 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
2935
2936 if (dump_file)
2937 {
2938 fprintf (dump_file, "Determining dynamic type for call: ");
2939 print_gimple_stmt (dump_file, call, 0, 0);
2940 fprintf (dump_file, " Starting walk at: ");
2941 print_gimple_stmt (dump_file, stmt, 0, 0);
2942 fprintf (dump_file, " instance pointer: ");
2943 print_generic_expr (dump_file, otr_object, TDF_SLIM);
2944 fprintf (dump_file, " Outer instance pointer: ");
2945 print_generic_expr (dump_file, instance, TDF_SLIM);
2946 fprintf (dump_file, " offset: %i (bits)", (int)offset);
2947 fprintf (dump_file, " vtbl reference: ");
2948 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
2949 fprintf (dump_file, "\n");
2950 }
2951
2952 tci.offset = offset;
2953 tci.instance = instance;
2954 tci.vtbl_ptr_ref = instance_ref;
2955 gcc_assert (TREE_CODE (instance) != MEM_REF);
2956 tci.known_current_type = NULL_TREE;
2957 tci.known_current_offset = 0;
2958 tci.otr_type = otr_type;
2959 tci.type_maybe_changed = false;
2960 tci.multiple_types_encountered = false;
2961 tci.speculative = false;
2962 tci.seen_unanalyzed_store = false;
2963
2964 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
2965 &tci, NULL, &function_entry_reached);
2966
2967 /* If we did not find any type changing statements, we may still drop
2968 maybe_in_construction flag if the context already have outer type.
2969
2970 Here we make special assumptions about both constructors and
2971 destructors which are all the functions that are allowed to alter the
2972 VMT pointers. It assumes that destructors begin with assignment into
2973 all VMT pointers and that constructors essentially look in the
2974 following way:
2975
2976 1) The very first thing they do is that they call constructors of
2977 ancestor sub-objects that have them.
2978
2979 2) Then VMT pointers of this and all its ancestors is set to new
2980 values corresponding to the type corresponding to the constructor.
2981
2982 3) Only afterwards, other stuff such as constructor of member
2983 sub-objects and the code written by the user is run. Only this may
2984 include calling virtual functions, directly or indirectly.
2985
2986 4) placement new can not be used to change type of non-POD statically
2987 allocated variables.
2988
2989 There is no way to call a constructor of an ancestor sub-object in any
2990 other way.
2991
2992 This means that we do not have to care whether constructors get the
2993 correct type information because they will always change it (in fact,
2994 if we define the type to be given by the VMT pointer, it is undefined).
2995
2996 The most important fact to derive from the above is that if, for some
2997 statement in the section 3, we try to detect whether the dynamic type
2998 has changed, we can safely ignore all calls as we examine the function
2999 body backwards until we reach statements in section 2 because these
3000 calls cannot be ancestor constructors or destructors (if the input is
3001 not bogus) and so do not change the dynamic type (this holds true only
3002 for automatically allocated objects but at the moment we devirtualize
3003 only these). We then must detect that statements in section 2 change
3004 the dynamic type and can try to derive the new type. That is enough
3005 and we can stop, we will never see the calls into constructors of
3006 sub-objects in this code.
3007
3008 Therefore if the static outer type was found (outer_type)
3009 we can safely ignore tci.speculative that is set on calls and give up
3010 only if there was dyanmic type store that may affect given variable
3011 (seen_unanalyzed_store) */
3012
3013 if (!tci.type_maybe_changed)
3014 {
3015 if (!outer_type || tci.seen_unanalyzed_store)
3016 return false;
3017 if (maybe_in_construction)
3018 maybe_in_construction = false;
3019 if (dump_file)
3020 fprintf (dump_file, " No dynamic type change found.\n");
3021 return true;
3022 }
3023
3024 if (tci.known_current_type
3025 && !function_entry_reached
3026 && !tci.multiple_types_encountered)
3027 {
3028 if (!tci.speculative
3029 /* Again in instances located in static storage we are interested only
3030 in constructor stores. */
3031 || (outer_type
3032 && !tci.seen_unanalyzed_store
3033 && offset == tci.offset
3034 && types_same_for_odr (tci.known_current_type,
3035 outer_type)))
3036 {
3037 outer_type = tci.known_current_type;
3038 offset = tci.known_current_offset;
3039 maybe_in_construction = false;
3040 maybe_derived_type = false;
3041 if (dump_file)
3042 fprintf (dump_file, " Determined dynamic type.\n");
3043 }
3044 else if (!speculative_outer_type
3045 || speculative_maybe_derived_type)
3046 {
3047 speculative_outer_type = tci.known_current_type;
3048 speculative_offset = tci.known_current_offset;
3049 speculative_maybe_derived_type = false;
3050 if (dump_file)
3051 fprintf (dump_file, " Determined speculative dynamic type.\n");
3052 }
3053 }
3054 else if (dump_file)
3055 fprintf (dump_file, " Found multiple types.\n");
3056
3057 return true;
3058 }
3059
3060 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
3061 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
3062 and insert them to NODES.
3063
3064 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
3065
3066 static void
3067 record_targets_from_bases (tree otr_type,
3068 HOST_WIDE_INT otr_token,
3069 tree outer_type,
3070 HOST_WIDE_INT offset,
3071 vec <cgraph_node *> &nodes,
3072 hash_set<tree> *inserted,
3073 hash_set<tree> *matched_vtables,
3074 bool *completep)
3075 {
3076 while (true)
3077 {
3078 HOST_WIDE_INT pos, size;
3079 tree base_binfo;
3080 tree fld;
3081
3082 if (types_same_for_odr (outer_type, otr_type))
3083 return;
3084
3085 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
3086 {
3087 if (TREE_CODE (fld) != FIELD_DECL)
3088 continue;
3089
3090 pos = int_bit_position (fld);
3091 size = tree_to_shwi (DECL_SIZE (fld));
3092 if (pos <= offset && (pos + size) > offset
3093 /* Do not get confused by zero sized bases. */
3094 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
3095 break;
3096 }
3097 /* Within a class type we should always find correcponding fields. */
3098 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
3099
3100 /* Nonbasetypes should have been stripped by outer_class_type. */
3101 gcc_assert (DECL_ARTIFICIAL (fld));
3102
3103 outer_type = TREE_TYPE (fld);
3104 offset -= pos;
3105
3106 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
3107 offset, otr_type);
3108 if (!base_binfo)
3109 {
3110 gcc_assert (odr_violation_reported);
3111 return;
3112 }
3113 gcc_assert (base_binfo);
3114 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
3115 {
3116 bool can_refer;
3117 tree target = gimple_get_virt_method_for_binfo (otr_token,
3118 base_binfo,
3119 &can_refer);
3120 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
3121 maybe_record_node (nodes, target, inserted, can_refer, completep);
3122 matched_vtables->add (BINFO_VTABLE (base_binfo));
3123 }
3124 }
3125 }
3126
3127 /* When virtual table is removed, we may need to flush the cache. */
3128
3129 static void
3130 devirt_variable_node_removal_hook (varpool_node *n,
3131 void *d ATTRIBUTE_UNUSED)
3132 {
3133 if (cached_polymorphic_call_targets
3134 && DECL_VIRTUAL_P (n->decl)
3135 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3136 free_polymorphic_call_targets_hash ();
3137 }
3138
3139 /* Record about how many calls would benefit from given type to be final. */
3140
3141 struct odr_type_warn_count
3142 {
3143 tree type;
3144 int count;
3145 gcov_type dyn_count;
3146 };
3147
3148 /* Record about how many calls would benefit from given method to be final. */
3149
3150 struct decl_warn_count
3151 {
3152 tree decl;
3153 int count;
3154 gcov_type dyn_count;
3155 };
3156
3157 /* Information about type and decl warnings. */
3158
3159 struct final_warning_record
3160 {
3161 gcov_type dyn_count;
3162 vec<odr_type_warn_count> type_warnings;
3163 hash_map<tree, decl_warn_count> decl_warnings;
3164 };
3165 struct final_warning_record *final_warning_records;
3166
3167 /* Return vector containing possible targets of polymorphic call of type
3168 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3169 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
3170 OTR_TYPE and include their virtual method. This is useful for types
3171 possibly in construction or destruction where the virtual table may
3172 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
3173 us to walk the inheritance graph for all derivations.
3174
3175 OTR_TOKEN == INT_MAX is used to mark calls that are provably
3176 undefined and should be redirected to unreachable.
3177
3178 If COMPLETEP is non-NULL, store true if the list is complete.
3179 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3180 in the target cache. If user needs to visit every target list
3181 just once, it can memoize them.
3182
3183 SPECULATION_TARGETS specify number of targets that are speculatively
3184 likely. These include targets specified by the speculative part
3185 of polymoprhic call context and also exclude all targets for classes
3186 in construction.
3187
3188 Returned vector is placed into cache. It is NOT caller's responsibility
3189 to free it. The vector can be freed on cgraph_remove_node call if
3190 the particular node is a virtual function present in the cache. */
3191
3192 vec <cgraph_node *>
3193 possible_polymorphic_call_targets (tree otr_type,
3194 HOST_WIDE_INT otr_token,
3195 ipa_polymorphic_call_context context,
3196 bool *completep,
3197 void **cache_token,
3198 int *speculative_targetsp)
3199 {
3200 static struct cgraph_node_hook_list *node_removal_hook_holder;
3201 vec <cgraph_node *> nodes = vNULL;
3202 vec <tree> bases_to_consider = vNULL;
3203 odr_type type, outer_type;
3204 polymorphic_call_target_d key;
3205 polymorphic_call_target_d **slot;
3206 unsigned int i;
3207 tree binfo, target;
3208 bool complete;
3209 bool can_refer;
3210 bool skipped = false;
3211
3212 otr_type = TYPE_MAIN_VARIANT (otr_type);
3213
3214 /* If ODR is not initialized, return empty incomplete list. */
3215 if (!odr_hash)
3216 {
3217 if (completep)
3218 *completep = false;
3219 if (cache_token)
3220 *cache_token = NULL;
3221 if (speculative_targetsp)
3222 *speculative_targetsp = 0;
3223 return nodes;
3224 }
3225
3226 /* If we hit type inconsistency, just return empty list of targets. */
3227 if (otr_token == INT_MAX)
3228 {
3229 if (completep)
3230 *completep = true;
3231 if (cache_token)
3232 *cache_token = NULL;
3233 if (speculative_targetsp)
3234 *speculative_targetsp = 0;
3235 return nodes;
3236 }
3237
3238 /* Do not bother to compute speculative info when user do not asks for it. */
3239 if (!speculative_targetsp || !context.speculative_outer_type)
3240 context.clear_speculation ();
3241
3242 type = get_odr_type (otr_type, true);
3243
3244 /* Recording type variants would wast results cache. */
3245 gcc_assert (!context.outer_type
3246 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3247
3248 /* Lookup the outer class type we want to walk. */
3249 if ((context.outer_type || context.speculative_outer_type)
3250 && !context.restrict_to_inner_class (otr_type))
3251 {
3252 if (completep)
3253 *completep = false;
3254 if (cache_token)
3255 *cache_token = NULL;
3256 if (speculative_targetsp)
3257 *speculative_targetsp = 0;
3258 return nodes;
3259 }
3260
3261 /* Check that restrict_to_inner_class kept the main variant. */
3262 gcc_assert (!context.outer_type
3263 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3264
3265 /* We canonicalize our query, so we do not need extra hashtable entries. */
3266
3267 /* Without outer type, we have no use for offset. Just do the
3268 basic search from innter type */
3269 if (!context.outer_type)
3270 {
3271 context.outer_type = otr_type;
3272 context.offset = 0;
3273 }
3274 /* We need to update our hiearchy if the type does not exist. */
3275 outer_type = get_odr_type (context.outer_type, true);
3276 /* If the type is complete, there are no derivations. */
3277 if (TYPE_FINAL_P (outer_type->type))
3278 context.maybe_derived_type = false;
3279
3280 /* Initialize query cache. */
3281 if (!cached_polymorphic_call_targets)
3282 {
3283 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3284 polymorphic_call_target_hash
3285 = new polymorphic_call_target_hash_type (23);
3286 if (!node_removal_hook_holder)
3287 {
3288 node_removal_hook_holder =
3289 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
3290 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
3291 NULL);
3292 }
3293 }
3294
3295 /* Lookup cached answer. */
3296 key.type = type;
3297 key.otr_token = otr_token;
3298 key.context = context;
3299 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3300 if (cache_token)
3301 *cache_token = (void *)*slot;
3302 if (*slot)
3303 {
3304 if (completep)
3305 *completep = (*slot)->complete;
3306 if (speculative_targetsp)
3307 *speculative_targetsp = (*slot)->speculative_targets;
3308 if ((*slot)->type_warning && final_warning_records)
3309 {
3310 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3311 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3312 += final_warning_records->dyn_count;
3313 }
3314 if ((*slot)->decl_warning && final_warning_records)
3315 {
3316 struct decl_warn_count *c =
3317 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3318 c->count++;
3319 c->dyn_count += final_warning_records->dyn_count;
3320 }
3321 return (*slot)->targets;
3322 }
3323
3324 complete = true;
3325
3326 /* Do actual search. */
3327 timevar_push (TV_IPA_VIRTUAL_CALL);
3328 *slot = XCNEW (polymorphic_call_target_d);
3329 if (cache_token)
3330 *cache_token = (void *)*slot;
3331 (*slot)->type = type;
3332 (*slot)->otr_token = otr_token;
3333 (*slot)->context = context;
3334 (*slot)->speculative_targets = 0;
3335
3336 hash_set<tree> inserted;
3337 hash_set<tree> matched_vtables;
3338
3339 /* First insert targets we speculatively identified as likely. */
3340 if (context.speculative_outer_type)
3341 {
3342 odr_type speculative_outer_type;
3343 bool speculation_complete = true;
3344
3345 /* First insert target from type itself and check if it may have derived types. */
3346 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3347 if (TYPE_FINAL_P (speculative_outer_type->type))
3348 context.speculative_maybe_derived_type = false;
3349 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3350 context.speculative_offset, otr_type);
3351 if (binfo)
3352 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3353 &can_refer);
3354 else
3355 target = NULL;
3356
3357 /* In the case we get complete method, we don't need
3358 to walk derivations. */
3359 if (target && DECL_FINAL_P (target))
3360 context.speculative_maybe_derived_type = false;
3361 if (type_possibly_instantiated_p (speculative_outer_type->type))
3362 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3363 if (binfo)
3364 matched_vtables.add (BINFO_VTABLE (binfo));
3365
3366
3367 /* Next walk recursively all derived types. */
3368 if (context.speculative_maybe_derived_type)
3369 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3370 possible_polymorphic_call_targets_1 (nodes, &inserted,
3371 &matched_vtables,
3372 otr_type,
3373 speculative_outer_type->derived_types[i],
3374 otr_token, speculative_outer_type->type,
3375 context.speculative_offset,
3376 &speculation_complete,
3377 bases_to_consider,
3378 false);
3379 (*slot)->speculative_targets = nodes.length();
3380 }
3381
3382 /* First see virtual method of type itself. */
3383 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3384 context.offset, otr_type);
3385 if (binfo)
3386 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3387 &can_refer);
3388 else
3389 {
3390 gcc_assert (odr_violation_reported);
3391 target = NULL;
3392 }
3393
3394 /* Destructors are never called through construction virtual tables,
3395 because the type is always known. */
3396 if (target && DECL_CXX_DESTRUCTOR_P (target))
3397 context.maybe_in_construction = false;
3398
3399 if (target)
3400 {
3401 /* In the case we get complete method, we don't need
3402 to walk derivations. */
3403 if (DECL_FINAL_P (target))
3404 context.maybe_derived_type = false;
3405 }
3406
3407 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3408 if (type_possibly_instantiated_p (outer_type->type))
3409 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3410 else
3411 {
3412 skipped = true;
3413 gcc_assert (in_lto_p || context.maybe_derived_type);
3414 }
3415
3416 if (binfo)
3417 matched_vtables.add (BINFO_VTABLE (binfo));
3418
3419 /* Next walk recursively all derived types. */
3420 if (context.maybe_derived_type)
3421 {
3422 for (i = 0; i < outer_type->derived_types.length(); i++)
3423 possible_polymorphic_call_targets_1 (nodes, &inserted,
3424 &matched_vtables,
3425 otr_type,
3426 outer_type->derived_types[i],
3427 otr_token, outer_type->type,
3428 context.offset, &complete,
3429 bases_to_consider,
3430 context.maybe_in_construction);
3431
3432 if (!outer_type->all_derivations_known)
3433 {
3434 if (final_warning_records)
3435 {
3436 if (complete
3437 && nodes.length () == 1
3438 && warn_suggest_final_types
3439 && !outer_type->derived_types.length ())
3440 {
3441 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3442 final_warning_records->type_warnings.safe_grow_cleared
3443 (odr_types.length ());
3444 final_warning_records->type_warnings[outer_type->id].count++;
3445 final_warning_records->type_warnings[outer_type->id].dyn_count
3446 += final_warning_records->dyn_count;
3447 final_warning_records->type_warnings[outer_type->id].type
3448 = outer_type->type;
3449 (*slot)->type_warning = outer_type->id + 1;
3450 }
3451 if (complete
3452 && warn_suggest_final_methods
3453 && nodes.length () == 1
3454 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3455 outer_type->type))
3456 {
3457 bool existed;
3458 struct decl_warn_count &c =
3459 final_warning_records->decl_warnings.get_or_insert
3460 (nodes[0]->decl, &existed);
3461
3462 if (existed)
3463 {
3464 c.count++;
3465 c.dyn_count += final_warning_records->dyn_count;
3466 }
3467 else
3468 {
3469 c.count = 1;
3470 c.dyn_count = final_warning_records->dyn_count;
3471 c.decl = nodes[0]->decl;
3472 }
3473 (*slot)->decl_warning = nodes[0]->decl;
3474 }
3475 }
3476 complete = false;
3477 }
3478 }
3479
3480 /* Finally walk bases, if asked to. */
3481 if (!(*slot)->speculative_targets)
3482 (*slot)->speculative_targets = nodes.length();
3483
3484 /* Destructors are never called through construction virtual tables,
3485 because the type is always known. One of entries may be cxa_pure_virtual
3486 so look to at least two of them. */
3487 if (context.maybe_in_construction)
3488 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3489 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3490 context.maybe_in_construction = false;
3491 if (context.maybe_in_construction)
3492 {
3493 if (type != outer_type
3494 && (!skipped
3495 || (context.maybe_derived_type
3496 && !type_all_derivations_known_p (outer_type->type))))
3497 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3498 context.offset, nodes, &inserted,
3499 &matched_vtables, &complete);
3500 if (skipped)
3501 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3502 for (i = 0; i < bases_to_consider.length(); i++)
3503 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3504 }
3505 bases_to_consider.release();
3506
3507 (*slot)->targets = nodes;
3508 (*slot)->complete = complete;
3509 if (completep)
3510 *completep = complete;
3511 if (speculative_targetsp)
3512 *speculative_targetsp = (*slot)->speculative_targets;
3513
3514 timevar_pop (TV_IPA_VIRTUAL_CALL);
3515 return nodes;
3516 }
3517
3518 bool
3519 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3520 vec<const decl_warn_count*> *vec)
3521 {
3522 vec->safe_push (&value);
3523 return true;
3524 }
3525
3526 /* Dump all possible targets of a polymorphic call. */
3527
3528 void
3529 dump_possible_polymorphic_call_targets (FILE *f,
3530 tree otr_type,
3531 HOST_WIDE_INT otr_token,
3532 const ipa_polymorphic_call_context &ctx)
3533 {
3534 vec <cgraph_node *> targets;
3535 bool final;
3536 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3537 unsigned int i;
3538 int speculative;
3539
3540 if (!type)
3541 return;
3542 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3543 ctx,
3544 &final, NULL, &speculative);
3545 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3546 print_generic_expr (f, type->type, TDF_SLIM);
3547 fprintf (f, " token %i\n", (int)otr_token);
3548 if (ctx.outer_type || ctx.offset)
3549 {
3550 fprintf (f, " Contained in type:");
3551 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
3552 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3553 ctx.offset);
3554 }
3555 if (ctx.speculative_outer_type)
3556 {
3557 fprintf (f, " Speculatively contained in type:");
3558 print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM);
3559 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3560 ctx.speculative_offset);
3561 }
3562
3563 fprintf (f, " %s%s%s%s\n ",
3564 final ? "This is a complete list." :
3565 "This is partial list; extra targets may be defined in other units.",
3566 ctx.maybe_in_construction ? " (base types included)" : "",
3567 ctx.maybe_derived_type ? " (derived types included)" : "",
3568 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3569 for (i = 0; i < targets.length (); i++)
3570 {
3571 char *name = NULL;
3572 if (i == (unsigned)speculative)
3573 fprintf (f, "\n Targets that are not likely:\n"
3574 " ");
3575 if (in_lto_p)
3576 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3577 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3578 if (in_lto_p)
3579 free (name);
3580 if (!targets[i]->definition)
3581 fprintf (f, " (no definition%s)",
3582 DECL_DECLARED_INLINE_P (targets[i]->decl)
3583 ? " inline" : "");
3584 }
3585 fprintf (f, "\n\n");
3586 }
3587
3588
3589 /* Return true if N can be possibly target of a polymorphic call of
3590 OTR_TYPE/OTR_TOKEN. */
3591
3592 bool
3593 possible_polymorphic_call_target_p (tree otr_type,
3594 HOST_WIDE_INT otr_token,
3595 const ipa_polymorphic_call_context &ctx,
3596 struct cgraph_node *n)
3597 {
3598 vec <cgraph_node *> targets;
3599 unsigned int i;
3600 enum built_in_function fcode;
3601 bool final;
3602
3603 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3604 && ((fcode = DECL_FUNCTION_CODE (n->decl))
3605 == BUILT_IN_UNREACHABLE
3606 || fcode == BUILT_IN_TRAP))
3607 return true;
3608
3609 if (!odr_hash)
3610 return true;
3611 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3612 for (i = 0; i < targets.length (); i++)
3613 if (n->semantically_equivalent_p (targets[i]))
3614 return true;
3615
3616 /* At a moment we allow middle end to dig out new external declarations
3617 as a targets of polymorphic calls. */
3618 if (!final && !n->definition)
3619 return true;
3620 return false;
3621 }
3622
3623
3624 /* After callgraph construction new external nodes may appear.
3625 Add them into the graph. */
3626
3627 void
3628 update_type_inheritance_graph (void)
3629 {
3630 struct cgraph_node *n;
3631
3632 if (!odr_hash)
3633 return;
3634 free_polymorphic_call_targets_hash ();
3635 timevar_push (TV_IPA_INHERITANCE);
3636 /* We reconstruct the graph starting from types of all methods seen in the
3637 the unit. */
3638 FOR_EACH_FUNCTION (n)
3639 if (DECL_VIRTUAL_P (n->decl)
3640 && !n->definition
3641 && n->real_symbol_p ())
3642 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
3643 true);
3644 timevar_pop (TV_IPA_INHERITANCE);
3645 }
3646
3647
3648 /* Return true if N looks like likely target of a polymorphic call.
3649 Rule out cxa_pure_virtual, noreturns, function declared cold and
3650 other obvious cases. */
3651
3652 bool
3653 likely_target_p (struct cgraph_node *n)
3654 {
3655 int flags;
3656 /* cxa_pure_virtual and similar things are not likely. */
3657 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3658 return false;
3659 flags = flags_from_decl_or_type (n->decl);
3660 if (flags & ECF_NORETURN)
3661 return false;
3662 if (lookup_attribute ("cold",
3663 DECL_ATTRIBUTES (n->decl)))
3664 return false;
3665 if (n->frequency < NODE_FREQUENCY_NORMAL)
3666 return false;
3667 /* If there are no virtual tables refering the target alive,
3668 the only way the target can be called is an instance comming from other
3669 compilation unit; speculative devirtualization is build around an
3670 assumption that won't happen. */
3671 if (!referenced_from_vtable_p (n))
3672 return false;
3673 return true;
3674 }
3675
3676 /* Compare type warning records P1 and P2 and chose one with larger count;
3677 helper for qsort. */
3678
3679 int
3680 type_warning_cmp (const void *p1, const void *p2)
3681 {
3682 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3683 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3684
3685 if (t1->dyn_count < t2->dyn_count)
3686 return 1;
3687 if (t1->dyn_count > t2->dyn_count)
3688 return -1;
3689 return t2->count - t1->count;
3690 }
3691
3692 /* Compare decl warning records P1 and P2 and chose one with larger count;
3693 helper for qsort. */
3694
3695 int
3696 decl_warning_cmp (const void *p1, const void *p2)
3697 {
3698 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3699 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3700
3701 if (t1->dyn_count < t2->dyn_count)
3702 return 1;
3703 if (t1->dyn_count > t2->dyn_count)
3704 return -1;
3705 return t2->count - t1->count;
3706 }
3707
3708 /* The ipa-devirt pass.
3709 When polymorphic call has only one likely target in the unit,
3710 turn it into speculative call. */
3711
3712 static unsigned int
3713 ipa_devirt (void)
3714 {
3715 struct cgraph_node *n;
3716 hash_set<void *> bad_call_targets;
3717 struct cgraph_edge *e;
3718
3719 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3720 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3721 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3722
3723 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3724 This is implemented by setting up final_warning_records that are updated
3725 by get_polymorphic_call_targets.
3726 We need to clear cache in this case to trigger recomputation of all
3727 entries. */
3728 if (warn_suggest_final_methods || warn_suggest_final_types)
3729 {
3730 final_warning_records = new (final_warning_record);
3731 final_warning_records->type_warnings = vNULL;
3732 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3733 free_polymorphic_call_targets_hash ();
3734 }
3735
3736 FOR_EACH_DEFINED_FUNCTION (n)
3737 {
3738 bool update = false;
3739 if (dump_file && n->indirect_calls)
3740 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3741 n->name (), n->order);
3742 for (e = n->indirect_calls; e; e = e->next_callee)
3743 if (e->indirect_info->polymorphic)
3744 {
3745 struct cgraph_node *likely_target = NULL;
3746 void *cache_token;
3747 bool final;
3748 int speculative_targets;
3749
3750 if (final_warning_records)
3751 final_warning_records->dyn_count = e->count;
3752
3753 vec <cgraph_node *>targets
3754 = possible_polymorphic_call_targets
3755 (e, &final, &cache_token, &speculative_targets);
3756 unsigned int i;
3757
3758 if (dump_file)
3759 dump_possible_polymorphic_call_targets
3760 (dump_file, e);
3761
3762 npolymorphic++;
3763
3764 if (!flag_devirtualize_speculatively)
3765 continue;
3766
3767 if (!cgraph_maybe_hot_edge_p (e))
3768 {
3769 if (dump_file)
3770 fprintf (dump_file, "Call is cold\n\n");
3771 ncold++;
3772 continue;
3773 }
3774 if (e->speculative)
3775 {
3776 if (dump_file)
3777 fprintf (dump_file, "Call is aready speculated\n\n");
3778 nspeculated++;
3779
3780 /* When dumping see if we agree with speculation. */
3781 if (!dump_file)
3782 continue;
3783 }
3784 if (bad_call_targets.contains (cache_token))
3785 {
3786 if (dump_file)
3787 fprintf (dump_file, "Target list is known to be useless\n\n");
3788 nmultiple++;
3789 continue;
3790 }
3791 for (i = 0; i < targets.length (); i++)
3792 if (likely_target_p (targets[i]))
3793 {
3794 if (likely_target)
3795 {
3796 if (i < (unsigned) speculative_targets)
3797 {
3798 likely_target = NULL;
3799 if (dump_file)
3800 fprintf (dump_file, "More than one likely target\n\n");
3801 nmultiple++;
3802 }
3803 break;
3804 }
3805 likely_target = targets[i];
3806 }
3807 if (!likely_target)
3808 {
3809 bad_call_targets.add (cache_token);
3810 continue;
3811 }
3812 /* This is reached only when dumping; check if we agree or disagree
3813 with the speculation. */
3814 if (e->speculative)
3815 {
3816 struct cgraph_edge *e2;
3817 struct ipa_ref *ref;
3818 cgraph_speculative_call_info (e, e2, e, ref);
3819 if (e2->callee->ultimate_alias_target ()
3820 == likely_target->ultimate_alias_target ())
3821 {
3822 fprintf (dump_file, "We agree with speculation\n\n");
3823 nok++;
3824 }
3825 else
3826 {
3827 fprintf (dump_file, "We disagree with speculation\n\n");
3828 nwrong++;
3829 }
3830 continue;
3831 }
3832 if (!likely_target->definition)
3833 {
3834 if (dump_file)
3835 fprintf (dump_file, "Target is not an definition\n\n");
3836 nnotdefined++;
3837 continue;
3838 }
3839 /* Do not introduce new references to external symbols. While we
3840 can handle these just well, it is common for programs to
3841 incorrectly with headers defining methods they are linked
3842 with. */
3843 if (DECL_EXTERNAL (likely_target->decl))
3844 {
3845 if (dump_file)
3846 fprintf (dump_file, "Target is external\n\n");
3847 nexternal++;
3848 continue;
3849 }
3850 /* Don't use an implicitly-declared destructor (c++/58678). */
3851 struct cgraph_node *non_thunk_target
3852 = likely_target->function_symbol ();
3853 if (DECL_ARTIFICIAL (non_thunk_target->decl)
3854 && DECL_COMDAT (non_thunk_target->decl))
3855 {
3856 if (dump_file)
3857 fprintf (dump_file, "Target is artificial\n\n");
3858 nartificial++;
3859 continue;
3860 }
3861 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3862 && likely_target->can_be_discarded_p ())
3863 {
3864 if (dump_file)
3865 fprintf (dump_file, "Target is overwritable\n\n");
3866 noverwritable++;
3867 continue;
3868 }
3869 else if (dbg_cnt (devirt))
3870 {
3871 if (dump_enabled_p ())
3872 {
3873 location_t locus = gimple_location_safe (e->call_stmt);
3874 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3875 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3876 n->name (), n->order,
3877 likely_target->name (),
3878 likely_target->order);
3879 }
3880 if (!likely_target->can_be_discarded_p ())
3881 {
3882 cgraph_node *alias;
3883 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3884 if (alias)
3885 likely_target = alias;
3886 }
3887 nconverted++;
3888 update = true;
3889 cgraph_turn_edge_to_speculative
3890 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3891 }
3892 }
3893 if (update)
3894 inline_update_overall_summary (n);
3895 }
3896 if (warn_suggest_final_methods || warn_suggest_final_types)
3897 {
3898 if (warn_suggest_final_types)
3899 {
3900 final_warning_records->type_warnings.qsort (type_warning_cmp);
3901 for (unsigned int i = 0;
3902 i < final_warning_records->type_warnings.length (); i++)
3903 if (final_warning_records->type_warnings[i].count)
3904 {
3905 tree type = final_warning_records->type_warnings[i].type;
3906 warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
3907 OPT_Wsuggest_final_types,
3908 "Declaring type %qD final "
3909 "would enable devirtualization of %i calls",
3910 type,
3911 final_warning_records->type_warnings[i].count);
3912 }
3913 }
3914
3915 if (warn_suggest_final_methods)
3916 {
3917 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
3918
3919 final_warning_records->decl_warnings.traverse
3920 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
3921 decl_warnings_vec.qsort (decl_warning_cmp);
3922 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
3923 {
3924 tree decl = decl_warnings_vec[i]->decl;
3925 int count = decl_warnings_vec[i]->count;
3926
3927 if (DECL_CXX_DESTRUCTOR_P (decl))
3928 warning_at (DECL_SOURCE_LOCATION (decl),
3929 OPT_Wsuggest_final_methods,
3930 "Declaring virtual destructor of %qD final "
3931 "would enable devirtualization of %i calls",
3932 DECL_CONTEXT (decl), count);
3933 else
3934 warning_at (DECL_SOURCE_LOCATION (decl),
3935 OPT_Wsuggest_final_methods,
3936 "Declaring method %qD final "
3937 "would enable devirtualization of %i calls",
3938 decl, count);
3939 }
3940 }
3941
3942 delete (final_warning_records);
3943 final_warning_records = 0;
3944 }
3945
3946 if (dump_file)
3947 fprintf (dump_file,
3948 "%i polymorphic calls, %i devirtualized,"
3949 " %i speculatively devirtualized, %i cold\n"
3950 "%i have multiple targets, %i overwritable,"
3951 " %i already speculated (%i agree, %i disagree),"
3952 " %i external, %i not defined, %i artificial\n",
3953 npolymorphic, ndevirtualized, nconverted, ncold,
3954 nmultiple, noverwritable, nspeculated, nok, nwrong,
3955 nexternal, nnotdefined, nartificial);
3956 return ndevirtualized ? TODO_remove_functions : 0;
3957 }
3958
3959 namespace {
3960
3961 const pass_data pass_data_ipa_devirt =
3962 {
3963 IPA_PASS, /* type */
3964 "devirt", /* name */
3965 OPTGROUP_NONE, /* optinfo_flags */
3966 TV_IPA_DEVIRT, /* tv_id */
3967 0, /* properties_required */
3968 0, /* properties_provided */
3969 0, /* properties_destroyed */
3970 0, /* todo_flags_start */
3971 ( TODO_dump_symtab ), /* todo_flags_finish */
3972 };
3973
3974 class pass_ipa_devirt : public ipa_opt_pass_d
3975 {
3976 public:
3977 pass_ipa_devirt (gcc::context *ctxt)
3978 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3979 NULL, /* generate_summary */
3980 NULL, /* write_summary */
3981 NULL, /* read_summary */
3982 NULL, /* write_optimization_summary */
3983 NULL, /* read_optimization_summary */
3984 NULL, /* stmt_fixup */
3985 0, /* function_transform_todo_flags_start */
3986 NULL, /* function_transform */
3987 NULL) /* variable_transform */
3988 {}
3989
3990 /* opt_pass methods: */
3991 virtual bool gate (function *)
3992 {
3993 return (flag_devirtualize
3994 && (flag_devirtualize_speculatively
3995 || (warn_suggest_final_methods
3996 || warn_suggest_final_types))
3997 && optimize);
3998 }
3999
4000 virtual unsigned int execute (function *) { return ipa_devirt (); }
4001
4002 }; // class pass_ipa_devirt
4003
4004 } // anon namespace
4005
4006 ipa_opt_pass_d *
4007 make_pass_ipa_devirt (gcc::context *ctxt)
4008 {
4009 return new pass_ipa_devirt (ctxt);
4010 }
4011
4012 #include "gt-ipa-devirt.h"