devirt-34.C: New testcase.
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "inchash.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-alias.h"
125 #include "internal-fn.h"
126 #include "gimple-fold.h"
127 #include "gimple-expr.h"
128 #include "gimple.h"
129 #include "ipa-inline.h"
130 #include "diagnostic.h"
131 #include "tree-dfa.h"
132 #include "demangle.h"
133 #include "dbgcnt.h"
134 #include "stor-layout.h"
135 #include "intl.h"
136
137 static bool odr_types_equivalent_p (tree, tree, bool, bool *, pointer_set_t *);
138
139 static bool odr_violation_reported = false;
140
141 /* Dummy polymorphic call context. */
142
143 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
144 = {0, 0, NULL, NULL, false, true, true};
145
146 /* Pointer set of all call targets appearing in the cache. */
147 static pointer_set_t *cached_polymorphic_call_targets;
148
149 /* The node of type inheritance graph. For each type unique in
150 One Defintion Rule (ODR) sense, we produce one node linking all
151 main variants of types equivalent to it, bases and derived types. */
152
153 struct GTY(()) odr_type_d
154 {
155 /* leader type. */
156 tree type;
157 /* All bases; built only for main variants of types */
158 vec<odr_type> GTY((skip)) bases;
159 /* All derrived types with virtual methods seen in unit;
160 built only for main variants oftypes */
161 vec<odr_type> GTY((skip)) derived_types;
162
163 /* All equivalent types, if more than one. */
164 vec<tree, va_gc> *types;
165 /* Set of all equivalent types, if NON-NULL. */
166 pointer_set_t * GTY((skip)) types_set;
167
168 /* Unique ID indexing the type in odr_types array. */
169 int id;
170 /* Is it in anonymous namespace? */
171 bool anonymous_namespace;
172 /* Do we know about all derivations of given type? */
173 bool all_derivations_known;
174 /* Did we report ODR violation here? */
175 bool odr_violated;
176 };
177
178 static bool contains_type_p (tree, HOST_WIDE_INT, tree);
179
180
181 /* Return true if BINFO corresponds to a type with virtual methods.
182
183 Every type has several BINFOs. One is the BINFO associated by the type
184 while other represents bases of derived types. The BINFOs representing
185 bases do not have BINFO_VTABLE pointer set when this is the single
186 inheritance (because vtables are shared). Look up the BINFO of type
187 and check presence of its vtable. */
188
189 static inline bool
190 polymorphic_type_binfo_p (tree binfo)
191 {
192 /* See if BINFO's type has an virtual table associtated with it. */
193 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
194 }
195
196 /* Return TRUE if all derived types of T are known and thus
197 we may consider the walk of derived type complete.
198
199 This is typically true only for final anonymous namespace types and types
200 defined within functions (that may be COMDAT and thus shared across units,
201 but with the same set of derived types). */
202
203 static bool
204 type_all_derivations_known_p (tree t)
205 {
206 if (TYPE_FINAL_P (t))
207 return true;
208 if (flag_ltrans)
209 return false;
210 if (type_in_anonymous_namespace_p (t))
211 return true;
212 return (decl_function_context (TYPE_NAME (t)) != NULL);
213 }
214
215 /* Return TURE if type's constructors are all visible. */
216
217 static bool
218 type_all_ctors_visible_p (tree t)
219 {
220 return !flag_ltrans
221 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
222 /* We can not always use type_all_derivations_known_p.
223 For function local types we must assume case where
224 the function is COMDAT and shared in between units.
225
226 TODO: These cases are quite easy to get, but we need
227 to keep track of C++ privatizing via -Wno-weak
228 as well as the IPA privatizing. */
229 && type_in_anonymous_namespace_p (t);
230 }
231
232 /* Return TRUE if type may have instance. */
233
234 static bool
235 type_possibly_instantiated_p (tree t)
236 {
237 tree vtable;
238 varpool_node *vnode;
239
240 /* TODO: Add abstract types here. */
241 if (!type_all_ctors_visible_p (t))
242 return true;
243
244 vtable = BINFO_VTABLE (TYPE_BINFO (t));
245 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
246 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
247 vnode = varpool_node::get (vtable);
248 return vnode && vnode->definition;
249 }
250
251 /* One Definition Rule hashtable helpers. */
252
253 struct odr_hasher
254 {
255 typedef odr_type_d value_type;
256 typedef union tree_node compare_type;
257 static inline hashval_t hash (const value_type *);
258 static inline bool equal (const value_type *, const compare_type *);
259 static inline void remove (value_type *);
260 };
261
262 /* Return type that was declared with T's name so that T is an
263 qualified variant of it. */
264
265 static inline tree
266 main_odr_variant (const_tree t)
267 {
268 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
269 return TREE_TYPE (TYPE_NAME (t));
270 /* Unnamed types and non-C++ produced types can be compared by variants. */
271 else
272 return TYPE_MAIN_VARIANT (t);
273 }
274
275 /* Produce hash based on type name. */
276
277 static hashval_t
278 hash_type_name (tree t)
279 {
280 gcc_checking_assert (main_odr_variant (t) == t);
281
282 /* If not in LTO, all main variants are unique, so we can do
283 pointer hash. */
284 if (!in_lto_p)
285 return htab_hash_pointer (t);
286
287 /* Anonymous types are unique. */
288 if (type_in_anonymous_namespace_p (t))
289 return htab_hash_pointer (t);
290
291 /* For polymorphic types, we can simply hash the virtual table. */
292 if (TREE_CODE (t) == RECORD_TYPE
293 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
294 {
295 tree v = BINFO_VTABLE (TYPE_BINFO (t));
296 hashval_t hash = 0;
297
298 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
299 {
300 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
301 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
302 }
303
304 v = DECL_ASSEMBLER_NAME (v);
305 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
306 return hash;
307 }
308
309 /* Rest is not implemented yet. */
310 gcc_unreachable ();
311 }
312
313 /* Return the computed hashcode for ODR_TYPE. */
314
315 inline hashval_t
316 odr_hasher::hash (const value_type *odr_type)
317 {
318 return hash_type_name (odr_type->type);
319 }
320
321 /* For languages with One Definition Rule, work out if
322 types are the same based on their name.
323
324 This is non-trivial for LTO where minnor differences in
325 the type representation may have prevented type merging
326 to merge two copies of otherwise equivalent type.
327
328 Until we start streaming mangled type names, this function works
329 only for polymorphic types. */
330
331 bool
332 types_same_for_odr (const_tree type1, const_tree type2)
333 {
334 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
335
336 type1 = main_odr_variant (type1);
337 type2 = main_odr_variant (type2);
338
339 if (type1 == type2)
340 return true;
341
342 if (!in_lto_p)
343 return false;
344
345 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
346 on the corresponding TYPE_STUB_DECL. */
347 if (type_in_anonymous_namespace_p (type1)
348 || type_in_anonymous_namespace_p (type2))
349 return false;
350
351 /* See if types are obvoiusly different (i.e. different codes
352 or polymorphis wrt non-polymorphic). This is not strictly correct
353 for ODR violating programs, but we can't do better without streaming
354 ODR names. */
355 if (TREE_CODE (type1) != TREE_CODE (type2))
356 return false;
357 if (TREE_CODE (type1) == RECORD_TYPE
358 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
359 return false;
360 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
361 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
362 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
363 return false;
364
365 /* At the moment we have no way to establish ODR equivlaence at LTO
366 other than comparing virtual table pointrs of polymorphic types.
367 Eventually we should start saving mangled names in TYPE_NAME.
368 Then this condition will become non-trivial. */
369
370 if (TREE_CODE (type1) == RECORD_TYPE
371 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
372 && BINFO_VTABLE (TYPE_BINFO (type1))
373 && BINFO_VTABLE (TYPE_BINFO (type2)))
374 {
375 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
376 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
377 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
378 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
379 return (operand_equal_p (TREE_OPERAND (v1, 1),
380 TREE_OPERAND (v2, 1), 0)
381 && DECL_ASSEMBLER_NAME
382 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
383 == DECL_ASSEMBLER_NAME
384 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
385 }
386 gcc_unreachable ();
387 }
388
389
390 /* Compare types T1 and T2 and return true if they are
391 equivalent. */
392
393 inline bool
394 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
395 {
396 tree t2 = const_cast <tree> (ct2);
397
398 gcc_checking_assert (main_odr_variant (t2) == t2);
399 if (t1->type == t2)
400 return true;
401 if (!in_lto_p)
402 return false;
403 return types_same_for_odr (t1->type, t2);
404 }
405
406 /* Free ODR type V. */
407
408 inline void
409 odr_hasher::remove (value_type *v)
410 {
411 v->bases.release ();
412 v->derived_types.release ();
413 if (v->types_set)
414 pointer_set_destroy (v->types_set);
415 ggc_free (v);
416 }
417
418 /* ODR type hash used to lookup ODR type based on tree type node. */
419
420 typedef hash_table<odr_hasher> odr_hash_type;
421 static odr_hash_type *odr_hash;
422
423 /* ODR types are also stored into ODR_TYPE vector to allow consistent
424 walking. Bases appear before derived types. Vector is garbage collected
425 so we won't end up visiting empty types. */
426
427 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
428 #define odr_types (*odr_types_ptr)
429
430 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
431 void
432 set_type_binfo (tree type, tree binfo)
433 {
434 for (; type; type = TYPE_NEXT_VARIANT (type))
435 if (COMPLETE_TYPE_P (type))
436 TYPE_BINFO (type) = binfo;
437 else
438 gcc_assert (!TYPE_BINFO (type));
439 }
440
441 /* Compare T2 and T2 based on name or structure. */
442
443 static bool
444 odr_subtypes_equivalent_p (tree t1, tree t2, pointer_set_t *visited)
445 {
446 bool an1, an2;
447
448 /* This can happen in incomplete types that should be handled earlier. */
449 gcc_assert (t1 && t2);
450
451 t1 = main_odr_variant (t1);
452 t2 = main_odr_variant (t2);
453 if (t1 == t2)
454 return true;
455 if (TREE_CODE (t1) != TREE_CODE (t2))
456 return false;
457 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
458 return false;
459 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
460 return false;
461
462 /* Anonymous namespace types must match exactly. */
463 an1 = type_in_anonymous_namespace_p (t1);
464 an2 = type_in_anonymous_namespace_p (t2);
465 if (an1 != an2 || an1)
466 return false;
467
468 /* For types where we can not establish ODR equivalency, recurse and deeply
469 compare. */
470 if (TREE_CODE (t1) != RECORD_TYPE
471 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
472 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
473 || !polymorphic_type_binfo_p (TYPE_BINFO (t2)))
474 {
475 /* This should really be a pair hash, but for the moment we do not need
476 100% reliability and it would be better to compare all ODR types so
477 recursion here is needed only for component types. */
478 if (pointer_set_insert (visited, t1))
479 return true;
480 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
481 }
482 return types_same_for_odr (t1, t2);
483 }
484
485 /* Output ODR violation warning about T1 and T2 with REASON.
486 Display location of ST1 and ST2 if REASON speaks about field or
487 method of the type.
488 If WARN is false, do nothing. Set WARNED if warning was indeed
489 output. */
490
491 void
492 warn_odr (tree t1, tree t2, tree st1, tree st2,
493 bool warn, bool *warned, const char *reason)
494 {
495 tree decl2 = TYPE_NAME (t2);
496
497 if (!warn)
498 return;
499 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
500 "type %qT violates one definition rule",
501 t1))
502 return;
503 if (!st1)
504 ;
505 else if (TREE_CODE (st1) == FIELD_DECL)
506 {
507 inform (DECL_SOURCE_LOCATION (decl2),
508 "a different type is defined in another translation unit");
509 inform (DECL_SOURCE_LOCATION (st1),
510 "the first difference of corresponding definitions is field %qD",
511 st1);
512 decl2 = st2;
513 }
514 else if (TREE_CODE (st1) == FUNCTION_DECL)
515 {
516 inform (DECL_SOURCE_LOCATION (decl2),
517 "a different type is defined in another translation unit");
518 inform (DECL_SOURCE_LOCATION (st1),
519 "the first difference of corresponding definitions is method %qD",
520 st1);
521 decl2 = st2;
522 }
523 else
524 return;
525 inform (DECL_SOURCE_LOCATION (decl2), reason);
526
527 if (warned)
528 *warned = true;
529 }
530
531 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
532 because they are used on same place in ODR matching types.
533 They are not; inform the user. */
534
535 void
536 warn_types_mismatch (tree t1, tree t2)
537 {
538 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
539 return;
540 /* In Firefox it is a common bug to have same types but in
541 different namespaces. Be a bit more informative on
542 this. */
543 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
544 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
545 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
546 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
547 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
548 DECL_NAME (TYPE_CONTEXT (t2))))))
549 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
550 "type %qT should match type %qT but is defined "
551 "in different namespace ",
552 t1, t2);
553 else
554 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
555 "type %qT should match type %qT",
556 t1, t2);
557 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
558 "the incompatible type is defined here");
559 }
560
561 /* Compare T1 and T2, report ODR violations if WARN is true and set
562 WARNED to true if anything is reported. Return true if types match.
563 If true is returned, the types are also compatible in the sense of
564 gimple_canonical_types_compatible_p. */
565
566 static bool
567 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, pointer_set_t *visited)
568 {
569 /* Check first for the obvious case of pointer identity. */
570 if (t1 == t2)
571 return true;
572 gcc_assert (!type_in_anonymous_namespace_p (t1));
573 gcc_assert (!type_in_anonymous_namespace_p (t2));
574
575 /* Can't be the same type if the types don't have the same code. */
576 if (TREE_CODE (t1) != TREE_CODE (t2))
577 {
578 warn_odr (t1, t2, NULL, NULL, warn, warned,
579 G_("a different type is defined in another translation unit"));
580 return false;
581 }
582
583 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
584 {
585 warn_odr (t1, t2, NULL, NULL, warn, warned,
586 G_("a type with different qualifiers is defined in another "
587 "translation unit"));
588 return false;
589 }
590
591 if (comp_type_attributes (t1, t2) != 1)
592 {
593 warn_odr (t1, t2, NULL, NULL, warn, warned,
594 G_("a type with attributes "
595 "is defined in another translation unit"));
596 return false;
597 }
598
599 if (TREE_CODE (t1) == ENUMERAL_TYPE)
600 {
601 tree v1, v2;
602 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
603 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
604 {
605 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
606 {
607 warn_odr (t1, t2, NULL, NULL, warn, warned,
608 G_("an enum with different value name"
609 " is defined in another translation unit"));
610 return false;
611 }
612 if (TREE_VALUE (v1) != TREE_VALUE (v2)
613 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
614 DECL_INITIAL (TREE_VALUE (v2)), 0))
615 {
616 warn_odr (t1, t2, NULL, NULL, warn, warned,
617 G_("an enum with different values is defined"
618 " in another translation unit"));
619 return false;
620 }
621 }
622 if (v1 || v2)
623 {
624 warn_odr (t1, t2, NULL, NULL, warn, warned,
625 G_("an enum with mismatching number of values "
626 "is defined in another translation unit"));
627 return false;
628 }
629 }
630
631 /* Non-aggregate types can be handled cheaply. */
632 if (INTEGRAL_TYPE_P (t1)
633 || SCALAR_FLOAT_TYPE_P (t1)
634 || FIXED_POINT_TYPE_P (t1)
635 || TREE_CODE (t1) == VECTOR_TYPE
636 || TREE_CODE (t1) == COMPLEX_TYPE
637 || TREE_CODE (t1) == OFFSET_TYPE
638 || POINTER_TYPE_P (t1))
639 {
640 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
641 {
642 warn_odr (t1, t2, NULL, NULL, warn, warned,
643 G_("a type with different precision is defined "
644 "in another translation unit"));
645 return false;
646 }
647 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
648 {
649 warn_odr (t1, t2, NULL, NULL, warn, warned,
650 G_("a type with different signedness is defined "
651 "in another translation unit"));
652 return false;
653 }
654
655 if (TREE_CODE (t1) == INTEGER_TYPE
656 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
657 {
658 /* char WRT uint_8? */
659 warn_odr (t1, t2, NULL, NULL, warn, warned,
660 G_("a different type is defined in another "
661 "translation unit"));
662 return false;
663 }
664
665 /* For canonical type comparisons we do not want to build SCCs
666 so we cannot compare pointed-to types. But we can, for now,
667 require the same pointed-to type kind and match what
668 useless_type_conversion_p would do. */
669 if (POINTER_TYPE_P (t1))
670 {
671 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
672 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
673 {
674 warn_odr (t1, t2, NULL, NULL, warn, warned,
675 G_("it is defined as a pointer in different address "
676 "space in another translation unit"));
677 return false;
678 }
679
680 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
681 {
682 warn_odr (t1, t2, NULL, NULL, warn, warned,
683 G_("it is defined as a pointer to different type "
684 "in another translation unit"));
685 if (warn && warned)
686 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
687 return false;
688 }
689 }
690
691 /* Tail-recurse to components. */
692 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
693 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
694 {
695 /* Probably specific enough. */
696 warn_odr (t1, t2, NULL, NULL, warn, warned,
697 G_("a different type is defined "
698 "in another translation unit"));
699 if (warn && warned)
700 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
701 return false;
702 }
703
704 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
705 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
706 TYPE_SIZE_UNIT (t2), 0));
707 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
708
709 return true;
710 }
711
712 /* Do type-specific comparisons. */
713 switch (TREE_CODE (t1))
714 {
715 case ARRAY_TYPE:
716 {
717 /* Array types are the same if the element types are the same and
718 the number of elements are the same. */
719 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
720 {
721 warn_odr (t1, t2, NULL, NULL, warn, warned,
722 G_("a different type is defined in another "
723 "translation unit"));
724 if (warn && warned)
725 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
726 }
727 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
728 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
729 == TYPE_NONALIASED_COMPONENT (t2));
730
731 tree i1 = TYPE_DOMAIN (t1);
732 tree i2 = TYPE_DOMAIN (t2);
733
734 /* For an incomplete external array, the type domain can be
735 NULL_TREE. Check this condition also. */
736 if (i1 == NULL_TREE || i2 == NULL_TREE)
737 return true;
738
739 tree min1 = TYPE_MIN_VALUE (i1);
740 tree min2 = TYPE_MIN_VALUE (i2);
741 tree max1 = TYPE_MAX_VALUE (i1);
742 tree max2 = TYPE_MAX_VALUE (i2);
743
744 /* In C++, minimums should be always 0. */
745 gcc_assert (min1 == min2);
746 if (!operand_equal_p (max1, max2, 0))
747 {
748 warn_odr (t1, t2, NULL, NULL, warn, warned,
749 G_("an array of different size is defined "
750 "in another translation unit"));
751 return false;
752 }
753 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
754 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
755 TYPE_SIZE_UNIT (t2), 0));
756 }
757 return true;
758
759 case METHOD_TYPE:
760 case FUNCTION_TYPE:
761 /* Function types are the same if the return type and arguments types
762 are the same. */
763 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
764 {
765 warn_odr (t1, t2, NULL, NULL, warn, warned,
766 G_("has different return value "
767 "in another translation unit"));
768 if (warn && warned)
769 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
770 return false;
771 }
772
773 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
774 return true;
775 else
776 {
777 tree parms1, parms2;
778
779 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
780 parms1 && parms2;
781 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
782 {
783 if (!odr_subtypes_equivalent_p
784 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
785 {
786 warn_odr (t1, t2, NULL, NULL, warn, warned,
787 G_("has different parameters in another "
788 "translation unit"));
789 if (warn && warned)
790 warn_types_mismatch (TREE_VALUE (parms1),
791 TREE_VALUE (parms2));
792 return false;
793 }
794 }
795
796 if (parms1 || parms2)
797 {
798 warn_odr (t1, t2, NULL, NULL, warn, warned,
799 G_("has different parameters "
800 "in another translation unit"));
801 return false;
802 }
803
804 return true;
805 }
806
807 case RECORD_TYPE:
808 case UNION_TYPE:
809 case QUAL_UNION_TYPE:
810 {
811 tree f1, f2;
812
813 /* For aggregate types, all the fields must be the same. */
814 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
815 {
816 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
817 f1 || f2;
818 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
819 {
820 /* Skip non-fields. */
821 while (f1 && TREE_CODE (f1) != FIELD_DECL)
822 f1 = TREE_CHAIN (f1);
823 while (f2 && TREE_CODE (f2) != FIELD_DECL)
824 f2 = TREE_CHAIN (f2);
825 if (!f1 || !f2)
826 break;
827 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
828 break;
829 if (DECL_NAME (f1) != DECL_NAME (f2)
830 && !DECL_ARTIFICIAL (f1))
831 {
832 warn_odr (t1, t2, f1, f2, warn, warned,
833 G_("a field with different name is defined "
834 "in another translation unit"));
835 return false;
836 }
837 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
838 {
839 /* Do not warn about artificial fields and just go into generic
840 field mismatch warning. */
841 if (DECL_ARTIFICIAL (f1))
842 break;
843
844 warn_odr (t1, t2, f1, f2, warn, warned,
845 G_("a field of same name but different type "
846 "is defined in another translation unit"));
847 if (warn && warned)
848 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
849 return false;
850 }
851 if (!gimple_compare_field_offset (f1, f2))
852 {
853 /* Do not warn about artificial fields and just go into generic
854 field mismatch warning. */
855 if (DECL_ARTIFICIAL (f1))
856 break;
857 warn_odr (t1, t2, t1, t2, warn, warned,
858 G_("fields has different layout "
859 "in another translation unit"));
860 return false;
861 }
862 gcc_assert (DECL_NONADDRESSABLE_P (f1)
863 == DECL_NONADDRESSABLE_P (f2));
864 }
865
866 /* If one aggregate has more fields than the other, they
867 are not the same. */
868 if (f1 || f2)
869 {
870 warn_odr (t1, t2, NULL, NULL, warn, warned,
871 G_("a type with different number of fields "
872 "is defined in another translation unit"));
873 return false;
874 }
875 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
876 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
877 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
878 {
879 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
880 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
881 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
882 {
883 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
884 {
885 warn_odr (t1, t2, f1, f2, warn, warned,
886 G_("a different method of same type "
887 "is defined in another translation unit"));
888 return false;
889 }
890 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
891 {
892 warn_odr (t1, t2, f1, f2, warn, warned,
893 G_("s definition that differs by virtual "
894 "keyword in another translation unit"));
895 return false;
896 }
897 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
898 {
899 warn_odr (t1, t2, f1, f2, warn, warned,
900 G_("virtual table layout differs in another "
901 "translation unit"));
902 return false;
903 }
904 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
905 {
906 warn_odr (t1, t2, f1, f2, warn, warned,
907 G_("method with incompatible type is defined "
908 "in another translation unit"));
909 return false;
910 }
911 }
912 if (f1 || f2)
913 {
914 warn_odr (t1, t2, NULL, NULL, warn, warned,
915 G_("a type with different number of methods "
916 "is defined in another translation unit"));
917 return false;
918 }
919 }
920 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
921 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
922 TYPE_SIZE_UNIT (t2), 0));
923 }
924
925 return true;
926 }
927
928 default:
929 gcc_unreachable ();
930 }
931 }
932
933 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
934 from VAL->type. This may happen in LTO where tree merging did not merge
935 all variants of the same type. It may or may not mean the ODR violation.
936 Add it to the list of duplicates and warn on some violations. */
937
938 static bool
939 add_type_duplicate (odr_type val, tree type)
940 {
941 bool build_bases = false;
942 if (!val->types_set)
943 val->types_set = pointer_set_create ();
944
945 /* Always prefer complete type to be the leader. */
946 if (!COMPLETE_TYPE_P (val->type)
947 && COMPLETE_TYPE_P (type))
948 {
949 tree tmp = type;
950
951 build_bases = true;
952 type = val->type;
953 val->type = tmp;
954 }
955
956 /* See if this duplicate is new. */
957 if (!pointer_set_insert (val->types_set, type))
958 {
959 bool merge = true;
960 bool base_mismatch = false;
961 unsigned int i,j;
962 bool warned = false;
963 pointer_set_t *visited = pointer_set_create ();
964
965 gcc_assert (in_lto_p);
966 vec_safe_push (val->types, type);
967
968 /* First we compare memory layout. */
969 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
970 &warned, visited))
971 {
972 merge = false;
973 odr_violation_reported = true;
974 val->odr_violated = true;
975 if (cgraph_dump_file)
976 {
977 fprintf (cgraph_dump_file, "ODR violation\n");
978
979 print_node (cgraph_dump_file, "", val->type, 0);
980 putc ('\n',cgraph_dump_file);
981 print_node (cgraph_dump_file, "", type, 0);
982 putc ('\n',cgraph_dump_file);
983 }
984 }
985 pointer_set_destroy (visited);
986
987 /* Next sanity check that bases are the same. If not, we will end
988 up producing wrong answers. */
989 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
990 && TREE_CODE (val->type) == RECORD_TYPE
991 && TREE_CODE (type) == RECORD_TYPE
992 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
993 {
994 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
995 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
996 {
997 odr_type base = get_odr_type
998 (BINFO_TYPE
999 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1000 i)),
1001 true);
1002 if (val->bases.length () <= j || val->bases[j] != base)
1003 base_mismatch = true;
1004 j++;
1005 }
1006 if (base_mismatch)
1007 {
1008 merge = false;
1009 odr_violation_reported = true;
1010
1011 if (!warned && !val->odr_violated)
1012 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1013 "a type with the same name but different bases is "
1014 "defined in another translation unit");
1015 val->odr_violated = true;
1016 if (cgraph_dump_file)
1017 {
1018 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
1019
1020 print_node (cgraph_dump_file, "", val->type, 0);
1021 putc ('\n',cgraph_dump_file);
1022 print_node (cgraph_dump_file, "", type, 0);
1023 putc ('\n',cgraph_dump_file);
1024 }
1025 }
1026 }
1027
1028 /* Regularize things a little. During LTO same types may come with
1029 different BINFOs. Either because their virtual table was
1030 not merged by tree merging and only later at decl merging or
1031 because one type comes with external vtable, while other
1032 with internal. We want to merge equivalent binfos to conserve
1033 memory and streaming overhead.
1034
1035 The external vtables are more harmful: they contain references
1036 to external declarations of methods that may be defined in the
1037 merged LTO unit. For this reason we absolutely need to remove
1038 them and replace by internal variants. Not doing so will lead
1039 to incomplete answers from possible_polymorphic_call_targets. */
1040 if (!flag_ltrans && merge
1041 && TREE_CODE (val->type) == RECORD_TYPE
1042 && TREE_CODE (type) == RECORD_TYPE
1043 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1044 && TYPE_MAIN_VARIANT (type) == type
1045 && TYPE_MAIN_VARIANT (val->type) == val->type
1046 && BINFO_VTABLE (TYPE_BINFO (val->type))
1047 && BINFO_VTABLE (TYPE_BINFO (type)))
1048 {
1049 tree master_binfo = TYPE_BINFO (val->type);
1050 tree v1 = BINFO_VTABLE (master_binfo);
1051 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1052
1053 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1054 {
1055 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1056 && operand_equal_p (TREE_OPERAND (v1, 1),
1057 TREE_OPERAND (v2, 1), 0));
1058 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1059 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1060 }
1061 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1062 == DECL_ASSEMBLER_NAME (v2));
1063
1064 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1065 {
1066 unsigned int i;
1067
1068 set_type_binfo (val->type, TYPE_BINFO (type));
1069 for (i = 0; i < val->types->length (); i++)
1070 {
1071 if (TYPE_BINFO ((*val->types)[i])
1072 == master_binfo)
1073 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1074 }
1075 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1076 }
1077 else
1078 set_type_binfo (type, master_binfo);
1079 }
1080 }
1081 return build_bases;
1082 }
1083
1084 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1085 possibly new entry. */
1086
1087 odr_type
1088 get_odr_type (tree type, bool insert)
1089 {
1090 odr_type_d **slot;
1091 odr_type val;
1092 hashval_t hash;
1093 bool build_bases = false;
1094 bool insert_to_odr_array = false;
1095 int base_id = -1;
1096
1097 type = main_odr_variant (type);
1098
1099 hash = hash_type_name (type);
1100 slot
1101 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
1102 if (!slot)
1103 return NULL;
1104
1105 /* See if we already have entry for type. */
1106 if (*slot)
1107 {
1108 val = *slot;
1109
1110 /* With LTO we need to support multiple tree representation of
1111 the same ODR type. */
1112 if (val->type != type)
1113 build_bases = add_type_duplicate (val, type);
1114 }
1115 else
1116 {
1117 val = ggc_cleared_alloc<odr_type_d> ();
1118 val->type = type;
1119 val->bases = vNULL;
1120 val->derived_types = vNULL;
1121 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1122 build_bases = COMPLETE_TYPE_P (val->type);
1123 insert_to_odr_array = true;
1124 }
1125
1126 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1127 && type == TYPE_MAIN_VARIANT (type))
1128 {
1129 tree binfo = TYPE_BINFO (type);
1130 unsigned int i;
1131
1132 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1133
1134 val->all_derivations_known = type_all_derivations_known_p (type);
1135 *slot = val;
1136 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1137 /* For now record only polymorphic types. other are
1138 pointless for devirtualization and we can not precisely
1139 determine ODR equivalency of these during LTO. */
1140 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1141 {
1142 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1143 i)),
1144 true);
1145 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1146 base->derived_types.safe_push (val);
1147 val->bases.safe_push (base);
1148 if (base->id > base_id)
1149 base_id = base->id;
1150 }
1151 }
1152 /* Ensure that type always appears after bases. */
1153 if (insert_to_odr_array)
1154 {
1155 if (odr_types_ptr)
1156 val->id = odr_types.length ();
1157 vec_safe_push (odr_types_ptr, val);
1158 }
1159 else if (base_id > val->id)
1160 {
1161 odr_types[val->id] = 0;
1162 /* Be sure we did not recorded any derived types; these may need
1163 renumbering too. */
1164 gcc_assert (val->derived_types.length() == 0);
1165 if (odr_types_ptr)
1166 val->id = odr_types.length ();
1167 vec_safe_push (odr_types_ptr, val);
1168 }
1169 return val;
1170 }
1171
1172 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1173 recusive printing. */
1174
1175 static void
1176 dump_odr_type (FILE *f, odr_type t, int indent=0)
1177 {
1178 unsigned int i;
1179 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1180 print_generic_expr (f, t->type, TDF_SLIM);
1181 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1182 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1183 if (TYPE_NAME (t->type))
1184 {
1185 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1186 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1187 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1188 }
1189 if (t->bases.length ())
1190 {
1191 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1192 for (i = 0; i < t->bases.length (); i++)
1193 fprintf (f, " %i", t->bases[i]->id);
1194 fprintf (f, "\n");
1195 }
1196 if (t->derived_types.length ())
1197 {
1198 fprintf (f, "%*s derived types:\n", indent * 2, "");
1199 for (i = 0; i < t->derived_types.length (); i++)
1200 dump_odr_type (f, t->derived_types[i], indent + 1);
1201 }
1202 fprintf (f, "\n");
1203 }
1204
1205 /* Dump the type inheritance graph. */
1206
1207 static void
1208 dump_type_inheritance_graph (FILE *f)
1209 {
1210 unsigned int i;
1211 if (!odr_types_ptr)
1212 return;
1213 fprintf (f, "\n\nType inheritance graph:\n");
1214 for (i = 0; i < odr_types.length (); i++)
1215 {
1216 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1217 dump_odr_type (f, odr_types[i]);
1218 }
1219 for (i = 0; i < odr_types.length (); i++)
1220 {
1221 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1222 {
1223 unsigned int j;
1224 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1225 print_node (f, "", odr_types[i]->type, 0);
1226 for (j = 0; j < odr_types[i]->types->length (); j++)
1227 {
1228 tree t;
1229 fprintf (f, "duplicate #%i\n", j);
1230 print_node (f, "", (*odr_types[i]->types)[j], 0);
1231 t = (*odr_types[i]->types)[j];
1232 while (TYPE_P (t) && TYPE_CONTEXT (t))
1233 {
1234 t = TYPE_CONTEXT (t);
1235 print_node (f, "", t, 0);
1236 }
1237 putc ('\n',f);
1238 }
1239 }
1240 }
1241 }
1242
1243 /* Given method type T, return type of class it belongs to.
1244 Lookup this pointer and get its type. */
1245
1246 tree
1247 method_class_type (const_tree t)
1248 {
1249 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1250 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1251
1252 return TREE_TYPE (first_parm_type);
1253 }
1254
1255 /* Initialize IPA devirt and build inheritance tree graph. */
1256
1257 void
1258 build_type_inheritance_graph (void)
1259 {
1260 struct symtab_node *n;
1261 FILE *inheritance_dump_file;
1262 int flags;
1263
1264 if (odr_hash)
1265 return;
1266 timevar_push (TV_IPA_INHERITANCE);
1267 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1268 odr_hash = new odr_hash_type (23);
1269
1270 /* We reconstruct the graph starting of types of all methods seen in the
1271 the unit. */
1272 FOR_EACH_SYMBOL (n)
1273 if (is_a <cgraph_node *> (n)
1274 && DECL_VIRTUAL_P (n->decl)
1275 && n->real_symbol_p ())
1276 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1277 true);
1278
1279 /* Look also for virtual tables of types that do not define any methods.
1280
1281 We need it in a case where class B has virtual base of class A
1282 re-defining its virtual method and there is class C with no virtual
1283 methods with B as virtual base.
1284
1285 Here we output B's virtual method in two variant - for non-virtual
1286 and virtual inheritance. B's virtual table has non-virtual version,
1287 while C's has virtual.
1288
1289 For this reason we need to know about C in order to include both
1290 variants of B. More correctly, record_target_from_binfo should
1291 add both variants of the method when walking B, but we have no
1292 link in between them.
1293
1294 We rely on fact that either the method is exported and thus we
1295 assume it is called externally or C is in anonymous namespace and
1296 thus we will see the vtable. */
1297
1298 else if (is_a <varpool_node *> (n)
1299 && DECL_VIRTUAL_P (n->decl)
1300 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1301 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1302 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1303 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1304 if (inheritance_dump_file)
1305 {
1306 dump_type_inheritance_graph (inheritance_dump_file);
1307 dump_end (TDI_inheritance, inheritance_dump_file);
1308 }
1309 timevar_pop (TV_IPA_INHERITANCE);
1310 }
1311
1312 /* Return true if N has reference from live virtual table
1313 (and thus can be a destination of polymorphic call).
1314 Be conservatively correct when callgraph is not built or
1315 if the method may be referred externally. */
1316
1317 static bool
1318 referenced_from_vtable_p (struct cgraph_node *node)
1319 {
1320 int i;
1321 struct ipa_ref *ref;
1322 bool found = false;
1323
1324 if (node->externally_visible
1325 || node->used_from_other_partition)
1326 return true;
1327
1328 /* Keep this test constant time.
1329 It is unlikely this can happen except for the case where speculative
1330 devirtualization introduced many speculative edges to this node.
1331 In this case the target is very likely alive anyway. */
1332 if (node->ref_list.referring.length () > 100)
1333 return true;
1334
1335 /* We need references built. */
1336 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
1337 return true;
1338
1339 for (i = 0; node->iterate_referring (i, ref); i++)
1340
1341 if ((ref->use == IPA_REF_ALIAS
1342 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1343 || (ref->use == IPA_REF_ADDR
1344 && TREE_CODE (ref->referring->decl) == VAR_DECL
1345 && DECL_VIRTUAL_P (ref->referring->decl)))
1346 {
1347 found = true;
1348 break;
1349 }
1350 return found;
1351 }
1352
1353 /* If TARGET has associated node, record it in the NODES array.
1354 CAN_REFER specify if program can refer to the target directly.
1355 if TARGET is unknown (NULL) or it can not be inserted (for example because
1356 its body was already removed and there is no way to refer to it), clear
1357 COMPLETEP. */
1358
1359 static void
1360 maybe_record_node (vec <cgraph_node *> &nodes,
1361 tree target, pointer_set_t *inserted,
1362 bool can_refer,
1363 bool *completep)
1364 {
1365 struct cgraph_node *target_node, *alias_target;
1366 enum availability avail;
1367
1368 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1369 list of targets; the runtime effect of calling them is undefined.
1370 Only "real" virtual methods should be accounted. */
1371 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1372 return;
1373
1374 if (!can_refer)
1375 {
1376 /* The only case when method of anonymous namespace becomes unreferable
1377 is when we completely optimized it out. */
1378 if (flag_ltrans
1379 || !target
1380 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1381 *completep = false;
1382 return;
1383 }
1384
1385 if (!target)
1386 return;
1387
1388 target_node = cgraph_node::get (target);
1389
1390 /* Preffer alias target over aliases, so we do not get confused by
1391 fake duplicates. */
1392 if (target_node)
1393 {
1394 alias_target = target_node->ultimate_alias_target (&avail);
1395 if (target_node != alias_target
1396 && avail >= AVAIL_AVAILABLE
1397 && target_node->get_availability ())
1398 target_node = alias_target;
1399 }
1400
1401 /* Method can only be called by polymorphic call if any
1402 of vtables refering to it are alive.
1403
1404 While this holds for non-anonymous functions, too, there are
1405 cases where we want to keep them in the list; for example
1406 inline functions with -fno-weak are static, but we still
1407 may devirtualize them when instance comes from other unit.
1408 The same holds for LTO.
1409
1410 Currently we ignore these functions in speculative devirtualization.
1411 ??? Maybe it would make sense to be more aggressive for LTO even
1412 eslewhere. */
1413 if (!flag_ltrans
1414 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1415 && (!target_node
1416 || !referenced_from_vtable_p (target_node)))
1417 ;
1418 /* See if TARGET is useful function we can deal with. */
1419 else if (target_node != NULL
1420 && (TREE_PUBLIC (target)
1421 || DECL_EXTERNAL (target)
1422 || target_node->definition)
1423 && target_node->real_symbol_p ())
1424 {
1425 gcc_assert (!target_node->global.inlined_to);
1426 gcc_assert (target_node->real_symbol_p ());
1427 if (!pointer_set_insert (inserted, target_node->decl))
1428 {
1429 pointer_set_insert (cached_polymorphic_call_targets,
1430 target_node);
1431 nodes.safe_push (target_node);
1432 }
1433 }
1434 else if (completep
1435 && (!type_in_anonymous_namespace_p
1436 (DECL_CONTEXT (target))
1437 || flag_ltrans))
1438 *completep = false;
1439 }
1440
1441 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1442 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1443 method in vtable and insert method to NODES array
1444 or BASES_TO_CONSIDER if this array is non-NULL.
1445 Otherwise recurse to base BINFOs.
1446 This match what get_binfo_at_offset does, but with offset
1447 being unknown.
1448
1449 TYPE_BINFOS is a stack of BINFOS of types with defined
1450 virtual table seen on way from class type to BINFO.
1451
1452 MATCHED_VTABLES tracks virtual tables we already did lookup
1453 for virtual function in. INSERTED tracks nodes we already
1454 inserted.
1455
1456 ANONYMOUS is true if BINFO is part of anonymous namespace.
1457
1458 Clear COMPLETEP when we hit unreferable target.
1459 */
1460
1461 static void
1462 record_target_from_binfo (vec <cgraph_node *> &nodes,
1463 vec <tree> *bases_to_consider,
1464 tree binfo,
1465 tree otr_type,
1466 vec <tree> &type_binfos,
1467 HOST_WIDE_INT otr_token,
1468 tree outer_type,
1469 HOST_WIDE_INT offset,
1470 pointer_set_t *inserted,
1471 pointer_set_t *matched_vtables,
1472 bool anonymous,
1473 bool *completep)
1474 {
1475 tree type = BINFO_TYPE (binfo);
1476 int i;
1477 tree base_binfo;
1478
1479
1480 if (BINFO_VTABLE (binfo))
1481 type_binfos.safe_push (binfo);
1482 if (types_same_for_odr (type, outer_type))
1483 {
1484 int i;
1485 tree type_binfo = NULL;
1486
1487 /* Lookup BINFO with virtual table. For normal types it is always last
1488 binfo on stack. */
1489 for (i = type_binfos.length () - 1; i >= 0; i--)
1490 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1491 {
1492 type_binfo = type_binfos[i];
1493 break;
1494 }
1495 if (BINFO_VTABLE (binfo))
1496 type_binfos.pop ();
1497 /* If this is duplicated BINFO for base shared by virtual inheritance,
1498 we may not have its associated vtable. This is not a problem, since
1499 we will walk it on the other path. */
1500 if (!type_binfo)
1501 return;
1502 tree inner_binfo = get_binfo_at_offset (type_binfo,
1503 offset, otr_type);
1504 if (!inner_binfo)
1505 {
1506 gcc_assert (odr_violation_reported);
1507 return;
1508 }
1509 /* For types in anonymous namespace first check if the respective vtable
1510 is alive. If not, we know the type can't be called. */
1511 if (!flag_ltrans && anonymous)
1512 {
1513 tree vtable = BINFO_VTABLE (inner_binfo);
1514 varpool_node *vnode;
1515
1516 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1517 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1518 vnode = varpool_node::get (vtable);
1519 if (!vnode || !vnode->definition)
1520 return;
1521 }
1522 gcc_assert (inner_binfo);
1523 if (bases_to_consider
1524 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
1525 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
1526 {
1527 bool can_refer;
1528 tree target = gimple_get_virt_method_for_binfo (otr_token,
1529 inner_binfo,
1530 &can_refer);
1531 if (!bases_to_consider)
1532 maybe_record_node (nodes, target, inserted, can_refer, completep);
1533 /* Destructors are never called via construction vtables. */
1534 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1535 bases_to_consider->safe_push (target);
1536 }
1537 return;
1538 }
1539
1540 /* Walk bases. */
1541 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1542 /* Walking bases that have no virtual method is pointless excercise. */
1543 if (polymorphic_type_binfo_p (base_binfo))
1544 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1545 type_binfos,
1546 otr_token, outer_type, offset, inserted,
1547 matched_vtables, anonymous, completep);
1548 if (BINFO_VTABLE (binfo))
1549 type_binfos.pop ();
1550 }
1551
1552 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1553 of TYPE, insert them to NODES, recurse into derived nodes.
1554 INSERTED is used to avoid duplicate insertions of methods into NODES.
1555 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1556 Clear COMPLETEP if unreferable target is found.
1557
1558 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1559 all cases where BASE_SKIPPED is true (because the base is abstract
1560 class). */
1561
1562 static void
1563 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1564 pointer_set_t *inserted,
1565 pointer_set_t *matched_vtables,
1566 tree otr_type,
1567 odr_type type,
1568 HOST_WIDE_INT otr_token,
1569 tree outer_type,
1570 HOST_WIDE_INT offset,
1571 bool *completep,
1572 vec <tree> &bases_to_consider,
1573 bool consider_construction)
1574 {
1575 tree binfo = TYPE_BINFO (type->type);
1576 unsigned int i;
1577 vec <tree> type_binfos = vNULL;
1578 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1579
1580 /* We may need to consider types w/o instances because of possible derived
1581 types using their methods either directly or via construction vtables.
1582 We are safe to skip them when all derivations are known, since we will
1583 handle them later.
1584 This is done by recording them to BASES_TO_CONSIDER array. */
1585 if (possibly_instantiated || consider_construction)
1586 {
1587 record_target_from_binfo (nodes,
1588 (!possibly_instantiated
1589 && type_all_derivations_known_p (type->type))
1590 ? &bases_to_consider : NULL,
1591 binfo, otr_type, type_binfos, otr_token,
1592 outer_type, offset,
1593 inserted, matched_vtables,
1594 type->anonymous_namespace, completep);
1595 }
1596 type_binfos.release ();
1597 for (i = 0; i < type->derived_types.length (); i++)
1598 possible_polymorphic_call_targets_1 (nodes, inserted,
1599 matched_vtables,
1600 otr_type,
1601 type->derived_types[i],
1602 otr_token, outer_type, offset, completep,
1603 bases_to_consider, consider_construction);
1604 }
1605
1606 /* Cache of queries for polymorphic call targets.
1607
1608 Enumerating all call targets may get expensive when there are many
1609 polymorphic calls in the program, so we memoize all the previous
1610 queries and avoid duplicated work. */
1611
1612 struct polymorphic_call_target_d
1613 {
1614 HOST_WIDE_INT otr_token;
1615 ipa_polymorphic_call_context context;
1616 odr_type type;
1617 vec <cgraph_node *> targets;
1618 int speculative_targets;
1619 bool complete;
1620 };
1621
1622 /* Polymorphic call target cache helpers. */
1623
1624 struct polymorphic_call_target_hasher
1625 {
1626 typedef polymorphic_call_target_d value_type;
1627 typedef polymorphic_call_target_d compare_type;
1628 static inline hashval_t hash (const value_type *);
1629 static inline bool equal (const value_type *, const compare_type *);
1630 static inline void remove (value_type *);
1631 };
1632
1633 /* Return the computed hashcode for ODR_QUERY. */
1634
1635 inline hashval_t
1636 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1637 {
1638 hashval_t hash;
1639
1640 hash = iterative_hash_host_wide_int
1641 (odr_query->otr_token,
1642 odr_query->type->id);
1643 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
1644 hash);
1645 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
1646 if (odr_query->context.speculative_outer_type)
1647 {
1648 hash = iterative_hash_hashval_t
1649 (TYPE_UID (odr_query->context.speculative_outer_type), hash);
1650 hash = iterative_hash_host_wide_int (odr_query->context.speculative_offset,
1651 hash);
1652 }
1653 return iterative_hash_hashval_t
1654 (((int)odr_query->context.maybe_in_construction << 2)
1655 | ((int)odr_query->context.speculative_maybe_derived_type << 1)
1656 | (int)odr_query->context.maybe_derived_type, hash);
1657 }
1658
1659 /* Compare cache entries T1 and T2. */
1660
1661 inline bool
1662 polymorphic_call_target_hasher::equal (const value_type *t1,
1663 const compare_type *t2)
1664 {
1665 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1666 && t1->context.offset == t2->context.offset
1667 && t1->context.speculative_offset == t2->context.speculative_offset
1668 && t1->context.outer_type == t2->context.outer_type
1669 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1670 && t1->context.maybe_in_construction
1671 == t2->context.maybe_in_construction
1672 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1673 && (t1->context.speculative_maybe_derived_type
1674 == t2->context.speculative_maybe_derived_type));
1675 }
1676
1677 /* Remove entry in polymorphic call target cache hash. */
1678
1679 inline void
1680 polymorphic_call_target_hasher::remove (value_type *v)
1681 {
1682 v->targets.release ();
1683 free (v);
1684 }
1685
1686 /* Polymorphic call target query cache. */
1687
1688 typedef hash_table<polymorphic_call_target_hasher>
1689 polymorphic_call_target_hash_type;
1690 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1691
1692 /* Destroy polymorphic call target query cache. */
1693
1694 static void
1695 free_polymorphic_call_targets_hash ()
1696 {
1697 if (cached_polymorphic_call_targets)
1698 {
1699 delete polymorphic_call_target_hash;
1700 polymorphic_call_target_hash = NULL;
1701 pointer_set_destroy (cached_polymorphic_call_targets);
1702 cached_polymorphic_call_targets = NULL;
1703 }
1704 }
1705
1706 /* When virtual function is removed, we may need to flush the cache. */
1707
1708 static void
1709 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1710 {
1711 if (cached_polymorphic_call_targets
1712 && pointer_set_contains (cached_polymorphic_call_targets, n))
1713 free_polymorphic_call_targets_hash ();
1714 }
1715
1716 /* Return true when TYPE contains an polymorphic type and thus is interesting
1717 for devirtualization machinery. */
1718
1719 bool
1720 contains_polymorphic_type_p (const_tree type)
1721 {
1722 type = TYPE_MAIN_VARIANT (type);
1723
1724 if (RECORD_OR_UNION_TYPE_P (type))
1725 {
1726 if (TYPE_BINFO (type)
1727 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1728 return true;
1729 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1730 if (TREE_CODE (fld) == FIELD_DECL
1731 && !DECL_ARTIFICIAL (fld)
1732 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1733 return true;
1734 return false;
1735 }
1736 if (TREE_CODE (type) == ARRAY_TYPE)
1737 return contains_polymorphic_type_p (TREE_TYPE (type));
1738 return false;
1739 }
1740
1741 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1742 is contained at CONTEXT->OFFSET. Walk the memory representation of
1743 CONTEXT->OUTER_TYPE and find the outermost class type that match
1744 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1745 to represent it.
1746
1747 For example when CONTEXT represents type
1748 class A
1749 {
1750 int a;
1751 class B b;
1752 }
1753 and we look for type at offset sizeof(int), we end up with B and offset 0.
1754 If the same is produced by multiple inheritance, we end up with A and offset
1755 sizeof(int).
1756
1757 If we can not find corresponding class, give up by setting
1758 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1759 Return true when lookup was sucesful. */
1760
1761 static bool
1762 get_class_context (ipa_polymorphic_call_context *context,
1763 tree expected_type)
1764 {
1765 tree type = context->outer_type;
1766 HOST_WIDE_INT offset = context->offset;
1767 bool speculative = false;
1768 bool speculation_valid = false;
1769 bool valid = false;
1770
1771 if (!context->outer_type)
1772 {
1773 type = context->outer_type = expected_type;
1774 context->offset = offset = 0;
1775 }
1776 /* See if speculative type seem to be derrived from outer_type.
1777 Then speculation is valid only if it really is a derivate and derived types
1778 are allowed.
1779
1780 The test does not really look for derivate, but also accepts the case where
1781 outer_type is a field of speculative_outer_type. In this case eiter
1782 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
1783 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
1784 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
1785 if (context->speculative_outer_type
1786 && context->speculative_offset >= context->offset
1787 && contains_type_p (context->speculative_outer_type,
1788 context->offset - context->speculative_offset,
1789 context->outer_type))
1790 speculation_valid = context->maybe_derived_type;
1791 else
1792 {
1793 context->speculative_outer_type = NULL;
1794 context->speculative_offset = 0;
1795 context->speculative_maybe_derived_type = false;
1796 }
1797
1798 /* Find the sub-object the constant actually refers to and mark whether it is
1799 an artificial one (as opposed to a user-defined one).
1800
1801 This loop is performed twice; first time for outer_type and second time
1802 for speculative_outer_type. The second iteration has SPECULATIVE set. */
1803 while (true)
1804 {
1805 HOST_WIDE_INT pos, size;
1806 tree fld;
1807
1808 /* On a match, just return what we found. */
1809 if (TREE_CODE (type) == TREE_CODE (expected_type)
1810 && (!in_lto_p
1811 || (TREE_CODE (type) == RECORD_TYPE
1812 && TYPE_BINFO (type)
1813 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
1814 && types_same_for_odr (type, expected_type))
1815 {
1816 if (speculative)
1817 {
1818 gcc_assert (speculation_valid);
1819 gcc_assert (valid);
1820
1821 /* If we did not match the offset, just give up on speculation. */
1822 if (offset != 0
1823 || (types_same_for_odr (context->speculative_outer_type,
1824 context->outer_type)
1825 && (context->maybe_derived_type
1826 == context->speculative_maybe_derived_type)))
1827 {
1828 context->speculative_outer_type = NULL;
1829 context->speculative_offset = 0;
1830 }
1831 return true;
1832 }
1833 else
1834 {
1835 /* Type can not contain itself on an non-zero offset. In that case
1836 just give up. */
1837 if (offset != 0)
1838 {
1839 valid = false;
1840 goto give_up;
1841 }
1842 valid = true;
1843 /* If speculation is not valid or we determined type precisely,
1844 we are done. */
1845 if (!speculation_valid
1846 || !context->maybe_derived_type)
1847 {
1848 context->speculative_outer_type = NULL;
1849 context->speculative_offset = 0;
1850 return true;
1851 }
1852 /* Otherwise look into speculation now. */
1853 else
1854 {
1855 speculative = true;
1856 type = context->speculative_outer_type;
1857 offset = context->speculative_offset;
1858 continue;
1859 }
1860 }
1861 }
1862
1863 /* Walk fields and find corresponding on at OFFSET. */
1864 if (TREE_CODE (type) == RECORD_TYPE)
1865 {
1866 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1867 {
1868 if (TREE_CODE (fld) != FIELD_DECL)
1869 continue;
1870
1871 pos = int_bit_position (fld);
1872 size = tree_to_uhwi (DECL_SIZE (fld));
1873 if (pos <= offset && (pos + size) > offset)
1874 break;
1875 }
1876
1877 if (!fld)
1878 goto give_up;
1879
1880 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1881 offset -= pos;
1882 /* DECL_ARTIFICIAL represents a basetype. */
1883 if (!DECL_ARTIFICIAL (fld))
1884 {
1885 if (!speculative)
1886 {
1887 context->outer_type = type;
1888 context->offset = offset;
1889 /* As soon as we se an field containing the type,
1890 we know we are not looking for derivations. */
1891 context->maybe_derived_type = false;
1892 }
1893 else
1894 {
1895 context->speculative_outer_type = type;
1896 context->speculative_offset = offset;
1897 context->speculative_maybe_derived_type = false;
1898 }
1899 }
1900 }
1901 else if (TREE_CODE (type) == ARRAY_TYPE)
1902 {
1903 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1904
1905 /* Give up if we don't know array size. */
1906 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1907 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1908 goto give_up;
1909 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1910 type = subtype;
1911 if (!speculative)
1912 {
1913 context->outer_type = type;
1914 context->offset = offset;
1915 context->maybe_derived_type = false;
1916 }
1917 else
1918 {
1919 context->speculative_outer_type = type;
1920 context->speculative_offset = offset;
1921 context->speculative_maybe_derived_type = false;
1922 }
1923 }
1924 /* Give up on anything else. */
1925 else
1926 goto give_up;
1927 }
1928
1929 /* If we failed to find subtype we look for, give up and fall back to the
1930 most generic query. */
1931 give_up:
1932 context->speculative_outer_type = NULL;
1933 context->speculative_offset = 0;
1934 context->speculative_maybe_derived_type = false;
1935 if (valid)
1936 return true;
1937 context->outer_type = expected_type;
1938 context->offset = 0;
1939 context->maybe_derived_type = true;
1940 context->maybe_in_construction = true;
1941 /* POD can be changed to an instance of a polymorphic type by
1942 placement new. Here we play safe and assume that any
1943 non-polymorphic type is POD. */
1944 if ((TREE_CODE (type) != RECORD_TYPE
1945 || !TYPE_BINFO (type)
1946 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1947 && (!TYPE_SIZE (type)
1948 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1949 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1950 tree_to_uhwi (TYPE_SIZE (type)))))
1951 return true;
1952 return false;
1953 }
1954
1955 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1956
1957 static bool
1958 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1959 tree otr_type)
1960 {
1961 ipa_polymorphic_call_context context = {offset, 0,
1962 TYPE_MAIN_VARIANT (outer_type),
1963 NULL, false, true, false};
1964 return get_class_context (&context, otr_type);
1965 }
1966
1967 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1968
1969 static tree
1970 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1971 tree vtable)
1972 {
1973 tree v = BINFO_VTABLE (binfo);
1974 int i;
1975 tree base_binfo;
1976 unsigned HOST_WIDE_INT this_offset;
1977
1978 if (v)
1979 {
1980 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1981 gcc_unreachable ();
1982
1983 if (offset == this_offset
1984 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1985 return binfo;
1986 }
1987
1988 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1989 if (polymorphic_type_binfo_p (base_binfo))
1990 {
1991 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1992 if (base_binfo)
1993 return base_binfo;
1994 }
1995 return NULL;
1996 }
1997
1998 /* T is known constant value of virtual table pointer.
1999 Store virtual table to V and its offset to OFFSET.
2000 Return false if T does not look like virtual table reference. */
2001
2002 bool
2003 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2004 unsigned HOST_WIDE_INT *offset)
2005 {
2006 /* We expect &MEM[(void *)&virtual_table + 16B].
2007 We obtain object's BINFO from the context of the virtual table.
2008 This one contains pointer to virtual table represented via
2009 POINTER_PLUS_EXPR. Verify that this pointer match to what
2010 we propagated through.
2011
2012 In the case of virtual inheritance, the virtual tables may
2013 be nested, i.e. the offset may be different from 16 and we may
2014 need to dive into the type representation. */
2015 if (TREE_CODE (t) == ADDR_EXPR
2016 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2017 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2018 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2019 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2020 == VAR_DECL)
2021 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2022 (TREE_OPERAND (t, 0), 0), 0)))
2023 {
2024 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2025 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2026 return true;
2027 }
2028
2029 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2030 We need to handle it when T comes from static variable initializer or
2031 BINFO. */
2032 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2033 {
2034 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2035 t = TREE_OPERAND (t, 0);
2036 }
2037 else
2038 *offset = 0;
2039
2040 if (TREE_CODE (t) != ADDR_EXPR)
2041 return false;
2042 *v = TREE_OPERAND (t, 0);
2043 return true;
2044 }
2045
2046 /* T is known constant value of virtual table pointer. Return BINFO of the
2047 instance type. */
2048
2049 tree
2050 vtable_pointer_value_to_binfo (const_tree t)
2051 {
2052 tree vtable;
2053 unsigned HOST_WIDE_INT offset;
2054
2055 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2056 return NULL_TREE;
2057
2058 /* FIXME: for stores of construction vtables we return NULL,
2059 because we do not have BINFO for those. Eventually we should fix
2060 our representation to allow this case to be handled, too.
2061 In the case we see store of BINFO we however may assume
2062 that standard folding will be ale to cope with it. */
2063 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2064 offset, vtable);
2065 }
2066
2067 /* We know that the instance is stored in variable or parameter
2068 (not dynamically allocated) and we want to disprove the fact
2069 that it may be in construction at invocation of CALL.
2070
2071 For the variable to be in construction we actually need to
2072 be in constructor of corresponding global variable or
2073 the inline stack of CALL must contain the constructor.
2074 Check this condition. This check works safely only before
2075 IPA passes, because inline stacks may become out of date
2076 later. */
2077
2078 bool
2079 decl_maybe_in_construction_p (tree base, tree outer_type,
2080 gimple call, tree function)
2081 {
2082 outer_type = TYPE_MAIN_VARIANT (outer_type);
2083 gcc_assert (DECL_P (base));
2084
2085 /* After inlining the code unification optimizations may invalidate
2086 inline stacks. Also we need to give up on global variables after
2087 IPA, because addresses of these may have been propagated to their
2088 constructors. */
2089 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2090 return true;
2091
2092 /* Pure functions can not do any changes on the dynamic type;
2093 that require writting to memory. */
2094 if (!auto_var_in_fn_p (base, function)
2095 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2096 return false;
2097
2098 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2099 block = BLOCK_SUPERCONTEXT (block))
2100 if (BLOCK_ABSTRACT_ORIGIN (block)
2101 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2102 {
2103 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2104
2105 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2106 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2107 || !DECL_CXX_DESTRUCTOR_P (fn)))
2108 {
2109 /* Watch for clones where we constant propagated the first
2110 argument (pointer to the instance). */
2111 fn = DECL_ABSTRACT_ORIGIN (fn);
2112 if (!fn
2113 || !is_global_var (base)
2114 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2115 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2116 || !DECL_CXX_DESTRUCTOR_P (fn)))
2117 continue;
2118 }
2119 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2120 continue;
2121
2122 /* FIXME: this can go away once we have ODR types equivalency on
2123 LTO level. */
2124 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2125 return true;
2126 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2127 if (types_same_for_odr (type, outer_type))
2128 return true;
2129 }
2130
2131 if (TREE_CODE (base) == VAR_DECL
2132 && is_global_var (base))
2133 {
2134 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2135 || (!DECL_CXX_CONSTRUCTOR_P (function)
2136 || !DECL_CXX_DESTRUCTOR_P (function)))
2137 {
2138 if (!DECL_ABSTRACT_ORIGIN (function))
2139 return false;
2140 /* Watch for clones where we constant propagated the first
2141 argument (pointer to the instance). */
2142 function = DECL_ABSTRACT_ORIGIN (function);
2143 if (!function
2144 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2145 || (!DECL_CXX_CONSTRUCTOR_P (function)
2146 || !DECL_CXX_DESTRUCTOR_P (function)))
2147 return false;
2148 }
2149 /* FIXME: this can go away once we have ODR types equivalency on
2150 LTO level. */
2151 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2152 return true;
2153 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2154 if (types_same_for_odr (type, outer_type))
2155 return true;
2156 }
2157 return false;
2158 }
2159
2160 /* Proudce polymorphic call context for call method of instance
2161 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2162
2163 static void
2164 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2165 tree base, HOST_WIDE_INT offset)
2166 {
2167 gcc_assert (DECL_P (base));
2168
2169 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
2170 context->offset = offset;
2171 context->speculative_outer_type = NULL;
2172 context->speculative_offset = 0;
2173 context->speculative_maybe_derived_type = true;
2174 /* Make very conservative assumption that all objects
2175 may be in construction.
2176 TODO: ipa-prop already contains code to tell better.
2177 merge it later. */
2178 context->maybe_in_construction = true;
2179 context->maybe_derived_type = false;
2180 }
2181
2182 /* CST is an invariant (address of decl), try to get meaningful
2183 polymorphic call context for polymorphic call of method
2184 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2185 Return FALSE if nothing meaningful can be found. */
2186
2187 bool
2188 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2189 tree cst,
2190 tree otr_type,
2191 HOST_WIDE_INT offset)
2192 {
2193 HOST_WIDE_INT offset2, size, max_size;
2194 tree base;
2195
2196 if (TREE_CODE (cst) != ADDR_EXPR)
2197 return false;
2198
2199 cst = TREE_OPERAND (cst, 0);
2200 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
2201 if (!DECL_P (base) || max_size == -1 || max_size != size)
2202 return false;
2203
2204 /* Only type inconsistent programs can have otr_type that is
2205 not part of outer type. */
2206 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2207 return false;
2208
2209 get_polymorphic_call_info_for_decl (context, base, offset);
2210 return true;
2211 }
2212
2213 /* See if OP is SSA name initialized as a copy or by single assignment.
2214 If so, walk the SSA graph up. */
2215
2216 static tree
2217 walk_ssa_copies (tree op)
2218 {
2219 STRIP_NOPS (op);
2220 while (TREE_CODE (op) == SSA_NAME
2221 && !SSA_NAME_IS_DEFAULT_DEF (op)
2222 && SSA_NAME_DEF_STMT (op)
2223 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2224 {
2225 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2226 return op;
2227 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2228 STRIP_NOPS (op);
2229 }
2230 return op;
2231 }
2232
2233 /* Given REF call in FNDECL, determine class of the polymorphic
2234 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
2235 CALL is optional argument giving the actual statement (usually call) where
2236 the context is used.
2237 Return pointer to object described by the context */
2238
2239 tree
2240 get_polymorphic_call_info (tree fndecl,
2241 tree ref,
2242 tree *otr_type,
2243 HOST_WIDE_INT *otr_token,
2244 ipa_polymorphic_call_context *context,
2245 gimple call)
2246 {
2247 tree base_pointer;
2248 *otr_type = obj_type_ref_class (ref);
2249 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2250
2251 /* Set up basic info in case we find nothing interesting in the analysis. */
2252 context->speculative_outer_type = NULL;
2253 context->speculative_offset = 0;
2254 context->speculative_maybe_derived_type = true;
2255 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
2256 context->offset = 0;
2257 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2258 context->maybe_derived_type = true;
2259 context->maybe_in_construction = true;
2260
2261 /* Walk SSA for outer object. */
2262 do
2263 {
2264 base_pointer = walk_ssa_copies (base_pointer);
2265 if (TREE_CODE (base_pointer) == ADDR_EXPR)
2266 {
2267 HOST_WIDE_INT size, max_size;
2268 HOST_WIDE_INT offset2;
2269 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2270 &offset2, &size, &max_size);
2271
2272 /* If this is a varying address, punt. */
2273 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2274 && max_size != -1
2275 && max_size == size)
2276 {
2277 /* We found dereference of a pointer. Type of the pointer
2278 and MEM_REF is meaningless, but we can look futher. */
2279 if (TREE_CODE (base) == MEM_REF)
2280 {
2281 base_pointer = TREE_OPERAND (base, 0);
2282 context->offset
2283 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
2284 context->outer_type = NULL;
2285 }
2286 /* We found base object. In this case the outer_type
2287 is known. */
2288 else if (DECL_P (base))
2289 {
2290 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
2291
2292 /* Only type inconsistent programs can have otr_type that is
2293 not part of outer type. */
2294 if (!contains_type_p (TREE_TYPE (base),
2295 context->offset + offset2, *otr_type))
2296 {
2297 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2298 code sequences; we arrange the calls to be builtin_unreachable
2299 later. */
2300 *otr_token = INT_MAX;
2301 return base_pointer;
2302 }
2303 get_polymorphic_call_info_for_decl (context, base,
2304 context->offset + offset2);
2305 if (context->maybe_in_construction && call)
2306 context->maybe_in_construction
2307 = decl_maybe_in_construction_p (base,
2308 context->outer_type,
2309 call,
2310 current_function_decl);
2311 return base_pointer;
2312 }
2313 else
2314 break;
2315 }
2316 else
2317 break;
2318 }
2319 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2320 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2321 {
2322 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2323 * BITS_PER_UNIT;
2324 base_pointer = TREE_OPERAND (base_pointer, 0);
2325 }
2326 else
2327 break;
2328 }
2329 while (true);
2330
2331 /* Try to determine type of the outer object. */
2332 if (TREE_CODE (base_pointer) == SSA_NAME
2333 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2334 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2335 {
2336 /* See if parameter is THIS pointer of a method. */
2337 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2338 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2339 {
2340 context->outer_type
2341 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2342 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2343
2344 /* Dynamic casting has possibly upcasted the type
2345 in the hiearchy. In this case outer type is less
2346 informative than inner type and we should forget
2347 about it. */
2348 if (!contains_type_p (context->outer_type, context->offset,
2349 *otr_type))
2350 {
2351 context->outer_type = NULL;
2352 return base_pointer;
2353 }
2354
2355 /* If the function is constructor or destructor, then
2356 the type is possibly in construction, but we know
2357 it is not derived type. */
2358 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2359 || DECL_CXX_DESTRUCTOR_P (fndecl))
2360 {
2361 context->maybe_in_construction = true;
2362 context->maybe_derived_type = false;
2363 }
2364 else
2365 {
2366 context->maybe_derived_type = true;
2367 context->maybe_in_construction = false;
2368 }
2369 return base_pointer;
2370 }
2371 /* Non-PODs passed by value are really passed by invisible
2372 reference. In this case we also know the type of the
2373 object. */
2374 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2375 {
2376 context->outer_type
2377 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2378 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2379 /* Only type inconsistent programs can have otr_type that is
2380 not part of outer type. */
2381 if (!contains_type_p (context->outer_type, context->offset,
2382 *otr_type))
2383 {
2384 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2385 code sequences; we arrange the calls to be builtin_unreachable
2386 later. */
2387 *otr_token = INT_MAX;
2388 return base_pointer;
2389 }
2390 context->maybe_derived_type = false;
2391 context->maybe_in_construction = false;
2392 return base_pointer;
2393 }
2394 }
2395
2396 tree base_type = TREE_TYPE (base_pointer);
2397
2398 if (TREE_CODE (base_pointer) == SSA_NAME
2399 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2400 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2401 {
2402 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2403 code sequences; we arrange the calls to be builtin_unreachable
2404 later. */
2405 *otr_token = INT_MAX;
2406 return base_pointer;
2407 }
2408 if (TREE_CODE (base_pointer) == SSA_NAME
2409 && SSA_NAME_DEF_STMT (base_pointer)
2410 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2411 base_type = TREE_TYPE (gimple_assign_rhs1
2412 (SSA_NAME_DEF_STMT (base_pointer)));
2413
2414 if (POINTER_TYPE_P (base_type)
2415 && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2416 context->offset,
2417 *otr_type))
2418 {
2419 context->speculative_outer_type = TYPE_MAIN_VARIANT
2420 (TREE_TYPE (base_type));
2421 context->speculative_offset = context->offset;
2422 context->speculative_maybe_derived_type = true;
2423 }
2424 /* TODO: There are multiple ways to derive a type. For instance
2425 if BASE_POINTER is passed to an constructor call prior our refernece.
2426 We do not make this type of flow sensitive analysis yet. */
2427 return base_pointer;
2428 }
2429
2430 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2431 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
2432 and insert them to NODES.
2433
2434 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2435
2436 static void
2437 record_targets_from_bases (tree otr_type,
2438 HOST_WIDE_INT otr_token,
2439 tree outer_type,
2440 HOST_WIDE_INT offset,
2441 vec <cgraph_node *> &nodes,
2442 pointer_set_t *inserted,
2443 pointer_set_t *matched_vtables,
2444 bool *completep)
2445 {
2446 while (true)
2447 {
2448 HOST_WIDE_INT pos, size;
2449 tree base_binfo;
2450 tree fld;
2451
2452 if (types_same_for_odr (outer_type, otr_type))
2453 return;
2454
2455 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2456 {
2457 if (TREE_CODE (fld) != FIELD_DECL)
2458 continue;
2459
2460 pos = int_bit_position (fld);
2461 size = tree_to_shwi (DECL_SIZE (fld));
2462 if (pos <= offset && (pos + size) > offset
2463 /* Do not get confused by zero sized bases. */
2464 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2465 break;
2466 }
2467 /* Within a class type we should always find correcponding fields. */
2468 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2469
2470 /* Nonbasetypes should have been stripped by outer_class_type. */
2471 gcc_assert (DECL_ARTIFICIAL (fld));
2472
2473 outer_type = TREE_TYPE (fld);
2474 offset -= pos;
2475
2476 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2477 offset, otr_type);
2478 if (!base_binfo)
2479 {
2480 gcc_assert (odr_violation_reported);
2481 return;
2482 }
2483 gcc_assert (base_binfo);
2484 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
2485 {
2486 bool can_refer;
2487 tree target = gimple_get_virt_method_for_binfo (otr_token,
2488 base_binfo,
2489 &can_refer);
2490 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2491 maybe_record_node (nodes, target, inserted, can_refer, completep);
2492 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
2493 }
2494 }
2495 }
2496
2497 /* When virtual table is removed, we may need to flush the cache. */
2498
2499 static void
2500 devirt_variable_node_removal_hook (varpool_node *n,
2501 void *d ATTRIBUTE_UNUSED)
2502 {
2503 if (cached_polymorphic_call_targets
2504 && DECL_VIRTUAL_P (n->decl)
2505 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2506 free_polymorphic_call_targets_hash ();
2507 }
2508
2509 /* Return vector containing possible targets of polymorphic call of type
2510 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2511 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
2512 OTR_TYPE and include their virtual method. This is useful for types
2513 possibly in construction or destruction where the virtual table may
2514 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2515 us to walk the inheritance graph for all derivations.
2516
2517 OTR_TOKEN == INT_MAX is used to mark calls that are provably
2518 undefined and should be redirected to unreachable.
2519
2520 If COMPLETEP is non-NULL, store true if the list is complete.
2521 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2522 in the target cache. If user needs to visit every target list
2523 just once, it can memoize them.
2524
2525 SPECULATION_TARGETS specify number of targets that are speculatively
2526 likely. These include targets specified by the speculative part
2527 of polymoprhic call context and also exclude all targets for classes
2528 in construction.
2529
2530 Returned vector is placed into cache. It is NOT caller's responsibility
2531 to free it. The vector can be freed on cgraph_remove_node call if
2532 the particular node is a virtual function present in the cache. */
2533
2534 vec <cgraph_node *>
2535 possible_polymorphic_call_targets (tree otr_type,
2536 HOST_WIDE_INT otr_token,
2537 ipa_polymorphic_call_context context,
2538 bool *completep,
2539 void **cache_token,
2540 int *speculative_targetsp)
2541 {
2542 static struct cgraph_node_hook_list *node_removal_hook_holder;
2543 pointer_set_t *inserted;
2544 pointer_set_t *matched_vtables;
2545 vec <cgraph_node *> nodes = vNULL;
2546 vec <tree> bases_to_consider = vNULL;
2547 odr_type type, outer_type;
2548 polymorphic_call_target_d key;
2549 polymorphic_call_target_d **slot;
2550 unsigned int i;
2551 tree binfo, target;
2552 bool complete;
2553 bool can_refer;
2554 bool skipped = false;
2555
2556 otr_type = TYPE_MAIN_VARIANT (otr_type);
2557
2558 /* If ODR is not initialized, return empty incomplete list. */
2559 if (!odr_hash)
2560 {
2561 if (completep)
2562 *completep = false;
2563 if (cache_token)
2564 *cache_token = NULL;
2565 if (speculative_targetsp)
2566 *speculative_targetsp = 0;
2567 return nodes;
2568 }
2569
2570 /* If we hit type inconsistency, just return empty list of targets. */
2571 if (otr_token == INT_MAX)
2572 {
2573 if (completep)
2574 *completep = true;
2575 if (cache_token)
2576 *cache_token = NULL;
2577 if (speculative_targetsp)
2578 *speculative_targetsp = 0;
2579 return nodes;
2580 }
2581
2582 type = get_odr_type (otr_type, true);
2583
2584 /* Recording type variants would wast results cache. */
2585 gcc_assert (!context.outer_type
2586 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2587
2588 /* Lookup the outer class type we want to walk. */
2589 if ((context.outer_type || context.speculative_outer_type)
2590 && !get_class_context (&context, otr_type))
2591 {
2592 if (completep)
2593 *completep = false;
2594 if (cache_token)
2595 *cache_token = NULL;
2596 if (speculative_targetsp)
2597 *speculative_targetsp = 0;
2598 return nodes;
2599 }
2600
2601 /* Check that get_class_context kept the main variant. */
2602 gcc_assert (!context.outer_type
2603 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2604
2605 /* We canonicalize our query, so we do not need extra hashtable entries. */
2606
2607 /* Without outer type, we have no use for offset. Just do the
2608 basic search from innter type */
2609 if (!context.outer_type)
2610 {
2611 context.outer_type = otr_type;
2612 context.offset = 0;
2613 }
2614 /* We need to update our hiearchy if the type does not exist. */
2615 outer_type = get_odr_type (context.outer_type, true);
2616 /* If the type is complete, there are no derivations. */
2617 if (TYPE_FINAL_P (outer_type->type))
2618 context.maybe_derived_type = false;
2619
2620 /* Initialize query cache. */
2621 if (!cached_polymorphic_call_targets)
2622 {
2623 cached_polymorphic_call_targets = pointer_set_create ();
2624 polymorphic_call_target_hash
2625 = new polymorphic_call_target_hash_type (23);
2626 if (!node_removal_hook_holder)
2627 {
2628 node_removal_hook_holder =
2629 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
2630 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
2631 NULL);
2632 }
2633 }
2634
2635 /* Lookup cached answer. */
2636 key.type = type;
2637 key.otr_token = otr_token;
2638 key.context = context;
2639 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2640 if (cache_token)
2641 *cache_token = (void *)*slot;
2642 if (*slot)
2643 {
2644 if (completep)
2645 *completep = (*slot)->complete;
2646 if (speculative_targetsp)
2647 *speculative_targetsp = (*slot)->speculative_targets;
2648 return (*slot)->targets;
2649 }
2650
2651 complete = true;
2652
2653 /* Do actual search. */
2654 timevar_push (TV_IPA_VIRTUAL_CALL);
2655 *slot = XCNEW (polymorphic_call_target_d);
2656 if (cache_token)
2657 *cache_token = (void *)*slot;
2658 (*slot)->type = type;
2659 (*slot)->otr_token = otr_token;
2660 (*slot)->context = context;
2661 (*slot)->speculative_targets = 0;
2662
2663 inserted = pointer_set_create ();
2664 matched_vtables = pointer_set_create ();
2665
2666 if (context.speculative_outer_type)
2667 {
2668 odr_type speculative_outer_type;
2669 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2670 if (TYPE_FINAL_P (speculative_outer_type->type))
2671 context.speculative_maybe_derived_type = false;
2672 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2673 context.speculative_offset, otr_type);
2674 if (binfo)
2675 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2676 &can_refer);
2677 else
2678 target = NULL;
2679
2680 if (target)
2681 {
2682 /* In the case we get complete method, we don't need
2683 to walk derivations. */
2684 if (DECL_FINAL_P (target))
2685 context.speculative_maybe_derived_type = false;
2686 }
2687 if (type_possibly_instantiated_p (speculative_outer_type->type))
2688 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2689 if (binfo)
2690 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
2691 /* Next walk recursively all derived types. */
2692 if (context.speculative_maybe_derived_type)
2693 {
2694 /* For anonymous namespace types we can attempt to build full type.
2695 All derivations must be in this unit (unless we see partial unit). */
2696 if (!type->all_derivations_known)
2697 complete = false;
2698 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2699 possible_polymorphic_call_targets_1 (nodes, inserted,
2700 matched_vtables,
2701 otr_type,
2702 speculative_outer_type->derived_types[i],
2703 otr_token, speculative_outer_type->type,
2704 context.speculative_offset, &complete,
2705 bases_to_consider,
2706 false);
2707 }
2708 /* Finally walk bases, if asked to. */
2709 (*slot)->speculative_targets = nodes.length();
2710 }
2711
2712 /* First see virtual method of type itself. */
2713 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2714 context.offset, otr_type);
2715 if (binfo)
2716 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2717 &can_refer);
2718 else
2719 {
2720 gcc_assert (odr_violation_reported);
2721 target = NULL;
2722 }
2723
2724 /* Destructors are never called through construction virtual tables,
2725 because the type is always known. */
2726 if (target && DECL_CXX_DESTRUCTOR_P (target))
2727 context.maybe_in_construction = false;
2728
2729 if (target)
2730 {
2731 /* In the case we get complete method, we don't need
2732 to walk derivations. */
2733 if (DECL_FINAL_P (target))
2734 context.maybe_derived_type = false;
2735 }
2736
2737 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2738 if (type_possibly_instantiated_p (outer_type->type))
2739 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2740 else
2741 {
2742 skipped = true;
2743 gcc_assert (in_lto_p || context.maybe_derived_type);
2744 }
2745
2746 if (binfo)
2747 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
2748
2749 /* Next walk recursively all derived types. */
2750 if (context.maybe_derived_type)
2751 {
2752 /* For anonymous namespace types we can attempt to build full type.
2753 All derivations must be in this unit (unless we see partial unit). */
2754 if (!type->all_derivations_known)
2755 complete = false;
2756 for (i = 0; i < outer_type->derived_types.length(); i++)
2757 possible_polymorphic_call_targets_1 (nodes, inserted,
2758 matched_vtables,
2759 otr_type,
2760 outer_type->derived_types[i],
2761 otr_token, outer_type->type,
2762 context.offset, &complete,
2763 bases_to_consider,
2764 context.maybe_in_construction);
2765 }
2766
2767 /* Finally walk bases, if asked to. */
2768 if (!(*slot)->speculative_targets)
2769 (*slot)->speculative_targets = nodes.length();
2770
2771 /* Destructors are never called through construction virtual tables,
2772 because the type is always known. One of entries may be cxa_pure_virtual
2773 so look to at least two of them. */
2774 if (context.maybe_in_construction)
2775 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2776 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2777 context.maybe_in_construction = false;
2778 if (context.maybe_in_construction)
2779 {
2780 if (type != outer_type
2781 && (!skipped
2782 || (context.maybe_derived_type
2783 && !type_all_derivations_known_p (outer_type->type))))
2784 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2785 context.offset, nodes, inserted,
2786 matched_vtables, &complete);
2787 if (skipped)
2788 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2789 for (i = 0; i < bases_to_consider.length(); i++)
2790 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
2791 }
2792 bases_to_consider.release();
2793
2794 (*slot)->targets = nodes;
2795 (*slot)->complete = complete;
2796 if (completep)
2797 *completep = complete;
2798 if (speculative_targetsp)
2799 *speculative_targetsp = (*slot)->speculative_targets;
2800
2801 pointer_set_destroy (inserted);
2802 pointer_set_destroy (matched_vtables);
2803 timevar_pop (TV_IPA_VIRTUAL_CALL);
2804 return nodes;
2805 }
2806
2807 /* Dump all possible targets of a polymorphic call. */
2808
2809 void
2810 dump_possible_polymorphic_call_targets (FILE *f,
2811 tree otr_type,
2812 HOST_WIDE_INT otr_token,
2813 const ipa_polymorphic_call_context &ctx)
2814 {
2815 vec <cgraph_node *> targets;
2816 bool final;
2817 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2818 unsigned int i;
2819 int speculative;
2820
2821 if (!type)
2822 return;
2823 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2824 ctx,
2825 &final, NULL, &speculative);
2826 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2827 print_generic_expr (f, type->type, TDF_SLIM);
2828 fprintf (f, " token %i\n", (int)otr_token);
2829 if (ctx.outer_type || ctx.offset)
2830 {
2831 fprintf (f, " Contained in type:");
2832 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2833 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2834 ctx.offset);
2835 }
2836 if (ctx.speculative_outer_type)
2837 {
2838 fprintf (f, " Speculatively contained in type:");
2839 print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM);
2840 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2841 ctx.speculative_offset);
2842 }
2843
2844 fprintf (f, " %s%s%s%s\n ",
2845 final ? "This is a complete list." :
2846 "This is partial list; extra targets may be defined in other units.",
2847 ctx.maybe_in_construction ? " (base types included)" : "",
2848 ctx.maybe_derived_type ? " (derived types included)" : "",
2849 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
2850 for (i = 0; i < targets.length (); i++)
2851 {
2852 char *name = NULL;
2853 if (i == (unsigned)speculative)
2854 fprintf (f, "\n Targets that are not likely:\n"
2855 " ");
2856 if (in_lto_p)
2857 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2858 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2859 if (in_lto_p)
2860 free (name);
2861 if (!targets[i]->definition)
2862 fprintf (f, " (no definition%s)",
2863 DECL_DECLARED_INLINE_P (targets[i]->decl)
2864 ? " inline" : "");
2865 }
2866 fprintf (f, "\n\n");
2867 }
2868
2869
2870 /* Return true if N can be possibly target of a polymorphic call of
2871 OTR_TYPE/OTR_TOKEN. */
2872
2873 bool
2874 possible_polymorphic_call_target_p (tree otr_type,
2875 HOST_WIDE_INT otr_token,
2876 const ipa_polymorphic_call_context &ctx,
2877 struct cgraph_node *n)
2878 {
2879 vec <cgraph_node *> targets;
2880 unsigned int i;
2881 enum built_in_function fcode;
2882 bool final;
2883
2884 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2885 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2886 == BUILT_IN_UNREACHABLE
2887 || fcode == BUILT_IN_TRAP))
2888 return true;
2889
2890 if (!odr_hash)
2891 return true;
2892 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2893 for (i = 0; i < targets.length (); i++)
2894 if (n->semantically_equivalent_p (targets[i]))
2895 return true;
2896
2897 /* At a moment we allow middle end to dig out new external declarations
2898 as a targets of polymorphic calls. */
2899 if (!final && !n->definition)
2900 return true;
2901 return false;
2902 }
2903
2904
2905 /* After callgraph construction new external nodes may appear.
2906 Add them into the graph. */
2907
2908 void
2909 update_type_inheritance_graph (void)
2910 {
2911 struct cgraph_node *n;
2912
2913 if (!odr_hash)
2914 return;
2915 free_polymorphic_call_targets_hash ();
2916 timevar_push (TV_IPA_INHERITANCE);
2917 /* We reconstruct the graph starting from types of all methods seen in the
2918 the unit. */
2919 FOR_EACH_FUNCTION (n)
2920 if (DECL_VIRTUAL_P (n->decl)
2921 && !n->definition
2922 && n->real_symbol_p ())
2923 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2924 true);
2925 timevar_pop (TV_IPA_INHERITANCE);
2926 }
2927
2928
2929 /* Return true if N looks like likely target of a polymorphic call.
2930 Rule out cxa_pure_virtual, noreturns, function declared cold and
2931 other obvious cases. */
2932
2933 bool
2934 likely_target_p (struct cgraph_node *n)
2935 {
2936 int flags;
2937 /* cxa_pure_virtual and similar things are not likely. */
2938 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2939 return false;
2940 flags = flags_from_decl_or_type (n->decl);
2941 if (flags & ECF_NORETURN)
2942 return false;
2943 if (lookup_attribute ("cold",
2944 DECL_ATTRIBUTES (n->decl)))
2945 return false;
2946 if (n->frequency < NODE_FREQUENCY_NORMAL)
2947 return false;
2948 /* If there are no virtual tables refering the target alive,
2949 the only way the target can be called is an instance comming from other
2950 compilation unit; speculative devirtualization is build around an
2951 assumption that won't happen. */
2952 if (!referenced_from_vtable_p (n))
2953 return false;
2954 return true;
2955 }
2956
2957 /* The ipa-devirt pass.
2958 When polymorphic call has only one likely target in the unit,
2959 turn it into speculative call. */
2960
2961 static unsigned int
2962 ipa_devirt (void)
2963 {
2964 struct cgraph_node *n;
2965 struct pointer_set_t *bad_call_targets = pointer_set_create ();
2966 struct cgraph_edge *e;
2967
2968 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2969 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
2970 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
2971
2972 FOR_EACH_DEFINED_FUNCTION (n)
2973 {
2974 bool update = false;
2975 if (dump_file && n->indirect_calls)
2976 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
2977 n->name (), n->order);
2978 for (e = n->indirect_calls; e; e = e->next_callee)
2979 if (e->indirect_info->polymorphic)
2980 {
2981 struct cgraph_node *likely_target = NULL;
2982 void *cache_token;
2983 bool final;
2984 int speculative_targets;
2985 vec <cgraph_node *>targets
2986 = possible_polymorphic_call_targets
2987 (e, &final, &cache_token, &speculative_targets);
2988 unsigned int i;
2989
2990 if (dump_file)
2991 dump_possible_polymorphic_call_targets
2992 (dump_file, e);
2993
2994 npolymorphic++;
2995
2996 if (!cgraph_maybe_hot_edge_p (e))
2997 {
2998 if (dump_file)
2999 fprintf (dump_file, "Call is cold\n\n");
3000 ncold++;
3001 continue;
3002 }
3003 if (e->speculative)
3004 {
3005 if (dump_file)
3006 fprintf (dump_file, "Call is aready speculated\n\n");
3007 nspeculated++;
3008
3009 /* When dumping see if we agree with speculation. */
3010 if (!dump_file)
3011 continue;
3012 }
3013 if (pointer_set_contains (bad_call_targets,
3014 cache_token))
3015 {
3016 if (dump_file)
3017 fprintf (dump_file, "Target list is known to be useless\n\n");
3018 nmultiple++;
3019 continue;
3020 }
3021 for (i = 0; i < targets.length (); i++)
3022 if (likely_target_p (targets[i]))
3023 {
3024 if (likely_target)
3025 {
3026 if (i < (unsigned) speculative_targets)
3027 {
3028 likely_target = NULL;
3029 if (dump_file)
3030 fprintf (dump_file, "More than one likely target\n\n");
3031 nmultiple++;
3032 }
3033 break;
3034 }
3035 likely_target = targets[i];
3036 }
3037 if (!likely_target)
3038 {
3039 pointer_set_insert (bad_call_targets, cache_token);
3040 continue;
3041 }
3042 /* This is reached only when dumping; check if we agree or disagree
3043 with the speculation. */
3044 if (e->speculative)
3045 {
3046 struct cgraph_edge *e2;
3047 struct ipa_ref *ref;
3048 cgraph_speculative_call_info (e, e2, e, ref);
3049 if (e2->callee->ultimate_alias_target ()
3050 == likely_target->ultimate_alias_target ())
3051 {
3052 fprintf (dump_file, "We agree with speculation\n\n");
3053 nok++;
3054 }
3055 else
3056 {
3057 fprintf (dump_file, "We disagree with speculation\n\n");
3058 nwrong++;
3059 }
3060 continue;
3061 }
3062 if (!likely_target->definition)
3063 {
3064 if (dump_file)
3065 fprintf (dump_file, "Target is not an definition\n\n");
3066 nnotdefined++;
3067 continue;
3068 }
3069 /* Do not introduce new references to external symbols. While we
3070 can handle these just well, it is common for programs to
3071 incorrectly with headers defining methods they are linked
3072 with. */
3073 if (DECL_EXTERNAL (likely_target->decl))
3074 {
3075 if (dump_file)
3076 fprintf (dump_file, "Target is external\n\n");
3077 nexternal++;
3078 continue;
3079 }
3080 /* Don't use an implicitly-declared destructor (c++/58678). */
3081 struct cgraph_node *non_thunk_target
3082 = likely_target->function_symbol ();
3083 if (DECL_ARTIFICIAL (non_thunk_target->decl)
3084 && DECL_COMDAT (non_thunk_target->decl))
3085 {
3086 if (dump_file)
3087 fprintf (dump_file, "Target is artificial\n\n");
3088 nartificial++;
3089 continue;
3090 }
3091 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3092 && likely_target->can_be_discarded_p ())
3093 {
3094 if (dump_file)
3095 fprintf (dump_file, "Target is overwritable\n\n");
3096 noverwritable++;
3097 continue;
3098 }
3099 else if (dbg_cnt (devirt))
3100 {
3101 if (dump_enabled_p ())
3102 {
3103 location_t locus = gimple_location_safe (e->call_stmt);
3104 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3105 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3106 n->name (), n->order,
3107 likely_target->name (),
3108 likely_target->order);
3109 }
3110 if (!likely_target->can_be_discarded_p ())
3111 {
3112 cgraph_node *alias;
3113 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3114 if (alias)
3115 likely_target = alias;
3116 }
3117 nconverted++;
3118 update = true;
3119 cgraph_turn_edge_to_speculative
3120 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3121 }
3122 }
3123 if (update)
3124 inline_update_overall_summary (n);
3125 }
3126 pointer_set_destroy (bad_call_targets);
3127
3128 if (dump_file)
3129 fprintf (dump_file,
3130 "%i polymorphic calls, %i devirtualized,"
3131 " %i speculatively devirtualized, %i cold\n"
3132 "%i have multiple targets, %i overwritable,"
3133 " %i already speculated (%i agree, %i disagree),"
3134 " %i external, %i not defined, %i artificial\n",
3135 npolymorphic, ndevirtualized, nconverted, ncold,
3136 nmultiple, noverwritable, nspeculated, nok, nwrong,
3137 nexternal, nnotdefined, nartificial);
3138 return ndevirtualized ? TODO_remove_functions : 0;
3139 }
3140
3141 namespace {
3142
3143 const pass_data pass_data_ipa_devirt =
3144 {
3145 IPA_PASS, /* type */
3146 "devirt", /* name */
3147 OPTGROUP_NONE, /* optinfo_flags */
3148 TV_IPA_DEVIRT, /* tv_id */
3149 0, /* properties_required */
3150 0, /* properties_provided */
3151 0, /* properties_destroyed */
3152 0, /* todo_flags_start */
3153 ( TODO_dump_symtab ), /* todo_flags_finish */
3154 };
3155
3156 class pass_ipa_devirt : public ipa_opt_pass_d
3157 {
3158 public:
3159 pass_ipa_devirt (gcc::context *ctxt)
3160 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3161 NULL, /* generate_summary */
3162 NULL, /* write_summary */
3163 NULL, /* read_summary */
3164 NULL, /* write_optimization_summary */
3165 NULL, /* read_optimization_summary */
3166 NULL, /* stmt_fixup */
3167 0, /* function_transform_todo_flags_start */
3168 NULL, /* function_transform */
3169 NULL) /* variable_transform */
3170 {}
3171
3172 /* opt_pass methods: */
3173 virtual bool gate (function *)
3174 {
3175 return (flag_devirtualize
3176 && flag_devirtualize_speculatively
3177 && optimize);
3178 }
3179
3180 virtual unsigned int execute (function *) { return ipa_devirt (); }
3181
3182 }; // class pass_ipa_devirt
3183
3184 } // anon namespace
3185
3186 ipa_opt_pass_d *
3187 make_pass_ipa_devirt (gcc::context *ctxt)
3188 {
3189 return new pass_ipa_devirt (ctxt);
3190 }
3191
3192 #include "gt-ipa-devirt.h"