add a hash_set based on hash_table
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "hash-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "inchash.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-alias.h"
125 #include "internal-fn.h"
126 #include "gimple-fold.h"
127 #include "gimple-expr.h"
128 #include "gimple.h"
129 #include "ipa-inline.h"
130 #include "diagnostic.h"
131 #include "tree-dfa.h"
132 #include "demangle.h"
133 #include "dbgcnt.h"
134 #include "stor-layout.h"
135 #include "intl.h"
136
137 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
138 hash_set<tree> *);
139
140 static bool odr_violation_reported = false;
141
142 /* Dummy polymorphic call context. */
143
144 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
145 = {0, 0, NULL, NULL, false, true, true};
146
147 /* Pointer set of all call targets appearing in the cache. */
148 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
149
150 /* The node of type inheritance graph. For each type unique in
151 One Defintion Rule (ODR) sense, we produce one node linking all
152 main variants of types equivalent to it, bases and derived types. */
153
154 struct GTY(()) odr_type_d
155 {
156 /* leader type. */
157 tree type;
158 /* All bases; built only for main variants of types */
159 vec<odr_type> GTY((skip)) bases;
160 /* All derrived types with virtual methods seen in unit;
161 built only for main variants oftypes */
162 vec<odr_type> GTY((skip)) derived_types;
163
164 /* All equivalent types, if more than one. */
165 vec<tree, va_gc> *types;
166 /* Set of all equivalent types, if NON-NULL. */
167 hash_set<tree> * GTY((skip)) types_set;
168
169 /* Unique ID indexing the type in odr_types array. */
170 int id;
171 /* Is it in anonymous namespace? */
172 bool anonymous_namespace;
173 /* Do we know about all derivations of given type? */
174 bool all_derivations_known;
175 /* Did we report ODR violation here? */
176 bool odr_violated;
177 };
178
179 static bool contains_type_p (tree, HOST_WIDE_INT, tree);
180
181
182 /* Return true if BINFO corresponds to a type with virtual methods.
183
184 Every type has several BINFOs. One is the BINFO associated by the type
185 while other represents bases of derived types. The BINFOs representing
186 bases do not have BINFO_VTABLE pointer set when this is the single
187 inheritance (because vtables are shared). Look up the BINFO of type
188 and check presence of its vtable. */
189
190 static inline bool
191 polymorphic_type_binfo_p (tree binfo)
192 {
193 /* See if BINFO's type has an virtual table associtated with it. */
194 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
195 }
196
197 /* Return TRUE if all derived types of T are known and thus
198 we may consider the walk of derived type complete.
199
200 This is typically true only for final anonymous namespace types and types
201 defined within functions (that may be COMDAT and thus shared across units,
202 but with the same set of derived types). */
203
204 static bool
205 type_all_derivations_known_p (tree t)
206 {
207 if (TYPE_FINAL_P (t))
208 return true;
209 if (flag_ltrans)
210 return false;
211 if (type_in_anonymous_namespace_p (t))
212 return true;
213 return (decl_function_context (TYPE_NAME (t)) != NULL);
214 }
215
216 /* Return TURE if type's constructors are all visible. */
217
218 static bool
219 type_all_ctors_visible_p (tree t)
220 {
221 return !flag_ltrans
222 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
223 /* We can not always use type_all_derivations_known_p.
224 For function local types we must assume case where
225 the function is COMDAT and shared in between units.
226
227 TODO: These cases are quite easy to get, but we need
228 to keep track of C++ privatizing via -Wno-weak
229 as well as the IPA privatizing. */
230 && type_in_anonymous_namespace_p (t);
231 }
232
233 /* Return TRUE if type may have instance. */
234
235 static bool
236 type_possibly_instantiated_p (tree t)
237 {
238 tree vtable;
239 varpool_node *vnode;
240
241 /* TODO: Add abstract types here. */
242 if (!type_all_ctors_visible_p (t))
243 return true;
244
245 vtable = BINFO_VTABLE (TYPE_BINFO (t));
246 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
247 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
248 vnode = varpool_node::get (vtable);
249 return vnode && vnode->definition;
250 }
251
252 /* One Definition Rule hashtable helpers. */
253
254 struct odr_hasher
255 {
256 typedef odr_type_d value_type;
257 typedef union tree_node compare_type;
258 static inline hashval_t hash (const value_type *);
259 static inline bool equal (const value_type *, const compare_type *);
260 static inline void remove (value_type *);
261 };
262
263 /* Return type that was declared with T's name so that T is an
264 qualified variant of it. */
265
266 static inline tree
267 main_odr_variant (const_tree t)
268 {
269 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
270 return TREE_TYPE (TYPE_NAME (t));
271 /* Unnamed types and non-C++ produced types can be compared by variants. */
272 else
273 return TYPE_MAIN_VARIANT (t);
274 }
275
276 /* Produce hash based on type name. */
277
278 static hashval_t
279 hash_type_name (tree t)
280 {
281 gcc_checking_assert (main_odr_variant (t) == t);
282
283 /* If not in LTO, all main variants are unique, so we can do
284 pointer hash. */
285 if (!in_lto_p)
286 return htab_hash_pointer (t);
287
288 /* Anonymous types are unique. */
289 if (type_in_anonymous_namespace_p (t))
290 return htab_hash_pointer (t);
291
292 /* For polymorphic types, we can simply hash the virtual table. */
293 if (TREE_CODE (t) == RECORD_TYPE
294 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
295 {
296 tree v = BINFO_VTABLE (TYPE_BINFO (t));
297 hashval_t hash = 0;
298
299 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
300 {
301 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
302 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
303 }
304
305 v = DECL_ASSEMBLER_NAME (v);
306 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
307 return hash;
308 }
309
310 /* Rest is not implemented yet. */
311 gcc_unreachable ();
312 }
313
314 /* Return the computed hashcode for ODR_TYPE. */
315
316 inline hashval_t
317 odr_hasher::hash (const value_type *odr_type)
318 {
319 return hash_type_name (odr_type->type);
320 }
321
322 /* For languages with One Definition Rule, work out if
323 types are the same based on their name.
324
325 This is non-trivial for LTO where minnor differences in
326 the type representation may have prevented type merging
327 to merge two copies of otherwise equivalent type.
328
329 Until we start streaming mangled type names, this function works
330 only for polymorphic types. */
331
332 bool
333 types_same_for_odr (const_tree type1, const_tree type2)
334 {
335 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
336
337 type1 = main_odr_variant (type1);
338 type2 = main_odr_variant (type2);
339
340 if (type1 == type2)
341 return true;
342
343 if (!in_lto_p)
344 return false;
345
346 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
347 on the corresponding TYPE_STUB_DECL. */
348 if (type_in_anonymous_namespace_p (type1)
349 || type_in_anonymous_namespace_p (type2))
350 return false;
351
352 /* See if types are obvoiusly different (i.e. different codes
353 or polymorphis wrt non-polymorphic). This is not strictly correct
354 for ODR violating programs, but we can't do better without streaming
355 ODR names. */
356 if (TREE_CODE (type1) != TREE_CODE (type2))
357 return false;
358 if (TREE_CODE (type1) == RECORD_TYPE
359 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
360 return false;
361 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
362 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
363 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
364 return false;
365
366 /* At the moment we have no way to establish ODR equivlaence at LTO
367 other than comparing virtual table pointrs of polymorphic types.
368 Eventually we should start saving mangled names in TYPE_NAME.
369 Then this condition will become non-trivial. */
370
371 if (TREE_CODE (type1) == RECORD_TYPE
372 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
373 && BINFO_VTABLE (TYPE_BINFO (type1))
374 && BINFO_VTABLE (TYPE_BINFO (type2)))
375 {
376 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
377 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
378 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
379 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
380 return (operand_equal_p (TREE_OPERAND (v1, 1),
381 TREE_OPERAND (v2, 1), 0)
382 && DECL_ASSEMBLER_NAME
383 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
384 == DECL_ASSEMBLER_NAME
385 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
386 }
387 gcc_unreachable ();
388 }
389
390
391 /* Compare types T1 and T2 and return true if they are
392 equivalent. */
393
394 inline bool
395 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
396 {
397 tree t2 = const_cast <tree> (ct2);
398
399 gcc_checking_assert (main_odr_variant (t2) == t2);
400 if (t1->type == t2)
401 return true;
402 if (!in_lto_p)
403 return false;
404 return types_same_for_odr (t1->type, t2);
405 }
406
407 /* Free ODR type V. */
408
409 inline void
410 odr_hasher::remove (value_type *v)
411 {
412 v->bases.release ();
413 v->derived_types.release ();
414 if (v->types_set)
415 delete v->types_set;
416 ggc_free (v);
417 }
418
419 /* ODR type hash used to lookup ODR type based on tree type node. */
420
421 typedef hash_table<odr_hasher> odr_hash_type;
422 static odr_hash_type *odr_hash;
423
424 /* ODR types are also stored into ODR_TYPE vector to allow consistent
425 walking. Bases appear before derived types. Vector is garbage collected
426 so we won't end up visiting empty types. */
427
428 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
429 #define odr_types (*odr_types_ptr)
430
431 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
432 void
433 set_type_binfo (tree type, tree binfo)
434 {
435 for (; type; type = TYPE_NEXT_VARIANT (type))
436 if (COMPLETE_TYPE_P (type))
437 TYPE_BINFO (type) = binfo;
438 else
439 gcc_assert (!TYPE_BINFO (type));
440 }
441
442 /* Compare T2 and T2 based on name or structure. */
443
444 static bool
445 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<tree> *visited)
446 {
447 bool an1, an2;
448
449 /* This can happen in incomplete types that should be handled earlier. */
450 gcc_assert (t1 && t2);
451
452 t1 = main_odr_variant (t1);
453 t2 = main_odr_variant (t2);
454 if (t1 == t2)
455 return true;
456 if (TREE_CODE (t1) != TREE_CODE (t2))
457 return false;
458 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
459 return false;
460 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
461 return false;
462
463 /* Anonymous namespace types must match exactly. */
464 an1 = type_in_anonymous_namespace_p (t1);
465 an2 = type_in_anonymous_namespace_p (t2);
466 if (an1 != an2 || an1)
467 return false;
468
469 /* For types where we can not establish ODR equivalency, recurse and deeply
470 compare. */
471 if (TREE_CODE (t1) != RECORD_TYPE
472 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
473 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
474 || !polymorphic_type_binfo_p (TYPE_BINFO (t2)))
475 {
476 /* This should really be a pair hash, but for the moment we do not need
477 100% reliability and it would be better to compare all ODR types so
478 recursion here is needed only for component types. */
479 if (visited->add (t1))
480 return true;
481 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
482 }
483 return types_same_for_odr (t1, t2);
484 }
485
486 /* Output ODR violation warning about T1 and T2 with REASON.
487 Display location of ST1 and ST2 if REASON speaks about field or
488 method of the type.
489 If WARN is false, do nothing. Set WARNED if warning was indeed
490 output. */
491
492 void
493 warn_odr (tree t1, tree t2, tree st1, tree st2,
494 bool warn, bool *warned, const char *reason)
495 {
496 tree decl2 = TYPE_NAME (t2);
497
498 if (!warn)
499 return;
500 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
501 "type %qT violates one definition rule",
502 t1))
503 return;
504 if (!st1)
505 ;
506 else if (TREE_CODE (st1) == FIELD_DECL)
507 {
508 inform (DECL_SOURCE_LOCATION (decl2),
509 "a different type is defined in another translation unit");
510 inform (DECL_SOURCE_LOCATION (st1),
511 "the first difference of corresponding definitions is field %qD",
512 st1);
513 decl2 = st2;
514 }
515 else if (TREE_CODE (st1) == FUNCTION_DECL)
516 {
517 inform (DECL_SOURCE_LOCATION (decl2),
518 "a different type is defined in another translation unit");
519 inform (DECL_SOURCE_LOCATION (st1),
520 "the first difference of corresponding definitions is method %qD",
521 st1);
522 decl2 = st2;
523 }
524 else
525 return;
526 inform (DECL_SOURCE_LOCATION (decl2), reason);
527
528 if (warned)
529 *warned = true;
530 }
531
532 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
533 because they are used on same place in ODR matching types.
534 They are not; inform the user. */
535
536 void
537 warn_types_mismatch (tree t1, tree t2)
538 {
539 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
540 return;
541 /* In Firefox it is a common bug to have same types but in
542 different namespaces. Be a bit more informative on
543 this. */
544 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
545 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
546 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
547 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
548 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
549 DECL_NAME (TYPE_CONTEXT (t2))))))
550 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
551 "type %qT should match type %qT but is defined "
552 "in different namespace ",
553 t1, t2);
554 else
555 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
556 "type %qT should match type %qT",
557 t1, t2);
558 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
559 "the incompatible type is defined here");
560 }
561
562 /* Compare T1 and T2, report ODR violations if WARN is true and set
563 WARNED to true if anything is reported. Return true if types match.
564 If true is returned, the types are also compatible in the sense of
565 gimple_canonical_types_compatible_p. */
566
567 static bool
568 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<tree> *visited)
569 {
570 /* Check first for the obvious case of pointer identity. */
571 if (t1 == t2)
572 return true;
573 gcc_assert (!type_in_anonymous_namespace_p (t1));
574 gcc_assert (!type_in_anonymous_namespace_p (t2));
575
576 /* Can't be the same type if the types don't have the same code. */
577 if (TREE_CODE (t1) != TREE_CODE (t2))
578 {
579 warn_odr (t1, t2, NULL, NULL, warn, warned,
580 G_("a different type is defined in another translation unit"));
581 return false;
582 }
583
584 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
585 {
586 warn_odr (t1, t2, NULL, NULL, warn, warned,
587 G_("a type with different qualifiers is defined in another "
588 "translation unit"));
589 return false;
590 }
591
592 if (comp_type_attributes (t1, t2) != 1)
593 {
594 warn_odr (t1, t2, NULL, NULL, warn, warned,
595 G_("a type with attributes "
596 "is defined in another translation unit"));
597 return false;
598 }
599
600 if (TREE_CODE (t1) == ENUMERAL_TYPE)
601 {
602 tree v1, v2;
603 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
604 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
605 {
606 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
607 {
608 warn_odr (t1, t2, NULL, NULL, warn, warned,
609 G_("an enum with different value name"
610 " is defined in another translation unit"));
611 return false;
612 }
613 if (TREE_VALUE (v1) != TREE_VALUE (v2)
614 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
615 DECL_INITIAL (TREE_VALUE (v2)), 0))
616 {
617 warn_odr (t1, t2, NULL, NULL, warn, warned,
618 G_("an enum with different values is defined"
619 " in another translation unit"));
620 return false;
621 }
622 }
623 if (v1 || v2)
624 {
625 warn_odr (t1, t2, NULL, NULL, warn, warned,
626 G_("an enum with mismatching number of values "
627 "is defined in another translation unit"));
628 return false;
629 }
630 }
631
632 /* Non-aggregate types can be handled cheaply. */
633 if (INTEGRAL_TYPE_P (t1)
634 || SCALAR_FLOAT_TYPE_P (t1)
635 || FIXED_POINT_TYPE_P (t1)
636 || TREE_CODE (t1) == VECTOR_TYPE
637 || TREE_CODE (t1) == COMPLEX_TYPE
638 || TREE_CODE (t1) == OFFSET_TYPE
639 || POINTER_TYPE_P (t1))
640 {
641 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
642 {
643 warn_odr (t1, t2, NULL, NULL, warn, warned,
644 G_("a type with different precision is defined "
645 "in another translation unit"));
646 return false;
647 }
648 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
649 {
650 warn_odr (t1, t2, NULL, NULL, warn, warned,
651 G_("a type with different signedness is defined "
652 "in another translation unit"));
653 return false;
654 }
655
656 if (TREE_CODE (t1) == INTEGER_TYPE
657 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
658 {
659 /* char WRT uint_8? */
660 warn_odr (t1, t2, NULL, NULL, warn, warned,
661 G_("a different type is defined in another "
662 "translation unit"));
663 return false;
664 }
665
666 /* For canonical type comparisons we do not want to build SCCs
667 so we cannot compare pointed-to types. But we can, for now,
668 require the same pointed-to type kind and match what
669 useless_type_conversion_p would do. */
670 if (POINTER_TYPE_P (t1))
671 {
672 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
673 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
674 {
675 warn_odr (t1, t2, NULL, NULL, warn, warned,
676 G_("it is defined as a pointer in different address "
677 "space in another translation unit"));
678 return false;
679 }
680
681 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
682 {
683 warn_odr (t1, t2, NULL, NULL, warn, warned,
684 G_("it is defined as a pointer to different type "
685 "in another translation unit"));
686 if (warn && warned)
687 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
688 return false;
689 }
690 }
691
692 /* Tail-recurse to components. */
693 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
694 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
695 {
696 /* Probably specific enough. */
697 warn_odr (t1, t2, NULL, NULL, warn, warned,
698 G_("a different type is defined "
699 "in another translation unit"));
700 if (warn && warned)
701 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
702 return false;
703 }
704
705 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
706 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
707 TYPE_SIZE_UNIT (t2), 0));
708 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
709
710 return true;
711 }
712
713 /* Do type-specific comparisons. */
714 switch (TREE_CODE (t1))
715 {
716 case ARRAY_TYPE:
717 {
718 /* Array types are the same if the element types are the same and
719 the number of elements are the same. */
720 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
721 {
722 warn_odr (t1, t2, NULL, NULL, warn, warned,
723 G_("a different type is defined in another "
724 "translation unit"));
725 if (warn && warned)
726 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
727 }
728 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
729 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
730 == TYPE_NONALIASED_COMPONENT (t2));
731
732 tree i1 = TYPE_DOMAIN (t1);
733 tree i2 = TYPE_DOMAIN (t2);
734
735 /* For an incomplete external array, the type domain can be
736 NULL_TREE. Check this condition also. */
737 if (i1 == NULL_TREE || i2 == NULL_TREE)
738 return true;
739
740 tree min1 = TYPE_MIN_VALUE (i1);
741 tree min2 = TYPE_MIN_VALUE (i2);
742 tree max1 = TYPE_MAX_VALUE (i1);
743 tree max2 = TYPE_MAX_VALUE (i2);
744
745 /* In C++, minimums should be always 0. */
746 gcc_assert (min1 == min2);
747 if (!operand_equal_p (max1, max2, 0))
748 {
749 warn_odr (t1, t2, NULL, NULL, warn, warned,
750 G_("an array of different size is defined "
751 "in another translation unit"));
752 return false;
753 }
754 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
755 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
756 TYPE_SIZE_UNIT (t2), 0));
757 }
758 return true;
759
760 case METHOD_TYPE:
761 case FUNCTION_TYPE:
762 /* Function types are the same if the return type and arguments types
763 are the same. */
764 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
765 {
766 warn_odr (t1, t2, NULL, NULL, warn, warned,
767 G_("has different return value "
768 "in another translation unit"));
769 if (warn && warned)
770 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
771 return false;
772 }
773
774 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
775 return true;
776 else
777 {
778 tree parms1, parms2;
779
780 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
781 parms1 && parms2;
782 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
783 {
784 if (!odr_subtypes_equivalent_p
785 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
786 {
787 warn_odr (t1, t2, NULL, NULL, warn, warned,
788 G_("has different parameters in another "
789 "translation unit"));
790 if (warn && warned)
791 warn_types_mismatch (TREE_VALUE (parms1),
792 TREE_VALUE (parms2));
793 return false;
794 }
795 }
796
797 if (parms1 || parms2)
798 {
799 warn_odr (t1, t2, NULL, NULL, warn, warned,
800 G_("has different parameters "
801 "in another translation unit"));
802 return false;
803 }
804
805 return true;
806 }
807
808 case RECORD_TYPE:
809 case UNION_TYPE:
810 case QUAL_UNION_TYPE:
811 {
812 tree f1, f2;
813
814 /* For aggregate types, all the fields must be the same. */
815 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
816 {
817 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
818 f1 || f2;
819 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
820 {
821 /* Skip non-fields. */
822 while (f1 && TREE_CODE (f1) != FIELD_DECL)
823 f1 = TREE_CHAIN (f1);
824 while (f2 && TREE_CODE (f2) != FIELD_DECL)
825 f2 = TREE_CHAIN (f2);
826 if (!f1 || !f2)
827 break;
828 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
829 break;
830 if (DECL_NAME (f1) != DECL_NAME (f2)
831 && !DECL_ARTIFICIAL (f1))
832 {
833 warn_odr (t1, t2, f1, f2, warn, warned,
834 G_("a field with different name is defined "
835 "in another translation unit"));
836 return false;
837 }
838 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
839 {
840 /* Do not warn about artificial fields and just go into generic
841 field mismatch warning. */
842 if (DECL_ARTIFICIAL (f1))
843 break;
844
845 warn_odr (t1, t2, f1, f2, warn, warned,
846 G_("a field of same name but different type "
847 "is defined in another translation unit"));
848 if (warn && warned)
849 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
850 return false;
851 }
852 if (!gimple_compare_field_offset (f1, f2))
853 {
854 /* Do not warn about artificial fields and just go into generic
855 field mismatch warning. */
856 if (DECL_ARTIFICIAL (f1))
857 break;
858 warn_odr (t1, t2, t1, t2, warn, warned,
859 G_("fields has different layout "
860 "in another translation unit"));
861 return false;
862 }
863 gcc_assert (DECL_NONADDRESSABLE_P (f1)
864 == DECL_NONADDRESSABLE_P (f2));
865 }
866
867 /* If one aggregate has more fields than the other, they
868 are not the same. */
869 if (f1 || f2)
870 {
871 warn_odr (t1, t2, NULL, NULL, warn, warned,
872 G_("a type with different number of fields "
873 "is defined in another translation unit"));
874 return false;
875 }
876 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
877 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
878 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
879 {
880 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
881 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
882 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
883 {
884 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
885 {
886 warn_odr (t1, t2, f1, f2, warn, warned,
887 G_("a different method of same type "
888 "is defined in another translation unit"));
889 return false;
890 }
891 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
892 {
893 warn_odr (t1, t2, f1, f2, warn, warned,
894 G_("s definition that differs by virtual "
895 "keyword in another translation unit"));
896 return false;
897 }
898 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
899 {
900 warn_odr (t1, t2, f1, f2, warn, warned,
901 G_("virtual table layout differs in another "
902 "translation unit"));
903 return false;
904 }
905 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
906 {
907 warn_odr (t1, t2, f1, f2, warn, warned,
908 G_("method with incompatible type is defined "
909 "in another translation unit"));
910 return false;
911 }
912 }
913 if (f1 || f2)
914 {
915 warn_odr (t1, t2, NULL, NULL, warn, warned,
916 G_("a type with different number of methods "
917 "is defined in another translation unit"));
918 return false;
919 }
920 }
921 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
922 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
923 TYPE_SIZE_UNIT (t2), 0));
924 }
925
926 return true;
927 }
928
929 default:
930 gcc_unreachable ();
931 }
932 }
933
934 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
935 from VAL->type. This may happen in LTO where tree merging did not merge
936 all variants of the same type. It may or may not mean the ODR violation.
937 Add it to the list of duplicates and warn on some violations. */
938
939 static bool
940 add_type_duplicate (odr_type val, tree type)
941 {
942 bool build_bases = false;
943 if (!val->types_set)
944 val->types_set = new hash_set<tree>;
945
946 /* Always prefer complete type to be the leader. */
947 if (!COMPLETE_TYPE_P (val->type)
948 && COMPLETE_TYPE_P (type))
949 {
950 tree tmp = type;
951
952 build_bases = true;
953 type = val->type;
954 val->type = tmp;
955 }
956
957 /* See if this duplicate is new. */
958 if (!val->types_set->add (type))
959 {
960 bool merge = true;
961 bool base_mismatch = false;
962 unsigned int i,j;
963 bool warned = false;
964 hash_set<tree> visited;
965
966 gcc_assert (in_lto_p);
967 vec_safe_push (val->types, type);
968
969 /* First we compare memory layout. */
970 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
971 &warned, &visited))
972 {
973 merge = false;
974 odr_violation_reported = true;
975 val->odr_violated = true;
976 if (cgraph_dump_file)
977 {
978 fprintf (cgraph_dump_file, "ODR violation\n");
979
980 print_node (cgraph_dump_file, "", val->type, 0);
981 putc ('\n',cgraph_dump_file);
982 print_node (cgraph_dump_file, "", type, 0);
983 putc ('\n',cgraph_dump_file);
984 }
985 }
986
987 /* Next sanity check that bases are the same. If not, we will end
988 up producing wrong answers. */
989 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
990 && TREE_CODE (val->type) == RECORD_TYPE
991 && TREE_CODE (type) == RECORD_TYPE
992 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
993 {
994 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
995 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
996 {
997 odr_type base = get_odr_type
998 (BINFO_TYPE
999 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1000 i)),
1001 true);
1002 if (val->bases.length () <= j || val->bases[j] != base)
1003 base_mismatch = true;
1004 j++;
1005 }
1006 if (base_mismatch)
1007 {
1008 merge = false;
1009 odr_violation_reported = true;
1010
1011 if (!warned && !val->odr_violated)
1012 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1013 "a type with the same name but different bases is "
1014 "defined in another translation unit");
1015 val->odr_violated = true;
1016 if (cgraph_dump_file)
1017 {
1018 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
1019
1020 print_node (cgraph_dump_file, "", val->type, 0);
1021 putc ('\n',cgraph_dump_file);
1022 print_node (cgraph_dump_file, "", type, 0);
1023 putc ('\n',cgraph_dump_file);
1024 }
1025 }
1026 }
1027
1028 /* Regularize things a little. During LTO same types may come with
1029 different BINFOs. Either because their virtual table was
1030 not merged by tree merging and only later at decl merging or
1031 because one type comes with external vtable, while other
1032 with internal. We want to merge equivalent binfos to conserve
1033 memory and streaming overhead.
1034
1035 The external vtables are more harmful: they contain references
1036 to external declarations of methods that may be defined in the
1037 merged LTO unit. For this reason we absolutely need to remove
1038 them and replace by internal variants. Not doing so will lead
1039 to incomplete answers from possible_polymorphic_call_targets. */
1040 if (!flag_ltrans && merge
1041 && TREE_CODE (val->type) == RECORD_TYPE
1042 && TREE_CODE (type) == RECORD_TYPE
1043 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1044 && TYPE_MAIN_VARIANT (type) == type
1045 && TYPE_MAIN_VARIANT (val->type) == val->type
1046 && BINFO_VTABLE (TYPE_BINFO (val->type))
1047 && BINFO_VTABLE (TYPE_BINFO (type)))
1048 {
1049 tree master_binfo = TYPE_BINFO (val->type);
1050 tree v1 = BINFO_VTABLE (master_binfo);
1051 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1052
1053 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1054 {
1055 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1056 && operand_equal_p (TREE_OPERAND (v1, 1),
1057 TREE_OPERAND (v2, 1), 0));
1058 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1059 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1060 }
1061 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1062 == DECL_ASSEMBLER_NAME (v2));
1063
1064 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1065 {
1066 unsigned int i;
1067
1068 set_type_binfo (val->type, TYPE_BINFO (type));
1069 for (i = 0; i < val->types->length (); i++)
1070 {
1071 if (TYPE_BINFO ((*val->types)[i])
1072 == master_binfo)
1073 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1074 }
1075 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1076 }
1077 else
1078 set_type_binfo (type, master_binfo);
1079 }
1080 }
1081 return build_bases;
1082 }
1083
1084 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1085 possibly new entry. */
1086
1087 odr_type
1088 get_odr_type (tree type, bool insert)
1089 {
1090 odr_type_d **slot;
1091 odr_type val;
1092 hashval_t hash;
1093 bool build_bases = false;
1094 bool insert_to_odr_array = false;
1095 int base_id = -1;
1096
1097 type = main_odr_variant (type);
1098
1099 hash = hash_type_name (type);
1100 slot
1101 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
1102 if (!slot)
1103 return NULL;
1104
1105 /* See if we already have entry for type. */
1106 if (*slot)
1107 {
1108 val = *slot;
1109
1110 /* With LTO we need to support multiple tree representation of
1111 the same ODR type. */
1112 if (val->type != type)
1113 build_bases = add_type_duplicate (val, type);
1114 }
1115 else
1116 {
1117 val = ggc_cleared_alloc<odr_type_d> ();
1118 val->type = type;
1119 val->bases = vNULL;
1120 val->derived_types = vNULL;
1121 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1122 build_bases = COMPLETE_TYPE_P (val->type);
1123 insert_to_odr_array = true;
1124 }
1125
1126 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1127 && type == TYPE_MAIN_VARIANT (type))
1128 {
1129 tree binfo = TYPE_BINFO (type);
1130 unsigned int i;
1131
1132 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1133
1134 val->all_derivations_known = type_all_derivations_known_p (type);
1135 *slot = val;
1136 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1137 /* For now record only polymorphic types. other are
1138 pointless for devirtualization and we can not precisely
1139 determine ODR equivalency of these during LTO. */
1140 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1141 {
1142 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1143 i)),
1144 true);
1145 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1146 base->derived_types.safe_push (val);
1147 val->bases.safe_push (base);
1148 if (base->id > base_id)
1149 base_id = base->id;
1150 }
1151 }
1152 /* Ensure that type always appears after bases. */
1153 if (insert_to_odr_array)
1154 {
1155 if (odr_types_ptr)
1156 val->id = odr_types.length ();
1157 vec_safe_push (odr_types_ptr, val);
1158 }
1159 else if (base_id > val->id)
1160 {
1161 odr_types[val->id] = 0;
1162 /* Be sure we did not recorded any derived types; these may need
1163 renumbering too. */
1164 gcc_assert (val->derived_types.length() == 0);
1165 if (odr_types_ptr)
1166 val->id = odr_types.length ();
1167 vec_safe_push (odr_types_ptr, val);
1168 }
1169 return val;
1170 }
1171
1172 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1173 recusive printing. */
1174
1175 static void
1176 dump_odr_type (FILE *f, odr_type t, int indent=0)
1177 {
1178 unsigned int i;
1179 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1180 print_generic_expr (f, t->type, TDF_SLIM);
1181 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1182 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1183 if (TYPE_NAME (t->type))
1184 {
1185 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1186 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1187 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1188 }
1189 if (t->bases.length ())
1190 {
1191 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1192 for (i = 0; i < t->bases.length (); i++)
1193 fprintf (f, " %i", t->bases[i]->id);
1194 fprintf (f, "\n");
1195 }
1196 if (t->derived_types.length ())
1197 {
1198 fprintf (f, "%*s derived types:\n", indent * 2, "");
1199 for (i = 0; i < t->derived_types.length (); i++)
1200 dump_odr_type (f, t->derived_types[i], indent + 1);
1201 }
1202 fprintf (f, "\n");
1203 }
1204
1205 /* Dump the type inheritance graph. */
1206
1207 static void
1208 dump_type_inheritance_graph (FILE *f)
1209 {
1210 unsigned int i;
1211 if (!odr_types_ptr)
1212 return;
1213 fprintf (f, "\n\nType inheritance graph:\n");
1214 for (i = 0; i < odr_types.length (); i++)
1215 {
1216 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1217 dump_odr_type (f, odr_types[i]);
1218 }
1219 for (i = 0; i < odr_types.length (); i++)
1220 {
1221 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1222 {
1223 unsigned int j;
1224 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1225 print_node (f, "", odr_types[i]->type, 0);
1226 for (j = 0; j < odr_types[i]->types->length (); j++)
1227 {
1228 tree t;
1229 fprintf (f, "duplicate #%i\n", j);
1230 print_node (f, "", (*odr_types[i]->types)[j], 0);
1231 t = (*odr_types[i]->types)[j];
1232 while (TYPE_P (t) && TYPE_CONTEXT (t))
1233 {
1234 t = TYPE_CONTEXT (t);
1235 print_node (f, "", t, 0);
1236 }
1237 putc ('\n',f);
1238 }
1239 }
1240 }
1241 }
1242
1243 /* Given method type T, return type of class it belongs to.
1244 Lookup this pointer and get its type. */
1245
1246 tree
1247 method_class_type (const_tree t)
1248 {
1249 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1250 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1251
1252 return TREE_TYPE (first_parm_type);
1253 }
1254
1255 /* Initialize IPA devirt and build inheritance tree graph. */
1256
1257 void
1258 build_type_inheritance_graph (void)
1259 {
1260 struct symtab_node *n;
1261 FILE *inheritance_dump_file;
1262 int flags;
1263
1264 if (odr_hash)
1265 return;
1266 timevar_push (TV_IPA_INHERITANCE);
1267 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1268 odr_hash = new odr_hash_type (23);
1269
1270 /* We reconstruct the graph starting of types of all methods seen in the
1271 the unit. */
1272 FOR_EACH_SYMBOL (n)
1273 if (is_a <cgraph_node *> (n)
1274 && DECL_VIRTUAL_P (n->decl)
1275 && n->real_symbol_p ())
1276 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1277 true);
1278
1279 /* Look also for virtual tables of types that do not define any methods.
1280
1281 We need it in a case where class B has virtual base of class A
1282 re-defining its virtual method and there is class C with no virtual
1283 methods with B as virtual base.
1284
1285 Here we output B's virtual method in two variant - for non-virtual
1286 and virtual inheritance. B's virtual table has non-virtual version,
1287 while C's has virtual.
1288
1289 For this reason we need to know about C in order to include both
1290 variants of B. More correctly, record_target_from_binfo should
1291 add both variants of the method when walking B, but we have no
1292 link in between them.
1293
1294 We rely on fact that either the method is exported and thus we
1295 assume it is called externally or C is in anonymous namespace and
1296 thus we will see the vtable. */
1297
1298 else if (is_a <varpool_node *> (n)
1299 && DECL_VIRTUAL_P (n->decl)
1300 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1301 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1302 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1303 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1304 if (inheritance_dump_file)
1305 {
1306 dump_type_inheritance_graph (inheritance_dump_file);
1307 dump_end (TDI_inheritance, inheritance_dump_file);
1308 }
1309 timevar_pop (TV_IPA_INHERITANCE);
1310 }
1311
1312 /* Return true if N has reference from live virtual table
1313 (and thus can be a destination of polymorphic call).
1314 Be conservatively correct when callgraph is not built or
1315 if the method may be referred externally. */
1316
1317 static bool
1318 referenced_from_vtable_p (struct cgraph_node *node)
1319 {
1320 int i;
1321 struct ipa_ref *ref;
1322 bool found = false;
1323
1324 if (node->externally_visible
1325 || node->used_from_other_partition)
1326 return true;
1327
1328 /* Keep this test constant time.
1329 It is unlikely this can happen except for the case where speculative
1330 devirtualization introduced many speculative edges to this node.
1331 In this case the target is very likely alive anyway. */
1332 if (node->ref_list.referring.length () > 100)
1333 return true;
1334
1335 /* We need references built. */
1336 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
1337 return true;
1338
1339 for (i = 0; node->iterate_referring (i, ref); i++)
1340
1341 if ((ref->use == IPA_REF_ALIAS
1342 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1343 || (ref->use == IPA_REF_ADDR
1344 && TREE_CODE (ref->referring->decl) == VAR_DECL
1345 && DECL_VIRTUAL_P (ref->referring->decl)))
1346 {
1347 found = true;
1348 break;
1349 }
1350 return found;
1351 }
1352
1353 /* If TARGET has associated node, record it in the NODES array.
1354 CAN_REFER specify if program can refer to the target directly.
1355 if TARGET is unknown (NULL) or it can not be inserted (for example because
1356 its body was already removed and there is no way to refer to it), clear
1357 COMPLETEP. */
1358
1359 static void
1360 maybe_record_node (vec <cgraph_node *> &nodes,
1361 tree target, hash_set<tree> *inserted,
1362 bool can_refer,
1363 bool *completep)
1364 {
1365 struct cgraph_node *target_node, *alias_target;
1366 enum availability avail;
1367
1368 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1369 list of targets; the runtime effect of calling them is undefined.
1370 Only "real" virtual methods should be accounted. */
1371 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1372 return;
1373
1374 if (!can_refer)
1375 {
1376 /* The only case when method of anonymous namespace becomes unreferable
1377 is when we completely optimized it out. */
1378 if (flag_ltrans
1379 || !target
1380 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1381 *completep = false;
1382 return;
1383 }
1384
1385 if (!target)
1386 return;
1387
1388 target_node = cgraph_node::get (target);
1389
1390 /* Preffer alias target over aliases, so we do not get confused by
1391 fake duplicates. */
1392 if (target_node)
1393 {
1394 alias_target = target_node->ultimate_alias_target (&avail);
1395 if (target_node != alias_target
1396 && avail >= AVAIL_AVAILABLE
1397 && target_node->get_availability ())
1398 target_node = alias_target;
1399 }
1400
1401 /* Method can only be called by polymorphic call if any
1402 of vtables refering to it are alive.
1403
1404 While this holds for non-anonymous functions, too, there are
1405 cases where we want to keep them in the list; for example
1406 inline functions with -fno-weak are static, but we still
1407 may devirtualize them when instance comes from other unit.
1408 The same holds for LTO.
1409
1410 Currently we ignore these functions in speculative devirtualization.
1411 ??? Maybe it would make sense to be more aggressive for LTO even
1412 eslewhere. */
1413 if (!flag_ltrans
1414 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1415 && (!target_node
1416 || !referenced_from_vtable_p (target_node)))
1417 ;
1418 /* See if TARGET is useful function we can deal with. */
1419 else if (target_node != NULL
1420 && (TREE_PUBLIC (target)
1421 || DECL_EXTERNAL (target)
1422 || target_node->definition)
1423 && target_node->real_symbol_p ())
1424 {
1425 gcc_assert (!target_node->global.inlined_to);
1426 gcc_assert (target_node->real_symbol_p ());
1427 if (!inserted->add (target))
1428 {
1429 cached_polymorphic_call_targets->add (target_node);
1430 nodes.safe_push (target_node);
1431 }
1432 }
1433 else if (completep
1434 && (!type_in_anonymous_namespace_p
1435 (DECL_CONTEXT (target))
1436 || flag_ltrans))
1437 *completep = false;
1438 }
1439
1440 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1441 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1442 method in vtable and insert method to NODES array
1443 or BASES_TO_CONSIDER if this array is non-NULL.
1444 Otherwise recurse to base BINFOs.
1445 This match what get_binfo_at_offset does, but with offset
1446 being unknown.
1447
1448 TYPE_BINFOS is a stack of BINFOS of types with defined
1449 virtual table seen on way from class type to BINFO.
1450
1451 MATCHED_VTABLES tracks virtual tables we already did lookup
1452 for virtual function in. INSERTED tracks nodes we already
1453 inserted.
1454
1455 ANONYMOUS is true if BINFO is part of anonymous namespace.
1456
1457 Clear COMPLETEP when we hit unreferable target.
1458 */
1459
1460 static void
1461 record_target_from_binfo (vec <cgraph_node *> &nodes,
1462 vec <tree> *bases_to_consider,
1463 tree binfo,
1464 tree otr_type,
1465 vec <tree> &type_binfos,
1466 HOST_WIDE_INT otr_token,
1467 tree outer_type,
1468 HOST_WIDE_INT offset,
1469 hash_set<tree> *inserted,
1470 hash_set<tree> *matched_vtables,
1471 bool anonymous,
1472 bool *completep)
1473 {
1474 tree type = BINFO_TYPE (binfo);
1475 int i;
1476 tree base_binfo;
1477
1478
1479 if (BINFO_VTABLE (binfo))
1480 type_binfos.safe_push (binfo);
1481 if (types_same_for_odr (type, outer_type))
1482 {
1483 int i;
1484 tree type_binfo = NULL;
1485
1486 /* Lookup BINFO with virtual table. For normal types it is always last
1487 binfo on stack. */
1488 for (i = type_binfos.length () - 1; i >= 0; i--)
1489 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1490 {
1491 type_binfo = type_binfos[i];
1492 break;
1493 }
1494 if (BINFO_VTABLE (binfo))
1495 type_binfos.pop ();
1496 /* If this is duplicated BINFO for base shared by virtual inheritance,
1497 we may not have its associated vtable. This is not a problem, since
1498 we will walk it on the other path. */
1499 if (!type_binfo)
1500 return;
1501 tree inner_binfo = get_binfo_at_offset (type_binfo,
1502 offset, otr_type);
1503 if (!inner_binfo)
1504 {
1505 gcc_assert (odr_violation_reported);
1506 return;
1507 }
1508 /* For types in anonymous namespace first check if the respective vtable
1509 is alive. If not, we know the type can't be called. */
1510 if (!flag_ltrans && anonymous)
1511 {
1512 tree vtable = BINFO_VTABLE (inner_binfo);
1513 varpool_node *vnode;
1514
1515 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1516 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1517 vnode = varpool_node::get (vtable);
1518 if (!vnode || !vnode->definition)
1519 return;
1520 }
1521 gcc_assert (inner_binfo);
1522 if (bases_to_consider
1523 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1524 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1525 {
1526 bool can_refer;
1527 tree target = gimple_get_virt_method_for_binfo (otr_token,
1528 inner_binfo,
1529 &can_refer);
1530 if (!bases_to_consider)
1531 maybe_record_node (nodes, target, inserted, can_refer, completep);
1532 /* Destructors are never called via construction vtables. */
1533 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1534 bases_to_consider->safe_push (target);
1535 }
1536 return;
1537 }
1538
1539 /* Walk bases. */
1540 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1541 /* Walking bases that have no virtual method is pointless excercise. */
1542 if (polymorphic_type_binfo_p (base_binfo))
1543 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1544 type_binfos,
1545 otr_token, outer_type, offset, inserted,
1546 matched_vtables, anonymous, completep);
1547 if (BINFO_VTABLE (binfo))
1548 type_binfos.pop ();
1549 }
1550
1551 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1552 of TYPE, insert them to NODES, recurse into derived nodes.
1553 INSERTED is used to avoid duplicate insertions of methods into NODES.
1554 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1555 Clear COMPLETEP if unreferable target is found.
1556
1557 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1558 all cases where BASE_SKIPPED is true (because the base is abstract
1559 class). */
1560
1561 static void
1562 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1563 hash_set<tree> *inserted,
1564 hash_set<tree> *matched_vtables,
1565 tree otr_type,
1566 odr_type type,
1567 HOST_WIDE_INT otr_token,
1568 tree outer_type,
1569 HOST_WIDE_INT offset,
1570 bool *completep,
1571 vec <tree> &bases_to_consider,
1572 bool consider_construction)
1573 {
1574 tree binfo = TYPE_BINFO (type->type);
1575 unsigned int i;
1576 vec <tree> type_binfos = vNULL;
1577 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1578
1579 /* We may need to consider types w/o instances because of possible derived
1580 types using their methods either directly or via construction vtables.
1581 We are safe to skip them when all derivations are known, since we will
1582 handle them later.
1583 This is done by recording them to BASES_TO_CONSIDER array. */
1584 if (possibly_instantiated || consider_construction)
1585 {
1586 record_target_from_binfo (nodes,
1587 (!possibly_instantiated
1588 && type_all_derivations_known_p (type->type))
1589 ? &bases_to_consider : NULL,
1590 binfo, otr_type, type_binfos, otr_token,
1591 outer_type, offset,
1592 inserted, matched_vtables,
1593 type->anonymous_namespace, completep);
1594 }
1595 type_binfos.release ();
1596 for (i = 0; i < type->derived_types.length (); i++)
1597 possible_polymorphic_call_targets_1 (nodes, inserted,
1598 matched_vtables,
1599 otr_type,
1600 type->derived_types[i],
1601 otr_token, outer_type, offset, completep,
1602 bases_to_consider, consider_construction);
1603 }
1604
1605 /* Cache of queries for polymorphic call targets.
1606
1607 Enumerating all call targets may get expensive when there are many
1608 polymorphic calls in the program, so we memoize all the previous
1609 queries and avoid duplicated work. */
1610
1611 struct polymorphic_call_target_d
1612 {
1613 HOST_WIDE_INT otr_token;
1614 ipa_polymorphic_call_context context;
1615 odr_type type;
1616 vec <cgraph_node *> targets;
1617 int speculative_targets;
1618 bool complete;
1619 };
1620
1621 /* Polymorphic call target cache helpers. */
1622
1623 struct polymorphic_call_target_hasher
1624 {
1625 typedef polymorphic_call_target_d value_type;
1626 typedef polymorphic_call_target_d compare_type;
1627 static inline hashval_t hash (const value_type *);
1628 static inline bool equal (const value_type *, const compare_type *);
1629 static inline void remove (value_type *);
1630 };
1631
1632 /* Return the computed hashcode for ODR_QUERY. */
1633
1634 inline hashval_t
1635 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1636 {
1637 inchash::hash hstate (odr_query->otr_token);
1638
1639 hstate.add_wide_int (odr_query->type->id);
1640 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1641 hstate.add_wide_int (odr_query->context.offset);
1642
1643 if (odr_query->context.speculative_outer_type)
1644 {
1645 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1646 hstate.add_wide_int (odr_query->context.speculative_offset);
1647 }
1648 hstate.add_flag (odr_query->context.maybe_in_construction);
1649 hstate.add_flag (odr_query->context.maybe_derived_type);
1650 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1651 hstate.commit_flag ();
1652 return hstate.end ();
1653 }
1654
1655 /* Compare cache entries T1 and T2. */
1656
1657 inline bool
1658 polymorphic_call_target_hasher::equal (const value_type *t1,
1659 const compare_type *t2)
1660 {
1661 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1662 && t1->context.offset == t2->context.offset
1663 && t1->context.speculative_offset == t2->context.speculative_offset
1664 && t1->context.outer_type == t2->context.outer_type
1665 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1666 && t1->context.maybe_in_construction
1667 == t2->context.maybe_in_construction
1668 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1669 && (t1->context.speculative_maybe_derived_type
1670 == t2->context.speculative_maybe_derived_type));
1671 }
1672
1673 /* Remove entry in polymorphic call target cache hash. */
1674
1675 inline void
1676 polymorphic_call_target_hasher::remove (value_type *v)
1677 {
1678 v->targets.release ();
1679 free (v);
1680 }
1681
1682 /* Polymorphic call target query cache. */
1683
1684 typedef hash_table<polymorphic_call_target_hasher>
1685 polymorphic_call_target_hash_type;
1686 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1687
1688 /* Destroy polymorphic call target query cache. */
1689
1690 static void
1691 free_polymorphic_call_targets_hash ()
1692 {
1693 if (cached_polymorphic_call_targets)
1694 {
1695 delete polymorphic_call_target_hash;
1696 polymorphic_call_target_hash = NULL;
1697 delete cached_polymorphic_call_targets;
1698 cached_polymorphic_call_targets = NULL;
1699 }
1700 }
1701
1702 /* When virtual function is removed, we may need to flush the cache. */
1703
1704 static void
1705 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1706 {
1707 if (cached_polymorphic_call_targets
1708 && cached_polymorphic_call_targets->contains (n))
1709 free_polymorphic_call_targets_hash ();
1710 }
1711
1712 /* Return true when TYPE contains an polymorphic type and thus is interesting
1713 for devirtualization machinery. */
1714
1715 bool
1716 contains_polymorphic_type_p (const_tree type)
1717 {
1718 type = TYPE_MAIN_VARIANT (type);
1719
1720 if (RECORD_OR_UNION_TYPE_P (type))
1721 {
1722 if (TYPE_BINFO (type)
1723 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1724 return true;
1725 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1726 if (TREE_CODE (fld) == FIELD_DECL
1727 && !DECL_ARTIFICIAL (fld)
1728 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1729 return true;
1730 return false;
1731 }
1732 if (TREE_CODE (type) == ARRAY_TYPE)
1733 return contains_polymorphic_type_p (TREE_TYPE (type));
1734 return false;
1735 }
1736
1737 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1738 is contained at CONTEXT->OFFSET. Walk the memory representation of
1739 CONTEXT->OUTER_TYPE and find the outermost class type that match
1740 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1741 to represent it.
1742
1743 For example when CONTEXT represents type
1744 class A
1745 {
1746 int a;
1747 class B b;
1748 }
1749 and we look for type at offset sizeof(int), we end up with B and offset 0.
1750 If the same is produced by multiple inheritance, we end up with A and offset
1751 sizeof(int).
1752
1753 If we can not find corresponding class, give up by setting
1754 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1755 Return true when lookup was sucesful. */
1756
1757 static bool
1758 get_class_context (ipa_polymorphic_call_context *context,
1759 tree expected_type)
1760 {
1761 tree type = context->outer_type;
1762 HOST_WIDE_INT offset = context->offset;
1763 bool speculative = false;
1764 bool speculation_valid = false;
1765 bool valid = false;
1766
1767 if (!context->outer_type)
1768 {
1769 type = context->outer_type = expected_type;
1770 context->offset = offset = 0;
1771 }
1772 /* See if speculative type seem to be derrived from outer_type.
1773 Then speculation is valid only if it really is a derivate and derived types
1774 are allowed.
1775
1776 The test does not really look for derivate, but also accepts the case where
1777 outer_type is a field of speculative_outer_type. In this case eiter
1778 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
1779 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
1780 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
1781 if (context->speculative_outer_type
1782 && context->speculative_offset >= context->offset
1783 && contains_type_p (context->speculative_outer_type,
1784 context->offset - context->speculative_offset,
1785 context->outer_type))
1786 speculation_valid = context->maybe_derived_type;
1787 else
1788 {
1789 context->speculative_outer_type = NULL;
1790 context->speculative_offset = 0;
1791 context->speculative_maybe_derived_type = false;
1792 }
1793
1794 /* Find the sub-object the constant actually refers to and mark whether it is
1795 an artificial one (as opposed to a user-defined one).
1796
1797 This loop is performed twice; first time for outer_type and second time
1798 for speculative_outer_type. The second iteration has SPECULATIVE set. */
1799 while (true)
1800 {
1801 HOST_WIDE_INT pos, size;
1802 tree fld;
1803
1804 /* On a match, just return what we found. */
1805 if (TREE_CODE (type) == TREE_CODE (expected_type)
1806 && (!in_lto_p
1807 || (TREE_CODE (type) == RECORD_TYPE
1808 && TYPE_BINFO (type)
1809 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
1810 && types_same_for_odr (type, expected_type))
1811 {
1812 if (speculative)
1813 {
1814 gcc_assert (speculation_valid);
1815 gcc_assert (valid);
1816
1817 /* If we did not match the offset, just give up on speculation. */
1818 if (offset != 0
1819 || (types_same_for_odr (context->speculative_outer_type,
1820 context->outer_type)
1821 && (context->maybe_derived_type
1822 == context->speculative_maybe_derived_type)))
1823 {
1824 context->speculative_outer_type = NULL;
1825 context->speculative_offset = 0;
1826 }
1827 return true;
1828 }
1829 else
1830 {
1831 /* Type can not contain itself on an non-zero offset. In that case
1832 just give up. */
1833 if (offset != 0)
1834 {
1835 valid = false;
1836 goto give_up;
1837 }
1838 valid = true;
1839 /* If speculation is not valid or we determined type precisely,
1840 we are done. */
1841 if (!speculation_valid
1842 || !context->maybe_derived_type)
1843 {
1844 context->speculative_outer_type = NULL;
1845 context->speculative_offset = 0;
1846 return true;
1847 }
1848 /* Otherwise look into speculation now. */
1849 else
1850 {
1851 speculative = true;
1852 type = context->speculative_outer_type;
1853 offset = context->speculative_offset;
1854 continue;
1855 }
1856 }
1857 }
1858
1859 /* Walk fields and find corresponding on at OFFSET. */
1860 if (TREE_CODE (type) == RECORD_TYPE)
1861 {
1862 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1863 {
1864 if (TREE_CODE (fld) != FIELD_DECL)
1865 continue;
1866
1867 pos = int_bit_position (fld);
1868 size = tree_to_uhwi (DECL_SIZE (fld));
1869 if (pos <= offset && (pos + size) > offset)
1870 break;
1871 }
1872
1873 if (!fld)
1874 goto give_up;
1875
1876 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1877 offset -= pos;
1878 /* DECL_ARTIFICIAL represents a basetype. */
1879 if (!DECL_ARTIFICIAL (fld))
1880 {
1881 if (!speculative)
1882 {
1883 context->outer_type = type;
1884 context->offset = offset;
1885 /* As soon as we se an field containing the type,
1886 we know we are not looking for derivations. */
1887 context->maybe_derived_type = false;
1888 }
1889 else
1890 {
1891 context->speculative_outer_type = type;
1892 context->speculative_offset = offset;
1893 context->speculative_maybe_derived_type = false;
1894 }
1895 }
1896 }
1897 else if (TREE_CODE (type) == ARRAY_TYPE)
1898 {
1899 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1900
1901 /* Give up if we don't know array size. */
1902 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1903 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1904 goto give_up;
1905 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1906 type = subtype;
1907 if (!speculative)
1908 {
1909 context->outer_type = type;
1910 context->offset = offset;
1911 context->maybe_derived_type = false;
1912 }
1913 else
1914 {
1915 context->speculative_outer_type = type;
1916 context->speculative_offset = offset;
1917 context->speculative_maybe_derived_type = false;
1918 }
1919 }
1920 /* Give up on anything else. */
1921 else
1922 goto give_up;
1923 }
1924
1925 /* If we failed to find subtype we look for, give up and fall back to the
1926 most generic query. */
1927 give_up:
1928 context->speculative_outer_type = NULL;
1929 context->speculative_offset = 0;
1930 context->speculative_maybe_derived_type = false;
1931 if (valid)
1932 return true;
1933 context->outer_type = expected_type;
1934 context->offset = 0;
1935 context->maybe_derived_type = true;
1936 context->maybe_in_construction = true;
1937 /* POD can be changed to an instance of a polymorphic type by
1938 placement new. Here we play safe and assume that any
1939 non-polymorphic type is POD. */
1940 if ((TREE_CODE (type) != RECORD_TYPE
1941 || !TYPE_BINFO (type)
1942 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1943 && (!TYPE_SIZE (type)
1944 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1945 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1946 tree_to_uhwi (TYPE_SIZE (type)))))
1947 return true;
1948 return false;
1949 }
1950
1951 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1952
1953 static bool
1954 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1955 tree otr_type)
1956 {
1957 ipa_polymorphic_call_context context = {offset, 0,
1958 TYPE_MAIN_VARIANT (outer_type),
1959 NULL, false, true, false};
1960 return get_class_context (&context, otr_type);
1961 }
1962
1963 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1964
1965 static tree
1966 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1967 tree vtable)
1968 {
1969 tree v = BINFO_VTABLE (binfo);
1970 int i;
1971 tree base_binfo;
1972 unsigned HOST_WIDE_INT this_offset;
1973
1974 if (v)
1975 {
1976 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1977 gcc_unreachable ();
1978
1979 if (offset == this_offset
1980 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1981 return binfo;
1982 }
1983
1984 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1985 if (polymorphic_type_binfo_p (base_binfo))
1986 {
1987 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1988 if (base_binfo)
1989 return base_binfo;
1990 }
1991 return NULL;
1992 }
1993
1994 /* T is known constant value of virtual table pointer.
1995 Store virtual table to V and its offset to OFFSET.
1996 Return false if T does not look like virtual table reference. */
1997
1998 bool
1999 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2000 unsigned HOST_WIDE_INT *offset)
2001 {
2002 /* We expect &MEM[(void *)&virtual_table + 16B].
2003 We obtain object's BINFO from the context of the virtual table.
2004 This one contains pointer to virtual table represented via
2005 POINTER_PLUS_EXPR. Verify that this pointer match to what
2006 we propagated through.
2007
2008 In the case of virtual inheritance, the virtual tables may
2009 be nested, i.e. the offset may be different from 16 and we may
2010 need to dive into the type representation. */
2011 if (TREE_CODE (t) == ADDR_EXPR
2012 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2013 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2014 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2015 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2016 == VAR_DECL)
2017 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2018 (TREE_OPERAND (t, 0), 0), 0)))
2019 {
2020 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2021 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2022 return true;
2023 }
2024
2025 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2026 We need to handle it when T comes from static variable initializer or
2027 BINFO. */
2028 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2029 {
2030 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2031 t = TREE_OPERAND (t, 0);
2032 }
2033 else
2034 *offset = 0;
2035
2036 if (TREE_CODE (t) != ADDR_EXPR)
2037 return false;
2038 *v = TREE_OPERAND (t, 0);
2039 return true;
2040 }
2041
2042 /* T is known constant value of virtual table pointer. Return BINFO of the
2043 instance type. */
2044
2045 tree
2046 vtable_pointer_value_to_binfo (const_tree t)
2047 {
2048 tree vtable;
2049 unsigned HOST_WIDE_INT offset;
2050
2051 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2052 return NULL_TREE;
2053
2054 /* FIXME: for stores of construction vtables we return NULL,
2055 because we do not have BINFO for those. Eventually we should fix
2056 our representation to allow this case to be handled, too.
2057 In the case we see store of BINFO we however may assume
2058 that standard folding will be ale to cope with it. */
2059 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2060 offset, vtable);
2061 }
2062
2063 /* We know that the instance is stored in variable or parameter
2064 (not dynamically allocated) and we want to disprove the fact
2065 that it may be in construction at invocation of CALL.
2066
2067 For the variable to be in construction we actually need to
2068 be in constructor of corresponding global variable or
2069 the inline stack of CALL must contain the constructor.
2070 Check this condition. This check works safely only before
2071 IPA passes, because inline stacks may become out of date
2072 later. */
2073
2074 bool
2075 decl_maybe_in_construction_p (tree base, tree outer_type,
2076 gimple call, tree function)
2077 {
2078 outer_type = TYPE_MAIN_VARIANT (outer_type);
2079 gcc_assert (DECL_P (base));
2080
2081 /* After inlining the code unification optimizations may invalidate
2082 inline stacks. Also we need to give up on global variables after
2083 IPA, because addresses of these may have been propagated to their
2084 constructors. */
2085 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2086 return true;
2087
2088 /* Pure functions can not do any changes on the dynamic type;
2089 that require writting to memory. */
2090 if (!auto_var_in_fn_p (base, function)
2091 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2092 return false;
2093
2094 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2095 block = BLOCK_SUPERCONTEXT (block))
2096 if (BLOCK_ABSTRACT_ORIGIN (block)
2097 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2098 {
2099 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2100
2101 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2102 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2103 || !DECL_CXX_DESTRUCTOR_P (fn)))
2104 {
2105 /* Watch for clones where we constant propagated the first
2106 argument (pointer to the instance). */
2107 fn = DECL_ABSTRACT_ORIGIN (fn);
2108 if (!fn
2109 || !is_global_var (base)
2110 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2111 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2112 || !DECL_CXX_DESTRUCTOR_P (fn)))
2113 continue;
2114 }
2115 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2116 continue;
2117
2118 /* FIXME: this can go away once we have ODR types equivalency on
2119 LTO level. */
2120 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2121 return true;
2122 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2123 if (types_same_for_odr (type, outer_type))
2124 return true;
2125 }
2126
2127 if (TREE_CODE (base) == VAR_DECL
2128 && is_global_var (base))
2129 {
2130 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2131 || (!DECL_CXX_CONSTRUCTOR_P (function)
2132 || !DECL_CXX_DESTRUCTOR_P (function)))
2133 {
2134 if (!DECL_ABSTRACT_ORIGIN (function))
2135 return false;
2136 /* Watch for clones where we constant propagated the first
2137 argument (pointer to the instance). */
2138 function = DECL_ABSTRACT_ORIGIN (function);
2139 if (!function
2140 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2141 || (!DECL_CXX_CONSTRUCTOR_P (function)
2142 || !DECL_CXX_DESTRUCTOR_P (function)))
2143 return false;
2144 }
2145 /* FIXME: this can go away once we have ODR types equivalency on
2146 LTO level. */
2147 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2148 return true;
2149 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2150 if (types_same_for_odr (type, outer_type))
2151 return true;
2152 }
2153 return false;
2154 }
2155
2156 /* Proudce polymorphic call context for call method of instance
2157 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2158
2159 static void
2160 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2161 tree base, HOST_WIDE_INT offset)
2162 {
2163 gcc_assert (DECL_P (base));
2164
2165 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
2166 context->offset = offset;
2167 context->speculative_outer_type = NULL;
2168 context->speculative_offset = 0;
2169 context->speculative_maybe_derived_type = true;
2170 /* Make very conservative assumption that all objects
2171 may be in construction.
2172 TODO: ipa-prop already contains code to tell better.
2173 merge it later. */
2174 context->maybe_in_construction = true;
2175 context->maybe_derived_type = false;
2176 }
2177
2178 /* CST is an invariant (address of decl), try to get meaningful
2179 polymorphic call context for polymorphic call of method
2180 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2181 Return FALSE if nothing meaningful can be found. */
2182
2183 bool
2184 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2185 tree cst,
2186 tree otr_type,
2187 HOST_WIDE_INT offset)
2188 {
2189 HOST_WIDE_INT offset2, size, max_size;
2190 tree base;
2191
2192 if (TREE_CODE (cst) != ADDR_EXPR)
2193 return false;
2194
2195 cst = TREE_OPERAND (cst, 0);
2196 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
2197 if (!DECL_P (base) || max_size == -1 || max_size != size)
2198 return false;
2199
2200 /* Only type inconsistent programs can have otr_type that is
2201 not part of outer type. */
2202 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2203 return false;
2204
2205 get_polymorphic_call_info_for_decl (context, base, offset);
2206 return true;
2207 }
2208
2209 /* See if OP is SSA name initialized as a copy or by single assignment.
2210 If so, walk the SSA graph up. */
2211
2212 static tree
2213 walk_ssa_copies (tree op)
2214 {
2215 STRIP_NOPS (op);
2216 while (TREE_CODE (op) == SSA_NAME
2217 && !SSA_NAME_IS_DEFAULT_DEF (op)
2218 && SSA_NAME_DEF_STMT (op)
2219 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2220 {
2221 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2222 return op;
2223 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2224 STRIP_NOPS (op);
2225 }
2226 return op;
2227 }
2228
2229 /* Given REF call in FNDECL, determine class of the polymorphic
2230 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
2231 CALL is optional argument giving the actual statement (usually call) where
2232 the context is used.
2233 Return pointer to object described by the context */
2234
2235 tree
2236 get_polymorphic_call_info (tree fndecl,
2237 tree ref,
2238 tree *otr_type,
2239 HOST_WIDE_INT *otr_token,
2240 ipa_polymorphic_call_context *context,
2241 gimple call)
2242 {
2243 tree base_pointer;
2244 *otr_type = obj_type_ref_class (ref);
2245 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2246
2247 /* Set up basic info in case we find nothing interesting in the analysis. */
2248 context->speculative_outer_type = NULL;
2249 context->speculative_offset = 0;
2250 context->speculative_maybe_derived_type = true;
2251 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
2252 context->offset = 0;
2253 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2254 context->maybe_derived_type = true;
2255 context->maybe_in_construction = true;
2256
2257 /* Walk SSA for outer object. */
2258 do
2259 {
2260 base_pointer = walk_ssa_copies (base_pointer);
2261 if (TREE_CODE (base_pointer) == ADDR_EXPR)
2262 {
2263 HOST_WIDE_INT size, max_size;
2264 HOST_WIDE_INT offset2;
2265 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2266 &offset2, &size, &max_size);
2267
2268 /* If this is a varying address, punt. */
2269 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2270 && max_size != -1
2271 && max_size == size)
2272 {
2273 /* We found dereference of a pointer. Type of the pointer
2274 and MEM_REF is meaningless, but we can look futher. */
2275 if (TREE_CODE (base) == MEM_REF)
2276 {
2277 base_pointer = TREE_OPERAND (base, 0);
2278 context->offset
2279 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
2280 context->outer_type = NULL;
2281 }
2282 /* We found base object. In this case the outer_type
2283 is known. */
2284 else if (DECL_P (base))
2285 {
2286 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
2287
2288 /* Only type inconsistent programs can have otr_type that is
2289 not part of outer type. */
2290 if (!contains_type_p (TREE_TYPE (base),
2291 context->offset + offset2, *otr_type))
2292 {
2293 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2294 code sequences; we arrange the calls to be builtin_unreachable
2295 later. */
2296 *otr_token = INT_MAX;
2297 return base_pointer;
2298 }
2299 get_polymorphic_call_info_for_decl (context, base,
2300 context->offset + offset2);
2301 if (context->maybe_in_construction && call)
2302 context->maybe_in_construction
2303 = decl_maybe_in_construction_p (base,
2304 context->outer_type,
2305 call,
2306 current_function_decl);
2307 return base_pointer;
2308 }
2309 else
2310 break;
2311 }
2312 else
2313 break;
2314 }
2315 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2316 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2317 {
2318 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2319 * BITS_PER_UNIT;
2320 base_pointer = TREE_OPERAND (base_pointer, 0);
2321 }
2322 else
2323 break;
2324 }
2325 while (true);
2326
2327 /* Try to determine type of the outer object. */
2328 if (TREE_CODE (base_pointer) == SSA_NAME
2329 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2330 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2331 {
2332 /* See if parameter is THIS pointer of a method. */
2333 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2334 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2335 {
2336 context->outer_type
2337 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2338 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2339
2340 /* Dynamic casting has possibly upcasted the type
2341 in the hiearchy. In this case outer type is less
2342 informative than inner type and we should forget
2343 about it. */
2344 if (!contains_type_p (context->outer_type, context->offset,
2345 *otr_type))
2346 {
2347 context->outer_type = NULL;
2348 return base_pointer;
2349 }
2350
2351 /* If the function is constructor or destructor, then
2352 the type is possibly in construction, but we know
2353 it is not derived type. */
2354 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2355 || DECL_CXX_DESTRUCTOR_P (fndecl))
2356 {
2357 context->maybe_in_construction = true;
2358 context->maybe_derived_type = false;
2359 }
2360 else
2361 {
2362 context->maybe_derived_type = true;
2363 context->maybe_in_construction = false;
2364 }
2365 return base_pointer;
2366 }
2367 /* Non-PODs passed by value are really passed by invisible
2368 reference. In this case we also know the type of the
2369 object. */
2370 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2371 {
2372 context->outer_type
2373 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2374 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2375 /* Only type inconsistent programs can have otr_type that is
2376 not part of outer type. */
2377 if (!contains_type_p (context->outer_type, context->offset,
2378 *otr_type))
2379 {
2380 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2381 code sequences; we arrange the calls to be builtin_unreachable
2382 later. */
2383 *otr_token = INT_MAX;
2384 return base_pointer;
2385 }
2386 context->maybe_derived_type = false;
2387 context->maybe_in_construction = false;
2388 return base_pointer;
2389 }
2390 }
2391
2392 tree base_type = TREE_TYPE (base_pointer);
2393
2394 if (TREE_CODE (base_pointer) == SSA_NAME
2395 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2396 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2397 {
2398 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2399 code sequences; we arrange the calls to be builtin_unreachable
2400 later. */
2401 *otr_token = INT_MAX;
2402 return base_pointer;
2403 }
2404 if (TREE_CODE (base_pointer) == SSA_NAME
2405 && SSA_NAME_DEF_STMT (base_pointer)
2406 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2407 base_type = TREE_TYPE (gimple_assign_rhs1
2408 (SSA_NAME_DEF_STMT (base_pointer)));
2409
2410 if (POINTER_TYPE_P (base_type)
2411 && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2412 context->offset,
2413 *otr_type))
2414 {
2415 context->speculative_outer_type = TYPE_MAIN_VARIANT
2416 (TREE_TYPE (base_type));
2417 context->speculative_offset = context->offset;
2418 context->speculative_maybe_derived_type = true;
2419 }
2420 /* TODO: There are multiple ways to derive a type. For instance
2421 if BASE_POINTER is passed to an constructor call prior our refernece.
2422 We do not make this type of flow sensitive analysis yet. */
2423 return base_pointer;
2424 }
2425
2426 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2427 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
2428 and insert them to NODES.
2429
2430 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2431
2432 static void
2433 record_targets_from_bases (tree otr_type,
2434 HOST_WIDE_INT otr_token,
2435 tree outer_type,
2436 HOST_WIDE_INT offset,
2437 vec <cgraph_node *> &nodes,
2438 hash_set<tree> *inserted,
2439 hash_set<tree> *matched_vtables,
2440 bool *completep)
2441 {
2442 while (true)
2443 {
2444 HOST_WIDE_INT pos, size;
2445 tree base_binfo;
2446 tree fld;
2447
2448 if (types_same_for_odr (outer_type, otr_type))
2449 return;
2450
2451 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2452 {
2453 if (TREE_CODE (fld) != FIELD_DECL)
2454 continue;
2455
2456 pos = int_bit_position (fld);
2457 size = tree_to_shwi (DECL_SIZE (fld));
2458 if (pos <= offset && (pos + size) > offset
2459 /* Do not get confused by zero sized bases. */
2460 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2461 break;
2462 }
2463 /* Within a class type we should always find correcponding fields. */
2464 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2465
2466 /* Nonbasetypes should have been stripped by outer_class_type. */
2467 gcc_assert (DECL_ARTIFICIAL (fld));
2468
2469 outer_type = TREE_TYPE (fld);
2470 offset -= pos;
2471
2472 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2473 offset, otr_type);
2474 if (!base_binfo)
2475 {
2476 gcc_assert (odr_violation_reported);
2477 return;
2478 }
2479 gcc_assert (base_binfo);
2480 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
2481 {
2482 bool can_refer;
2483 tree target = gimple_get_virt_method_for_binfo (otr_token,
2484 base_binfo,
2485 &can_refer);
2486 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2487 maybe_record_node (nodes, target, inserted, can_refer, completep);
2488 matched_vtables->add (BINFO_VTABLE (base_binfo));
2489 }
2490 }
2491 }
2492
2493 /* When virtual table is removed, we may need to flush the cache. */
2494
2495 static void
2496 devirt_variable_node_removal_hook (varpool_node *n,
2497 void *d ATTRIBUTE_UNUSED)
2498 {
2499 if (cached_polymorphic_call_targets
2500 && DECL_VIRTUAL_P (n->decl)
2501 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2502 free_polymorphic_call_targets_hash ();
2503 }
2504
2505 /* Return vector containing possible targets of polymorphic call of type
2506 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2507 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
2508 OTR_TYPE and include their virtual method. This is useful for types
2509 possibly in construction or destruction where the virtual table may
2510 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2511 us to walk the inheritance graph for all derivations.
2512
2513 OTR_TOKEN == INT_MAX is used to mark calls that are provably
2514 undefined and should be redirected to unreachable.
2515
2516 If COMPLETEP is non-NULL, store true if the list is complete.
2517 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2518 in the target cache. If user needs to visit every target list
2519 just once, it can memoize them.
2520
2521 SPECULATION_TARGETS specify number of targets that are speculatively
2522 likely. These include targets specified by the speculative part
2523 of polymoprhic call context and also exclude all targets for classes
2524 in construction.
2525
2526 Returned vector is placed into cache. It is NOT caller's responsibility
2527 to free it. The vector can be freed on cgraph_remove_node call if
2528 the particular node is a virtual function present in the cache. */
2529
2530 vec <cgraph_node *>
2531 possible_polymorphic_call_targets (tree otr_type,
2532 HOST_WIDE_INT otr_token,
2533 ipa_polymorphic_call_context context,
2534 bool *completep,
2535 void **cache_token,
2536 int *speculative_targetsp)
2537 {
2538 static struct cgraph_node_hook_list *node_removal_hook_holder;
2539 vec <cgraph_node *> nodes = vNULL;
2540 vec <tree> bases_to_consider = vNULL;
2541 odr_type type, outer_type;
2542 polymorphic_call_target_d key;
2543 polymorphic_call_target_d **slot;
2544 unsigned int i;
2545 tree binfo, target;
2546 bool complete;
2547 bool can_refer;
2548 bool skipped = false;
2549
2550 otr_type = TYPE_MAIN_VARIANT (otr_type);
2551
2552 /* If ODR is not initialized, return empty incomplete list. */
2553 if (!odr_hash)
2554 {
2555 if (completep)
2556 *completep = false;
2557 if (cache_token)
2558 *cache_token = NULL;
2559 if (speculative_targetsp)
2560 *speculative_targetsp = 0;
2561 return nodes;
2562 }
2563
2564 /* If we hit type inconsistency, just return empty list of targets. */
2565 if (otr_token == INT_MAX)
2566 {
2567 if (completep)
2568 *completep = true;
2569 if (cache_token)
2570 *cache_token = NULL;
2571 if (speculative_targetsp)
2572 *speculative_targetsp = 0;
2573 return nodes;
2574 }
2575
2576 type = get_odr_type (otr_type, true);
2577
2578 /* Recording type variants would wast results cache. */
2579 gcc_assert (!context.outer_type
2580 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2581
2582 /* Lookup the outer class type we want to walk. */
2583 if ((context.outer_type || context.speculative_outer_type)
2584 && !get_class_context (&context, otr_type))
2585 {
2586 if (completep)
2587 *completep = false;
2588 if (cache_token)
2589 *cache_token = NULL;
2590 if (speculative_targetsp)
2591 *speculative_targetsp = 0;
2592 return nodes;
2593 }
2594
2595 /* Check that get_class_context kept the main variant. */
2596 gcc_assert (!context.outer_type
2597 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2598
2599 /* We canonicalize our query, so we do not need extra hashtable entries. */
2600
2601 /* Without outer type, we have no use for offset. Just do the
2602 basic search from innter type */
2603 if (!context.outer_type)
2604 {
2605 context.outer_type = otr_type;
2606 context.offset = 0;
2607 }
2608 /* We need to update our hiearchy if the type does not exist. */
2609 outer_type = get_odr_type (context.outer_type, true);
2610 /* If the type is complete, there are no derivations. */
2611 if (TYPE_FINAL_P (outer_type->type))
2612 context.maybe_derived_type = false;
2613
2614 /* Initialize query cache. */
2615 if (!cached_polymorphic_call_targets)
2616 {
2617 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
2618 polymorphic_call_target_hash
2619 = new polymorphic_call_target_hash_type (23);
2620 if (!node_removal_hook_holder)
2621 {
2622 node_removal_hook_holder =
2623 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
2624 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
2625 NULL);
2626 }
2627 }
2628
2629 /* Lookup cached answer. */
2630 key.type = type;
2631 key.otr_token = otr_token;
2632 key.context = context;
2633 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2634 if (cache_token)
2635 *cache_token = (void *)*slot;
2636 if (*slot)
2637 {
2638 if (completep)
2639 *completep = (*slot)->complete;
2640 if (speculative_targetsp)
2641 *speculative_targetsp = (*slot)->speculative_targets;
2642 return (*slot)->targets;
2643 }
2644
2645 complete = true;
2646
2647 /* Do actual search. */
2648 timevar_push (TV_IPA_VIRTUAL_CALL);
2649 *slot = XCNEW (polymorphic_call_target_d);
2650 if (cache_token)
2651 *cache_token = (void *)*slot;
2652 (*slot)->type = type;
2653 (*slot)->otr_token = otr_token;
2654 (*slot)->context = context;
2655 (*slot)->speculative_targets = 0;
2656
2657 hash_set<tree> inserted;
2658 hash_set<tree> matched_vtables;
2659
2660 if (context.speculative_outer_type)
2661 {
2662 odr_type speculative_outer_type;
2663 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
2664 if (TYPE_FINAL_P (speculative_outer_type->type))
2665 context.speculative_maybe_derived_type = false;
2666 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
2667 context.speculative_offset, otr_type);
2668 if (binfo)
2669 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2670 &can_refer);
2671 else
2672 target = NULL;
2673
2674 if (target)
2675 {
2676 /* In the case we get complete method, we don't need
2677 to walk derivations. */
2678 if (DECL_FINAL_P (target))
2679 context.speculative_maybe_derived_type = false;
2680 }
2681 if (type_possibly_instantiated_p (speculative_outer_type->type))
2682 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2683 if (binfo)
2684 matched_vtables.add (BINFO_VTABLE (binfo));
2685 /* Next walk recursively all derived types. */
2686 if (context.speculative_maybe_derived_type)
2687 {
2688 /* For anonymous namespace types we can attempt to build full type.
2689 All derivations must be in this unit (unless we see partial unit). */
2690 if (!type->all_derivations_known)
2691 complete = false;
2692 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
2693 possible_polymorphic_call_targets_1 (nodes, &inserted,
2694 &matched_vtables,
2695 otr_type,
2696 speculative_outer_type->derived_types[i],
2697 otr_token, speculative_outer_type->type,
2698 context.speculative_offset, &complete,
2699 bases_to_consider,
2700 false);
2701 }
2702 /* Finally walk bases, if asked to. */
2703 (*slot)->speculative_targets = nodes.length();
2704 }
2705
2706 /* First see virtual method of type itself. */
2707 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2708 context.offset, otr_type);
2709 if (binfo)
2710 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2711 &can_refer);
2712 else
2713 {
2714 gcc_assert (odr_violation_reported);
2715 target = NULL;
2716 }
2717
2718 /* Destructors are never called through construction virtual tables,
2719 because the type is always known. */
2720 if (target && DECL_CXX_DESTRUCTOR_P (target))
2721 context.maybe_in_construction = false;
2722
2723 if (target)
2724 {
2725 /* In the case we get complete method, we don't need
2726 to walk derivations. */
2727 if (DECL_FINAL_P (target))
2728 context.maybe_derived_type = false;
2729 }
2730
2731 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2732 if (type_possibly_instantiated_p (outer_type->type))
2733 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2734 else
2735 {
2736 skipped = true;
2737 gcc_assert (in_lto_p || context.maybe_derived_type);
2738 }
2739
2740 if (binfo)
2741 matched_vtables.add (BINFO_VTABLE (binfo));
2742
2743 /* Next walk recursively all derived types. */
2744 if (context.maybe_derived_type)
2745 {
2746 /* For anonymous namespace types we can attempt to build full type.
2747 All derivations must be in this unit (unless we see partial unit). */
2748 if (!type->all_derivations_known)
2749 complete = false;
2750 for (i = 0; i < outer_type->derived_types.length(); i++)
2751 possible_polymorphic_call_targets_1 (nodes, &inserted,
2752 &matched_vtables,
2753 otr_type,
2754 outer_type->derived_types[i],
2755 otr_token, outer_type->type,
2756 context.offset, &complete,
2757 bases_to_consider,
2758 context.maybe_in_construction);
2759 }
2760
2761 /* Finally walk bases, if asked to. */
2762 if (!(*slot)->speculative_targets)
2763 (*slot)->speculative_targets = nodes.length();
2764
2765 /* Destructors are never called through construction virtual tables,
2766 because the type is always known. One of entries may be cxa_pure_virtual
2767 so look to at least two of them. */
2768 if (context.maybe_in_construction)
2769 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2770 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2771 context.maybe_in_construction = false;
2772 if (context.maybe_in_construction)
2773 {
2774 if (type != outer_type
2775 && (!skipped
2776 || (context.maybe_derived_type
2777 && !type_all_derivations_known_p (outer_type->type))))
2778 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2779 context.offset, nodes, &inserted,
2780 &matched_vtables, &complete);
2781 if (skipped)
2782 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
2783 for (i = 0; i < bases_to_consider.length(); i++)
2784 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
2785 }
2786 bases_to_consider.release();
2787
2788 (*slot)->targets = nodes;
2789 (*slot)->complete = complete;
2790 if (completep)
2791 *completep = complete;
2792 if (speculative_targetsp)
2793 *speculative_targetsp = (*slot)->speculative_targets;
2794
2795 timevar_pop (TV_IPA_VIRTUAL_CALL);
2796 return nodes;
2797 }
2798
2799 /* Dump all possible targets of a polymorphic call. */
2800
2801 void
2802 dump_possible_polymorphic_call_targets (FILE *f,
2803 tree otr_type,
2804 HOST_WIDE_INT otr_token,
2805 const ipa_polymorphic_call_context &ctx)
2806 {
2807 vec <cgraph_node *> targets;
2808 bool final;
2809 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2810 unsigned int i;
2811 int speculative;
2812
2813 if (!type)
2814 return;
2815 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2816 ctx,
2817 &final, NULL, &speculative);
2818 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2819 print_generic_expr (f, type->type, TDF_SLIM);
2820 fprintf (f, " token %i\n", (int)otr_token);
2821 if (ctx.outer_type || ctx.offset)
2822 {
2823 fprintf (f, " Contained in type:");
2824 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2825 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2826 ctx.offset);
2827 }
2828 if (ctx.speculative_outer_type)
2829 {
2830 fprintf (f, " Speculatively contained in type:");
2831 print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM);
2832 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2833 ctx.speculative_offset);
2834 }
2835
2836 fprintf (f, " %s%s%s%s\n ",
2837 final ? "This is a complete list." :
2838 "This is partial list; extra targets may be defined in other units.",
2839 ctx.maybe_in_construction ? " (base types included)" : "",
2840 ctx.maybe_derived_type ? " (derived types included)" : "",
2841 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
2842 for (i = 0; i < targets.length (); i++)
2843 {
2844 char *name = NULL;
2845 if (i == (unsigned)speculative)
2846 fprintf (f, "\n Targets that are not likely:\n"
2847 " ");
2848 if (in_lto_p)
2849 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2850 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2851 if (in_lto_p)
2852 free (name);
2853 if (!targets[i]->definition)
2854 fprintf (f, " (no definition%s)",
2855 DECL_DECLARED_INLINE_P (targets[i]->decl)
2856 ? " inline" : "");
2857 }
2858 fprintf (f, "\n\n");
2859 }
2860
2861
2862 /* Return true if N can be possibly target of a polymorphic call of
2863 OTR_TYPE/OTR_TOKEN. */
2864
2865 bool
2866 possible_polymorphic_call_target_p (tree otr_type,
2867 HOST_WIDE_INT otr_token,
2868 const ipa_polymorphic_call_context &ctx,
2869 struct cgraph_node *n)
2870 {
2871 vec <cgraph_node *> targets;
2872 unsigned int i;
2873 enum built_in_function fcode;
2874 bool final;
2875
2876 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2877 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2878 == BUILT_IN_UNREACHABLE
2879 || fcode == BUILT_IN_TRAP))
2880 return true;
2881
2882 if (!odr_hash)
2883 return true;
2884 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2885 for (i = 0; i < targets.length (); i++)
2886 if (n->semantically_equivalent_p (targets[i]))
2887 return true;
2888
2889 /* At a moment we allow middle end to dig out new external declarations
2890 as a targets of polymorphic calls. */
2891 if (!final && !n->definition)
2892 return true;
2893 return false;
2894 }
2895
2896
2897 /* After callgraph construction new external nodes may appear.
2898 Add them into the graph. */
2899
2900 void
2901 update_type_inheritance_graph (void)
2902 {
2903 struct cgraph_node *n;
2904
2905 if (!odr_hash)
2906 return;
2907 free_polymorphic_call_targets_hash ();
2908 timevar_push (TV_IPA_INHERITANCE);
2909 /* We reconstruct the graph starting from types of all methods seen in the
2910 the unit. */
2911 FOR_EACH_FUNCTION (n)
2912 if (DECL_VIRTUAL_P (n->decl)
2913 && !n->definition
2914 && n->real_symbol_p ())
2915 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2916 true);
2917 timevar_pop (TV_IPA_INHERITANCE);
2918 }
2919
2920
2921 /* Return true if N looks like likely target of a polymorphic call.
2922 Rule out cxa_pure_virtual, noreturns, function declared cold and
2923 other obvious cases. */
2924
2925 bool
2926 likely_target_p (struct cgraph_node *n)
2927 {
2928 int flags;
2929 /* cxa_pure_virtual and similar things are not likely. */
2930 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2931 return false;
2932 flags = flags_from_decl_or_type (n->decl);
2933 if (flags & ECF_NORETURN)
2934 return false;
2935 if (lookup_attribute ("cold",
2936 DECL_ATTRIBUTES (n->decl)))
2937 return false;
2938 if (n->frequency < NODE_FREQUENCY_NORMAL)
2939 return false;
2940 /* If there are no virtual tables refering the target alive,
2941 the only way the target can be called is an instance comming from other
2942 compilation unit; speculative devirtualization is build around an
2943 assumption that won't happen. */
2944 if (!referenced_from_vtable_p (n))
2945 return false;
2946 return true;
2947 }
2948
2949 /* The ipa-devirt pass.
2950 When polymorphic call has only one likely target in the unit,
2951 turn it into speculative call. */
2952
2953 static unsigned int
2954 ipa_devirt (void)
2955 {
2956 struct cgraph_node *n;
2957 hash_set<void *> bad_call_targets;
2958 struct cgraph_edge *e;
2959
2960 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2961 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
2962 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
2963
2964 FOR_EACH_DEFINED_FUNCTION (n)
2965 {
2966 bool update = false;
2967 if (dump_file && n->indirect_calls)
2968 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
2969 n->name (), n->order);
2970 for (e = n->indirect_calls; e; e = e->next_callee)
2971 if (e->indirect_info->polymorphic)
2972 {
2973 struct cgraph_node *likely_target = NULL;
2974 void *cache_token;
2975 bool final;
2976 int speculative_targets;
2977 vec <cgraph_node *>targets
2978 = possible_polymorphic_call_targets
2979 (e, &final, &cache_token, &speculative_targets);
2980 unsigned int i;
2981
2982 if (dump_file)
2983 dump_possible_polymorphic_call_targets
2984 (dump_file, e);
2985
2986 npolymorphic++;
2987
2988 if (!cgraph_maybe_hot_edge_p (e))
2989 {
2990 if (dump_file)
2991 fprintf (dump_file, "Call is cold\n\n");
2992 ncold++;
2993 continue;
2994 }
2995 if (e->speculative)
2996 {
2997 if (dump_file)
2998 fprintf (dump_file, "Call is aready speculated\n\n");
2999 nspeculated++;
3000
3001 /* When dumping see if we agree with speculation. */
3002 if (!dump_file)
3003 continue;
3004 }
3005 if (bad_call_targets.contains (cache_token))
3006 {
3007 if (dump_file)
3008 fprintf (dump_file, "Target list is known to be useless\n\n");
3009 nmultiple++;
3010 continue;
3011 }
3012 for (i = 0; i < targets.length (); i++)
3013 if (likely_target_p (targets[i]))
3014 {
3015 if (likely_target)
3016 {
3017 if (i < (unsigned) speculative_targets)
3018 {
3019 likely_target = NULL;
3020 if (dump_file)
3021 fprintf (dump_file, "More than one likely target\n\n");
3022 nmultiple++;
3023 }
3024 break;
3025 }
3026 likely_target = targets[i];
3027 }
3028 if (!likely_target)
3029 {
3030 bad_call_targets.add (cache_token);
3031 continue;
3032 }
3033 /* This is reached only when dumping; check if we agree or disagree
3034 with the speculation. */
3035 if (e->speculative)
3036 {
3037 struct cgraph_edge *e2;
3038 struct ipa_ref *ref;
3039 cgraph_speculative_call_info (e, e2, e, ref);
3040 if (e2->callee->ultimate_alias_target ()
3041 == likely_target->ultimate_alias_target ())
3042 {
3043 fprintf (dump_file, "We agree with speculation\n\n");
3044 nok++;
3045 }
3046 else
3047 {
3048 fprintf (dump_file, "We disagree with speculation\n\n");
3049 nwrong++;
3050 }
3051 continue;
3052 }
3053 if (!likely_target->definition)
3054 {
3055 if (dump_file)
3056 fprintf (dump_file, "Target is not an definition\n\n");
3057 nnotdefined++;
3058 continue;
3059 }
3060 /* Do not introduce new references to external symbols. While we
3061 can handle these just well, it is common for programs to
3062 incorrectly with headers defining methods they are linked
3063 with. */
3064 if (DECL_EXTERNAL (likely_target->decl))
3065 {
3066 if (dump_file)
3067 fprintf (dump_file, "Target is external\n\n");
3068 nexternal++;
3069 continue;
3070 }
3071 /* Don't use an implicitly-declared destructor (c++/58678). */
3072 struct cgraph_node *non_thunk_target
3073 = likely_target->function_symbol ();
3074 if (DECL_ARTIFICIAL (non_thunk_target->decl)
3075 && DECL_COMDAT (non_thunk_target->decl))
3076 {
3077 if (dump_file)
3078 fprintf (dump_file, "Target is artificial\n\n");
3079 nartificial++;
3080 continue;
3081 }
3082 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
3083 && likely_target->can_be_discarded_p ())
3084 {
3085 if (dump_file)
3086 fprintf (dump_file, "Target is overwritable\n\n");
3087 noverwritable++;
3088 continue;
3089 }
3090 else if (dbg_cnt (devirt))
3091 {
3092 if (dump_enabled_p ())
3093 {
3094 location_t locus = gimple_location_safe (e->call_stmt);
3095 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
3096 "speculatively devirtualizing call in %s/%i to %s/%i\n",
3097 n->name (), n->order,
3098 likely_target->name (),
3099 likely_target->order);
3100 }
3101 if (!likely_target->can_be_discarded_p ())
3102 {
3103 cgraph_node *alias;
3104 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
3105 if (alias)
3106 likely_target = alias;
3107 }
3108 nconverted++;
3109 update = true;
3110 cgraph_turn_edge_to_speculative
3111 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
3112 }
3113 }
3114 if (update)
3115 inline_update_overall_summary (n);
3116 }
3117
3118 if (dump_file)
3119 fprintf (dump_file,
3120 "%i polymorphic calls, %i devirtualized,"
3121 " %i speculatively devirtualized, %i cold\n"
3122 "%i have multiple targets, %i overwritable,"
3123 " %i already speculated (%i agree, %i disagree),"
3124 " %i external, %i not defined, %i artificial\n",
3125 npolymorphic, ndevirtualized, nconverted, ncold,
3126 nmultiple, noverwritable, nspeculated, nok, nwrong,
3127 nexternal, nnotdefined, nartificial);
3128 return ndevirtualized ? TODO_remove_functions : 0;
3129 }
3130
3131 namespace {
3132
3133 const pass_data pass_data_ipa_devirt =
3134 {
3135 IPA_PASS, /* type */
3136 "devirt", /* name */
3137 OPTGROUP_NONE, /* optinfo_flags */
3138 TV_IPA_DEVIRT, /* tv_id */
3139 0, /* properties_required */
3140 0, /* properties_provided */
3141 0, /* properties_destroyed */
3142 0, /* todo_flags_start */
3143 ( TODO_dump_symtab ), /* todo_flags_finish */
3144 };
3145
3146 class pass_ipa_devirt : public ipa_opt_pass_d
3147 {
3148 public:
3149 pass_ipa_devirt (gcc::context *ctxt)
3150 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
3151 NULL, /* generate_summary */
3152 NULL, /* write_summary */
3153 NULL, /* read_summary */
3154 NULL, /* write_optimization_summary */
3155 NULL, /* read_optimization_summary */
3156 NULL, /* stmt_fixup */
3157 0, /* function_transform_todo_flags_start */
3158 NULL, /* function_transform */
3159 NULL) /* variable_transform */
3160 {}
3161
3162 /* opt_pass methods: */
3163 virtual bool gate (function *)
3164 {
3165 return (flag_devirtualize
3166 && flag_devirtualize_speculatively
3167 && optimize);
3168 }
3169
3170 virtual unsigned int execute (function *) { return ipa_devirt (); }
3171
3172 }; // class pass_ipa_devirt
3173
3174 } // anon namespace
3175
3176 ipa_opt_pass_d *
3177 make_pass_ipa_devirt (gcc::context *ctxt)
3178 {
3179 return new pass_ipa_devirt (ctxt);
3180 }
3181
3182 #include "gt-ipa-devirt.h"