Add an abstract incremental hash data type
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "inchash.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-alias.h"
125 #include "internal-fn.h"
126 #include "gimple-fold.h"
127 #include "gimple-expr.h"
128 #include "gimple.h"
129 #include "ipa-inline.h"
130 #include "diagnostic.h"
131 #include "tree-dfa.h"
132 #include "demangle.h"
133 #include "dbgcnt.h"
134 #include "stor-layout.h"
135 #include "intl.h"
136
137 static bool odr_types_equivalent_p (tree, tree, bool, bool *, pointer_set_t *);
138
139 static bool odr_violation_reported = false;
140
141 /* Dummy polymorphic call context. */
142
143 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
144 = {0, NULL, false, true};
145
146 /* Pointer set of all call targets appearing in the cache. */
147 static pointer_set_t *cached_polymorphic_call_targets;
148
149 /* The node of type inheritance graph. For each type unique in
150 One Defintion Rule (ODR) sense, we produce one node linking all
151 main variants of types equivalent to it, bases and derived types. */
152
153 struct GTY(()) odr_type_d
154 {
155 /* leader type. */
156 tree type;
157 /* All bases; built only for main variants of types */
158 vec<odr_type> GTY((skip)) bases;
159 /* All derrived types with virtual methods seen in unit;
160 built only for main variants oftypes */
161 vec<odr_type> GTY((skip)) derived_types;
162
163 /* All equivalent types, if more than one. */
164 vec<tree, va_gc> *types;
165 /* Set of all equivalent types, if NON-NULL. */
166 pointer_set_t * GTY((skip)) types_set;
167
168 /* Unique ID indexing the type in odr_types array. */
169 int id;
170 /* Is it in anonymous namespace? */
171 bool anonymous_namespace;
172 /* Do we know about all derivations of given type? */
173 bool all_derivations_known;
174 /* Did we report ODR violation here? */
175 bool odr_violated;
176 };
177
178
179 /* Return true if BINFO corresponds to a type with virtual methods.
180
181 Every type has several BINFOs. One is the BINFO associated by the type
182 while other represents bases of derived types. The BINFOs representing
183 bases do not have BINFO_VTABLE pointer set when this is the single
184 inheritance (because vtables are shared). Look up the BINFO of type
185 and check presence of its vtable. */
186
187 static inline bool
188 polymorphic_type_binfo_p (tree binfo)
189 {
190 /* See if BINFO's type has an virtual table associtated with it. */
191 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
192 }
193
194 /* Return TRUE if all derived types of T are known and thus
195 we may consider the walk of derived type complete.
196
197 This is typically true only for final anonymous namespace types and types
198 defined within functions (that may be COMDAT and thus shared across units,
199 but with the same set of derived types). */
200
201 static bool
202 type_all_derivations_known_p (tree t)
203 {
204 if (TYPE_FINAL_P (t))
205 return true;
206 if (flag_ltrans)
207 return false;
208 if (type_in_anonymous_namespace_p (t))
209 return true;
210 return (decl_function_context (TYPE_NAME (t)) != NULL);
211 }
212
213 /* Return TURE if type's constructors are all visible. */
214
215 static bool
216 type_all_ctors_visible_p (tree t)
217 {
218 return !flag_ltrans
219 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
220 /* We can not always use type_all_derivations_known_p.
221 For function local types we must assume case where
222 the function is COMDAT and shared in between units.
223
224 TODO: These cases are quite easy to get, but we need
225 to keep track of C++ privatizing via -Wno-weak
226 as well as the IPA privatizing. */
227 && type_in_anonymous_namespace_p (t);
228 }
229
230 /* Return TRUE if type may have instance. */
231
232 static bool
233 type_possibly_instantiated_p (tree t)
234 {
235 tree vtable;
236 varpool_node *vnode;
237
238 /* TODO: Add abstract types here. */
239 if (!type_all_ctors_visible_p (t))
240 return true;
241
242 vtable = BINFO_VTABLE (TYPE_BINFO (t));
243 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
244 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
245 vnode = varpool_node::get (vtable);
246 return vnode && vnode->definition;
247 }
248
249 /* One Definition Rule hashtable helpers. */
250
251 struct odr_hasher
252 {
253 typedef odr_type_d value_type;
254 typedef union tree_node compare_type;
255 static inline hashval_t hash (const value_type *);
256 static inline bool equal (const value_type *, const compare_type *);
257 static inline void remove (value_type *);
258 };
259
260 /* Return type that was declared with T's name so that T is an
261 qualified variant of it. */
262
263 static inline tree
264 main_odr_variant (const_tree t)
265 {
266 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
267 return TREE_TYPE (TYPE_NAME (t));
268 /* Unnamed types and non-C++ produced types can be compared by variants. */
269 else
270 return TYPE_MAIN_VARIANT (t);
271 }
272
273 /* Produce hash based on type name. */
274
275 static hashval_t
276 hash_type_name (tree t)
277 {
278 gcc_checking_assert (main_odr_variant (t) == t);
279
280 /* If not in LTO, all main variants are unique, so we can do
281 pointer hash. */
282 if (!in_lto_p)
283 return htab_hash_pointer (t);
284
285 /* Anonymous types are unique. */
286 if (type_in_anonymous_namespace_p (t))
287 return htab_hash_pointer (t);
288
289 /* For polymorphic types, we can simply hash the virtual table. */
290 if (TREE_CODE (t) == RECORD_TYPE
291 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
292 {
293 tree v = BINFO_VTABLE (TYPE_BINFO (t));
294 hashval_t hash = 0;
295
296 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
297 {
298 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
299 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
300 }
301
302 v = DECL_ASSEMBLER_NAME (v);
303 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
304 return hash;
305 }
306
307 /* Rest is not implemented yet. */
308 gcc_unreachable ();
309 }
310
311 /* Return the computed hashcode for ODR_TYPE. */
312
313 inline hashval_t
314 odr_hasher::hash (const value_type *odr_type)
315 {
316 return hash_type_name (odr_type->type);
317 }
318
319 /* For languages with One Definition Rule, work out if
320 types are the same based on their name.
321
322 This is non-trivial for LTO where minnor differences in
323 the type representation may have prevented type merging
324 to merge two copies of otherwise equivalent type.
325
326 Until we start streaming mangled type names, this function works
327 only for polymorphic types. */
328
329 bool
330 types_same_for_odr (const_tree type1, const_tree type2)
331 {
332 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
333
334 type1 = main_odr_variant (type1);
335 type2 = main_odr_variant (type2);
336
337 if (type1 == type2)
338 return true;
339
340 if (!in_lto_p)
341 return false;
342
343 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
344 on the corresponding TYPE_STUB_DECL. */
345 if (type_in_anonymous_namespace_p (type1)
346 || type_in_anonymous_namespace_p (type2))
347 return false;
348
349 /* See if types are obvoiusly different (i.e. different codes
350 or polymorphis wrt non-polymorphic). This is not strictly correct
351 for ODR violating programs, but we can't do better without streaming
352 ODR names. */
353 if (TREE_CODE (type1) != TREE_CODE (type2))
354 return false;
355 if (TREE_CODE (type1) == RECORD_TYPE
356 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
357 return false;
358 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
359 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
360 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
361 return false;
362
363 /* At the moment we have no way to establish ODR equivlaence at LTO
364 other than comparing virtual table pointrs of polymorphic types.
365 Eventually we should start saving mangled names in TYPE_NAME.
366 Then this condition will become non-trivial. */
367
368 if (TREE_CODE (type1) == RECORD_TYPE
369 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
370 && BINFO_VTABLE (TYPE_BINFO (type1))
371 && BINFO_VTABLE (TYPE_BINFO (type2)))
372 {
373 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
374 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
375 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
376 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
377 return (operand_equal_p (TREE_OPERAND (v1, 1),
378 TREE_OPERAND (v2, 1), 0)
379 && DECL_ASSEMBLER_NAME
380 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
381 == DECL_ASSEMBLER_NAME
382 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
383 }
384 gcc_unreachable ();
385 }
386
387
388 /* Compare types T1 and T2 and return true if they are
389 equivalent. */
390
391 inline bool
392 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
393 {
394 tree t2 = const_cast <tree> (ct2);
395
396 gcc_checking_assert (main_odr_variant (t2) == t2);
397 if (t1->type == t2)
398 return true;
399 if (!in_lto_p)
400 return false;
401 return types_same_for_odr (t1->type, t2);
402 }
403
404 /* Free ODR type V. */
405
406 inline void
407 odr_hasher::remove (value_type *v)
408 {
409 v->bases.release ();
410 v->derived_types.release ();
411 if (v->types_set)
412 pointer_set_destroy (v->types_set);
413 ggc_free (v);
414 }
415
416 /* ODR type hash used to lookup ODR type based on tree type node. */
417
418 typedef hash_table<odr_hasher> odr_hash_type;
419 static odr_hash_type *odr_hash;
420
421 /* ODR types are also stored into ODR_TYPE vector to allow consistent
422 walking. Bases appear before derived types. Vector is garbage collected
423 so we won't end up visiting empty types. */
424
425 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
426 #define odr_types (*odr_types_ptr)
427
428 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
429 void
430 set_type_binfo (tree type, tree binfo)
431 {
432 for (; type; type = TYPE_NEXT_VARIANT (type))
433 if (COMPLETE_TYPE_P (type))
434 TYPE_BINFO (type) = binfo;
435 else
436 gcc_assert (!TYPE_BINFO (type));
437 }
438
439 /* Compare T2 and T2 based on name or structure. */
440
441 static bool
442 odr_subtypes_equivalent_p (tree t1, tree t2, pointer_set_t *visited)
443 {
444 bool an1, an2;
445
446 /* This can happen in incomplete types that should be handled earlier. */
447 gcc_assert (t1 && t2);
448
449 t1 = main_odr_variant (t1);
450 t2 = main_odr_variant (t2);
451 if (t1 == t2)
452 return true;
453 if (TREE_CODE (t1) != TREE_CODE (t2))
454 return false;
455 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
456 return false;
457 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
458 return false;
459
460 /* Anonymous namespace types must match exactly. */
461 an1 = type_in_anonymous_namespace_p (t1);
462 an2 = type_in_anonymous_namespace_p (t2);
463 if (an1 != an2 || an1)
464 return false;
465
466 /* For types where we can not establish ODR equivalency, recurse and deeply
467 compare. */
468 if (TREE_CODE (t1) != RECORD_TYPE
469 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
470 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
471 || !polymorphic_type_binfo_p (TYPE_BINFO (t2)))
472 {
473 /* This should really be a pair hash, but for the moment we do not need
474 100% reliability and it would be better to compare all ODR types so
475 recursion here is needed only for component types. */
476 if (pointer_set_insert (visited, t1))
477 return true;
478 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
479 }
480 return types_same_for_odr (t1, t2);
481 }
482
483 /* Output ODR violation warning about T1 and T2 with REASON.
484 Display location of ST1 and ST2 if REASON speaks about field or
485 method of the type.
486 If WARN is false, do nothing. Set WARNED if warning was indeed
487 output. */
488
489 void
490 warn_odr (tree t1, tree t2, tree st1, tree st2,
491 bool warn, bool *warned, const char *reason)
492 {
493 tree decl2 = TYPE_NAME (t2);
494
495 if (!warn)
496 return;
497 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
498 "type %qT violates one definition rule",
499 t1))
500 return;
501 if (!st1)
502 ;
503 else if (TREE_CODE (st1) == FIELD_DECL)
504 {
505 inform (DECL_SOURCE_LOCATION (decl2),
506 "a different type is defined in another translation unit");
507 inform (DECL_SOURCE_LOCATION (st1),
508 "the first difference of corresponding definitions is field %qD",
509 st1);
510 decl2 = st2;
511 }
512 else if (TREE_CODE (st1) == FUNCTION_DECL)
513 {
514 inform (DECL_SOURCE_LOCATION (decl2),
515 "a different type is defined in another translation unit");
516 inform (DECL_SOURCE_LOCATION (st1),
517 "the first difference of corresponding definitions is method %qD",
518 st1);
519 decl2 = st2;
520 }
521 else
522 return;
523 inform (DECL_SOURCE_LOCATION (decl2), reason);
524
525 if (warned)
526 *warned = true;
527 }
528
529 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
530 because they are used on same place in ODR matching types.
531 They are not; inform the user. */
532
533 void
534 warn_types_mismatch (tree t1, tree t2)
535 {
536 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
537 return;
538 /* In Firefox it is a common bug to have same types but in
539 different namespaces. Be a bit more informative on
540 this. */
541 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
542 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
543 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
544 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
545 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
546 DECL_NAME (TYPE_CONTEXT (t2))))))
547 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
548 "type %qT should match type %qT but is defined "
549 "in different namespace ",
550 t1, t2);
551 else
552 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
553 "type %qT should match type %qT",
554 t1, t2);
555 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
556 "the incompatible type is defined here");
557 }
558
559 /* Compare T1 and T2, report ODR violations if WARN is true and set
560 WARNED to true if anything is reported. Return true if types match.
561 If true is returned, the types are also compatible in the sense of
562 gimple_canonical_types_compatible_p. */
563
564 static bool
565 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, pointer_set_t *visited)
566 {
567 /* Check first for the obvious case of pointer identity. */
568 if (t1 == t2)
569 return true;
570 gcc_assert (!type_in_anonymous_namespace_p (t1));
571 gcc_assert (!type_in_anonymous_namespace_p (t2));
572
573 /* Can't be the same type if the types don't have the same code. */
574 if (TREE_CODE (t1) != TREE_CODE (t2))
575 {
576 warn_odr (t1, t2, NULL, NULL, warn, warned,
577 G_("a different type is defined in another translation unit"));
578 return false;
579 }
580
581 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
582 {
583 warn_odr (t1, t2, NULL, NULL, warn, warned,
584 G_("a type with different qualifiers is defined in another "
585 "translation unit"));
586 return false;
587 }
588
589 if (comp_type_attributes (t1, t2) != 1)
590 {
591 warn_odr (t1, t2, NULL, NULL, warn, warned,
592 G_("a type with attributes "
593 "is defined in another translation unit"));
594 return false;
595 }
596
597 if (TREE_CODE (t1) == ENUMERAL_TYPE)
598 {
599 tree v1, v2;
600 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
601 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
602 {
603 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
604 {
605 warn_odr (t1, t2, NULL, NULL, warn, warned,
606 G_("an enum with different value name"
607 " is defined in another translation unit"));
608 return false;
609 }
610 if (TREE_VALUE (v1) != TREE_VALUE (v2)
611 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
612 DECL_INITIAL (TREE_VALUE (v2)), 0))
613 {
614 warn_odr (t1, t2, NULL, NULL, warn, warned,
615 G_("an enum with different values is defined"
616 " in another translation unit"));
617 return false;
618 }
619 }
620 if (v1 || v2)
621 {
622 warn_odr (t1, t2, NULL, NULL, warn, warned,
623 G_("an enum with mismatching number of values "
624 "is defined in another translation unit"));
625 return false;
626 }
627 }
628
629 /* Non-aggregate types can be handled cheaply. */
630 if (INTEGRAL_TYPE_P (t1)
631 || SCALAR_FLOAT_TYPE_P (t1)
632 || FIXED_POINT_TYPE_P (t1)
633 || TREE_CODE (t1) == VECTOR_TYPE
634 || TREE_CODE (t1) == COMPLEX_TYPE
635 || TREE_CODE (t1) == OFFSET_TYPE
636 || POINTER_TYPE_P (t1))
637 {
638 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
639 {
640 warn_odr (t1, t2, NULL, NULL, warn, warned,
641 G_("a type with different precision is defined "
642 "in another translation unit"));
643 return false;
644 }
645 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
646 {
647 warn_odr (t1, t2, NULL, NULL, warn, warned,
648 G_("a type with different signedness is defined "
649 "in another translation unit"));
650 return false;
651 }
652
653 if (TREE_CODE (t1) == INTEGER_TYPE
654 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
655 {
656 /* char WRT uint_8? */
657 warn_odr (t1, t2, NULL, NULL, warn, warned,
658 G_("a different type is defined in another "
659 "translation unit"));
660 return false;
661 }
662
663 /* For canonical type comparisons we do not want to build SCCs
664 so we cannot compare pointed-to types. But we can, for now,
665 require the same pointed-to type kind and match what
666 useless_type_conversion_p would do. */
667 if (POINTER_TYPE_P (t1))
668 {
669 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
670 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
671 {
672 warn_odr (t1, t2, NULL, NULL, warn, warned,
673 G_("it is defined as a pointer in different address "
674 "space in another translation unit"));
675 return false;
676 }
677
678 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
679 {
680 warn_odr (t1, t2, NULL, NULL, warn, warned,
681 G_("it is defined as a pointer to different type "
682 "in another translation unit"));
683 if (warn && warned)
684 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
685 return false;
686 }
687 }
688
689 /* Tail-recurse to components. */
690 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
691 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
692 {
693 /* Probably specific enough. */
694 warn_odr (t1, t2, NULL, NULL, warn, warned,
695 G_("a different type is defined "
696 "in another translation unit"));
697 if (warn && warned)
698 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
699 return false;
700 }
701
702 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
703 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
704 TYPE_SIZE_UNIT (t2), 0));
705 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
706
707 return true;
708 }
709
710 /* Do type-specific comparisons. */
711 switch (TREE_CODE (t1))
712 {
713 case ARRAY_TYPE:
714 {
715 /* Array types are the same if the element types are the same and
716 the number of elements are the same. */
717 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
718 {
719 warn_odr (t1, t2, NULL, NULL, warn, warned,
720 G_("a different type is defined in another "
721 "translation unit"));
722 if (warn && warned)
723 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
724 }
725 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
726 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
727 == TYPE_NONALIASED_COMPONENT (t2));
728
729 tree i1 = TYPE_DOMAIN (t1);
730 tree i2 = TYPE_DOMAIN (t2);
731
732 /* For an incomplete external array, the type domain can be
733 NULL_TREE. Check this condition also. */
734 if (i1 == NULL_TREE || i2 == NULL_TREE)
735 return true;
736
737 tree min1 = TYPE_MIN_VALUE (i1);
738 tree min2 = TYPE_MIN_VALUE (i2);
739 tree max1 = TYPE_MAX_VALUE (i1);
740 tree max2 = TYPE_MAX_VALUE (i2);
741
742 /* In C++, minimums should be always 0. */
743 gcc_assert (min1 == min2);
744 if (!operand_equal_p (max1, max2, 0))
745 {
746 warn_odr (t1, t2, NULL, NULL, warn, warned,
747 G_("an array of different size is defined "
748 "in another translation unit"));
749 return false;
750 }
751 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
752 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
753 TYPE_SIZE_UNIT (t2), 0));
754 }
755 return true;
756
757 case METHOD_TYPE:
758 case FUNCTION_TYPE:
759 /* Function types are the same if the return type and arguments types
760 are the same. */
761 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
762 {
763 warn_odr (t1, t2, NULL, NULL, warn, warned,
764 G_("has different return value "
765 "in another translation unit"));
766 if (warn && warned)
767 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
768 return false;
769 }
770
771 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
772 return true;
773 else
774 {
775 tree parms1, parms2;
776
777 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
778 parms1 && parms2;
779 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
780 {
781 if (!odr_subtypes_equivalent_p
782 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
783 {
784 warn_odr (t1, t2, NULL, NULL, warn, warned,
785 G_("has different parameters in another "
786 "translation unit"));
787 if (warn && warned)
788 warn_types_mismatch (TREE_VALUE (parms1),
789 TREE_VALUE (parms2));
790 return false;
791 }
792 }
793
794 if (parms1 || parms2)
795 {
796 warn_odr (t1, t2, NULL, NULL, warn, warned,
797 G_("has different parameters "
798 "in another translation unit"));
799 return false;
800 }
801
802 return true;
803 }
804
805 case RECORD_TYPE:
806 case UNION_TYPE:
807 case QUAL_UNION_TYPE:
808 {
809 tree f1, f2;
810
811 /* For aggregate types, all the fields must be the same. */
812 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
813 {
814 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
815 f1 || f2;
816 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
817 {
818 /* Skip non-fields. */
819 while (f1 && TREE_CODE (f1) != FIELD_DECL)
820 f1 = TREE_CHAIN (f1);
821 while (f2 && TREE_CODE (f2) != FIELD_DECL)
822 f2 = TREE_CHAIN (f2);
823 if (!f1 || !f2)
824 break;
825 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
826 break;
827 if (DECL_NAME (f1) != DECL_NAME (f2)
828 && !DECL_ARTIFICIAL (f1))
829 {
830 warn_odr (t1, t2, f1, f2, warn, warned,
831 G_("a field with different name is defined "
832 "in another translation unit"));
833 return false;
834 }
835 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
836 {
837 /* Do not warn about artificial fields and just go into generic
838 field mismatch warning. */
839 if (DECL_ARTIFICIAL (f1))
840 break;
841
842 warn_odr (t1, t2, f1, f2, warn, warned,
843 G_("a field of same name but different type "
844 "is defined in another translation unit"));
845 if (warn && warned)
846 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
847 return false;
848 }
849 if (!gimple_compare_field_offset (f1, f2))
850 {
851 /* Do not warn about artificial fields and just go into generic
852 field mismatch warning. */
853 if (DECL_ARTIFICIAL (f1))
854 break;
855 warn_odr (t1, t2, t1, t2, warn, warned,
856 G_("fields has different layout "
857 "in another translation unit"));
858 return false;
859 }
860 gcc_assert (DECL_NONADDRESSABLE_P (f1)
861 == DECL_NONADDRESSABLE_P (f2));
862 }
863
864 /* If one aggregate has more fields than the other, they
865 are not the same. */
866 if (f1 || f2)
867 {
868 warn_odr (t1, t2, NULL, NULL, warn, warned,
869 G_("a type with different number of fields "
870 "is defined in another translation unit"));
871 return false;
872 }
873 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
874 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
875 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
876 {
877 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
878 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
879 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
880 {
881 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
882 {
883 warn_odr (t1, t2, f1, f2, warn, warned,
884 G_("a different method of same type "
885 "is defined in another translation unit"));
886 return false;
887 }
888 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
889 {
890 warn_odr (t1, t2, f1, f2, warn, warned,
891 G_("s definition that differs by virtual "
892 "keyword in another translation unit"));
893 return false;
894 }
895 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
896 {
897 warn_odr (t1, t2, f1, f2, warn, warned,
898 G_("virtual table layout differs in another "
899 "translation unit"));
900 return false;
901 }
902 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
903 {
904 warn_odr (t1, t2, f1, f2, warn, warned,
905 G_("method with incompatible type is defined "
906 "in another translation unit"));
907 return false;
908 }
909 }
910 if (f1 || f2)
911 {
912 warn_odr (t1, t2, NULL, NULL, warn, warned,
913 G_("a type with different number of methods "
914 "is defined in another translation unit"));
915 return false;
916 }
917 }
918 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
919 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
920 TYPE_SIZE_UNIT (t2), 0));
921 }
922
923 return true;
924 }
925
926 default:
927 gcc_unreachable ();
928 }
929 }
930
931 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
932 from VAL->type. This may happen in LTO where tree merging did not merge
933 all variants of the same type. It may or may not mean the ODR violation.
934 Add it to the list of duplicates and warn on some violations. */
935
936 static bool
937 add_type_duplicate (odr_type val, tree type)
938 {
939 bool build_bases = false;
940 if (!val->types_set)
941 val->types_set = pointer_set_create ();
942
943 /* Always prefer complete type to be the leader. */
944 if (!COMPLETE_TYPE_P (val->type)
945 && COMPLETE_TYPE_P (type))
946 {
947 tree tmp = type;
948
949 build_bases = true;
950 type = val->type;
951 val->type = tmp;
952 }
953
954 /* See if this duplicate is new. */
955 if (!pointer_set_insert (val->types_set, type))
956 {
957 bool merge = true;
958 bool base_mismatch = false;
959 unsigned int i,j;
960 bool warned = false;
961 pointer_set_t *visited = pointer_set_create ();
962
963 gcc_assert (in_lto_p);
964 vec_safe_push (val->types, type);
965
966 /* First we compare memory layout. */
967 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
968 &warned, visited))
969 {
970 merge = false;
971 odr_violation_reported = true;
972 val->odr_violated = true;
973 if (cgraph_dump_file)
974 {
975 fprintf (cgraph_dump_file, "ODR violation\n");
976
977 print_node (cgraph_dump_file, "", val->type, 0);
978 putc ('\n',cgraph_dump_file);
979 print_node (cgraph_dump_file, "", type, 0);
980 putc ('\n',cgraph_dump_file);
981 }
982 }
983 pointer_set_destroy (visited);
984
985 /* Next sanity check that bases are the same. If not, we will end
986 up producing wrong answers. */
987 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
988 && TREE_CODE (val->type) == RECORD_TYPE
989 && TREE_CODE (type) == RECORD_TYPE
990 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
991 {
992 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
993 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
994 {
995 odr_type base = get_odr_type
996 (BINFO_TYPE
997 (BINFO_BASE_BINFO (TYPE_BINFO (type),
998 i)),
999 true);
1000 if (val->bases.length () <= j || val->bases[j] != base)
1001 base_mismatch = true;
1002 j++;
1003 }
1004 if (base_mismatch)
1005 {
1006 merge = false;
1007 odr_violation_reported = true;
1008
1009 if (!warned && !val->odr_violated)
1010 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1011 "a type with the same name but different bases is "
1012 "defined in another translation unit");
1013 val->odr_violated = true;
1014 if (cgraph_dump_file)
1015 {
1016 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
1017
1018 print_node (cgraph_dump_file, "", val->type, 0);
1019 putc ('\n',cgraph_dump_file);
1020 print_node (cgraph_dump_file, "", type, 0);
1021 putc ('\n',cgraph_dump_file);
1022 }
1023 }
1024 }
1025
1026 /* Regularize things a little. During LTO same types may come with
1027 different BINFOs. Either because their virtual table was
1028 not merged by tree merging and only later at decl merging or
1029 because one type comes with external vtable, while other
1030 with internal. We want to merge equivalent binfos to conserve
1031 memory and streaming overhead.
1032
1033 The external vtables are more harmful: they contain references
1034 to external declarations of methods that may be defined in the
1035 merged LTO unit. For this reason we absolutely need to remove
1036 them and replace by internal variants. Not doing so will lead
1037 to incomplete answers from possible_polymorphic_call_targets. */
1038 if (!flag_ltrans && merge
1039 && TREE_CODE (val->type) == RECORD_TYPE
1040 && TREE_CODE (type) == RECORD_TYPE
1041 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1042 && TYPE_MAIN_VARIANT (type) == type
1043 && TYPE_MAIN_VARIANT (val->type) == val->type
1044 && BINFO_VTABLE (TYPE_BINFO (val->type))
1045 && BINFO_VTABLE (TYPE_BINFO (type)))
1046 {
1047 tree master_binfo = TYPE_BINFO (val->type);
1048 tree v1 = BINFO_VTABLE (master_binfo);
1049 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1050
1051 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1052 {
1053 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1054 && operand_equal_p (TREE_OPERAND (v1, 1),
1055 TREE_OPERAND (v2, 1), 0));
1056 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1057 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1058 }
1059 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1060 == DECL_ASSEMBLER_NAME (v2));
1061
1062 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1063 {
1064 unsigned int i;
1065
1066 set_type_binfo (val->type, TYPE_BINFO (type));
1067 for (i = 0; i < val->types->length (); i++)
1068 {
1069 if (TYPE_BINFO ((*val->types)[i])
1070 == master_binfo)
1071 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1072 }
1073 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1074 }
1075 else
1076 set_type_binfo (type, master_binfo);
1077 }
1078 }
1079 return build_bases;
1080 }
1081
1082 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1083 possibly new entry. */
1084
1085 odr_type
1086 get_odr_type (tree type, bool insert)
1087 {
1088 odr_type_d **slot;
1089 odr_type val;
1090 hashval_t hash;
1091 bool build_bases = false;
1092 bool insert_to_odr_array = false;
1093 int base_id = -1;
1094
1095 type = main_odr_variant (type);
1096
1097 hash = hash_type_name (type);
1098 slot
1099 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
1100 if (!slot)
1101 return NULL;
1102
1103 /* See if we already have entry for type. */
1104 if (*slot)
1105 {
1106 val = *slot;
1107
1108 /* With LTO we need to support multiple tree representation of
1109 the same ODR type. */
1110 if (val->type != type)
1111 build_bases = add_type_duplicate (val, type);
1112 }
1113 else
1114 {
1115 val = ggc_cleared_alloc<odr_type_d> ();
1116 val->type = type;
1117 val->bases = vNULL;
1118 val->derived_types = vNULL;
1119 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1120 build_bases = COMPLETE_TYPE_P (val->type);
1121 insert_to_odr_array = true;
1122 }
1123
1124 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1125 && type == TYPE_MAIN_VARIANT (type))
1126 {
1127 tree binfo = TYPE_BINFO (type);
1128 unsigned int i;
1129
1130 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1131
1132 val->all_derivations_known = type_all_derivations_known_p (type);
1133 *slot = val;
1134 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1135 /* For now record only polymorphic types. other are
1136 pointless for devirtualization and we can not precisely
1137 determine ODR equivalency of these during LTO. */
1138 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1139 {
1140 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1141 i)),
1142 true);
1143 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1144 base->derived_types.safe_push (val);
1145 val->bases.safe_push (base);
1146 if (base->id > base_id)
1147 base_id = base->id;
1148 }
1149 }
1150 /* Ensure that type always appears after bases. */
1151 if (insert_to_odr_array)
1152 {
1153 if (odr_types_ptr)
1154 val->id = odr_types.length ();
1155 vec_safe_push (odr_types_ptr, val);
1156 }
1157 else if (base_id > val->id)
1158 {
1159 odr_types[val->id] = 0;
1160 /* Be sure we did not recorded any derived types; these may need
1161 renumbering too. */
1162 gcc_assert (val->derived_types.length() == 0);
1163 if (odr_types_ptr)
1164 val->id = odr_types.length ();
1165 vec_safe_push (odr_types_ptr, val);
1166 }
1167 return val;
1168 }
1169
1170 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1171 recusive printing. */
1172
1173 static void
1174 dump_odr_type (FILE *f, odr_type t, int indent=0)
1175 {
1176 unsigned int i;
1177 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1178 print_generic_expr (f, t->type, TDF_SLIM);
1179 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1180 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1181 if (TYPE_NAME (t->type))
1182 {
1183 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1184 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1185 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1186 }
1187 if (t->bases.length ())
1188 {
1189 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1190 for (i = 0; i < t->bases.length (); i++)
1191 fprintf (f, " %i", t->bases[i]->id);
1192 fprintf (f, "\n");
1193 }
1194 if (t->derived_types.length ())
1195 {
1196 fprintf (f, "%*s derived types:\n", indent * 2, "");
1197 for (i = 0; i < t->derived_types.length (); i++)
1198 dump_odr_type (f, t->derived_types[i], indent + 1);
1199 }
1200 fprintf (f, "\n");
1201 }
1202
1203 /* Dump the type inheritance graph. */
1204
1205 static void
1206 dump_type_inheritance_graph (FILE *f)
1207 {
1208 unsigned int i;
1209 if (!odr_types_ptr)
1210 return;
1211 fprintf (f, "\n\nType inheritance graph:\n");
1212 for (i = 0; i < odr_types.length (); i++)
1213 {
1214 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1215 dump_odr_type (f, odr_types[i]);
1216 }
1217 for (i = 0; i < odr_types.length (); i++)
1218 {
1219 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1220 {
1221 unsigned int j;
1222 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1223 print_node (f, "", odr_types[i]->type, 0);
1224 for (j = 0; j < odr_types[i]->types->length (); j++)
1225 {
1226 tree t;
1227 fprintf (f, "duplicate #%i\n", j);
1228 print_node (f, "", (*odr_types[i]->types)[j], 0);
1229 t = (*odr_types[i]->types)[j];
1230 while (TYPE_P (t) && TYPE_CONTEXT (t))
1231 {
1232 t = TYPE_CONTEXT (t);
1233 print_node (f, "", t, 0);
1234 }
1235 putc ('\n',f);
1236 }
1237 }
1238 }
1239 }
1240
1241 /* Given method type T, return type of class it belongs to.
1242 Lookup this pointer and get its type. */
1243
1244 tree
1245 method_class_type (const_tree t)
1246 {
1247 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1248 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1249
1250 return TREE_TYPE (first_parm_type);
1251 }
1252
1253 /* Initialize IPA devirt and build inheritance tree graph. */
1254
1255 void
1256 build_type_inheritance_graph (void)
1257 {
1258 struct symtab_node *n;
1259 FILE *inheritance_dump_file;
1260 int flags;
1261
1262 if (odr_hash)
1263 return;
1264 timevar_push (TV_IPA_INHERITANCE);
1265 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1266 odr_hash = new odr_hash_type (23);
1267
1268 /* We reconstruct the graph starting of types of all methods seen in the
1269 the unit. */
1270 FOR_EACH_SYMBOL (n)
1271 if (is_a <cgraph_node *> (n)
1272 && DECL_VIRTUAL_P (n->decl)
1273 && n->real_symbol_p ())
1274 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1275 true);
1276
1277 /* Look also for virtual tables of types that do not define any methods.
1278
1279 We need it in a case where class B has virtual base of class A
1280 re-defining its virtual method and there is class C with no virtual
1281 methods with B as virtual base.
1282
1283 Here we output B's virtual method in two variant - for non-virtual
1284 and virtual inheritance. B's virtual table has non-virtual version,
1285 while C's has virtual.
1286
1287 For this reason we need to know about C in order to include both
1288 variants of B. More correctly, record_target_from_binfo should
1289 add both variants of the method when walking B, but we have no
1290 link in between them.
1291
1292 We rely on fact that either the method is exported and thus we
1293 assume it is called externally or C is in anonymous namespace and
1294 thus we will see the vtable. */
1295
1296 else if (is_a <varpool_node *> (n)
1297 && DECL_VIRTUAL_P (n->decl)
1298 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1299 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1300 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1301 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1302 if (inheritance_dump_file)
1303 {
1304 dump_type_inheritance_graph (inheritance_dump_file);
1305 dump_end (TDI_inheritance, inheritance_dump_file);
1306 }
1307 timevar_pop (TV_IPA_INHERITANCE);
1308 }
1309
1310 /* Return true if N has reference from live virtual table
1311 (and thus can be a destination of polymorphic call).
1312 Be conservatively correct when callgraph is not built or
1313 if the method may be referred externally. */
1314
1315 static bool
1316 referenced_from_vtable_p (struct cgraph_node *node)
1317 {
1318 int i;
1319 struct ipa_ref *ref;
1320 bool found = false;
1321
1322 if (node->externally_visible
1323 || node->used_from_other_partition)
1324 return true;
1325
1326 /* Keep this test constant time.
1327 It is unlikely this can happen except for the case where speculative
1328 devirtualization introduced many speculative edges to this node.
1329 In this case the target is very likely alive anyway. */
1330 if (node->ref_list.referring.length () > 100)
1331 return true;
1332
1333 /* We need references built. */
1334 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
1335 return true;
1336
1337 for (i = 0; node->iterate_referring (i, ref); i++)
1338
1339 if ((ref->use == IPA_REF_ALIAS
1340 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1341 || (ref->use == IPA_REF_ADDR
1342 && TREE_CODE (ref->referring->decl) == VAR_DECL
1343 && DECL_VIRTUAL_P (ref->referring->decl)))
1344 {
1345 found = true;
1346 break;
1347 }
1348 return found;
1349 }
1350
1351 /* If TARGET has associated node, record it in the NODES array.
1352 CAN_REFER specify if program can refer to the target directly.
1353 if TARGET is unknown (NULL) or it can not be inserted (for example because
1354 its body was already removed and there is no way to refer to it), clear
1355 COMPLETEP. */
1356
1357 static void
1358 maybe_record_node (vec <cgraph_node *> &nodes,
1359 tree target, pointer_set_t *inserted,
1360 bool can_refer,
1361 bool *completep)
1362 {
1363 struct cgraph_node *target_node, *alias_target;
1364 enum availability avail;
1365
1366 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1367 list of targets; the runtime effect of calling them is undefined.
1368 Only "real" virtual methods should be accounted. */
1369 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1370 return;
1371
1372 if (!can_refer)
1373 {
1374 /* The only case when method of anonymous namespace becomes unreferable
1375 is when we completely optimized it out. */
1376 if (flag_ltrans
1377 || !target
1378 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1379 *completep = false;
1380 return;
1381 }
1382
1383 if (!target)
1384 return;
1385
1386 target_node = cgraph_node::get (target);
1387
1388 /* Preffer alias target over aliases, so we do not get confused by
1389 fake duplicates. */
1390 if (target_node)
1391 {
1392 alias_target = target_node->ultimate_alias_target (&avail);
1393 if (target_node != alias_target
1394 && avail >= AVAIL_AVAILABLE
1395 && target_node->get_availability ())
1396 target_node = alias_target;
1397 }
1398
1399 /* Method can only be called by polymorphic call if any
1400 of vtables refering to it are alive.
1401
1402 While this holds for non-anonymous functions, too, there are
1403 cases where we want to keep them in the list; for example
1404 inline functions with -fno-weak are static, but we still
1405 may devirtualize them when instance comes from other unit.
1406 The same holds for LTO.
1407
1408 Currently we ignore these functions in speculative devirtualization.
1409 ??? Maybe it would make sense to be more aggressive for LTO even
1410 eslewhere. */
1411 if (!flag_ltrans
1412 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1413 && (!target_node
1414 || !referenced_from_vtable_p (target_node)))
1415 ;
1416 /* See if TARGET is useful function we can deal with. */
1417 else if (target_node != NULL
1418 && (TREE_PUBLIC (target)
1419 || DECL_EXTERNAL (target)
1420 || target_node->definition)
1421 && target_node->real_symbol_p ())
1422 {
1423 gcc_assert (!target_node->global.inlined_to);
1424 gcc_assert (target_node->real_symbol_p ());
1425 if (!pointer_set_insert (inserted, target_node->decl))
1426 {
1427 pointer_set_insert (cached_polymorphic_call_targets,
1428 target_node);
1429 nodes.safe_push (target_node);
1430 }
1431 }
1432 else if (completep
1433 && (!type_in_anonymous_namespace_p
1434 (DECL_CONTEXT (target))
1435 || flag_ltrans))
1436 *completep = false;
1437 }
1438
1439 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1440 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1441 method in vtable and insert method to NODES array
1442 or BASES_TO_CONSIDER if this array is non-NULL.
1443 Otherwise recurse to base BINFOs.
1444 This match what get_binfo_at_offset does, but with offset
1445 being unknown.
1446
1447 TYPE_BINFOS is a stack of BINFOS of types with defined
1448 virtual table seen on way from class type to BINFO.
1449
1450 MATCHED_VTABLES tracks virtual tables we already did lookup
1451 for virtual function in. INSERTED tracks nodes we already
1452 inserted.
1453
1454 ANONYMOUS is true if BINFO is part of anonymous namespace.
1455
1456 Clear COMPLETEP when we hit unreferable target.
1457 */
1458
1459 static void
1460 record_target_from_binfo (vec <cgraph_node *> &nodes,
1461 vec <tree> *bases_to_consider,
1462 tree binfo,
1463 tree otr_type,
1464 vec <tree> &type_binfos,
1465 HOST_WIDE_INT otr_token,
1466 tree outer_type,
1467 HOST_WIDE_INT offset,
1468 pointer_set_t *inserted,
1469 pointer_set_t *matched_vtables,
1470 bool anonymous,
1471 bool *completep)
1472 {
1473 tree type = BINFO_TYPE (binfo);
1474 int i;
1475 tree base_binfo;
1476
1477
1478 if (BINFO_VTABLE (binfo))
1479 type_binfos.safe_push (binfo);
1480 if (types_same_for_odr (type, outer_type))
1481 {
1482 int i;
1483 tree type_binfo = NULL;
1484
1485 /* Lookup BINFO with virtual table. For normal types it is always last
1486 binfo on stack. */
1487 for (i = type_binfos.length () - 1; i >= 0; i--)
1488 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1489 {
1490 type_binfo = type_binfos[i];
1491 break;
1492 }
1493 if (BINFO_VTABLE (binfo))
1494 type_binfos.pop ();
1495 /* If this is duplicated BINFO for base shared by virtual inheritance,
1496 we may not have its associated vtable. This is not a problem, since
1497 we will walk it on the other path. */
1498 if (!type_binfo)
1499 return;
1500 tree inner_binfo = get_binfo_at_offset (type_binfo,
1501 offset, otr_type);
1502 if (!inner_binfo)
1503 {
1504 gcc_assert (odr_violation_reported);
1505 return;
1506 }
1507 /* For types in anonymous namespace first check if the respective vtable
1508 is alive. If not, we know the type can't be called. */
1509 if (!flag_ltrans && anonymous)
1510 {
1511 tree vtable = BINFO_VTABLE (inner_binfo);
1512 varpool_node *vnode;
1513
1514 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1515 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1516 vnode = varpool_node::get (vtable);
1517 if (!vnode || !vnode->definition)
1518 return;
1519 }
1520 gcc_assert (inner_binfo);
1521 if (bases_to_consider
1522 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
1523 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
1524 {
1525 bool can_refer;
1526 tree target = gimple_get_virt_method_for_binfo (otr_token,
1527 inner_binfo,
1528 &can_refer);
1529 if (!bases_to_consider)
1530 maybe_record_node (nodes, target, inserted, can_refer, completep);
1531 /* Destructors are never called via construction vtables. */
1532 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1533 bases_to_consider->safe_push (target);
1534 }
1535 return;
1536 }
1537
1538 /* Walk bases. */
1539 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1540 /* Walking bases that have no virtual method is pointless excercise. */
1541 if (polymorphic_type_binfo_p (base_binfo))
1542 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1543 type_binfos,
1544 otr_token, outer_type, offset, inserted,
1545 matched_vtables, anonymous, completep);
1546 if (BINFO_VTABLE (binfo))
1547 type_binfos.pop ();
1548 }
1549
1550 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1551 of TYPE, insert them to NODES, recurse into derived nodes.
1552 INSERTED is used to avoid duplicate insertions of methods into NODES.
1553 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1554 Clear COMPLETEP if unreferable target is found.
1555
1556 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1557 all cases where BASE_SKIPPED is true (because the base is abstract
1558 class). */
1559
1560 static void
1561 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1562 pointer_set_t *inserted,
1563 pointer_set_t *matched_vtables,
1564 tree otr_type,
1565 odr_type type,
1566 HOST_WIDE_INT otr_token,
1567 tree outer_type,
1568 HOST_WIDE_INT offset,
1569 bool *completep,
1570 vec <tree> &bases_to_consider,
1571 bool consider_construction)
1572 {
1573 tree binfo = TYPE_BINFO (type->type);
1574 unsigned int i;
1575 vec <tree> type_binfos = vNULL;
1576 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1577
1578 /* We may need to consider types w/o instances because of possible derived
1579 types using their methods either directly or via construction vtables.
1580 We are safe to skip them when all derivations are known, since we will
1581 handle them later.
1582 This is done by recording them to BASES_TO_CONSIDER array. */
1583 if (possibly_instantiated || consider_construction)
1584 {
1585 record_target_from_binfo (nodes,
1586 (!possibly_instantiated
1587 && type_all_derivations_known_p (type->type))
1588 ? &bases_to_consider : NULL,
1589 binfo, otr_type, type_binfos, otr_token,
1590 outer_type, offset,
1591 inserted, matched_vtables,
1592 type->anonymous_namespace, completep);
1593 }
1594 type_binfos.release ();
1595 for (i = 0; i < type->derived_types.length (); i++)
1596 possible_polymorphic_call_targets_1 (nodes, inserted,
1597 matched_vtables,
1598 otr_type,
1599 type->derived_types[i],
1600 otr_token, outer_type, offset, completep,
1601 bases_to_consider, consider_construction);
1602 }
1603
1604 /* Cache of queries for polymorphic call targets.
1605
1606 Enumerating all call targets may get expensive when there are many
1607 polymorphic calls in the program, so we memoize all the previous
1608 queries and avoid duplicated work. */
1609
1610 struct polymorphic_call_target_d
1611 {
1612 HOST_WIDE_INT otr_token;
1613 ipa_polymorphic_call_context context;
1614 odr_type type;
1615 vec <cgraph_node *> targets;
1616 int nonconstruction_targets;
1617 bool complete;
1618 };
1619
1620 /* Polymorphic call target cache helpers. */
1621
1622 struct polymorphic_call_target_hasher
1623 {
1624 typedef polymorphic_call_target_d value_type;
1625 typedef polymorphic_call_target_d compare_type;
1626 static inline hashval_t hash (const value_type *);
1627 static inline bool equal (const value_type *, const compare_type *);
1628 static inline void remove (value_type *);
1629 };
1630
1631 /* Return the computed hashcode for ODR_QUERY. */
1632
1633 inline hashval_t
1634 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1635 {
1636 hashval_t hash;
1637
1638 hash = iterative_hash_host_wide_int
1639 (odr_query->otr_token,
1640 odr_query->type->id);
1641 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
1642 hash);
1643 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
1644 return iterative_hash_hashval_t
1645 (((int)odr_query->context.maybe_in_construction << 1)
1646 | (int)odr_query->context.maybe_derived_type, hash);
1647 }
1648
1649 /* Compare cache entries T1 and T2. */
1650
1651 inline bool
1652 polymorphic_call_target_hasher::equal (const value_type *t1,
1653 const compare_type *t2)
1654 {
1655 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1656 && t1->context.offset == t2->context.offset
1657 && t1->context.outer_type == t2->context.outer_type
1658 && t1->context.maybe_in_construction
1659 == t2->context.maybe_in_construction
1660 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
1661 }
1662
1663 /* Remove entry in polymorphic call target cache hash. */
1664
1665 inline void
1666 polymorphic_call_target_hasher::remove (value_type *v)
1667 {
1668 v->targets.release ();
1669 free (v);
1670 }
1671
1672 /* Polymorphic call target query cache. */
1673
1674 typedef hash_table<polymorphic_call_target_hasher>
1675 polymorphic_call_target_hash_type;
1676 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1677
1678 /* Destroy polymorphic call target query cache. */
1679
1680 static void
1681 free_polymorphic_call_targets_hash ()
1682 {
1683 if (cached_polymorphic_call_targets)
1684 {
1685 delete polymorphic_call_target_hash;
1686 polymorphic_call_target_hash = NULL;
1687 pointer_set_destroy (cached_polymorphic_call_targets);
1688 cached_polymorphic_call_targets = NULL;
1689 }
1690 }
1691
1692 /* When virtual function is removed, we may need to flush the cache. */
1693
1694 static void
1695 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1696 {
1697 if (cached_polymorphic_call_targets
1698 && pointer_set_contains (cached_polymorphic_call_targets, n))
1699 free_polymorphic_call_targets_hash ();
1700 }
1701
1702 /* Return true when TYPE contains an polymorphic type and thus is interesting
1703 for devirtualization machinery. */
1704
1705 bool
1706 contains_polymorphic_type_p (const_tree type)
1707 {
1708 type = TYPE_MAIN_VARIANT (type);
1709
1710 if (RECORD_OR_UNION_TYPE_P (type))
1711 {
1712 if (TYPE_BINFO (type)
1713 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1714 return true;
1715 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1716 if (TREE_CODE (fld) == FIELD_DECL
1717 && !DECL_ARTIFICIAL (fld)
1718 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1719 return true;
1720 return false;
1721 }
1722 if (TREE_CODE (type) == ARRAY_TYPE)
1723 return contains_polymorphic_type_p (TREE_TYPE (type));
1724 return false;
1725 }
1726
1727 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1728 is contained at CONTEXT->OFFSET. Walk the memory representation of
1729 CONTEXT->OUTER_TYPE and find the outermost class type that match
1730 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1731 to represent it.
1732
1733 For example when CONTEXT represents type
1734 class A
1735 {
1736 int a;
1737 class B b;
1738 }
1739 and we look for type at offset sizeof(int), we end up with B and offset 0.
1740 If the same is produced by multiple inheritance, we end up with A and offset
1741 sizeof(int).
1742
1743 If we can not find corresponding class, give up by setting
1744 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1745 Return true when lookup was sucesful. */
1746
1747 static bool
1748 get_class_context (ipa_polymorphic_call_context *context,
1749 tree expected_type)
1750 {
1751 tree type = context->outer_type;
1752 HOST_WIDE_INT offset = context->offset;
1753
1754 /* Find the sub-object the constant actually refers to and mark whether it is
1755 an artificial one (as opposed to a user-defined one). */
1756 while (true)
1757 {
1758 HOST_WIDE_INT pos, size;
1759 tree fld;
1760
1761 /* On a match, just return what we found. */
1762 if (TREE_CODE (type) == TREE_CODE (expected_type)
1763 && types_same_for_odr (type, expected_type))
1764 {
1765 /* Type can not contain itself on an non-zero offset. In that case
1766 just give up. */
1767 if (offset != 0)
1768 goto give_up;
1769 gcc_assert (offset == 0);
1770 return true;
1771 }
1772
1773 /* Walk fields and find corresponding on at OFFSET. */
1774 if (TREE_CODE (type) == RECORD_TYPE)
1775 {
1776 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1777 {
1778 if (TREE_CODE (fld) != FIELD_DECL)
1779 continue;
1780
1781 pos = int_bit_position (fld);
1782 size = tree_to_uhwi (DECL_SIZE (fld));
1783 if (pos <= offset && (pos + size) > offset)
1784 break;
1785 }
1786
1787 if (!fld)
1788 goto give_up;
1789
1790 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1791 offset -= pos;
1792 /* DECL_ARTIFICIAL represents a basetype. */
1793 if (!DECL_ARTIFICIAL (fld))
1794 {
1795 context->outer_type = type;
1796 context->offset = offset;
1797 /* As soon as we se an field containing the type,
1798 we know we are not looking for derivations. */
1799 context->maybe_derived_type = false;
1800 }
1801 }
1802 else if (TREE_CODE (type) == ARRAY_TYPE)
1803 {
1804 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1805
1806 /* Give up if we don't know array size. */
1807 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1808 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1809 goto give_up;
1810 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1811 type = subtype;
1812 context->outer_type = type;
1813 context->offset = offset;
1814 context->maybe_derived_type = false;
1815 }
1816 /* Give up on anything else. */
1817 else
1818 goto give_up;
1819 }
1820
1821 /* If we failed to find subtype we look for, give up and fall back to the
1822 most generic query. */
1823 give_up:
1824 context->outer_type = expected_type;
1825 context->offset = 0;
1826 context->maybe_derived_type = true;
1827 context->maybe_in_construction = true;
1828 /* POD can be changed to an instance of a polymorphic type by
1829 placement new. Here we play safe and assume that any
1830 non-polymorphic type is POD. */
1831 if ((TREE_CODE (type) != RECORD_TYPE
1832 || !TYPE_BINFO (type)
1833 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1834 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1835 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1836 tree_to_uhwi (TYPE_SIZE (type)))))
1837 return true;
1838 return false;
1839 }
1840
1841 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1842
1843 static bool
1844 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1845 tree otr_type)
1846 {
1847 ipa_polymorphic_call_context context = {offset,
1848 TYPE_MAIN_VARIANT (outer_type),
1849 false, true};
1850 return get_class_context (&context, otr_type);
1851 }
1852
1853 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1854
1855 static tree
1856 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1857 tree vtable)
1858 {
1859 tree v = BINFO_VTABLE (binfo);
1860 int i;
1861 tree base_binfo;
1862 unsigned HOST_WIDE_INT this_offset;
1863
1864 if (v)
1865 {
1866 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1867 gcc_unreachable ();
1868
1869 if (offset == this_offset
1870 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1871 return binfo;
1872 }
1873
1874 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1875 if (polymorphic_type_binfo_p (base_binfo))
1876 {
1877 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1878 if (base_binfo)
1879 return base_binfo;
1880 }
1881 return NULL;
1882 }
1883
1884 /* T is known constant value of virtual table pointer.
1885 Store virtual table to V and its offset to OFFSET.
1886 Return false if T does not look like virtual table reference. */
1887
1888 bool
1889 vtable_pointer_value_to_vtable (const_tree t, tree *v,
1890 unsigned HOST_WIDE_INT *offset)
1891 {
1892 /* We expect &MEM[(void *)&virtual_table + 16B].
1893 We obtain object's BINFO from the context of the virtual table.
1894 This one contains pointer to virtual table represented via
1895 POINTER_PLUS_EXPR. Verify that this pointer match to what
1896 we propagated through.
1897
1898 In the case of virtual inheritance, the virtual tables may
1899 be nested, i.e. the offset may be different from 16 and we may
1900 need to dive into the type representation. */
1901 if (TREE_CODE (t) == ADDR_EXPR
1902 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1903 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1904 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1905 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1906 == VAR_DECL)
1907 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1908 (TREE_OPERAND (t, 0), 0), 0)))
1909 {
1910 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1911 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1912 return true;
1913 }
1914
1915 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1916 We need to handle it when T comes from static variable initializer or
1917 BINFO. */
1918 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1919 {
1920 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1921 t = TREE_OPERAND (t, 0);
1922 }
1923 else
1924 *offset = 0;
1925
1926 if (TREE_CODE (t) != ADDR_EXPR)
1927 return false;
1928 *v = TREE_OPERAND (t, 0);
1929 return true;
1930 }
1931
1932 /* T is known constant value of virtual table pointer. Return BINFO of the
1933 instance type. */
1934
1935 tree
1936 vtable_pointer_value_to_binfo (const_tree t)
1937 {
1938 tree vtable;
1939 unsigned HOST_WIDE_INT offset;
1940
1941 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1942 return NULL_TREE;
1943
1944 /* FIXME: for stores of construction vtables we return NULL,
1945 because we do not have BINFO for those. Eventually we should fix
1946 our representation to allow this case to be handled, too.
1947 In the case we see store of BINFO we however may assume
1948 that standard folding will be ale to cope with it. */
1949 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1950 offset, vtable);
1951 }
1952
1953 /* We know that the instance is stored in variable or parameter
1954 (not dynamically allocated) and we want to disprove the fact
1955 that it may be in construction at invocation of CALL.
1956
1957 For the variable to be in construction we actually need to
1958 be in constructor of corresponding global variable or
1959 the inline stack of CALL must contain the constructor.
1960 Check this condition. This check works safely only before
1961 IPA passes, because inline stacks may become out of date
1962 later. */
1963
1964 bool
1965 decl_maybe_in_construction_p (tree base, tree outer_type,
1966 gimple call, tree function)
1967 {
1968 outer_type = TYPE_MAIN_VARIANT (outer_type);
1969 gcc_assert (DECL_P (base));
1970
1971 /* After inlining the code unification optimizations may invalidate
1972 inline stacks. Also we need to give up on global variables after
1973 IPA, because addresses of these may have been propagated to their
1974 constructors. */
1975 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
1976 return true;
1977
1978 /* Pure functions can not do any changes on the dynamic type;
1979 that require writting to memory. */
1980 if (!auto_var_in_fn_p (base, function)
1981 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
1982 return false;
1983
1984 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
1985 block = BLOCK_SUPERCONTEXT (block))
1986 if (BLOCK_ABSTRACT_ORIGIN (block)
1987 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
1988 {
1989 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
1990
1991 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
1992 || (!DECL_CXX_CONSTRUCTOR_P (fn)
1993 || !DECL_CXX_DESTRUCTOR_P (fn)))
1994 {
1995 /* Watch for clones where we constant propagated the first
1996 argument (pointer to the instance). */
1997 fn = DECL_ABSTRACT_ORIGIN (fn);
1998 if (!fn
1999 || !is_global_var (base)
2000 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2001 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2002 || !DECL_CXX_DESTRUCTOR_P (fn)))
2003 continue;
2004 }
2005 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2006 continue;
2007
2008 /* FIXME: this can go away once we have ODR types equivalency on
2009 LTO level. */
2010 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2011 return true;
2012 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2013 if (types_same_for_odr (type, outer_type))
2014 return true;
2015 }
2016
2017 if (TREE_CODE (base) == VAR_DECL
2018 && is_global_var (base))
2019 {
2020 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2021 || (!DECL_CXX_CONSTRUCTOR_P (function)
2022 || !DECL_CXX_DESTRUCTOR_P (function)))
2023 {
2024 if (!DECL_ABSTRACT_ORIGIN (function))
2025 return false;
2026 /* Watch for clones where we constant propagated the first
2027 argument (pointer to the instance). */
2028 function = DECL_ABSTRACT_ORIGIN (function);
2029 if (!function
2030 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2031 || (!DECL_CXX_CONSTRUCTOR_P (function)
2032 || !DECL_CXX_DESTRUCTOR_P (function)))
2033 return false;
2034 }
2035 /* FIXME: this can go away once we have ODR types equivalency on
2036 LTO level. */
2037 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2038 return true;
2039 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2040 if (types_same_for_odr (type, outer_type))
2041 return true;
2042 }
2043 return false;
2044 }
2045
2046 /* Proudce polymorphic call context for call method of instance
2047 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2048
2049 static void
2050 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2051 tree base, HOST_WIDE_INT offset)
2052 {
2053 gcc_assert (DECL_P (base));
2054
2055 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
2056 context->offset = offset;
2057 /* Make very conservative assumption that all objects
2058 may be in construction.
2059 TODO: ipa-prop already contains code to tell better.
2060 merge it later. */
2061 context->maybe_in_construction = true;
2062 context->maybe_derived_type = false;
2063 }
2064
2065 /* CST is an invariant (address of decl), try to get meaningful
2066 polymorphic call context for polymorphic call of method
2067 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2068 Return FALSE if nothing meaningful can be found. */
2069
2070 bool
2071 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2072 tree cst,
2073 tree otr_type,
2074 HOST_WIDE_INT offset)
2075 {
2076 HOST_WIDE_INT offset2, size, max_size;
2077 tree base;
2078
2079 if (TREE_CODE (cst) != ADDR_EXPR)
2080 return false;
2081
2082 cst = TREE_OPERAND (cst, 0);
2083 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
2084 if (!DECL_P (base) || max_size == -1 || max_size != size)
2085 return false;
2086
2087 /* Only type inconsistent programs can have otr_type that is
2088 not part of outer type. */
2089 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2090 return false;
2091
2092 get_polymorphic_call_info_for_decl (context, base, offset);
2093 return true;
2094 }
2095
2096 /* Given REF call in FNDECL, determine class of the polymorphic
2097 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
2098 CALL is optional argument giving the actual statement (usually call) where
2099 the context is used.
2100 Return pointer to object described by the context */
2101
2102 tree
2103 get_polymorphic_call_info (tree fndecl,
2104 tree ref,
2105 tree *otr_type,
2106 HOST_WIDE_INT *otr_token,
2107 ipa_polymorphic_call_context *context,
2108 gimple call)
2109 {
2110 tree base_pointer;
2111 *otr_type = obj_type_ref_class (ref);
2112 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2113
2114 /* Set up basic info in case we find nothing interesting in the analysis. */
2115 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
2116 context->offset = 0;
2117 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2118 context->maybe_derived_type = true;
2119 context->maybe_in_construction = true;
2120
2121 /* Walk SSA for outer object. */
2122 do
2123 {
2124 if (TREE_CODE (base_pointer) == SSA_NAME
2125 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2126 && SSA_NAME_DEF_STMT (base_pointer)
2127 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2128 {
2129 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
2130 STRIP_NOPS (base_pointer);
2131 }
2132 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
2133 {
2134 HOST_WIDE_INT size, max_size;
2135 HOST_WIDE_INT offset2;
2136 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2137 &offset2, &size, &max_size);
2138
2139 /* If this is a varying address, punt. */
2140 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2141 && max_size != -1
2142 && max_size == size)
2143 {
2144 /* We found dereference of a pointer. Type of the pointer
2145 and MEM_REF is meaningless, but we can look futher. */
2146 if (TREE_CODE (base) == MEM_REF)
2147 {
2148 base_pointer = TREE_OPERAND (base, 0);
2149 context->offset
2150 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
2151 context->outer_type = NULL;
2152 }
2153 /* We found base object. In this case the outer_type
2154 is known. */
2155 else if (DECL_P (base))
2156 {
2157 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
2158
2159 /* Only type inconsistent programs can have otr_type that is
2160 not part of outer type. */
2161 if (!contains_type_p (TREE_TYPE (base),
2162 context->offset + offset2, *otr_type))
2163 {
2164 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2165 code sequences; we arrange the calls to be builtin_unreachable
2166 later. */
2167 *otr_token = INT_MAX;
2168 return base_pointer;
2169 }
2170 get_polymorphic_call_info_for_decl (context, base,
2171 context->offset + offset2);
2172 if (context->maybe_in_construction && call)
2173 context->maybe_in_construction
2174 = decl_maybe_in_construction_p (base,
2175 context->outer_type,
2176 call,
2177 current_function_decl);
2178 return NULL;
2179 }
2180 else
2181 break;
2182 }
2183 else
2184 break;
2185 }
2186 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2187 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2188 {
2189 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2190 * BITS_PER_UNIT;
2191 base_pointer = TREE_OPERAND (base_pointer, 0);
2192 }
2193 else
2194 break;
2195 }
2196 while (true);
2197
2198 /* Try to determine type of the outer object. */
2199 if (TREE_CODE (base_pointer) == SSA_NAME
2200 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2201 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2202 {
2203 /* See if parameter is THIS pointer of a method. */
2204 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2205 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2206 {
2207 context->outer_type
2208 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2209 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2210
2211 /* Dynamic casting has possibly upcasted the type
2212 in the hiearchy. In this case outer type is less
2213 informative than inner type and we should forget
2214 about it. */
2215 if (!contains_type_p (context->outer_type, context->offset,
2216 *otr_type))
2217 {
2218 context->outer_type = NULL;
2219 return base_pointer;
2220 }
2221
2222 /* If the function is constructor or destructor, then
2223 the type is possibly in construction, but we know
2224 it is not derived type. */
2225 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2226 || DECL_CXX_DESTRUCTOR_P (fndecl))
2227 {
2228 context->maybe_in_construction = true;
2229 context->maybe_derived_type = false;
2230 }
2231 else
2232 {
2233 context->maybe_derived_type = true;
2234 context->maybe_in_construction = false;
2235 }
2236 return base_pointer;
2237 }
2238 /* Non-PODs passed by value are really passed by invisible
2239 reference. In this case we also know the type of the
2240 object. */
2241 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2242 {
2243 context->outer_type
2244 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2245 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2246 /* Only type inconsistent programs can have otr_type that is
2247 not part of outer type. */
2248 if (!contains_type_p (context->outer_type, context->offset,
2249 *otr_type))
2250 {
2251 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2252 code sequences; we arrange the calls to be builtin_unreachable
2253 later. */
2254 *otr_token = INT_MAX;
2255 return base_pointer;
2256 }
2257 context->maybe_derived_type = false;
2258 context->maybe_in_construction = false;
2259 return base_pointer;
2260 }
2261 }
2262 /* TODO: There are multiple ways to derive a type. For instance
2263 if BASE_POINTER is passed to an constructor call prior our refernece.
2264 We do not make this type of flow sensitive analysis yet. */
2265 return base_pointer;
2266 }
2267
2268 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
2269 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
2270 and insert them to NODES.
2271
2272 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
2273
2274 static void
2275 record_targets_from_bases (tree otr_type,
2276 HOST_WIDE_INT otr_token,
2277 tree outer_type,
2278 HOST_WIDE_INT offset,
2279 vec <cgraph_node *> &nodes,
2280 pointer_set_t *inserted,
2281 pointer_set_t *matched_vtables,
2282 bool *completep)
2283 {
2284 while (true)
2285 {
2286 HOST_WIDE_INT pos, size;
2287 tree base_binfo;
2288 tree fld;
2289
2290 if (types_same_for_odr (outer_type, otr_type))
2291 return;
2292
2293 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
2294 {
2295 if (TREE_CODE (fld) != FIELD_DECL)
2296 continue;
2297
2298 pos = int_bit_position (fld);
2299 size = tree_to_shwi (DECL_SIZE (fld));
2300 if (pos <= offset && (pos + size) > offset
2301 /* Do not get confused by zero sized bases. */
2302 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
2303 break;
2304 }
2305 /* Within a class type we should always find correcponding fields. */
2306 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
2307
2308 /* Nonbasetypes should have been stripped by outer_class_type. */
2309 gcc_assert (DECL_ARTIFICIAL (fld));
2310
2311 outer_type = TREE_TYPE (fld);
2312 offset -= pos;
2313
2314 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
2315 offset, otr_type);
2316 if (!base_binfo)
2317 {
2318 gcc_assert (odr_violation_reported);
2319 return;
2320 }
2321 gcc_assert (base_binfo);
2322 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
2323 {
2324 bool can_refer;
2325 tree target = gimple_get_virt_method_for_binfo (otr_token,
2326 base_binfo,
2327 &can_refer);
2328 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
2329 maybe_record_node (nodes, target, inserted, can_refer, completep);
2330 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
2331 }
2332 }
2333 }
2334
2335 /* When virtual table is removed, we may need to flush the cache. */
2336
2337 static void
2338 devirt_variable_node_removal_hook (varpool_node *n,
2339 void *d ATTRIBUTE_UNUSED)
2340 {
2341 if (cached_polymorphic_call_targets
2342 && DECL_VIRTUAL_P (n->decl)
2343 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
2344 free_polymorphic_call_targets_hash ();
2345 }
2346
2347 /* Return vector containing possible targets of polymorphic call of type
2348 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
2349 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
2350 OTR_TYPE and include their virtual method. This is useful for types
2351 possibly in construction or destruction where the virtual table may
2352 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
2353 us to walk the inheritance graph for all derivations.
2354
2355 OTR_TOKEN == INT_MAX is used to mark calls that are provably
2356 undefined and should be redirected to unreachable.
2357
2358 If COMPLETEP is non-NULL, store true if the list is complete.
2359 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
2360 in the target cache. If user needs to visit every target list
2361 just once, it can memoize them.
2362
2363 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
2364 the type is not in the construction. Those targets appear first in the
2365 vector returned.
2366
2367 Returned vector is placed into cache. It is NOT caller's responsibility
2368 to free it. The vector can be freed on cgraph_remove_node call if
2369 the particular node is a virtual function present in the cache. */
2370
2371 vec <cgraph_node *>
2372 possible_polymorphic_call_targets (tree otr_type,
2373 HOST_WIDE_INT otr_token,
2374 ipa_polymorphic_call_context context,
2375 bool *completep,
2376 void **cache_token,
2377 int *nonconstruction_targetsp)
2378 {
2379 static struct cgraph_node_hook_list *node_removal_hook_holder;
2380 pointer_set_t *inserted;
2381 pointer_set_t *matched_vtables;
2382 vec <cgraph_node *> nodes = vNULL;
2383 vec <tree> bases_to_consider = vNULL;
2384 odr_type type, outer_type;
2385 polymorphic_call_target_d key;
2386 polymorphic_call_target_d **slot;
2387 unsigned int i;
2388 tree binfo, target;
2389 bool complete;
2390 bool can_refer;
2391 bool skipped = false;
2392
2393 otr_type = TYPE_MAIN_VARIANT (otr_type);
2394
2395 /* If ODR is not initialized, return empty incomplete list. */
2396 if (!odr_hash)
2397 {
2398 if (completep)
2399 *completep = false;
2400 if (cache_token)
2401 *cache_token = NULL;
2402 if (nonconstruction_targetsp)
2403 *nonconstruction_targetsp = 0;
2404 return nodes;
2405 }
2406
2407 /* If we hit type inconsistency, just return empty list of targets. */
2408 if (otr_token == INT_MAX)
2409 {
2410 if (completep)
2411 *completep = true;
2412 if (cache_token)
2413 *cache_token = NULL;
2414 if (nonconstruction_targetsp)
2415 *nonconstruction_targetsp = 0;
2416 return nodes;
2417 }
2418
2419 type = get_odr_type (otr_type, true);
2420
2421 /* Recording type variants would wast results cache. */
2422 gcc_assert (!context.outer_type
2423 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2424
2425 /* Lookup the outer class type we want to walk. */
2426 if (context.outer_type
2427 && !get_class_context (&context, otr_type))
2428 {
2429 if (completep)
2430 *completep = false;
2431 if (cache_token)
2432 *cache_token = NULL;
2433 if (nonconstruction_targetsp)
2434 *nonconstruction_targetsp = 0;
2435 return nodes;
2436 }
2437
2438 /* Check that get_class_context kept the main variant. */
2439 gcc_assert (!context.outer_type
2440 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
2441
2442 /* We canonicalize our query, so we do not need extra hashtable entries. */
2443
2444 /* Without outer type, we have no use for offset. Just do the
2445 basic search from innter type */
2446 if (!context.outer_type)
2447 {
2448 context.outer_type = otr_type;
2449 context.offset = 0;
2450 }
2451 /* We need to update our hiearchy if the type does not exist. */
2452 outer_type = get_odr_type (context.outer_type, true);
2453 /* If the type is complete, there are no derivations. */
2454 if (TYPE_FINAL_P (outer_type->type))
2455 context.maybe_derived_type = false;
2456
2457 /* Initialize query cache. */
2458 if (!cached_polymorphic_call_targets)
2459 {
2460 cached_polymorphic_call_targets = pointer_set_create ();
2461 polymorphic_call_target_hash
2462 = new polymorphic_call_target_hash_type (23);
2463 if (!node_removal_hook_holder)
2464 {
2465 node_removal_hook_holder =
2466 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
2467 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
2468 NULL);
2469 }
2470 }
2471
2472 /* Lookup cached answer. */
2473 key.type = type;
2474 key.otr_token = otr_token;
2475 key.context = context;
2476 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
2477 if (cache_token)
2478 *cache_token = (void *)*slot;
2479 if (*slot)
2480 {
2481 if (completep)
2482 *completep = (*slot)->complete;
2483 if (nonconstruction_targetsp)
2484 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
2485 return (*slot)->targets;
2486 }
2487
2488 complete = true;
2489
2490 /* Do actual search. */
2491 timevar_push (TV_IPA_VIRTUAL_CALL);
2492 *slot = XCNEW (polymorphic_call_target_d);
2493 if (cache_token)
2494 *cache_token = (void *)*slot;
2495 (*slot)->type = type;
2496 (*slot)->otr_token = otr_token;
2497 (*slot)->context = context;
2498
2499 inserted = pointer_set_create ();
2500 matched_vtables = pointer_set_create ();
2501
2502 /* First see virtual method of type itself. */
2503 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2504 context.offset, otr_type);
2505 if (binfo)
2506 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2507 &can_refer);
2508 else
2509 {
2510 gcc_assert (odr_violation_reported);
2511 target = NULL;
2512 }
2513
2514 /* Destructors are never called through construction virtual tables,
2515 because the type is always known. */
2516 if (target && DECL_CXX_DESTRUCTOR_P (target))
2517 context.maybe_in_construction = false;
2518
2519 if (target)
2520 {
2521 /* In the case we get complete method, we don't need
2522 to walk derivations. */
2523 if (DECL_FINAL_P (target))
2524 context.maybe_derived_type = false;
2525 }
2526
2527 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2528 if (type_possibly_instantiated_p (outer_type->type))
2529 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2530 else
2531 {
2532 skipped = true;
2533 gcc_assert (in_lto_p || context.maybe_derived_type);
2534 }
2535
2536 if (binfo)
2537 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
2538
2539 /* Next walk recursively all derived types. */
2540 if (context.maybe_derived_type)
2541 {
2542 /* For anonymous namespace types we can attempt to build full type.
2543 All derivations must be in this unit (unless we see partial unit). */
2544 if (!type->all_derivations_known)
2545 complete = false;
2546 for (i = 0; i < outer_type->derived_types.length(); i++)
2547 possible_polymorphic_call_targets_1 (nodes, inserted,
2548 matched_vtables,
2549 otr_type,
2550 outer_type->derived_types[i],
2551 otr_token, outer_type->type,
2552 context.offset, &complete,
2553 bases_to_consider,
2554 context.maybe_in_construction);
2555 }
2556
2557 /* Finally walk bases, if asked to. */
2558 (*slot)->nonconstruction_targets = nodes.length();
2559
2560 /* Destructors are never called through construction virtual tables,
2561 because the type is always known. One of entries may be cxa_pure_virtual
2562 so look to at least two of them. */
2563 if (context.maybe_in_construction)
2564 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2565 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2566 context.maybe_in_construction = false;
2567 if (context.maybe_in_construction)
2568 {
2569 if (type != outer_type
2570 && (!skipped
2571 || (context.maybe_derived_type
2572 && !type_all_derivations_known_p (outer_type->type))))
2573 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2574 context.offset, nodes, inserted,
2575 matched_vtables, &complete);
2576 if (skipped)
2577 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2578 for (i = 0; i < bases_to_consider.length(); i++)
2579 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
2580 }
2581 bases_to_consider.release();
2582
2583 (*slot)->targets = nodes;
2584 (*slot)->complete = complete;
2585 if (completep)
2586 *completep = complete;
2587 if (nonconstruction_targetsp)
2588 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
2589
2590 pointer_set_destroy (inserted);
2591 pointer_set_destroy (matched_vtables);
2592 timevar_pop (TV_IPA_VIRTUAL_CALL);
2593 return nodes;
2594 }
2595
2596 /* Dump all possible targets of a polymorphic call. */
2597
2598 void
2599 dump_possible_polymorphic_call_targets (FILE *f,
2600 tree otr_type,
2601 HOST_WIDE_INT otr_token,
2602 const ipa_polymorphic_call_context &ctx)
2603 {
2604 vec <cgraph_node *> targets;
2605 bool final;
2606 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2607 unsigned int i;
2608 int nonconstruction;
2609
2610 if (!type)
2611 return;
2612 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2613 ctx,
2614 &final, NULL, &nonconstruction);
2615 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2616 print_generic_expr (f, type->type, TDF_SLIM);
2617 fprintf (f, " token %i\n", (int)otr_token);
2618 if (ctx.outer_type || ctx.offset)
2619 {
2620 fprintf (f, " Contained in type:");
2621 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2622 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2623 ctx.offset);
2624 }
2625
2626 fprintf (f, " %s%s%s\n ",
2627 final ? "This is a complete list." :
2628 "This is partial list; extra targets may be defined in other units.",
2629 ctx.maybe_in_construction ? " (base types included)" : "",
2630 ctx.maybe_derived_type ? " (derived types included)" : "");
2631 for (i = 0; i < targets.length (); i++)
2632 {
2633 char *name = NULL;
2634 if (i == (unsigned)nonconstruction)
2635 fprintf (f, "\n If the type is in construction,"
2636 " then additional tarets are:\n"
2637 " ");
2638 if (in_lto_p)
2639 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2640 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2641 if (in_lto_p)
2642 free (name);
2643 if (!targets[i]->definition)
2644 fprintf (f, " (no definition%s)",
2645 DECL_DECLARED_INLINE_P (targets[i]->decl)
2646 ? " inline" : "");
2647 }
2648 fprintf (f, "\n\n");
2649 }
2650
2651
2652 /* Return true if N can be possibly target of a polymorphic call of
2653 OTR_TYPE/OTR_TOKEN. */
2654
2655 bool
2656 possible_polymorphic_call_target_p (tree otr_type,
2657 HOST_WIDE_INT otr_token,
2658 const ipa_polymorphic_call_context &ctx,
2659 struct cgraph_node *n)
2660 {
2661 vec <cgraph_node *> targets;
2662 unsigned int i;
2663 enum built_in_function fcode;
2664 bool final;
2665
2666 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2667 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2668 == BUILT_IN_UNREACHABLE
2669 || fcode == BUILT_IN_TRAP))
2670 return true;
2671
2672 if (!odr_hash)
2673 return true;
2674 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2675 for (i = 0; i < targets.length (); i++)
2676 if (n->semantically_equivalent_p (targets[i]))
2677 return true;
2678
2679 /* At a moment we allow middle end to dig out new external declarations
2680 as a targets of polymorphic calls. */
2681 if (!final && !n->definition)
2682 return true;
2683 return false;
2684 }
2685
2686
2687 /* After callgraph construction new external nodes may appear.
2688 Add them into the graph. */
2689
2690 void
2691 update_type_inheritance_graph (void)
2692 {
2693 struct cgraph_node *n;
2694
2695 if (!odr_hash)
2696 return;
2697 free_polymorphic_call_targets_hash ();
2698 timevar_push (TV_IPA_INHERITANCE);
2699 /* We reconstruct the graph starting from types of all methods seen in the
2700 the unit. */
2701 FOR_EACH_FUNCTION (n)
2702 if (DECL_VIRTUAL_P (n->decl)
2703 && !n->definition
2704 && n->real_symbol_p ())
2705 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2706 true);
2707 timevar_pop (TV_IPA_INHERITANCE);
2708 }
2709
2710
2711 /* Return true if N looks like likely target of a polymorphic call.
2712 Rule out cxa_pure_virtual, noreturns, function declared cold and
2713 other obvious cases. */
2714
2715 bool
2716 likely_target_p (struct cgraph_node *n)
2717 {
2718 int flags;
2719 /* cxa_pure_virtual and similar things are not likely. */
2720 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2721 return false;
2722 flags = flags_from_decl_or_type (n->decl);
2723 if (flags & ECF_NORETURN)
2724 return false;
2725 if (lookup_attribute ("cold",
2726 DECL_ATTRIBUTES (n->decl)))
2727 return false;
2728 if (n->frequency < NODE_FREQUENCY_NORMAL)
2729 return false;
2730 /* If there are no virtual tables refering the target alive,
2731 the only way the target can be called is an instance comming from other
2732 compilation unit; speculative devirtualization is build around an
2733 assumption that won't happen. */
2734 if (!referenced_from_vtable_p (n))
2735 return false;
2736 return true;
2737 }
2738
2739 /* The ipa-devirt pass.
2740 When polymorphic call has only one likely target in the unit,
2741 turn it into speculative call. */
2742
2743 static unsigned int
2744 ipa_devirt (void)
2745 {
2746 struct cgraph_node *n;
2747 struct pointer_set_t *bad_call_targets = pointer_set_create ();
2748 struct cgraph_edge *e;
2749
2750 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2751 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
2752 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
2753
2754 FOR_EACH_DEFINED_FUNCTION (n)
2755 {
2756 bool update = false;
2757 if (dump_file && n->indirect_calls)
2758 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
2759 n->name (), n->order);
2760 for (e = n->indirect_calls; e; e = e->next_callee)
2761 if (e->indirect_info->polymorphic)
2762 {
2763 struct cgraph_node *likely_target = NULL;
2764 void *cache_token;
2765 bool final;
2766 int nonconstruction_targets;
2767 vec <cgraph_node *>targets
2768 = possible_polymorphic_call_targets
2769 (e, &final, &cache_token, &nonconstruction_targets);
2770 unsigned int i;
2771
2772 if (dump_file)
2773 dump_possible_polymorphic_call_targets
2774 (dump_file, e);
2775
2776 npolymorphic++;
2777
2778 if (!cgraph_maybe_hot_edge_p (e))
2779 {
2780 if (dump_file)
2781 fprintf (dump_file, "Call is cold\n\n");
2782 ncold++;
2783 continue;
2784 }
2785 if (e->speculative)
2786 {
2787 if (dump_file)
2788 fprintf (dump_file, "Call is aready speculated\n\n");
2789 nspeculated++;
2790
2791 /* When dumping see if we agree with speculation. */
2792 if (!dump_file)
2793 continue;
2794 }
2795 if (pointer_set_contains (bad_call_targets,
2796 cache_token))
2797 {
2798 if (dump_file)
2799 fprintf (dump_file, "Target list is known to be useless\n\n");
2800 nmultiple++;
2801 continue;
2802 }
2803 for (i = 0; i < targets.length (); i++)
2804 if (likely_target_p (targets[i]))
2805 {
2806 if (likely_target)
2807 {
2808 if (i < (unsigned) nonconstruction_targets)
2809 {
2810 likely_target = NULL;
2811 if (dump_file)
2812 fprintf (dump_file, "More than one likely target\n\n");
2813 nmultiple++;
2814 }
2815 break;
2816 }
2817 likely_target = targets[i];
2818 }
2819 if (!likely_target)
2820 {
2821 pointer_set_insert (bad_call_targets, cache_token);
2822 continue;
2823 }
2824 /* This is reached only when dumping; check if we agree or disagree
2825 with the speculation. */
2826 if (e->speculative)
2827 {
2828 struct cgraph_edge *e2;
2829 struct ipa_ref *ref;
2830 cgraph_speculative_call_info (e, e2, e, ref);
2831 if (e2->callee->ultimate_alias_target ()
2832 == likely_target->ultimate_alias_target ())
2833 {
2834 fprintf (dump_file, "We agree with speculation\n\n");
2835 nok++;
2836 }
2837 else
2838 {
2839 fprintf (dump_file, "We disagree with speculation\n\n");
2840 nwrong++;
2841 }
2842 continue;
2843 }
2844 if (!likely_target->definition)
2845 {
2846 if (dump_file)
2847 fprintf (dump_file, "Target is not an definition\n\n");
2848 nnotdefined++;
2849 continue;
2850 }
2851 /* Do not introduce new references to external symbols. While we
2852 can handle these just well, it is common for programs to
2853 incorrectly with headers defining methods they are linked
2854 with. */
2855 if (DECL_EXTERNAL (likely_target->decl))
2856 {
2857 if (dump_file)
2858 fprintf (dump_file, "Target is external\n\n");
2859 nexternal++;
2860 continue;
2861 }
2862 /* Don't use an implicitly-declared destructor (c++/58678). */
2863 struct cgraph_node *non_thunk_target
2864 = likely_target->function_symbol ();
2865 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2866 && DECL_COMDAT (non_thunk_target->decl))
2867 {
2868 if (dump_file)
2869 fprintf (dump_file, "Target is artificial\n\n");
2870 nartificial++;
2871 continue;
2872 }
2873 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
2874 && likely_target->can_be_discarded_p ())
2875 {
2876 if (dump_file)
2877 fprintf (dump_file, "Target is overwritable\n\n");
2878 noverwritable++;
2879 continue;
2880 }
2881 else if (dbg_cnt (devirt))
2882 {
2883 if (dump_enabled_p ())
2884 {
2885 location_t locus = gimple_location_safe (e->call_stmt);
2886 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2887 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2888 n->name (), n->order,
2889 likely_target->name (),
2890 likely_target->order);
2891 }
2892 if (!likely_target->can_be_discarded_p ())
2893 {
2894 cgraph_node *alias;
2895 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
2896 if (alias)
2897 likely_target = alias;
2898 }
2899 nconverted++;
2900 update = true;
2901 cgraph_turn_edge_to_speculative
2902 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2903 }
2904 }
2905 if (update)
2906 inline_update_overall_summary (n);
2907 }
2908 pointer_set_destroy (bad_call_targets);
2909
2910 if (dump_file)
2911 fprintf (dump_file,
2912 "%i polymorphic calls, %i devirtualized,"
2913 " %i speculatively devirtualized, %i cold\n"
2914 "%i have multiple targets, %i overwritable,"
2915 " %i already speculated (%i agree, %i disagree),"
2916 " %i external, %i not defined, %i artificial\n",
2917 npolymorphic, ndevirtualized, nconverted, ncold,
2918 nmultiple, noverwritable, nspeculated, nok, nwrong,
2919 nexternal, nnotdefined, nartificial);
2920 return ndevirtualized ? TODO_remove_functions : 0;
2921 }
2922
2923 namespace {
2924
2925 const pass_data pass_data_ipa_devirt =
2926 {
2927 IPA_PASS, /* type */
2928 "devirt", /* name */
2929 OPTGROUP_NONE, /* optinfo_flags */
2930 TV_IPA_DEVIRT, /* tv_id */
2931 0, /* properties_required */
2932 0, /* properties_provided */
2933 0, /* properties_destroyed */
2934 0, /* todo_flags_start */
2935 ( TODO_dump_symtab ), /* todo_flags_finish */
2936 };
2937
2938 class pass_ipa_devirt : public ipa_opt_pass_d
2939 {
2940 public:
2941 pass_ipa_devirt (gcc::context *ctxt)
2942 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2943 NULL, /* generate_summary */
2944 NULL, /* write_summary */
2945 NULL, /* read_summary */
2946 NULL, /* write_optimization_summary */
2947 NULL, /* read_optimization_summary */
2948 NULL, /* stmt_fixup */
2949 0, /* function_transform_todo_flags_start */
2950 NULL, /* function_transform */
2951 NULL) /* variable_transform */
2952 {}
2953
2954 /* opt_pass methods: */
2955 virtual bool gate (function *)
2956 {
2957 return (flag_devirtualize
2958 && flag_devirtualize_speculatively
2959 && optimize);
2960 }
2961
2962 virtual unsigned int execute (function *) { return ipa_devirt (); }
2963
2964 }; // class pass_ipa_devirt
2965
2966 } // anon namespace
2967
2968 ipa_opt_pass_d *
2969 make_pass_ipa_devirt (gcc::context *ctxt)
2970 {
2971 return new pass_ipa_devirt (ctxt);
2972 }
2973
2974 #include "gt-ipa-devirt.h"