ipa-prop.c (ipa_binfo_from_known_type_jfunc): In LTO do not walk non-polymorphic...
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
132 #include "dbgcnt.h"
133
134 static bool odr_violation_reported = false;
135
136 /* Dummy polymorphic call context. */
137
138 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
139 = {0, NULL, false, true};
140
141 /* Pointer set of all call targets appearing in the cache. */
142 static pointer_set_t *cached_polymorphic_call_targets;
143
144 /* The node of type inheritance graph. For each type unique in
145 One Defintion Rule (ODR) sense, we produce one node linking all
146 main variants of types equivalent to it, bases and derived types. */
147
148 struct GTY(()) odr_type_d
149 {
150 /* leader type. */
151 tree type;
152 /* All bases; built only for main variants of types */
153 vec<odr_type> GTY((skip)) bases;
154 /* All derrived types with virtual methods seen in unit;
155 built only for main variants oftypes */
156 vec<odr_type> GTY((skip)) derived_types;
157
158 /* All equivalent types, if more than one. */
159 vec<tree, va_gc> *types;
160 /* Set of all equivalent types, if NON-NULL. */
161 pointer_set_t * GTY((skip)) types_set;
162
163 /* Unique ID indexing the type in odr_types array. */
164 int id;
165 /* Is it in anonymous namespace? */
166 bool anonymous_namespace;
167 /* Do we know about all derivations of given type? */
168 bool all_derivations_known;
169 /* Did we report ODR violation here? */
170 bool odr_violated;
171 };
172
173
174 /* Return true if BINFO corresponds to a type with virtual methods.
175
176 Every type has several BINFOs. One is the BINFO associated by the type
177 while other represents bases of derived types. The BINFOs representing
178 bases do not have BINFO_VTABLE pointer set when this is the single
179 inheritance (because vtables are shared). Look up the BINFO of type
180 and check presence of its vtable. */
181
182 static inline bool
183 polymorphic_type_binfo_p (tree binfo)
184 {
185 /* See if BINFO's type has an virtual table associtated with it. */
186 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
187 }
188
189 /* Return TRUE if all derived types of T are known and thus
190 we may consider the walk of derived type complete.
191
192 This is typically true only for final anonymous namespace types and types
193 defined within functions (that may be COMDAT and thus shared across units,
194 but with the same set of derived types). */
195
196 static bool
197 type_all_derivations_known_p (tree t)
198 {
199 if (TYPE_FINAL_P (t))
200 return true;
201 if (flag_ltrans)
202 return false;
203 if (type_in_anonymous_namespace_p (t))
204 return true;
205 return (decl_function_context (TYPE_NAME (t)) != NULL);
206 }
207
208 /* Return TURE if type's constructors are all visible. */
209
210 static bool
211 type_all_ctors_visible_p (tree t)
212 {
213 return !flag_ltrans
214 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
215 /* We can not always use type_all_derivations_known_p.
216 For function local types we must assume case where
217 the function is COMDAT and shared in between units.
218
219 TODO: These cases are quite easy to get, but we need
220 to keep track of C++ privatizing via -Wno-weak
221 as well as the IPA privatizing. */
222 && type_in_anonymous_namespace_p (t);
223 }
224
225 /* Return TRUE if type may have instance. */
226
227 static bool
228 type_possibly_instantiated_p (tree t)
229 {
230 tree vtable;
231 varpool_node *vnode;
232
233 /* TODO: Add abstract types here. */
234 if (!type_all_ctors_visible_p (t))
235 return true;
236
237 vtable = BINFO_VTABLE (TYPE_BINFO (t));
238 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
239 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
240 vnode = varpool_get_node (vtable);
241 return vnode && vnode->definition;
242 }
243
244 /* One Definition Rule hashtable helpers. */
245
246 struct odr_hasher
247 {
248 typedef odr_type_d value_type;
249 typedef union tree_node compare_type;
250 static inline hashval_t hash (const value_type *);
251 static inline bool equal (const value_type *, const compare_type *);
252 static inline void remove (value_type *);
253 };
254
255 /* Return type that was declared with T's name so that T is an
256 qualified variant of it. */
257
258 static inline tree
259 main_odr_variant (const_tree t)
260 {
261 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
262 return TREE_TYPE (TYPE_NAME (t));
263 /* Unnamed types and non-C++ produced types can be compared by variants. */
264 else
265 return TYPE_MAIN_VARIANT (t);
266 }
267
268 /* Produce hash based on type name. */
269
270 static hashval_t
271 hash_type_name (tree t)
272 {
273 gcc_checking_assert (main_odr_variant (t) == t);
274
275 /* If not in LTO, all main variants are unique, so we can do
276 pointer hash. */
277 if (!in_lto_p)
278 return htab_hash_pointer (t);
279
280 /* Anonymous types are unique. */
281 if (type_in_anonymous_namespace_p (t))
282 return htab_hash_pointer (t);
283
284 /* For polymorphic types, we can simply hash the virtual table. */
285 if (TREE_CODE (t) == RECORD_TYPE
286 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
287 {
288 tree v = BINFO_VTABLE (TYPE_BINFO (t));
289 hashval_t hash = 0;
290
291 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
292 {
293 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
294 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
295 }
296
297 v = DECL_ASSEMBLER_NAME (v);
298 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
299 return hash;
300 }
301
302 /* Rest is not implemented yet. */
303 gcc_unreachable ();
304 }
305
306 /* Return the computed hashcode for ODR_TYPE. */
307
308 inline hashval_t
309 odr_hasher::hash (const value_type *odr_type)
310 {
311 return hash_type_name (odr_type->type);
312 }
313
314 /* For languages with One Definition Rule, work out if
315 types are the same based on their name.
316
317 This is non-trivial for LTO where minnor differences in
318 the type representation may have prevented type merging
319 to merge two copies of otherwise equivalent type.
320
321 Until we start streaming mangled type names, this function works
322 only for polymorphic types. */
323
324 bool
325 types_same_for_odr (const_tree type1, const_tree type2)
326 {
327 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
328
329 type1 = main_odr_variant (type1);
330 type2 = main_odr_variant (type2);
331
332 if (type1 == type2)
333 return true;
334
335 if (!in_lto_p)
336 return false;
337
338 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
339 on the corresponding TYPE_STUB_DECL. */
340 if (type_in_anonymous_namespace_p (type1)
341 || type_in_anonymous_namespace_p (type2))
342 return false;
343
344 /* See if types are obvoiusly different (i.e. different codes
345 or polymorphis wrt non-polymorphic). This is not strictly correct
346 for ODR violating programs, but we can't do better without streaming
347 ODR names. */
348 if (TREE_CODE (type1) != TREE_CODE (type2))
349 return false;
350 if (TREE_CODE (type1) == RECORD_TYPE
351 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
352 return false;
353 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
354 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
355 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
356 return false;
357
358 /* At the moment we have no way to establish ODR equivlaence at LTO
359 other than comparing virtual table pointrs of polymorphic types.
360 Eventually we should start saving mangled names in TYPE_NAME.
361 Then this condition will become non-trivial. */
362
363 if (TREE_CODE (type1) == RECORD_TYPE
364 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
365 && BINFO_VTABLE (TYPE_BINFO (type1))
366 && BINFO_VTABLE (TYPE_BINFO (type2)))
367 {
368 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
369 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
370 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
371 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
372 return (operand_equal_p (TREE_OPERAND (v1, 1),
373 TREE_OPERAND (v2, 1), 0)
374 && DECL_ASSEMBLER_NAME
375 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
376 == DECL_ASSEMBLER_NAME
377 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
378 }
379 gcc_unreachable ();
380 }
381
382
383 /* Compare types T1 and T2 and return true if they are
384 equivalent. */
385
386 inline bool
387 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
388 {
389 tree t2 = const_cast <tree> (ct2);
390
391 gcc_checking_assert (main_odr_variant (t2) == t2);
392 if (t1->type == t2)
393 return true;
394 if (!in_lto_p)
395 return false;
396 return types_same_for_odr (t1->type, t2);
397 }
398
399 /* Free ODR type V. */
400
401 inline void
402 odr_hasher::remove (value_type *v)
403 {
404 v->bases.release ();
405 v->derived_types.release ();
406 if (v->types_set)
407 pointer_set_destroy (v->types_set);
408 ggc_free (v);
409 }
410
411 /* ODR type hash used to lookup ODR type based on tree type node. */
412
413 typedef hash_table<odr_hasher> odr_hash_type;
414 static odr_hash_type *odr_hash;
415
416 /* ODR types are also stored into ODR_TYPE vector to allow consistent
417 walking. Bases appear before derived types. Vector is garbage collected
418 so we won't end up visiting empty types. */
419
420 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
421 #define odr_types (*odr_types_ptr)
422
423 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
424 void
425 set_type_binfo (tree type, tree binfo)
426 {
427 for (; type; type = TYPE_NEXT_VARIANT (type))
428 if (COMPLETE_TYPE_P (type))
429 TYPE_BINFO (type) = binfo;
430 else
431 gcc_assert (!TYPE_BINFO (type));
432 }
433
434 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
435 from VAL->type. This may happen in LTO where tree merging did not merge
436 all variants of the same type. It may or may not mean the ODR violation.
437 Add it to the list of duplicates and warn on some violations. */
438
439 static bool
440 add_type_duplicate (odr_type val, tree type)
441 {
442 bool build_bases = false;
443 if (!val->types_set)
444 val->types_set = pointer_set_create ();
445
446 /* Always prefer complete type to be the leader. */
447 if (!COMPLETE_TYPE_P (val->type)
448 && COMPLETE_TYPE_P (type))
449 {
450 tree tmp = type;
451
452 build_bases = true;
453 type = val->type;
454 val->type = tmp;
455 }
456
457 /* See if this duplicate is new. */
458 if (!pointer_set_insert (val->types_set, type))
459 {
460 bool merge = true;
461 bool base_mismatch = false;
462 bool warned = 0;
463 unsigned int i,j;
464
465 gcc_assert (in_lto_p);
466 vec_safe_push (val->types, type);
467
468 /* First we compare memory layout. */
469 if (!types_compatible_p (val->type, type))
470 {
471 merge = false;
472 odr_violation_reported = true;
473 if (BINFO_VTABLE (TYPE_BINFO (val->type))
474 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
475 "type %qD violates one definition rule ",
476 type))
477 {
478 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
479 "a type with the same name but different layout is "
480 "defined in another translation unit");
481 warned = true;
482 }
483 val->odr_violated = true;
484 if (cgraph_dump_file)
485 {
486 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
487
488 print_node (cgraph_dump_file, "", val->type, 0);
489 putc ('\n',cgraph_dump_file);
490 print_node (cgraph_dump_file, "", type, 0);
491 putc ('\n',cgraph_dump_file);
492 }
493 }
494
495 /* Next sanity check that bases are the same. If not, we will end
496 up producing wrong answers. */
497 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
498 && TREE_CODE (val->type) == RECORD_TYPE
499 && TREE_CODE (type) == RECORD_TYPE
500 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
501 {
502 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
503 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
504 {
505 odr_type base = get_odr_type
506 (BINFO_TYPE
507 (BINFO_BASE_BINFO (TYPE_BINFO (type),
508 i)),
509 true);
510 if (val->bases.length () <= j || val->bases[j] != base)
511 base_mismatch = true;
512 j++;
513 }
514 if (base_mismatch)
515 {
516 merge = false;
517 odr_violation_reported = true;
518
519 if (!warned
520 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
521 "type %qD violates one definition rule ",
522 type))
523 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
524 "a type with the same name but different bases is "
525 "defined in another translation unit");
526 val->odr_violated = true;
527 if (cgraph_dump_file)
528 {
529 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
530
531 print_node (cgraph_dump_file, "", val->type, 0);
532 putc ('\n',cgraph_dump_file);
533 print_node (cgraph_dump_file, "", type, 0);
534 putc ('\n',cgraph_dump_file);
535 }
536 }
537 }
538
539 /* Regularize things a little. During LTO same types may come with
540 different BINFOs. Either because their virtual table was
541 not merged by tree merging and only later at decl merging or
542 because one type comes with external vtable, while other
543 with internal. We want to merge equivalent binfos to conserve
544 memory and streaming overhead.
545
546 The external vtables are more harmful: they contain references
547 to external declarations of methods that may be defined in the
548 merged LTO unit. For this reason we absolutely need to remove
549 them and replace by internal variants. Not doing so will lead
550 to incomplete answers from possible_polymorphic_call_targets. */
551 if (!flag_ltrans && merge
552 && TREE_CODE (val->type) == RECORD_TYPE
553 && TREE_CODE (type) == RECORD_TYPE
554 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
555 && TYPE_MAIN_VARIANT (type) == type
556 && TYPE_MAIN_VARIANT (val->type) == val->type
557 && BINFO_VTABLE (TYPE_BINFO (val->type))
558 && BINFO_VTABLE (TYPE_BINFO (type)))
559 {
560 tree master_binfo = TYPE_BINFO (val->type);
561 tree v1 = BINFO_VTABLE (master_binfo);
562 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
563
564 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
565 {
566 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
567 && operand_equal_p (TREE_OPERAND (v1, 1),
568 TREE_OPERAND (v2, 1), 0));
569 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
570 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
571 }
572 gcc_assert (DECL_ASSEMBLER_NAME (v1)
573 == DECL_ASSEMBLER_NAME (v2));
574
575 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
576 {
577 unsigned int i;
578
579 set_type_binfo (val->type, TYPE_BINFO (type));
580 for (i = 0; i < val->types->length (); i++)
581 {
582 if (TYPE_BINFO ((*val->types)[i])
583 == master_binfo)
584 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
585 }
586 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
587 }
588 else
589 set_type_binfo (type, master_binfo);
590 }
591 }
592 return build_bases;
593 }
594
595 /* Get ODR type hash entry for TYPE. If INSERT is true, create
596 possibly new entry. */
597
598 odr_type
599 get_odr_type (tree type, bool insert)
600 {
601 odr_type_d **slot;
602 odr_type val;
603 hashval_t hash;
604 bool build_bases = false;
605 bool insert_to_odr_array = false;
606 int base_id = -1;
607
608 type = main_odr_variant (type);
609
610 hash = hash_type_name (type);
611 slot
612 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
613 if (!slot)
614 return NULL;
615
616 /* See if we already have entry for type. */
617 if (*slot)
618 {
619 val = *slot;
620
621 /* With LTO we need to support multiple tree representation of
622 the same ODR type. */
623 if (val->type != type)
624 build_bases = add_type_duplicate (val, type);
625 }
626 else
627 {
628
629 val = ggc_cleared_alloc<odr_type_d> ();
630 val->type = type;
631 val->bases = vNULL;
632 val->derived_types = vNULL;
633 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
634 build_bases = COMPLETE_TYPE_P (val->type);
635 insert_to_odr_array = true;
636 }
637
638 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
639 && type == TYPE_MAIN_VARIANT (type))
640 {
641 tree binfo = TYPE_BINFO (type);
642 unsigned int i;
643
644 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
645
646 val->all_derivations_known = type_all_derivations_known_p (type);
647 *slot = val;
648 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
649 /* For now record only polymorphic types. other are
650 pointless for devirtualization and we can not precisely
651 determine ODR equivalency of these during LTO. */
652 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
653 {
654 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
655 i)),
656 true);
657 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
658 base->derived_types.safe_push (val);
659 val->bases.safe_push (base);
660 if (base->id > base_id)
661 base_id = base->id;
662 }
663 }
664 /* Ensure that type always appears after bases. */
665 if (insert_to_odr_array)
666 {
667 if (odr_types_ptr)
668 val->id = odr_types.length ();
669 vec_safe_push (odr_types_ptr, val);
670 }
671 else if (base_id > val->id)
672 {
673 odr_types[val->id] = 0;
674 /* Be sure we did not recorded any derived types; these may need
675 renumbering too. */
676 gcc_assert (val->derived_types.length() == 0);
677 if (odr_types_ptr)
678 val->id = odr_types.length ();
679 vec_safe_push (odr_types_ptr, val);
680 }
681 return val;
682 }
683
684 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
685 recusive printing. */
686
687 static void
688 dump_odr_type (FILE *f, odr_type t, int indent=0)
689 {
690 unsigned int i;
691 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
692 print_generic_expr (f, t->type, TDF_SLIM);
693 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
694 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
695 if (TYPE_NAME (t->type))
696 {
697 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
698 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
699 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
700 }
701 if (t->bases.length ())
702 {
703 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
704 for (i = 0; i < t->bases.length (); i++)
705 fprintf (f, " %i", t->bases[i]->id);
706 fprintf (f, "\n");
707 }
708 if (t->derived_types.length ())
709 {
710 fprintf (f, "%*s derived types:\n", indent * 2, "");
711 for (i = 0; i < t->derived_types.length (); i++)
712 dump_odr_type (f, t->derived_types[i], indent + 1);
713 }
714 fprintf (f, "\n");
715 }
716
717 /* Dump the type inheritance graph. */
718
719 static void
720 dump_type_inheritance_graph (FILE *f)
721 {
722 unsigned int i;
723 if (!odr_types_ptr)
724 return;
725 fprintf (f, "\n\nType inheritance graph:\n");
726 for (i = 0; i < odr_types.length (); i++)
727 {
728 if (odr_types[i] && odr_types[i]->bases.length () == 0)
729 dump_odr_type (f, odr_types[i]);
730 }
731 for (i = 0; i < odr_types.length (); i++)
732 {
733 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
734 {
735 unsigned int j;
736 fprintf (f, "Duplicate tree types for odr type %i\n", i);
737 print_node (f, "", odr_types[i]->type, 0);
738 for (j = 0; j < odr_types[i]->types->length (); j++)
739 {
740 tree t;
741 fprintf (f, "duplicate #%i\n", j);
742 print_node (f, "", (*odr_types[i]->types)[j], 0);
743 t = (*odr_types[i]->types)[j];
744 while (TYPE_P (t) && TYPE_CONTEXT (t))
745 {
746 t = TYPE_CONTEXT (t);
747 print_node (f, "", t, 0);
748 }
749 putc ('\n',f);
750 }
751 }
752 }
753 }
754
755 /* Given method type T, return type of class it belongs to.
756 Lookup this pointer and get its type. */
757
758 tree
759 method_class_type (const_tree t)
760 {
761 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
762 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
763
764 return TREE_TYPE (first_parm_type);
765 }
766
767 /* Initialize IPA devirt and build inheritance tree graph. */
768
769 void
770 build_type_inheritance_graph (void)
771 {
772 struct symtab_node *n;
773 FILE *inheritance_dump_file;
774 int flags;
775
776 if (odr_hash)
777 return;
778 timevar_push (TV_IPA_INHERITANCE);
779 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
780 odr_hash = new odr_hash_type (23);
781
782 /* We reconstruct the graph starting of types of all methods seen in the
783 the unit. */
784 FOR_EACH_SYMBOL (n)
785 if (is_a <cgraph_node *> (n)
786 && DECL_VIRTUAL_P (n->decl)
787 && symtab_real_symbol_p (n))
788 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
789 true);
790
791 /* Look also for virtual tables of types that do not define any methods.
792
793 We need it in a case where class B has virtual base of class A
794 re-defining its virtual method and there is class C with no virtual
795 methods with B as virtual base.
796
797 Here we output B's virtual method in two variant - for non-virtual
798 and virtual inheritance. B's virtual table has non-virtual version,
799 while C's has virtual.
800
801 For this reason we need to know about C in order to include both
802 variants of B. More correctly, record_target_from_binfo should
803 add both variants of the method when walking B, but we have no
804 link in between them.
805
806 We rely on fact that either the method is exported and thus we
807 assume it is called externally or C is in anonymous namespace and
808 thus we will see the vtable. */
809
810 else if (is_a <varpool_node *> (n)
811 && DECL_VIRTUAL_P (n->decl)
812 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
813 && TYPE_BINFO (DECL_CONTEXT (n->decl))
814 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
815 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
816 if (inheritance_dump_file)
817 {
818 dump_type_inheritance_graph (inheritance_dump_file);
819 dump_end (TDI_inheritance, inheritance_dump_file);
820 }
821 timevar_pop (TV_IPA_INHERITANCE);
822 }
823
824 /* Return true if N has reference from live virtual table
825 (and thus can be a destination of polymorphic call).
826 Be conservatively correct when callgraph is not built or
827 if the method may be referred externally. */
828
829 static bool
830 referenced_from_vtable_p (struct cgraph_node *node)
831 {
832 int i;
833 struct ipa_ref *ref;
834 bool found = false;
835
836 if (node->externally_visible
837 || node->used_from_other_partition)
838 return true;
839
840 /* Keep this test constant time.
841 It is unlikely this can happen except for the case where speculative
842 devirtualization introduced many speculative edges to this node.
843 In this case the target is very likely alive anyway. */
844 if (node->ref_list.referring.length () > 100)
845 return true;
846
847 /* We need references built. */
848 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
849 return true;
850
851 for (i = 0; node->iterate_referring (i, ref); i++)
852
853 if ((ref->use == IPA_REF_ALIAS
854 && referenced_from_vtable_p (cgraph (ref->referring)))
855 || (ref->use == IPA_REF_ADDR
856 && TREE_CODE (ref->referring->decl) == VAR_DECL
857 && DECL_VIRTUAL_P (ref->referring->decl)))
858 {
859 found = true;
860 break;
861 }
862 return found;
863 }
864
865 /* If TARGET has associated node, record it in the NODES array.
866 CAN_REFER specify if program can refer to the target directly.
867 if TARGET is unknown (NULL) or it can not be inserted (for example because
868 its body was already removed and there is no way to refer to it), clear
869 COMPLETEP. */
870
871 static void
872 maybe_record_node (vec <cgraph_node *> &nodes,
873 tree target, pointer_set_t *inserted,
874 bool can_refer,
875 bool *completep)
876 {
877 struct cgraph_node *target_node, *alias_target;
878 enum availability avail;
879
880 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
881 list of targets; the runtime effect of calling them is undefined.
882 Only "real" virtual methods should be accounted. */
883 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
884 return;
885
886 if (!can_refer)
887 {
888 /* The only case when method of anonymous namespace becomes unreferable
889 is when we completely optimized it out. */
890 if (flag_ltrans
891 || !target
892 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
893 *completep = false;
894 return;
895 }
896
897 if (!target)
898 return;
899
900 target_node = cgraph_get_node (target);
901
902 /* Preffer alias target over aliases, so we do not get confused by
903 fake duplicates. */
904 if (target_node)
905 {
906 alias_target = cgraph_function_or_thunk_node (target_node, &avail);
907 if (target_node != alias_target
908 && avail >= AVAIL_AVAILABLE
909 && cgraph_function_body_availability (target_node))
910 target_node = alias_target;
911 }
912
913 /* Method can only be called by polymorphic call if any
914 of vtables refering to it are alive.
915
916 While this holds for non-anonymous functions, too, there are
917 cases where we want to keep them in the list; for example
918 inline functions with -fno-weak are static, but we still
919 may devirtualize them when instance comes from other unit.
920 The same holds for LTO.
921
922 Currently we ignore these functions in speculative devirtualization.
923 ??? Maybe it would make sense to be more aggressive for LTO even
924 eslewhere. */
925 if (!flag_ltrans
926 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
927 && (!target_node
928 || !referenced_from_vtable_p (target_node)))
929 ;
930 /* See if TARGET is useful function we can deal with. */
931 else if (target_node != NULL
932 && (TREE_PUBLIC (target)
933 || DECL_EXTERNAL (target)
934 || target_node->definition)
935 && symtab_real_symbol_p (target_node))
936 {
937 gcc_assert (!target_node->global.inlined_to);
938 gcc_assert (symtab_real_symbol_p (target_node));
939 if (!pointer_set_insert (inserted, target_node->decl))
940 {
941 pointer_set_insert (cached_polymorphic_call_targets,
942 target_node);
943 nodes.safe_push (target_node);
944 }
945 }
946 else if (completep
947 && (!type_in_anonymous_namespace_p
948 (DECL_CONTEXT (target))
949 || flag_ltrans))
950 *completep = false;
951 }
952
953 /* See if BINFO's type match OUTER_TYPE. If so, lookup
954 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
955 method in vtable and insert method to NODES array
956 or BASES_TO_CONSIDER if this array is non-NULL.
957 Otherwise recurse to base BINFOs.
958 This match what get_binfo_at_offset does, but with offset
959 being unknown.
960
961 TYPE_BINFOS is a stack of BINFOS of types with defined
962 virtual table seen on way from class type to BINFO.
963
964 MATCHED_VTABLES tracks virtual tables we already did lookup
965 for virtual function in. INSERTED tracks nodes we already
966 inserted.
967
968 ANONYMOUS is true if BINFO is part of anonymous namespace.
969
970 Clear COMPLETEP when we hit unreferable target.
971 */
972
973 static void
974 record_target_from_binfo (vec <cgraph_node *> &nodes,
975 vec <tree> *bases_to_consider,
976 tree binfo,
977 tree otr_type,
978 vec <tree> &type_binfos,
979 HOST_WIDE_INT otr_token,
980 tree outer_type,
981 HOST_WIDE_INT offset,
982 pointer_set_t *inserted,
983 pointer_set_t *matched_vtables,
984 bool anonymous,
985 bool *completep)
986 {
987 tree type = BINFO_TYPE (binfo);
988 int i;
989 tree base_binfo;
990
991
992 if (BINFO_VTABLE (binfo))
993 type_binfos.safe_push (binfo);
994 if (types_same_for_odr (type, outer_type))
995 {
996 int i;
997 tree type_binfo = NULL;
998
999 /* Lookup BINFO with virtual table. For normal types it is always last
1000 binfo on stack. */
1001 for (i = type_binfos.length () - 1; i >= 0; i--)
1002 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1003 {
1004 type_binfo = type_binfos[i];
1005 break;
1006 }
1007 if (BINFO_VTABLE (binfo))
1008 type_binfos.pop ();
1009 /* If this is duplicated BINFO for base shared by virtual inheritance,
1010 we may not have its associated vtable. This is not a problem, since
1011 we will walk it on the other path. */
1012 if (!type_binfo)
1013 return;
1014 tree inner_binfo = get_binfo_at_offset (type_binfo,
1015 offset, otr_type);
1016 if (!inner_binfo)
1017 {
1018 gcc_assert (odr_violation_reported);
1019 return;
1020 }
1021 /* For types in anonymous namespace first check if the respective vtable
1022 is alive. If not, we know the type can't be called. */
1023 if (!flag_ltrans && anonymous)
1024 {
1025 tree vtable = BINFO_VTABLE (inner_binfo);
1026 varpool_node *vnode;
1027
1028 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1029 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1030 vnode = varpool_get_node (vtable);
1031 if (!vnode || !vnode->definition)
1032 return;
1033 }
1034 gcc_assert (inner_binfo);
1035 if (bases_to_consider
1036 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
1037 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
1038 {
1039 bool can_refer;
1040 tree target = gimple_get_virt_method_for_binfo (otr_token,
1041 inner_binfo,
1042 &can_refer);
1043 if (!bases_to_consider)
1044 maybe_record_node (nodes, target, inserted, can_refer, completep);
1045 /* Destructors are never called via construction vtables. */
1046 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1047 bases_to_consider->safe_push (target);
1048 }
1049 return;
1050 }
1051
1052 /* Walk bases. */
1053 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1054 /* Walking bases that have no virtual method is pointless excercise. */
1055 if (polymorphic_type_binfo_p (base_binfo))
1056 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1057 type_binfos,
1058 otr_token, outer_type, offset, inserted,
1059 matched_vtables, anonymous, completep);
1060 if (BINFO_VTABLE (binfo))
1061 type_binfos.pop ();
1062 }
1063
1064 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1065 of TYPE, insert them to NODES, recurse into derived nodes.
1066 INSERTED is used to avoid duplicate insertions of methods into NODES.
1067 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1068 Clear COMPLETEP if unreferable target is found.
1069
1070 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1071 all cases where BASE_SKIPPED is true (because the base is abstract
1072 class). */
1073
1074 static void
1075 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1076 pointer_set_t *inserted,
1077 pointer_set_t *matched_vtables,
1078 tree otr_type,
1079 odr_type type,
1080 HOST_WIDE_INT otr_token,
1081 tree outer_type,
1082 HOST_WIDE_INT offset,
1083 bool *completep,
1084 vec <tree> &bases_to_consider,
1085 bool consider_construction)
1086 {
1087 tree binfo = TYPE_BINFO (type->type);
1088 unsigned int i;
1089 vec <tree> type_binfos = vNULL;
1090 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1091
1092 /* We may need to consider types w/o instances because of possible derived
1093 types using their methods either directly or via construction vtables.
1094 We are safe to skip them when all derivations are known, since we will
1095 handle them later.
1096 This is done by recording them to BASES_TO_CONSIDER array. */
1097 if (possibly_instantiated || consider_construction)
1098 {
1099 record_target_from_binfo (nodes,
1100 (!possibly_instantiated
1101 && type_all_derivations_known_p (type->type))
1102 ? &bases_to_consider : NULL,
1103 binfo, otr_type, type_binfos, otr_token,
1104 outer_type, offset,
1105 inserted, matched_vtables,
1106 type->anonymous_namespace, completep);
1107 }
1108 type_binfos.release ();
1109 for (i = 0; i < type->derived_types.length (); i++)
1110 possible_polymorphic_call_targets_1 (nodes, inserted,
1111 matched_vtables,
1112 otr_type,
1113 type->derived_types[i],
1114 otr_token, outer_type, offset, completep,
1115 bases_to_consider, consider_construction);
1116 }
1117
1118 /* Cache of queries for polymorphic call targets.
1119
1120 Enumerating all call targets may get expensive when there are many
1121 polymorphic calls in the program, so we memoize all the previous
1122 queries and avoid duplicated work. */
1123
1124 struct polymorphic_call_target_d
1125 {
1126 HOST_WIDE_INT otr_token;
1127 ipa_polymorphic_call_context context;
1128 odr_type type;
1129 vec <cgraph_node *> targets;
1130 int nonconstruction_targets;
1131 bool complete;
1132 };
1133
1134 /* Polymorphic call target cache helpers. */
1135
1136 struct polymorphic_call_target_hasher
1137 {
1138 typedef polymorphic_call_target_d value_type;
1139 typedef polymorphic_call_target_d compare_type;
1140 static inline hashval_t hash (const value_type *);
1141 static inline bool equal (const value_type *, const compare_type *);
1142 static inline void remove (value_type *);
1143 };
1144
1145 /* Return the computed hashcode for ODR_QUERY. */
1146
1147 inline hashval_t
1148 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1149 {
1150 hashval_t hash;
1151
1152 hash = iterative_hash_host_wide_int
1153 (odr_query->otr_token,
1154 odr_query->type->id);
1155 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
1156 hash);
1157 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
1158 return iterative_hash_hashval_t
1159 (((int)odr_query->context.maybe_in_construction << 1)
1160 | (int)odr_query->context.maybe_derived_type, hash);
1161 }
1162
1163 /* Compare cache entries T1 and T2. */
1164
1165 inline bool
1166 polymorphic_call_target_hasher::equal (const value_type *t1,
1167 const compare_type *t2)
1168 {
1169 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1170 && t1->context.offset == t2->context.offset
1171 && t1->context.outer_type == t2->context.outer_type
1172 && t1->context.maybe_in_construction
1173 == t2->context.maybe_in_construction
1174 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
1175 }
1176
1177 /* Remove entry in polymorphic call target cache hash. */
1178
1179 inline void
1180 polymorphic_call_target_hasher::remove (value_type *v)
1181 {
1182 v->targets.release ();
1183 free (v);
1184 }
1185
1186 /* Polymorphic call target query cache. */
1187
1188 typedef hash_table<polymorphic_call_target_hasher>
1189 polymorphic_call_target_hash_type;
1190 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1191
1192 /* Destroy polymorphic call target query cache. */
1193
1194 static void
1195 free_polymorphic_call_targets_hash ()
1196 {
1197 if (cached_polymorphic_call_targets)
1198 {
1199 delete polymorphic_call_target_hash;
1200 polymorphic_call_target_hash = NULL;
1201 pointer_set_destroy (cached_polymorphic_call_targets);
1202 cached_polymorphic_call_targets = NULL;
1203 }
1204 }
1205
1206 /* When virtual function is removed, we may need to flush the cache. */
1207
1208 static void
1209 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1210 {
1211 if (cached_polymorphic_call_targets
1212 && pointer_set_contains (cached_polymorphic_call_targets, n))
1213 free_polymorphic_call_targets_hash ();
1214 }
1215
1216 /* Return true when TYPE contains an polymorphic type and thus is interesting
1217 for devirtualization machinery. */
1218
1219 bool
1220 contains_polymorphic_type_p (const_tree type)
1221 {
1222 type = TYPE_MAIN_VARIANT (type);
1223
1224 if (RECORD_OR_UNION_TYPE_P (type))
1225 {
1226 if (TYPE_BINFO (type)
1227 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1228 return true;
1229 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1230 if (TREE_CODE (fld) == FIELD_DECL
1231 && !DECL_ARTIFICIAL (fld)
1232 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1233 return true;
1234 return false;
1235 }
1236 if (TREE_CODE (type) == ARRAY_TYPE)
1237 return contains_polymorphic_type_p (TREE_TYPE (type));
1238 return false;
1239 }
1240
1241 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1242 is contained at CONTEXT->OFFSET. Walk the memory representation of
1243 CONTEXT->OUTER_TYPE and find the outermost class type that match
1244 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1245 to represent it.
1246
1247 For example when CONTEXT represents type
1248 class A
1249 {
1250 int a;
1251 class B b;
1252 }
1253 and we look for type at offset sizeof(int), we end up with B and offset 0.
1254 If the same is produced by multiple inheritance, we end up with A and offset
1255 sizeof(int).
1256
1257 If we can not find corresponding class, give up by setting
1258 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1259 Return true when lookup was sucesful. */
1260
1261 static bool
1262 get_class_context (ipa_polymorphic_call_context *context,
1263 tree expected_type)
1264 {
1265 tree type = context->outer_type;
1266 HOST_WIDE_INT offset = context->offset;
1267
1268 /* Find the sub-object the constant actually refers to and mark whether it is
1269 an artificial one (as opposed to a user-defined one). */
1270 while (true)
1271 {
1272 HOST_WIDE_INT pos, size;
1273 tree fld;
1274
1275 /* On a match, just return what we found. */
1276 if (TREE_CODE (type) == TREE_CODE (expected_type)
1277 && types_same_for_odr (type, expected_type))
1278 {
1279 /* Type can not contain itself on an non-zero offset. In that case
1280 just give up. */
1281 if (offset != 0)
1282 goto give_up;
1283 gcc_assert (offset == 0);
1284 return true;
1285 }
1286
1287 /* Walk fields and find corresponding on at OFFSET. */
1288 if (TREE_CODE (type) == RECORD_TYPE)
1289 {
1290 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1291 {
1292 if (TREE_CODE (fld) != FIELD_DECL)
1293 continue;
1294
1295 pos = int_bit_position (fld);
1296 size = tree_to_uhwi (DECL_SIZE (fld));
1297 if (pos <= offset && (pos + size) > offset)
1298 break;
1299 }
1300
1301 if (!fld)
1302 goto give_up;
1303
1304 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1305 offset -= pos;
1306 /* DECL_ARTIFICIAL represents a basetype. */
1307 if (!DECL_ARTIFICIAL (fld))
1308 {
1309 context->outer_type = type;
1310 context->offset = offset;
1311 /* As soon as we se an field containing the type,
1312 we know we are not looking for derivations. */
1313 context->maybe_derived_type = false;
1314 }
1315 }
1316 else if (TREE_CODE (type) == ARRAY_TYPE)
1317 {
1318 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1319
1320 /* Give up if we don't know array size. */
1321 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1322 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1323 goto give_up;
1324 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1325 type = subtype;
1326 context->outer_type = type;
1327 context->offset = offset;
1328 context->maybe_derived_type = false;
1329 }
1330 /* Give up on anything else. */
1331 else
1332 goto give_up;
1333 }
1334
1335 /* If we failed to find subtype we look for, give up and fall back to the
1336 most generic query. */
1337 give_up:
1338 context->outer_type = expected_type;
1339 context->offset = 0;
1340 context->maybe_derived_type = true;
1341 context->maybe_in_construction = true;
1342 /* POD can be changed to an instance of a polymorphic type by
1343 placement new. Here we play safe and assume that any
1344 non-polymorphic type is POD. */
1345 if ((TREE_CODE (type) != RECORD_TYPE
1346 || !TYPE_BINFO (type)
1347 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1348 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1349 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1350 tree_to_uhwi (TYPE_SIZE (type)))))
1351 return true;
1352 return false;
1353 }
1354
1355 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1356
1357 static bool
1358 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1359 tree otr_type)
1360 {
1361 ipa_polymorphic_call_context context = {offset,
1362 TYPE_MAIN_VARIANT (outer_type),
1363 false, true};
1364 return get_class_context (&context, otr_type);
1365 }
1366
1367 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1368
1369 static tree
1370 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1371 tree vtable)
1372 {
1373 tree v = BINFO_VTABLE (binfo);
1374 int i;
1375 tree base_binfo;
1376 unsigned HOST_WIDE_INT this_offset;
1377
1378 if (v)
1379 {
1380 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1381 gcc_unreachable ();
1382
1383 if (offset == this_offset
1384 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1385 return binfo;
1386 }
1387
1388 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1389 if (polymorphic_type_binfo_p (base_binfo))
1390 {
1391 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1392 if (base_binfo)
1393 return base_binfo;
1394 }
1395 return NULL;
1396 }
1397
1398 /* T is known constant value of virtual table pointer.
1399 Store virtual table to V and its offset to OFFSET.
1400 Return false if T does not look like virtual table reference. */
1401
1402 bool
1403 vtable_pointer_value_to_vtable (const_tree t, tree *v,
1404 unsigned HOST_WIDE_INT *offset)
1405 {
1406 /* We expect &MEM[(void *)&virtual_table + 16B].
1407 We obtain object's BINFO from the context of the virtual table.
1408 This one contains pointer to virtual table represented via
1409 POINTER_PLUS_EXPR. Verify that this pointer match to what
1410 we propagated through.
1411
1412 In the case of virtual inheritance, the virtual tables may
1413 be nested, i.e. the offset may be different from 16 and we may
1414 need to dive into the type representation. */
1415 if (TREE_CODE (t) == ADDR_EXPR
1416 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1417 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1418 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1419 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1420 == VAR_DECL)
1421 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1422 (TREE_OPERAND (t, 0), 0), 0)))
1423 {
1424 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1425 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1426 return true;
1427 }
1428
1429 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1430 We need to handle it when T comes from static variable initializer or
1431 BINFO. */
1432 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1433 {
1434 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1435 t = TREE_OPERAND (t, 0);
1436 }
1437 else
1438 *offset = 0;
1439
1440 if (TREE_CODE (t) != ADDR_EXPR)
1441 return false;
1442 *v = TREE_OPERAND (t, 0);
1443 return true;
1444 }
1445
1446 /* T is known constant value of virtual table pointer. Return BINFO of the
1447 instance type. */
1448
1449 tree
1450 vtable_pointer_value_to_binfo (const_tree t)
1451 {
1452 tree vtable;
1453 unsigned HOST_WIDE_INT offset;
1454
1455 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1456 return NULL_TREE;
1457
1458 /* FIXME: for stores of construction vtables we return NULL,
1459 because we do not have BINFO for those. Eventually we should fix
1460 our representation to allow this case to be handled, too.
1461 In the case we see store of BINFO we however may assume
1462 that standard folding will be ale to cope with it. */
1463 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1464 offset, vtable);
1465 }
1466
1467 /* We know that the instance is stored in variable or parameter
1468 (not dynamically allocated) and we want to disprove the fact
1469 that it may be in construction at invocation of CALL.
1470
1471 For the variable to be in construction we actually need to
1472 be in constructor of corresponding global variable or
1473 the inline stack of CALL must contain the constructor.
1474 Check this condition. This check works safely only before
1475 IPA passes, because inline stacks may become out of date
1476 later. */
1477
1478 bool
1479 decl_maybe_in_construction_p (tree base, tree outer_type,
1480 gimple call, tree function)
1481 {
1482 outer_type = TYPE_MAIN_VARIANT (outer_type);
1483 gcc_assert (DECL_P (base));
1484
1485 /* After inlining the code unification optimizations may invalidate
1486 inline stacks. Also we need to give up on global variables after
1487 IPA, because addresses of these may have been propagated to their
1488 constructors. */
1489 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
1490 return true;
1491
1492 /* Pure functions can not do any changes on the dynamic type;
1493 that require writting to memory. */
1494 if (!auto_var_in_fn_p (base, function)
1495 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
1496 return false;
1497
1498 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
1499 block = BLOCK_SUPERCONTEXT (block))
1500 if (BLOCK_ABSTRACT_ORIGIN (block)
1501 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
1502 {
1503 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
1504
1505 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
1506 || (!DECL_CXX_CONSTRUCTOR_P (fn)
1507 || !DECL_CXX_DESTRUCTOR_P (fn)))
1508 {
1509 /* Watch for clones where we constant propagated the first
1510 argument (pointer to the instance). */
1511 fn = DECL_ABSTRACT_ORIGIN (fn);
1512 if (!fn
1513 || !is_global_var (base)
1514 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
1515 || (!DECL_CXX_CONSTRUCTOR_P (fn)
1516 || !DECL_CXX_DESTRUCTOR_P (fn)))
1517 continue;
1518 }
1519 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
1520 continue;
1521
1522 /* FIXME: this can go away once we have ODR types equivalency on
1523 LTO level. */
1524 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
1525 return true;
1526 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
1527 if (types_same_for_odr (type, outer_type))
1528 return true;
1529 }
1530
1531 if (TREE_CODE (base) == VAR_DECL
1532 && is_global_var (base))
1533 {
1534 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
1535 || (!DECL_CXX_CONSTRUCTOR_P (function)
1536 || !DECL_CXX_DESTRUCTOR_P (function)))
1537 {
1538 if (!DECL_ABSTRACT_ORIGIN (function))
1539 return false;
1540 /* Watch for clones where we constant propagated the first
1541 argument (pointer to the instance). */
1542 function = DECL_ABSTRACT_ORIGIN (function);
1543 if (!function
1544 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
1545 || (!DECL_CXX_CONSTRUCTOR_P (function)
1546 || !DECL_CXX_DESTRUCTOR_P (function)))
1547 return false;
1548 }
1549 /* FIXME: this can go away once we have ODR types equivalency on
1550 LTO level. */
1551 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
1552 return true;
1553 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
1554 if (types_same_for_odr (type, outer_type))
1555 return true;
1556 }
1557 return false;
1558 }
1559
1560 /* Proudce polymorphic call context for call method of instance
1561 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1562
1563 static void
1564 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1565 tree base, HOST_WIDE_INT offset)
1566 {
1567 gcc_assert (DECL_P (base));
1568
1569 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
1570 context->offset = offset;
1571 /* Make very conservative assumption that all objects
1572 may be in construction.
1573 TODO: ipa-prop already contains code to tell better.
1574 merge it later. */
1575 context->maybe_in_construction = true;
1576 context->maybe_derived_type = false;
1577 }
1578
1579 /* CST is an invariant (address of decl), try to get meaningful
1580 polymorphic call context for polymorphic call of method
1581 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1582 Return FALSE if nothing meaningful can be found. */
1583
1584 bool
1585 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1586 tree cst,
1587 tree otr_type,
1588 HOST_WIDE_INT offset)
1589 {
1590 HOST_WIDE_INT offset2, size, max_size;
1591 tree base;
1592
1593 if (TREE_CODE (cst) != ADDR_EXPR)
1594 return false;
1595
1596 cst = TREE_OPERAND (cst, 0);
1597 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1598 if (!DECL_P (base) || max_size == -1 || max_size != size)
1599 return false;
1600
1601 /* Only type inconsistent programs can have otr_type that is
1602 not part of outer type. */
1603 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1604 return false;
1605
1606 get_polymorphic_call_info_for_decl (context, base, offset);
1607 return true;
1608 }
1609
1610 /* Given REF call in FNDECL, determine class of the polymorphic
1611 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1612 CALL is optional argument giving the actual statement (usually call) where
1613 the context is used.
1614 Return pointer to object described by the context */
1615
1616 tree
1617 get_polymorphic_call_info (tree fndecl,
1618 tree ref,
1619 tree *otr_type,
1620 HOST_WIDE_INT *otr_token,
1621 ipa_polymorphic_call_context *context,
1622 gimple call)
1623 {
1624 tree base_pointer;
1625 *otr_type = obj_type_ref_class (ref);
1626 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1627
1628 /* Set up basic info in case we find nothing interesting in the analysis. */
1629 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
1630 context->offset = 0;
1631 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1632 context->maybe_derived_type = true;
1633 context->maybe_in_construction = true;
1634
1635 /* Walk SSA for outer object. */
1636 do
1637 {
1638 if (TREE_CODE (base_pointer) == SSA_NAME
1639 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1640 && SSA_NAME_DEF_STMT (base_pointer)
1641 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1642 {
1643 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1644 STRIP_NOPS (base_pointer);
1645 }
1646 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1647 {
1648 HOST_WIDE_INT size, max_size;
1649 HOST_WIDE_INT offset2;
1650 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1651 &offset2, &size, &max_size);
1652
1653 /* If this is a varying address, punt. */
1654 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1655 && max_size != -1
1656 && max_size == size)
1657 {
1658 /* We found dereference of a pointer. Type of the pointer
1659 and MEM_REF is meaningless, but we can look futher. */
1660 if (TREE_CODE (base) == MEM_REF)
1661 {
1662 base_pointer = TREE_OPERAND (base, 0);
1663 context->offset
1664 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1665 context->outer_type = NULL;
1666 }
1667 /* We found base object. In this case the outer_type
1668 is known. */
1669 else if (DECL_P (base))
1670 {
1671 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1672
1673 /* Only type inconsistent programs can have otr_type that is
1674 not part of outer type. */
1675 if (!contains_type_p (TREE_TYPE (base),
1676 context->offset + offset2, *otr_type))
1677 {
1678 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1679 code sequences; we arrange the calls to be builtin_unreachable
1680 later. */
1681 *otr_token = INT_MAX;
1682 return base_pointer;
1683 }
1684 get_polymorphic_call_info_for_decl (context, base,
1685 context->offset + offset2);
1686 if (context->maybe_in_construction && call)
1687 context->maybe_in_construction
1688 = decl_maybe_in_construction_p (base,
1689 context->outer_type,
1690 call,
1691 current_function_decl);
1692 return NULL;
1693 }
1694 else
1695 break;
1696 }
1697 else
1698 break;
1699 }
1700 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1701 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1702 {
1703 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1704 * BITS_PER_UNIT;
1705 base_pointer = TREE_OPERAND (base_pointer, 0);
1706 }
1707 else
1708 break;
1709 }
1710 while (true);
1711
1712 /* Try to determine type of the outer object. */
1713 if (TREE_CODE (base_pointer) == SSA_NAME
1714 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1715 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1716 {
1717 /* See if parameter is THIS pointer of a method. */
1718 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1719 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1720 {
1721 context->outer_type
1722 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1723 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1724
1725 /* Dynamic casting has possibly upcasted the type
1726 in the hiearchy. In this case outer type is less
1727 informative than inner type and we should forget
1728 about it. */
1729 if (!contains_type_p (context->outer_type, context->offset,
1730 *otr_type))
1731 {
1732 context->outer_type = NULL;
1733 return base_pointer;
1734 }
1735
1736 /* If the function is constructor or destructor, then
1737 the type is possibly in construction, but we know
1738 it is not derived type. */
1739 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1740 || DECL_CXX_DESTRUCTOR_P (fndecl))
1741 {
1742 context->maybe_in_construction = true;
1743 context->maybe_derived_type = false;
1744 }
1745 else
1746 {
1747 context->maybe_derived_type = true;
1748 context->maybe_in_construction = false;
1749 }
1750 return base_pointer;
1751 }
1752 /* Non-PODs passed by value are really passed by invisible
1753 reference. In this case we also know the type of the
1754 object. */
1755 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1756 {
1757 context->outer_type
1758 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1759 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1760 /* Only type inconsistent programs can have otr_type that is
1761 not part of outer type. */
1762 if (!contains_type_p (context->outer_type, context->offset,
1763 *otr_type))
1764 {
1765 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1766 code sequences; we arrange the calls to be builtin_unreachable
1767 later. */
1768 *otr_token = INT_MAX;
1769 return base_pointer;
1770 }
1771 context->maybe_derived_type = false;
1772 context->maybe_in_construction = false;
1773 return base_pointer;
1774 }
1775 }
1776 /* TODO: There are multiple ways to derive a type. For instance
1777 if BASE_POINTER is passed to an constructor call prior our refernece.
1778 We do not make this type of flow sensitive analysis yet. */
1779 return base_pointer;
1780 }
1781
1782 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1783 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1784 and insert them to NODES.
1785
1786 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1787
1788 static void
1789 record_targets_from_bases (tree otr_type,
1790 HOST_WIDE_INT otr_token,
1791 tree outer_type,
1792 HOST_WIDE_INT offset,
1793 vec <cgraph_node *> &nodes,
1794 pointer_set_t *inserted,
1795 pointer_set_t *matched_vtables,
1796 bool *completep)
1797 {
1798 while (true)
1799 {
1800 HOST_WIDE_INT pos, size;
1801 tree base_binfo;
1802 tree fld;
1803
1804 if (types_same_for_odr (outer_type, otr_type))
1805 return;
1806
1807 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1808 {
1809 if (TREE_CODE (fld) != FIELD_DECL)
1810 continue;
1811
1812 pos = int_bit_position (fld);
1813 size = tree_to_shwi (DECL_SIZE (fld));
1814 if (pos <= offset && (pos + size) > offset
1815 /* Do not get confused by zero sized bases. */
1816 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1817 break;
1818 }
1819 /* Within a class type we should always find correcponding fields. */
1820 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1821
1822 /* Nonbasetypes should have been stripped by outer_class_type. */
1823 gcc_assert (DECL_ARTIFICIAL (fld));
1824
1825 outer_type = TREE_TYPE (fld);
1826 offset -= pos;
1827
1828 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1829 offset, otr_type);
1830 if (!base_binfo)
1831 {
1832 gcc_assert (odr_violation_reported);
1833 return;
1834 }
1835 gcc_assert (base_binfo);
1836 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1837 {
1838 bool can_refer;
1839 tree target = gimple_get_virt_method_for_binfo (otr_token,
1840 base_binfo,
1841 &can_refer);
1842 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
1843 maybe_record_node (nodes, target, inserted, can_refer, completep);
1844 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1845 }
1846 }
1847 }
1848
1849 /* When virtual table is removed, we may need to flush the cache. */
1850
1851 static void
1852 devirt_variable_node_removal_hook (varpool_node *n,
1853 void *d ATTRIBUTE_UNUSED)
1854 {
1855 if (cached_polymorphic_call_targets
1856 && DECL_VIRTUAL_P (n->decl)
1857 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1858 free_polymorphic_call_targets_hash ();
1859 }
1860
1861 /* Return vector containing possible targets of polymorphic call of type
1862 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1863 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1864 OTR_TYPE and include their virtual method. This is useful for types
1865 possibly in construction or destruction where the virtual table may
1866 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1867 us to walk the inheritance graph for all derivations.
1868
1869 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1870 undefined and should be redirected to unreachable.
1871
1872 If COMPLETEP is non-NULL, store true if the list is complete.
1873 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1874 in the target cache. If user needs to visit every target list
1875 just once, it can memoize them.
1876
1877 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1878 the type is not in the construction. Those targets appear first in the
1879 vector returned.
1880
1881 Returned vector is placed into cache. It is NOT caller's responsibility
1882 to free it. The vector can be freed on cgraph_remove_node call if
1883 the particular node is a virtual function present in the cache. */
1884
1885 vec <cgraph_node *>
1886 possible_polymorphic_call_targets (tree otr_type,
1887 HOST_WIDE_INT otr_token,
1888 ipa_polymorphic_call_context context,
1889 bool *completep,
1890 void **cache_token,
1891 int *nonconstruction_targetsp)
1892 {
1893 static struct cgraph_node_hook_list *node_removal_hook_holder;
1894 pointer_set_t *inserted;
1895 pointer_set_t *matched_vtables;
1896 vec <cgraph_node *> nodes = vNULL;
1897 vec <tree> bases_to_consider = vNULL;
1898 odr_type type, outer_type;
1899 polymorphic_call_target_d key;
1900 polymorphic_call_target_d **slot;
1901 unsigned int i;
1902 tree binfo, target;
1903 bool complete;
1904 bool can_refer;
1905 bool skipped = false;
1906
1907 otr_type = TYPE_MAIN_VARIANT (otr_type);
1908
1909 /* If ODR is not initialized, return empty incomplete list. */
1910 if (!odr_hash)
1911 {
1912 if (completep)
1913 *completep = false;
1914 if (cache_token)
1915 *cache_token = NULL;
1916 if (nonconstruction_targetsp)
1917 *nonconstruction_targetsp = 0;
1918 return nodes;
1919 }
1920
1921 /* If we hit type inconsistency, just return empty list of targets. */
1922 if (otr_token == INT_MAX)
1923 {
1924 if (completep)
1925 *completep = true;
1926 if (cache_token)
1927 *cache_token = NULL;
1928 if (nonconstruction_targetsp)
1929 *nonconstruction_targetsp = 0;
1930 return nodes;
1931 }
1932
1933 type = get_odr_type (otr_type, true);
1934
1935 /* Recording type variants would wast results cache. */
1936 gcc_assert (!context.outer_type
1937 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1938
1939 /* Lookup the outer class type we want to walk. */
1940 if (context.outer_type
1941 && !get_class_context (&context, otr_type))
1942 {
1943 if (completep)
1944 *completep = false;
1945 if (cache_token)
1946 *cache_token = NULL;
1947 if (nonconstruction_targetsp)
1948 *nonconstruction_targetsp = 0;
1949 return nodes;
1950 }
1951
1952 /* Check that get_class_context kept the main variant. */
1953 gcc_assert (!context.outer_type
1954 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1955
1956 /* We canonicalize our query, so we do not need extra hashtable entries. */
1957
1958 /* Without outer type, we have no use for offset. Just do the
1959 basic search from innter type */
1960 if (!context.outer_type)
1961 {
1962 context.outer_type = otr_type;
1963 context.offset = 0;
1964 }
1965 /* We need to update our hiearchy if the type does not exist. */
1966 outer_type = get_odr_type (context.outer_type, true);
1967 /* If the type is complete, there are no derivations. */
1968 if (TYPE_FINAL_P (outer_type->type))
1969 context.maybe_derived_type = false;
1970
1971 /* Initialize query cache. */
1972 if (!cached_polymorphic_call_targets)
1973 {
1974 cached_polymorphic_call_targets = pointer_set_create ();
1975 polymorphic_call_target_hash
1976 = new polymorphic_call_target_hash_type (23);
1977 if (!node_removal_hook_holder)
1978 {
1979 node_removal_hook_holder =
1980 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1981 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1982 NULL);
1983 }
1984 }
1985
1986 /* Lookup cached answer. */
1987 key.type = type;
1988 key.otr_token = otr_token;
1989 key.context = context;
1990 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
1991 if (cache_token)
1992 *cache_token = (void *)*slot;
1993 if (*slot)
1994 {
1995 if (completep)
1996 *completep = (*slot)->complete;
1997 if (nonconstruction_targetsp)
1998 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1999 return (*slot)->targets;
2000 }
2001
2002 complete = true;
2003
2004 /* Do actual search. */
2005 timevar_push (TV_IPA_VIRTUAL_CALL);
2006 *slot = XCNEW (polymorphic_call_target_d);
2007 if (cache_token)
2008 *cache_token = (void *)*slot;
2009 (*slot)->type = type;
2010 (*slot)->otr_token = otr_token;
2011 (*slot)->context = context;
2012
2013 inserted = pointer_set_create ();
2014 matched_vtables = pointer_set_create ();
2015
2016 /* First see virtual method of type itself. */
2017 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2018 context.offset, otr_type);
2019 if (binfo)
2020 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2021 &can_refer);
2022 else
2023 {
2024 gcc_assert (odr_violation_reported);
2025 target = NULL;
2026 }
2027
2028 /* Destructors are never called through construction virtual tables,
2029 because the type is always known. */
2030 if (target && DECL_CXX_DESTRUCTOR_P (target))
2031 context.maybe_in_construction = false;
2032
2033 if (target)
2034 {
2035 /* In the case we get complete method, we don't need
2036 to walk derivations. */
2037 if (DECL_FINAL_P (target))
2038 context.maybe_derived_type = false;
2039 }
2040
2041 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2042 if (type_possibly_instantiated_p (outer_type->type))
2043 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2044 else
2045 {
2046 skipped = true;
2047 gcc_assert (in_lto_p || context.maybe_derived_type);
2048 }
2049
2050 if (binfo)
2051 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
2052
2053 /* Next walk recursively all derived types. */
2054 if (context.maybe_derived_type)
2055 {
2056 /* For anonymous namespace types we can attempt to build full type.
2057 All derivations must be in this unit (unless we see partial unit). */
2058 if (!type->all_derivations_known)
2059 complete = false;
2060 for (i = 0; i < outer_type->derived_types.length(); i++)
2061 possible_polymorphic_call_targets_1 (nodes, inserted,
2062 matched_vtables,
2063 otr_type,
2064 outer_type->derived_types[i],
2065 otr_token, outer_type->type,
2066 context.offset, &complete,
2067 bases_to_consider,
2068 context.maybe_in_construction);
2069 }
2070
2071 /* Finally walk bases, if asked to. */
2072 (*slot)->nonconstruction_targets = nodes.length();
2073
2074 /* Destructors are never called through construction virtual tables,
2075 because the type is always known. One of entries may be cxa_pure_virtual
2076 so look to at least two of them. */
2077 if (context.maybe_in_construction)
2078 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2079 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2080 context.maybe_in_construction = false;
2081 if (context.maybe_in_construction)
2082 {
2083 if (type != outer_type
2084 && (!skipped
2085 || (context.maybe_derived_type
2086 && !type_all_derivations_known_p (outer_type->type))))
2087 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2088 context.offset, nodes, inserted,
2089 matched_vtables, &complete);
2090 if (skipped)
2091 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2092 for (i = 0; i < bases_to_consider.length(); i++)
2093 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
2094 }
2095 bases_to_consider.release();
2096
2097 (*slot)->targets = nodes;
2098 (*slot)->complete = complete;
2099 if (completep)
2100 *completep = complete;
2101 if (nonconstruction_targetsp)
2102 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
2103
2104 pointer_set_destroy (inserted);
2105 pointer_set_destroy (matched_vtables);
2106 timevar_pop (TV_IPA_VIRTUAL_CALL);
2107 return nodes;
2108 }
2109
2110 /* Dump all possible targets of a polymorphic call. */
2111
2112 void
2113 dump_possible_polymorphic_call_targets (FILE *f,
2114 tree otr_type,
2115 HOST_WIDE_INT otr_token,
2116 const ipa_polymorphic_call_context &ctx)
2117 {
2118 vec <cgraph_node *> targets;
2119 bool final;
2120 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2121 unsigned int i;
2122 int nonconstruction;
2123
2124 if (!type)
2125 return;
2126 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2127 ctx,
2128 &final, NULL, &nonconstruction);
2129 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2130 print_generic_expr (f, type->type, TDF_SLIM);
2131 fprintf (f, " token %i\n", (int)otr_token);
2132 if (ctx.outer_type || ctx.offset)
2133 {
2134 fprintf (f, " Contained in type:");
2135 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2136 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2137 ctx.offset);
2138 }
2139
2140 fprintf (f, " %s%s%s\n ",
2141 final ? "This is a complete list." :
2142 "This is partial list; extra targets may be defined in other units.",
2143 ctx.maybe_in_construction ? " (base types included)" : "",
2144 ctx.maybe_derived_type ? " (derived types included)" : "");
2145 for (i = 0; i < targets.length (); i++)
2146 {
2147 char *name = NULL;
2148 if (i == (unsigned)nonconstruction)
2149 fprintf (f, "\n If the type is in construction,"
2150 " then additional tarets are:\n"
2151 " ");
2152 if (in_lto_p)
2153 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2154 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2155 if (in_lto_p)
2156 free (name);
2157 if (!targets[i]->definition)
2158 fprintf (f, " (no definition%s)",
2159 DECL_DECLARED_INLINE_P (targets[i]->decl)
2160 ? " inline" : "");
2161 }
2162 fprintf (f, "\n\n");
2163 }
2164
2165
2166 /* Return true if N can be possibly target of a polymorphic call of
2167 OTR_TYPE/OTR_TOKEN. */
2168
2169 bool
2170 possible_polymorphic_call_target_p (tree otr_type,
2171 HOST_WIDE_INT otr_token,
2172 const ipa_polymorphic_call_context &ctx,
2173 struct cgraph_node *n)
2174 {
2175 vec <cgraph_node *> targets;
2176 unsigned int i;
2177 enum built_in_function fcode;
2178 bool final;
2179
2180 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2181 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2182 == BUILT_IN_UNREACHABLE
2183 || fcode == BUILT_IN_TRAP))
2184 return true;
2185
2186 if (!odr_hash)
2187 return true;
2188 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2189 for (i = 0; i < targets.length (); i++)
2190 if (symtab_semantically_equivalent_p (n, targets[i]))
2191 return true;
2192
2193 /* At a moment we allow middle end to dig out new external declarations
2194 as a targets of polymorphic calls. */
2195 if (!final && !n->definition)
2196 return true;
2197 return false;
2198 }
2199
2200
2201 /* After callgraph construction new external nodes may appear.
2202 Add them into the graph. */
2203
2204 void
2205 update_type_inheritance_graph (void)
2206 {
2207 struct cgraph_node *n;
2208
2209 if (!odr_hash)
2210 return;
2211 free_polymorphic_call_targets_hash ();
2212 timevar_push (TV_IPA_INHERITANCE);
2213 /* We reconstruct the graph starting from types of all methods seen in the
2214 the unit. */
2215 FOR_EACH_FUNCTION (n)
2216 if (DECL_VIRTUAL_P (n->decl)
2217 && !n->definition
2218 && symtab_real_symbol_p (n))
2219 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2220 true);
2221 timevar_pop (TV_IPA_INHERITANCE);
2222 }
2223
2224
2225 /* Return true if N looks like likely target of a polymorphic call.
2226 Rule out cxa_pure_virtual, noreturns, function declared cold and
2227 other obvious cases. */
2228
2229 bool
2230 likely_target_p (struct cgraph_node *n)
2231 {
2232 int flags;
2233 /* cxa_pure_virtual and similar things are not likely. */
2234 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2235 return false;
2236 flags = flags_from_decl_or_type (n->decl);
2237 if (flags & ECF_NORETURN)
2238 return false;
2239 if (lookup_attribute ("cold",
2240 DECL_ATTRIBUTES (n->decl)))
2241 return false;
2242 if (n->frequency < NODE_FREQUENCY_NORMAL)
2243 return false;
2244 /* If there are no virtual tables refering the target alive,
2245 the only way the target can be called is an instance comming from other
2246 compilation unit; speculative devirtualization is build around an
2247 assumption that won't happen. */
2248 if (!referenced_from_vtable_p (n))
2249 return false;
2250 return true;
2251 }
2252
2253 /* The ipa-devirt pass.
2254 When polymorphic call has only one likely target in the unit,
2255 turn it into speculative call. */
2256
2257 static unsigned int
2258 ipa_devirt (void)
2259 {
2260 struct cgraph_node *n;
2261 struct pointer_set_t *bad_call_targets = pointer_set_create ();
2262 struct cgraph_edge *e;
2263
2264 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2265 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
2266 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
2267
2268 FOR_EACH_DEFINED_FUNCTION (n)
2269 {
2270 bool update = false;
2271 if (dump_file && n->indirect_calls)
2272 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
2273 n->name (), n->order);
2274 for (e = n->indirect_calls; e; e = e->next_callee)
2275 if (e->indirect_info->polymorphic)
2276 {
2277 struct cgraph_node *likely_target = NULL;
2278 void *cache_token;
2279 bool final;
2280 int nonconstruction_targets;
2281 vec <cgraph_node *>targets
2282 = possible_polymorphic_call_targets
2283 (e, &final, &cache_token, &nonconstruction_targets);
2284 unsigned int i;
2285
2286 if (dump_file)
2287 dump_possible_polymorphic_call_targets
2288 (dump_file, e);
2289
2290 npolymorphic++;
2291
2292 if (!cgraph_maybe_hot_edge_p (e))
2293 {
2294 if (dump_file)
2295 fprintf (dump_file, "Call is cold\n\n");
2296 ncold++;
2297 continue;
2298 }
2299 if (e->speculative)
2300 {
2301 if (dump_file)
2302 fprintf (dump_file, "Call is aready speculated\n\n");
2303 nspeculated++;
2304
2305 /* When dumping see if we agree with speculation. */
2306 if (!dump_file)
2307 continue;
2308 }
2309 if (pointer_set_contains (bad_call_targets,
2310 cache_token))
2311 {
2312 if (dump_file)
2313 fprintf (dump_file, "Target list is known to be useless\n\n");
2314 nmultiple++;
2315 continue;
2316 }
2317 for (i = 0; i < targets.length (); i++)
2318 if (likely_target_p (targets[i]))
2319 {
2320 if (likely_target)
2321 {
2322 if (i < (unsigned) nonconstruction_targets)
2323 {
2324 likely_target = NULL;
2325 if (dump_file)
2326 fprintf (dump_file, "More than one likely target\n\n");
2327 nmultiple++;
2328 }
2329 break;
2330 }
2331 likely_target = targets[i];
2332 }
2333 if (!likely_target)
2334 {
2335 pointer_set_insert (bad_call_targets, cache_token);
2336 continue;
2337 }
2338 /* This is reached only when dumping; check if we agree or disagree
2339 with the speculation. */
2340 if (e->speculative)
2341 {
2342 struct cgraph_edge *e2;
2343 struct ipa_ref *ref;
2344 cgraph_speculative_call_info (e, e2, e, ref);
2345 if (cgraph_function_or_thunk_node (e2->callee, NULL)
2346 == cgraph_function_or_thunk_node (likely_target, NULL))
2347 {
2348 fprintf (dump_file, "We agree with speculation\n\n");
2349 nok++;
2350 }
2351 else
2352 {
2353 fprintf (dump_file, "We disagree with speculation\n\n");
2354 nwrong++;
2355 }
2356 continue;
2357 }
2358 if (!likely_target->definition)
2359 {
2360 if (dump_file)
2361 fprintf (dump_file, "Target is not an definition\n\n");
2362 nnotdefined++;
2363 continue;
2364 }
2365 /* Do not introduce new references to external symbols. While we
2366 can handle these just well, it is common for programs to
2367 incorrectly with headers defining methods they are linked
2368 with. */
2369 if (DECL_EXTERNAL (likely_target->decl))
2370 {
2371 if (dump_file)
2372 fprintf (dump_file, "Target is external\n\n");
2373 nexternal++;
2374 continue;
2375 }
2376 /* Don't use an implicitly-declared destructor (c++/58678). */
2377 struct cgraph_node *non_thunk_target
2378 = cgraph_function_node (likely_target);
2379 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2380 && DECL_COMDAT (non_thunk_target->decl))
2381 {
2382 if (dump_file)
2383 fprintf (dump_file, "Target is artificial\n\n");
2384 nartificial++;
2385 continue;
2386 }
2387 if (cgraph_function_body_availability (likely_target)
2388 <= AVAIL_OVERWRITABLE
2389 && symtab_can_be_discarded (likely_target))
2390 {
2391 if (dump_file)
2392 fprintf (dump_file, "Target is overwritable\n\n");
2393 noverwritable++;
2394 continue;
2395 }
2396 else if (dbg_cnt (devirt))
2397 {
2398 if (dump_enabled_p ())
2399 {
2400 location_t locus = gimple_location_safe (e->call_stmt);
2401 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2402 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2403 n->name (), n->order,
2404 likely_target->name (),
2405 likely_target->order);
2406 }
2407 if (!symtab_can_be_discarded (likely_target))
2408 {
2409 cgraph_node *alias;
2410 alias = cgraph (symtab_nonoverwritable_alias
2411 (likely_target));
2412 if (alias)
2413 likely_target = alias;
2414 }
2415 nconverted++;
2416 update = true;
2417 cgraph_turn_edge_to_speculative
2418 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2419 }
2420 }
2421 if (update)
2422 inline_update_overall_summary (n);
2423 }
2424 pointer_set_destroy (bad_call_targets);
2425
2426 if (dump_file)
2427 fprintf (dump_file,
2428 "%i polymorphic calls, %i devirtualized,"
2429 " %i speculatively devirtualized, %i cold\n"
2430 "%i have multiple targets, %i overwritable,"
2431 " %i already speculated (%i agree, %i disagree),"
2432 " %i external, %i not defined, %i artificial\n",
2433 npolymorphic, ndevirtualized, nconverted, ncold,
2434 nmultiple, noverwritable, nspeculated, nok, nwrong,
2435 nexternal, nnotdefined, nartificial);
2436 return ndevirtualized ? TODO_remove_functions : 0;
2437 }
2438
2439 namespace {
2440
2441 const pass_data pass_data_ipa_devirt =
2442 {
2443 IPA_PASS, /* type */
2444 "devirt", /* name */
2445 OPTGROUP_NONE, /* optinfo_flags */
2446 TV_IPA_DEVIRT, /* tv_id */
2447 0, /* properties_required */
2448 0, /* properties_provided */
2449 0, /* properties_destroyed */
2450 0, /* todo_flags_start */
2451 ( TODO_dump_symtab ), /* todo_flags_finish */
2452 };
2453
2454 class pass_ipa_devirt : public ipa_opt_pass_d
2455 {
2456 public:
2457 pass_ipa_devirt (gcc::context *ctxt)
2458 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2459 NULL, /* generate_summary */
2460 NULL, /* write_summary */
2461 NULL, /* read_summary */
2462 NULL, /* write_optimization_summary */
2463 NULL, /* read_optimization_summary */
2464 NULL, /* stmt_fixup */
2465 0, /* function_transform_todo_flags_start */
2466 NULL, /* function_transform */
2467 NULL) /* variable_transform */
2468 {}
2469
2470 /* opt_pass methods: */
2471 virtual bool gate (function *)
2472 {
2473 return (flag_devirtualize
2474 && flag_devirtualize_speculatively
2475 && optimize);
2476 }
2477
2478 virtual unsigned int execute (function *) { return ipa_devirt (); }
2479
2480 }; // class pass_ipa_devirt
2481
2482 } // anon namespace
2483
2484 ipa_opt_pass_d *
2485 make_pass_ipa_devirt (gcc::context *ctxt)
2486 {
2487 return new pass_ipa_devirt (ctxt);
2488 }
2489
2490 #include "gt-ipa-devirt.h"