Fix for devirtualization dump functions
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
132 #include "dbgcnt.h"
133
134 static bool odr_violation_reported = false;
135
136 /* Dummy polymorphic call context. */
137
138 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
139 = {0, NULL, false, true};
140
141 /* Pointer set of all call targets appearing in the cache. */
142 static pointer_set_t *cached_polymorphic_call_targets;
143
144 /* The node of type inheritance graph. For each type unique in
145 One Defintion Rule (ODR) sense, we produce one node linking all
146 main variants of types equivalent to it, bases and derived types. */
147
148 struct GTY(()) odr_type_d
149 {
150 /* leader type. */
151 tree type;
152 /* All bases. */
153 vec<odr_type> GTY((skip)) bases;
154 /* All derrived types with virtual methods seen in unit. */
155 vec<odr_type> GTY((skip)) derived_types;
156
157 /* All equivalent types, if more than one. */
158 vec<tree, va_gc> *types;
159 /* Set of all equivalent types, if NON-NULL. */
160 pointer_set_t * GTY((skip)) types_set;
161
162 /* Unique ID indexing the type in odr_types array. */
163 int id;
164 /* Is it in anonymous namespace? */
165 bool anonymous_namespace;
166 /* Do we know about all derivations of given type? */
167 bool all_derivations_known;
168 };
169
170
171 /* Return true if BINFO corresponds to a type with virtual methods.
172
173 Every type has several BINFOs. One is the BINFO associated by the type
174 while other represents bases of derived types. The BINFOs representing
175 bases do not have BINFO_VTABLE pointer set when this is the single
176 inheritance (because vtables are shared). Look up the BINFO of type
177 and check presence of its vtable. */
178
179 static inline bool
180 polymorphic_type_binfo_p (tree binfo)
181 {
182 /* See if BINFO's type has an virtual table associtated with it. */
183 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
184 }
185
186 /* Return TRUE if all derived types of T are known and thus
187 we may consider the walk of derived type complete.
188
189 This is typically true only for final anonymous namespace types and types
190 defined within functions (that may be COMDAT and thus shared across units,
191 but with the same set of derived types). */
192
193 static bool
194 type_all_derivations_known_p (tree t)
195 {
196 if (TYPE_FINAL_P (t))
197 return true;
198 if (flag_ltrans)
199 return false;
200 if (type_in_anonymous_namespace_p (t))
201 return true;
202 return (decl_function_context (TYPE_NAME (t)) != NULL);
203 }
204
205 /* Return TURE if type's constructors are all visible. */
206
207 static bool
208 type_all_ctors_visible_p (tree t)
209 {
210 return !flag_ltrans
211 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
212 /* We can not always use type_all_derivations_known_p.
213 For function local types we must assume case where
214 the function is COMDAT and shared in between units.
215
216 TODO: These cases are quite easy to get, but we need
217 to keep track of C++ privatizing via -Wno-weak
218 as well as the IPA privatizing. */
219 && type_in_anonymous_namespace_p (t);
220 }
221
222 /* Return TRUE if type may have instance. */
223
224 static bool
225 type_possibly_instantiated_p (tree t)
226 {
227 tree vtable;
228 varpool_node *vnode;
229
230 /* TODO: Add abstract types here. */
231 if (!type_all_ctors_visible_p (t))
232 return true;
233
234 vtable = BINFO_VTABLE (TYPE_BINFO (t));
235 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
236 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
237 vnode = varpool_get_node (vtable);
238 return vnode && vnode->definition;
239 }
240
241 /* One Definition Rule hashtable helpers. */
242
243 struct odr_hasher
244 {
245 typedef odr_type_d value_type;
246 typedef union tree_node compare_type;
247 static inline hashval_t hash (const value_type *);
248 static inline bool equal (const value_type *, const compare_type *);
249 static inline void remove (value_type *);
250 };
251
252 /* Produce hash based on type name. */
253
254 hashval_t
255 hash_type_name (tree t)
256 {
257 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
258
259 /* If not in LTO, all main variants are unique, so we can do
260 pointer hash. */
261 if (!in_lto_p)
262 return htab_hash_pointer (t);
263
264 /* Anonymous types are unique. */
265 if (type_in_anonymous_namespace_p (t))
266 return htab_hash_pointer (t);
267
268 /* For polymorphic types, we can simply hash the virtual table. */
269 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
270 {
271 tree v = BINFO_VTABLE (TYPE_BINFO (t));
272 hashval_t hash = 0;
273
274 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
275 {
276 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
277 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
278 }
279
280 v = DECL_ASSEMBLER_NAME (v);
281 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
282 return hash;
283 }
284
285 /* Rest is not implemented yet. */
286 gcc_unreachable ();
287 }
288
289 /* Return the computed hashcode for ODR_TYPE. */
290
291 inline hashval_t
292 odr_hasher::hash (const value_type *odr_type)
293 {
294 return hash_type_name (odr_type->type);
295 }
296
297 /* Compare types T1 and T2 and return true if they are
298 equivalent. */
299
300 inline bool
301 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
302 {
303 tree t2 = const_cast <tree> (ct2);
304
305 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
306 if (t1->type == t2)
307 return true;
308 if (!in_lto_p)
309 return false;
310 return types_same_for_odr (t1->type, t2);
311 }
312
313 /* Free ODR type V. */
314
315 inline void
316 odr_hasher::remove (value_type *v)
317 {
318 v->bases.release ();
319 v->derived_types.release ();
320 if (v->types_set)
321 pointer_set_destroy (v->types_set);
322 ggc_free (v);
323 }
324
325 /* ODR type hash used to lookup ODR type based on tree type node. */
326
327 typedef hash_table<odr_hasher> odr_hash_type;
328 static odr_hash_type *odr_hash;
329
330 /* ODR types are also stored into ODR_TYPE vector to allow consistent
331 walking. Bases appear before derived types. Vector is garbage collected
332 so we won't end up visiting empty types. */
333
334 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
335 #define odr_types (*odr_types_ptr)
336
337 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
338 from VAL->type. This may happen in LTO where tree merging did not merge
339 all variants of the same type. It may or may not mean the ODR violation.
340 Add it to the list of duplicates and warn on some violations. */
341
342 static void
343 add_type_duplicate (odr_type val, tree type)
344 {
345 if (!val->types_set)
346 val->types_set = pointer_set_create ();
347
348 /* See if this duplicate is new. */
349 if (!pointer_set_insert (val->types_set, type))
350 {
351 bool merge = true;
352 bool base_mismatch = false;
353 gcc_assert (in_lto_p);
354 vec_safe_push (val->types, type);
355 unsigned int i,j;
356
357 /* First we compare memory layout. */
358 if (!types_compatible_p (val->type, type))
359 {
360 merge = false;
361 odr_violation_reported = true;
362 if (BINFO_VTABLE (TYPE_BINFO (val->type))
363 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
364 "type %qD violates one definition rule ",
365 type))
366 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
367 "a type with the same name but different layout is "
368 "defined in another translation unit");
369 if (cgraph_dump_file)
370 {
371 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
372
373 print_node (cgraph_dump_file, "", val->type, 0);
374 putc ('\n',cgraph_dump_file);
375 print_node (cgraph_dump_file, "", type, 0);
376 putc ('\n',cgraph_dump_file);
377 }
378 }
379
380 /* Next sanity check that bases are the same. If not, we will end
381 up producing wrong answers. */
382 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
383 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
384 {
385 odr_type base = get_odr_type
386 (BINFO_TYPE
387 (BINFO_BASE_BINFO (TYPE_BINFO (type),
388 i)),
389 true);
390 if (val->bases.length () <= j || val->bases[j] != base)
391 base_mismatch = true;
392 j++;
393 }
394 if (base_mismatch)
395 {
396 merge = false;
397 odr_violation_reported = true;
398
399 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
400 "type %qD violates one definition rule ",
401 type))
402 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
403 "a type with the same name but different bases is "
404 "defined in another translation unit");
405 if (cgraph_dump_file)
406 {
407 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
408
409 print_node (cgraph_dump_file, "", val->type, 0);
410 putc ('\n',cgraph_dump_file);
411 print_node (cgraph_dump_file, "", type, 0);
412 putc ('\n',cgraph_dump_file);
413 }
414 }
415
416 /* Regularize things a little. During LTO same types may come with
417 different BINFOs. Either because their virtual table was
418 not merged by tree merging and only later at decl merging or
419 because one type comes with external vtable, while other
420 with internal. We want to merge equivalent binfos to conserve
421 memory and streaming overhead.
422
423 The external vtables are more harmful: they contain references
424 to external declarations of methods that may be defined in the
425 merged LTO unit. For this reason we absolutely need to remove
426 them and replace by internal variants. Not doing so will lead
427 to incomplete answers from possible_polymorphic_call_targets. */
428 if (!flag_ltrans && merge)
429 {
430 tree master_binfo = TYPE_BINFO (val->type);
431 tree v1 = BINFO_VTABLE (master_binfo);
432 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
433
434 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
435 {
436 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
437 && operand_equal_p (TREE_OPERAND (v1, 1),
438 TREE_OPERAND (v2, 1), 0));
439 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
440 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
441 }
442 gcc_assert (DECL_ASSEMBLER_NAME (v1)
443 == DECL_ASSEMBLER_NAME (v2));
444
445 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
446 {
447 unsigned int i;
448
449 TYPE_BINFO (val->type) = TYPE_BINFO (type);
450 for (i = 0; i < val->types->length (); i++)
451 {
452 if (TYPE_BINFO ((*val->types)[i])
453 == master_binfo)
454 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
455 }
456 }
457 else
458 TYPE_BINFO (type) = master_binfo;
459 }
460 }
461 }
462
463 /* Get ODR type hash entry for TYPE. If INSERT is true, create
464 possibly new entry. */
465
466 odr_type
467 get_odr_type (tree type, bool insert)
468 {
469 odr_type_d **slot;
470 odr_type val;
471 hashval_t hash;
472
473 type = TYPE_MAIN_VARIANT (type);
474 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
475 hash = hash_type_name (type);
476 slot
477 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
478 if (!slot)
479 return NULL;
480
481 /* See if we already have entry for type. */
482 if (*slot)
483 {
484 val = *slot;
485
486 /* With LTO we need to support multiple tree representation of
487 the same ODR type. */
488 if (val->type != type)
489 add_type_duplicate (val, type);
490 }
491 else
492 {
493 tree binfo = TYPE_BINFO (type);
494 unsigned int i;
495
496 val = ggc_cleared_alloc<odr_type_d> ();
497 val->type = type;
498 val->bases = vNULL;
499 val->derived_types = vNULL;
500 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
501 val->all_derivations_known = type_all_derivations_known_p (type);
502 *slot = val;
503 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
504 /* For now record only polymorphic types. other are
505 pointless for devirtualization and we can not precisely
506 determine ODR equivalency of these during LTO. */
507 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
508 {
509 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
510 i)),
511 true);
512 base->derived_types.safe_push (val);
513 val->bases.safe_push (base);
514 }
515 /* First record bases, then add into array so ids are increasing. */
516 if (odr_types_ptr)
517 val->id = odr_types.length ();
518 vec_safe_push (odr_types_ptr, val);
519 }
520 return val;
521 }
522
523 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
524 recusive printing. */
525
526 static void
527 dump_odr_type (FILE *f, odr_type t, int indent=0)
528 {
529 unsigned int i;
530 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
531 print_generic_expr (f, t->type, TDF_SLIM);
532 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
533 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
534 if (TYPE_NAME (t->type))
535 {
536 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
537 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
538 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
539 }
540 if (t->bases.length ())
541 {
542 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
543 for (i = 0; i < t->bases.length (); i++)
544 fprintf (f, " %i", t->bases[i]->id);
545 fprintf (f, "\n");
546 }
547 if (t->derived_types.length ())
548 {
549 fprintf (f, "%*s derived types:\n", indent * 2, "");
550 for (i = 0; i < t->derived_types.length (); i++)
551 dump_odr_type (f, t->derived_types[i], indent + 1);
552 }
553 fprintf (f, "\n");
554 }
555
556 /* Dump the type inheritance graph. */
557
558 static void
559 dump_type_inheritance_graph (FILE *f)
560 {
561 unsigned int i;
562 if (!odr_types_ptr)
563 return;
564 fprintf (f, "\n\nType inheritance graph:\n");
565 for (i = 0; i < odr_types.length (); i++)
566 {
567 if (odr_types[i]->bases.length () == 0)
568 dump_odr_type (f, odr_types[i]);
569 }
570 for (i = 0; i < odr_types.length (); i++)
571 {
572 if (odr_types[i]->types && odr_types[i]->types->length ())
573 {
574 unsigned int j;
575 fprintf (f, "Duplicate tree types for odr type %i\n", i);
576 print_node (f, "", odr_types[i]->type, 0);
577 for (j = 0; j < odr_types[i]->types->length (); j++)
578 {
579 tree t;
580 fprintf (f, "duplicate #%i\n", j);
581 print_node (f, "", (*odr_types[i]->types)[j], 0);
582 t = (*odr_types[i]->types)[j];
583 while (TYPE_P (t) && TYPE_CONTEXT (t))
584 {
585 t = TYPE_CONTEXT (t);
586 print_node (f, "", t, 0);
587 }
588 putc ('\n',f);
589 }
590 }
591 }
592 }
593
594 /* Given method type T, return type of class it belongs to.
595 Lookup this pointer and get its type. */
596
597 tree
598 method_class_type (tree t)
599 {
600 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
601 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
602
603 return TREE_TYPE (first_parm_type);
604 }
605
606 /* Initialize IPA devirt and build inheritance tree graph. */
607
608 void
609 build_type_inheritance_graph (void)
610 {
611 struct symtab_node *n;
612 FILE *inheritance_dump_file;
613 int flags;
614
615 if (odr_hash)
616 return;
617 timevar_push (TV_IPA_INHERITANCE);
618 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
619 odr_hash = new odr_hash_type (23);
620
621 /* We reconstruct the graph starting of types of all methods seen in the
622 the unit. */
623 FOR_EACH_SYMBOL (n)
624 if (is_a <cgraph_node *> (n)
625 && DECL_VIRTUAL_P (n->decl)
626 && symtab_real_symbol_p (n))
627 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
628
629 /* Look also for virtual tables of types that do not define any methods.
630
631 We need it in a case where class B has virtual base of class A
632 re-defining its virtual method and there is class C with no virtual
633 methods with B as virtual base.
634
635 Here we output B's virtual method in two variant - for non-virtual
636 and virtual inheritance. B's virtual table has non-virtual version,
637 while C's has virtual.
638
639 For this reason we need to know about C in order to include both
640 variants of B. More correctly, record_target_from_binfo should
641 add both variants of the method when walking B, but we have no
642 link in between them.
643
644 We rely on fact that either the method is exported and thus we
645 assume it is called externally or C is in anonymous namespace and
646 thus we will see the vtable. */
647
648 else if (is_a <varpool_node *> (n)
649 && DECL_VIRTUAL_P (n->decl)
650 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
651 && TYPE_BINFO (DECL_CONTEXT (n->decl))
652 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
653 get_odr_type (DECL_CONTEXT (n->decl), true);
654 if (inheritance_dump_file)
655 {
656 dump_type_inheritance_graph (inheritance_dump_file);
657 dump_end (TDI_inheritance, inheritance_dump_file);
658 }
659 timevar_pop (TV_IPA_INHERITANCE);
660 }
661
662 /* Return true if N has reference from live virtual table
663 (and thus can be a destination of polymorphic call).
664 Be conservatively correct when callgraph is not built or
665 if the method may be referred externally. */
666
667 static bool
668 referenced_from_vtable_p (struct cgraph_node *node)
669 {
670 int i;
671 struct ipa_ref *ref;
672 bool found = false;
673
674 if (node->externally_visible
675 || node->used_from_other_partition)
676 return true;
677
678 /* Keep this test constant time.
679 It is unlikely this can happen except for the case where speculative
680 devirtualization introduced many speculative edges to this node.
681 In this case the target is very likely alive anyway. */
682 if (node->ref_list.referring.length () > 100)
683 return true;
684
685 /* We need references built. */
686 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
687 return true;
688
689 for (i = 0; node->iterate_referring (i, ref); i++)
690
691 if ((ref->use == IPA_REF_ALIAS
692 && referenced_from_vtable_p (cgraph (ref->referring)))
693 || (ref->use == IPA_REF_ADDR
694 && TREE_CODE (ref->referring->decl) == VAR_DECL
695 && DECL_VIRTUAL_P (ref->referring->decl)))
696 {
697 found = true;
698 break;
699 }
700 return found;
701 }
702
703 /* If TARGET has associated node, record it in the NODES array.
704 CAN_REFER specify if program can refer to the target directly.
705 if TARGET is unknown (NULL) or it can not be inserted (for example because
706 its body was already removed and there is no way to refer to it), clear
707 COMPLETEP. */
708
709 static void
710 maybe_record_node (vec <cgraph_node *> &nodes,
711 tree target, pointer_set_t *inserted,
712 bool can_refer,
713 bool *completep)
714 {
715 struct cgraph_node *target_node;
716
717 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
718 list of targets; the runtime effect of calling them is undefined.
719 Only "real" virtual methods should be accounted. */
720 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
721 return;
722
723 if (!can_refer)
724 {
725 /* The only case when method of anonymous namespace becomes unreferable
726 is when we completely optimized it out. */
727 if (flag_ltrans
728 || !target
729 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
730 *completep = false;
731 return;
732 }
733
734 if (!target)
735 return;
736
737 target_node = cgraph_get_node (target);
738
739 /* Method can only be called by polymorphic call if any
740 of vtables refering to it are alive.
741
742 While this holds for non-anonymous functions, too, there are
743 cases where we want to keep them in the list; for example
744 inline functions with -fno-weak are static, but we still
745 may devirtualize them when instance comes from other unit.
746 The same holds for LTO.
747
748 Currently we ignore these functions in speculative devirtualization.
749 ??? Maybe it would make sense to be more aggressive for LTO even
750 eslewhere. */
751 if (!flag_ltrans
752 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
753 && (!target_node
754 || !referenced_from_vtable_p (target_node)))
755 ;
756 /* See if TARGET is useful function we can deal with. */
757 else if (target_node != NULL
758 && (TREE_PUBLIC (target)
759 || DECL_EXTERNAL (target)
760 || target_node->definition)
761 && symtab_real_symbol_p (target_node))
762 {
763 gcc_assert (!target_node->global.inlined_to);
764 gcc_assert (symtab_real_symbol_p (target_node));
765 if (!pointer_set_insert (inserted, target))
766 {
767 pointer_set_insert (cached_polymorphic_call_targets,
768 target_node);
769 nodes.safe_push (target_node);
770 }
771 }
772 else if (completep
773 && (!type_in_anonymous_namespace_p
774 (DECL_CONTEXT (target))
775 || flag_ltrans))
776 *completep = false;
777 }
778
779 /* See if BINFO's type match OUTER_TYPE. If so, lookup
780 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
781 method in vtable and insert method to NODES array
782 or BASES_TO_CONSIDER if this array is non-NULL.
783 Otherwise recurse to base BINFOs.
784 This match what get_binfo_at_offset does, but with offset
785 being unknown.
786
787 TYPE_BINFOS is a stack of BINFOS of types with defined
788 virtual table seen on way from class type to BINFO.
789
790 MATCHED_VTABLES tracks virtual tables we already did lookup
791 for virtual function in. INSERTED tracks nodes we already
792 inserted.
793
794 ANONYMOUS is true if BINFO is part of anonymous namespace.
795
796 Clear COMPLETEP when we hit unreferable target.
797 */
798
799 static void
800 record_target_from_binfo (vec <cgraph_node *> &nodes,
801 vec <tree> *bases_to_consider,
802 tree binfo,
803 tree otr_type,
804 vec <tree> &type_binfos,
805 HOST_WIDE_INT otr_token,
806 tree outer_type,
807 HOST_WIDE_INT offset,
808 pointer_set_t *inserted,
809 pointer_set_t *matched_vtables,
810 bool anonymous,
811 bool *completep)
812 {
813 tree type = BINFO_TYPE (binfo);
814 int i;
815 tree base_binfo;
816
817
818 if (BINFO_VTABLE (binfo))
819 type_binfos.safe_push (binfo);
820 if (types_same_for_odr (type, outer_type))
821 {
822 int i;
823 tree type_binfo = NULL;
824
825 /* Lookup BINFO with virtual table. For normal types it is always last
826 binfo on stack. */
827 for (i = type_binfos.length () - 1; i >= 0; i--)
828 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
829 {
830 type_binfo = type_binfos[i];
831 break;
832 }
833 if (BINFO_VTABLE (binfo))
834 type_binfos.pop ();
835 /* If this is duplicated BINFO for base shared by virtual inheritance,
836 we may not have its associated vtable. This is not a problem, since
837 we will walk it on the other path. */
838 if (!type_binfo)
839 return;
840 tree inner_binfo = get_binfo_at_offset (type_binfo,
841 offset, otr_type);
842 if (!inner_binfo)
843 {
844 gcc_assert (odr_violation_reported);
845 return;
846 }
847 /* For types in anonymous namespace first check if the respective vtable
848 is alive. If not, we know the type can't be called. */
849 if (!flag_ltrans && anonymous)
850 {
851 tree vtable = BINFO_VTABLE (inner_binfo);
852 varpool_node *vnode;
853
854 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
855 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
856 vnode = varpool_get_node (vtable);
857 if (!vnode || !vnode->definition)
858 return;
859 }
860 gcc_assert (inner_binfo);
861 if (bases_to_consider
862 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
863 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
864 {
865 bool can_refer;
866 tree target = gimple_get_virt_method_for_binfo (otr_token,
867 inner_binfo,
868 &can_refer);
869 if (!bases_to_consider)
870 maybe_record_node (nodes, target, inserted, can_refer, completep);
871 /* Destructors are never called via construction vtables. */
872 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
873 bases_to_consider->safe_push (target);
874 }
875 return;
876 }
877
878 /* Walk bases. */
879 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
880 /* Walking bases that have no virtual method is pointless excercise. */
881 if (polymorphic_type_binfo_p (base_binfo))
882 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
883 type_binfos,
884 otr_token, outer_type, offset, inserted,
885 matched_vtables, anonymous, completep);
886 if (BINFO_VTABLE (binfo))
887 type_binfos.pop ();
888 }
889
890 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
891 of TYPE, insert them to NODES, recurse into derived nodes.
892 INSERTED is used to avoid duplicate insertions of methods into NODES.
893 MATCHED_VTABLES are used to avoid duplicate walking vtables.
894 Clear COMPLETEP if unreferable target is found.
895
896 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
897 all cases where BASE_SKIPPED is true (because the base is abstract
898 class). */
899
900 static void
901 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
902 pointer_set_t *inserted,
903 pointer_set_t *matched_vtables,
904 tree otr_type,
905 odr_type type,
906 HOST_WIDE_INT otr_token,
907 tree outer_type,
908 HOST_WIDE_INT offset,
909 bool *completep,
910 vec <tree> &bases_to_consider,
911 bool consider_construction)
912 {
913 tree binfo = TYPE_BINFO (type->type);
914 unsigned int i;
915 vec <tree> type_binfos = vNULL;
916 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
917
918 /* We may need to consider types w/o instances because of possible derived
919 types using their methods either directly or via construction vtables.
920 We are safe to skip them when all derivations are known, since we will
921 handle them later.
922 This is done by recording them to BASES_TO_CONSIDER array. */
923 if (possibly_instantiated || consider_construction)
924 {
925 record_target_from_binfo (nodes,
926 (!possibly_instantiated
927 && type_all_derivations_known_p (type->type))
928 ? &bases_to_consider : NULL,
929 binfo, otr_type, type_binfos, otr_token,
930 outer_type, offset,
931 inserted, matched_vtables,
932 type->anonymous_namespace, completep);
933 }
934 type_binfos.release ();
935 for (i = 0; i < type->derived_types.length (); i++)
936 possible_polymorphic_call_targets_1 (nodes, inserted,
937 matched_vtables,
938 otr_type,
939 type->derived_types[i],
940 otr_token, outer_type, offset, completep,
941 bases_to_consider, consider_construction);
942 }
943
944 /* Cache of queries for polymorphic call targets.
945
946 Enumerating all call targets may get expensive when there are many
947 polymorphic calls in the program, so we memoize all the previous
948 queries and avoid duplicated work. */
949
950 struct polymorphic_call_target_d
951 {
952 HOST_WIDE_INT otr_token;
953 ipa_polymorphic_call_context context;
954 odr_type type;
955 vec <cgraph_node *> targets;
956 int nonconstruction_targets;
957 bool complete;
958 };
959
960 /* Polymorphic call target cache helpers. */
961
962 struct polymorphic_call_target_hasher
963 {
964 typedef polymorphic_call_target_d value_type;
965 typedef polymorphic_call_target_d compare_type;
966 static inline hashval_t hash (const value_type *);
967 static inline bool equal (const value_type *, const compare_type *);
968 static inline void remove (value_type *);
969 };
970
971 /* Return the computed hashcode for ODR_QUERY. */
972
973 inline hashval_t
974 polymorphic_call_target_hasher::hash (const value_type *odr_query)
975 {
976 hashval_t hash;
977
978 hash = iterative_hash_host_wide_int
979 (odr_query->otr_token,
980 odr_query->type->id);
981 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
982 hash);
983 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
984 return iterative_hash_hashval_t
985 (((int)odr_query->context.maybe_in_construction << 1)
986 | (int)odr_query->context.maybe_derived_type, hash);
987 }
988
989 /* Compare cache entries T1 and T2. */
990
991 inline bool
992 polymorphic_call_target_hasher::equal (const value_type *t1,
993 const compare_type *t2)
994 {
995 return (t1->type == t2->type && t1->otr_token == t2->otr_token
996 && t1->context.offset == t2->context.offset
997 && t1->context.outer_type == t2->context.outer_type
998 && t1->context.maybe_in_construction
999 == t2->context.maybe_in_construction
1000 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
1001 }
1002
1003 /* Remove entry in polymorphic call target cache hash. */
1004
1005 inline void
1006 polymorphic_call_target_hasher::remove (value_type *v)
1007 {
1008 v->targets.release ();
1009 free (v);
1010 }
1011
1012 /* Polymorphic call target query cache. */
1013
1014 typedef hash_table<polymorphic_call_target_hasher>
1015 polymorphic_call_target_hash_type;
1016 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1017
1018 /* Destroy polymorphic call target query cache. */
1019
1020 static void
1021 free_polymorphic_call_targets_hash ()
1022 {
1023 if (cached_polymorphic_call_targets)
1024 {
1025 delete polymorphic_call_target_hash;
1026 polymorphic_call_target_hash = NULL;
1027 pointer_set_destroy (cached_polymorphic_call_targets);
1028 cached_polymorphic_call_targets = NULL;
1029 }
1030 }
1031
1032 /* When virtual function is removed, we may need to flush the cache. */
1033
1034 static void
1035 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1036 {
1037 if (cached_polymorphic_call_targets
1038 && pointer_set_contains (cached_polymorphic_call_targets, n))
1039 free_polymorphic_call_targets_hash ();
1040 }
1041
1042 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1043 is contained at CONTEXT->OFFSET. Walk the memory representation of
1044 CONTEXT->OUTER_TYPE and find the outermost class type that match
1045 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1046 to represent it.
1047
1048 For example when CONTEXT represents type
1049 class A
1050 {
1051 int a;
1052 class B b;
1053 }
1054 and we look for type at offset sizeof(int), we end up with B and offset 0.
1055 If the same is produced by multiple inheritance, we end up with A and offset
1056 sizeof(int).
1057
1058 If we can not find corresponding class, give up by setting
1059 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1060 Return true when lookup was sucesful. */
1061
1062 static bool
1063 get_class_context (ipa_polymorphic_call_context *context,
1064 tree expected_type)
1065 {
1066 tree type = context->outer_type;
1067 HOST_WIDE_INT offset = context->offset;
1068
1069 /* Find the sub-object the constant actually refers to and mark whether it is
1070 an artificial one (as opposed to a user-defined one). */
1071 while (true)
1072 {
1073 HOST_WIDE_INT pos, size;
1074 tree fld;
1075
1076 /* On a match, just return what we found. */
1077 if (TREE_CODE (type) == TREE_CODE (expected_type)
1078 && types_same_for_odr (type, expected_type))
1079 {
1080 /* Type can not contain itself on an non-zero offset. In that case
1081 just give up. */
1082 if (offset != 0)
1083 goto give_up;
1084 gcc_assert (offset == 0);
1085 return true;
1086 }
1087
1088 /* Walk fields and find corresponding on at OFFSET. */
1089 if (TREE_CODE (type) == RECORD_TYPE)
1090 {
1091 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1092 {
1093 if (TREE_CODE (fld) != FIELD_DECL)
1094 continue;
1095
1096 pos = int_bit_position (fld);
1097 size = tree_to_uhwi (DECL_SIZE (fld));
1098 if (pos <= offset && (pos + size) > offset)
1099 break;
1100 }
1101
1102 if (!fld)
1103 goto give_up;
1104
1105 type = TREE_TYPE (fld);
1106 offset -= pos;
1107 /* DECL_ARTIFICIAL represents a basetype. */
1108 if (!DECL_ARTIFICIAL (fld))
1109 {
1110 context->outer_type = type;
1111 context->offset = offset;
1112 /* As soon as we se an field containing the type,
1113 we know we are not looking for derivations. */
1114 context->maybe_derived_type = false;
1115 }
1116 }
1117 else if (TREE_CODE (type) == ARRAY_TYPE)
1118 {
1119 tree subtype = TREE_TYPE (type);
1120
1121 /* Give up if we don't know array size. */
1122 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1123 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1124 goto give_up;
1125 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1126 type = subtype;
1127 context->outer_type = type;
1128 context->offset = offset;
1129 context->maybe_derived_type = false;
1130 }
1131 /* Give up on anything else. */
1132 else
1133 goto give_up;
1134 }
1135
1136 /* If we failed to find subtype we look for, give up and fall back to the
1137 most generic query. */
1138 give_up:
1139 context->outer_type = expected_type;
1140 context->offset = 0;
1141 context->maybe_derived_type = true;
1142 context->maybe_in_construction = true;
1143 /* POD can be changed to an instance of a polymorphic type by
1144 placement new. Here we play safe and assume that any
1145 non-polymorphic type is POD. */
1146 if ((TREE_CODE (type) != RECORD_TYPE
1147 || !TYPE_BINFO (type)
1148 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1149 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1150 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1151 tree_to_uhwi (TYPE_SIZE (type)))))
1152 return true;
1153 return false;
1154 }
1155
1156 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1157
1158 static bool
1159 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1160 tree otr_type)
1161 {
1162 ipa_polymorphic_call_context context = {offset, outer_type,
1163 false, true};
1164 return get_class_context (&context, otr_type);
1165 }
1166
1167 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1168
1169 static tree
1170 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1171 tree vtable)
1172 {
1173 tree v = BINFO_VTABLE (binfo);
1174 int i;
1175 tree base_binfo;
1176 unsigned HOST_WIDE_INT this_offset;
1177
1178 if (v)
1179 {
1180 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1181 gcc_unreachable ();
1182
1183 if (offset == this_offset
1184 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1185 return binfo;
1186 }
1187
1188 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1189 if (polymorphic_type_binfo_p (base_binfo))
1190 {
1191 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1192 if (base_binfo)
1193 return base_binfo;
1194 }
1195 return NULL;
1196 }
1197
1198 /* T is known constant value of virtual table pointer.
1199 Store virtual table to V and its offset to OFFSET.
1200 Return false if T does not look like virtual table reference. */
1201
1202 bool
1203 vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
1204 {
1205 /* We expect &MEM[(void *)&virtual_table + 16B].
1206 We obtain object's BINFO from the context of the virtual table.
1207 This one contains pointer to virtual table represented via
1208 POINTER_PLUS_EXPR. Verify that this pointer match to what
1209 we propagated through.
1210
1211 In the case of virtual inheritance, the virtual tables may
1212 be nested, i.e. the offset may be different from 16 and we may
1213 need to dive into the type representation. */
1214 if (TREE_CODE (t) == ADDR_EXPR
1215 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1216 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1217 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1218 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1219 == VAR_DECL)
1220 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1221 (TREE_OPERAND (t, 0), 0), 0)))
1222 {
1223 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1224 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1225 return true;
1226 }
1227
1228 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1229 We need to handle it when T comes from static variable initializer or
1230 BINFO. */
1231 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1232 {
1233 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1234 t = TREE_OPERAND (t, 0);
1235 }
1236 else
1237 *offset = 0;
1238
1239 if (TREE_CODE (t) != ADDR_EXPR)
1240 return false;
1241 *v = TREE_OPERAND (t, 0);
1242 return true;
1243 }
1244
1245 /* T is known constant value of virtual table pointer. Return BINFO of the
1246 instance type. */
1247
1248 tree
1249 vtable_pointer_value_to_binfo (tree t)
1250 {
1251 tree vtable;
1252 unsigned HOST_WIDE_INT offset;
1253
1254 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1255 return NULL_TREE;
1256
1257 /* FIXME: for stores of construction vtables we return NULL,
1258 because we do not have BINFO for those. Eventually we should fix
1259 our representation to allow this case to be handled, too.
1260 In the case we see store of BINFO we however may assume
1261 that standard folding will be ale to cope with it. */
1262 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1263 offset, vtable);
1264 }
1265
1266 /* Proudce polymorphic call context for call method of instance
1267 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1268
1269 static void
1270 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1271 tree base, HOST_WIDE_INT offset)
1272 {
1273 gcc_assert (DECL_P (base));
1274
1275 context->outer_type = TREE_TYPE (base);
1276 context->offset = offset;
1277 /* Make very conservative assumption that all objects
1278 may be in construction.
1279 TODO: ipa-prop already contains code to tell better.
1280 merge it later. */
1281 context->maybe_in_construction = true;
1282 context->maybe_derived_type = false;
1283 }
1284
1285 /* CST is an invariant (address of decl), try to get meaningful
1286 polymorphic call context for polymorphic call of method
1287 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1288 Return FALSE if nothing meaningful can be found. */
1289
1290 bool
1291 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1292 tree cst,
1293 tree otr_type,
1294 HOST_WIDE_INT offset)
1295 {
1296 HOST_WIDE_INT offset2, size, max_size;
1297 tree base;
1298
1299 if (TREE_CODE (cst) != ADDR_EXPR)
1300 return false;
1301
1302 cst = TREE_OPERAND (cst, 0);
1303 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1304 if (!DECL_P (base) || max_size == -1 || max_size != size)
1305 return false;
1306
1307 /* Only type inconsistent programs can have otr_type that is
1308 not part of outer type. */
1309 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1310 return false;
1311
1312 get_polymorphic_call_info_for_decl (context, base, offset);
1313 return true;
1314 }
1315
1316 /* Given REF call in FNDECL, determine class of the polymorphic
1317 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1318 Return pointer to object described by the context */
1319
1320 tree
1321 get_polymorphic_call_info (tree fndecl,
1322 tree ref,
1323 tree *otr_type,
1324 HOST_WIDE_INT *otr_token,
1325 ipa_polymorphic_call_context *context)
1326 {
1327 tree base_pointer;
1328 *otr_type = obj_type_ref_class (ref);
1329 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1330
1331 /* Set up basic info in case we find nothing interesting in the analysis. */
1332 context->outer_type = *otr_type;
1333 context->offset = 0;
1334 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1335 context->maybe_derived_type = true;
1336 context->maybe_in_construction = true;
1337
1338 /* Walk SSA for outer object. */
1339 do
1340 {
1341 if (TREE_CODE (base_pointer) == SSA_NAME
1342 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1343 && SSA_NAME_DEF_STMT (base_pointer)
1344 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1345 {
1346 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1347 STRIP_NOPS (base_pointer);
1348 }
1349 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1350 {
1351 HOST_WIDE_INT size, max_size;
1352 HOST_WIDE_INT offset2;
1353 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1354 &offset2, &size, &max_size);
1355
1356 /* If this is a varying address, punt. */
1357 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1358 && max_size != -1
1359 && max_size == size)
1360 {
1361 /* We found dereference of a pointer. Type of the pointer
1362 and MEM_REF is meaningless, but we can look futher. */
1363 if (TREE_CODE (base) == MEM_REF)
1364 {
1365 base_pointer = TREE_OPERAND (base, 0);
1366 context->offset
1367 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1368 context->outer_type = NULL;
1369 }
1370 /* We found base object. In this case the outer_type
1371 is known. */
1372 else if (DECL_P (base))
1373 {
1374 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1375
1376 /* Only type inconsistent programs can have otr_type that is
1377 not part of outer type. */
1378 if (!contains_type_p (TREE_TYPE (base),
1379 context->offset + offset2, *otr_type))
1380 {
1381 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1382 code sequences; we arrange the calls to be builtin_unreachable
1383 later. */
1384 *otr_token = INT_MAX;
1385 return base_pointer;
1386 }
1387 get_polymorphic_call_info_for_decl (context, base,
1388 context->offset + offset2);
1389 return NULL;
1390 }
1391 else
1392 break;
1393 }
1394 else
1395 break;
1396 }
1397 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1398 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1399 {
1400 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1401 * BITS_PER_UNIT;
1402 base_pointer = TREE_OPERAND (base_pointer, 0);
1403 }
1404 else
1405 break;
1406 }
1407 while (true);
1408
1409 /* Try to determine type of the outer object. */
1410 if (TREE_CODE (base_pointer) == SSA_NAME
1411 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1412 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1413 {
1414 /* See if parameter is THIS pointer of a method. */
1415 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1416 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1417 {
1418 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1419 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1420
1421 /* Dynamic casting has possibly upcasted the type
1422 in the hiearchy. In this case outer type is less
1423 informative than inner type and we should forget
1424 about it. */
1425 if (!contains_type_p (context->outer_type, context->offset,
1426 *otr_type))
1427 {
1428 context->outer_type = NULL;
1429 return base_pointer;
1430 }
1431
1432 /* If the function is constructor or destructor, then
1433 the type is possibly in construction, but we know
1434 it is not derived type. */
1435 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1436 || DECL_CXX_DESTRUCTOR_P (fndecl))
1437 {
1438 context->maybe_in_construction = true;
1439 context->maybe_derived_type = false;
1440 }
1441 else
1442 {
1443 context->maybe_derived_type = true;
1444 context->maybe_in_construction = false;
1445 }
1446 return base_pointer;
1447 }
1448 /* Non-PODs passed by value are really passed by invisible
1449 reference. In this case we also know the type of the
1450 object. */
1451 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1452 {
1453 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1454 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1455 /* Only type inconsistent programs can have otr_type that is
1456 not part of outer type. */
1457 if (!contains_type_p (context->outer_type, context->offset,
1458 *otr_type))
1459 {
1460 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1461 code sequences; we arrange the calls to be builtin_unreachable
1462 later. */
1463 *otr_token = INT_MAX;
1464 return base_pointer;
1465 }
1466 context->maybe_derived_type = false;
1467 context->maybe_in_construction = false;
1468 return base_pointer;
1469 }
1470 }
1471 /* TODO: There are multiple ways to derive a type. For instance
1472 if BASE_POINTER is passed to an constructor call prior our refernece.
1473 We do not make this type of flow sensitive analysis yet. */
1474 return base_pointer;
1475 }
1476
1477 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1478 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1479 and insert them to NODES.
1480
1481 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1482
1483 static void
1484 record_targets_from_bases (tree otr_type,
1485 HOST_WIDE_INT otr_token,
1486 tree outer_type,
1487 HOST_WIDE_INT offset,
1488 vec <cgraph_node *> &nodes,
1489 pointer_set_t *inserted,
1490 pointer_set_t *matched_vtables,
1491 bool *completep)
1492 {
1493 while (true)
1494 {
1495 HOST_WIDE_INT pos, size;
1496 tree base_binfo;
1497 tree fld;
1498
1499 if (types_same_for_odr (outer_type, otr_type))
1500 return;
1501
1502 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1503 {
1504 if (TREE_CODE (fld) != FIELD_DECL)
1505 continue;
1506
1507 pos = int_bit_position (fld);
1508 size = tree_to_shwi (DECL_SIZE (fld));
1509 if (pos <= offset && (pos + size) > offset
1510 /* Do not get confused by zero sized bases. */
1511 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1512 break;
1513 }
1514 /* Within a class type we should always find correcponding fields. */
1515 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1516
1517 /* Nonbasetypes should have been stripped by outer_class_type. */
1518 gcc_assert (DECL_ARTIFICIAL (fld));
1519
1520 outer_type = TREE_TYPE (fld);
1521 offset -= pos;
1522
1523 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1524 offset, otr_type);
1525 if (!base_binfo)
1526 {
1527 gcc_assert (odr_violation_reported);
1528 return;
1529 }
1530 gcc_assert (base_binfo);
1531 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1532 {
1533 bool can_refer;
1534 tree target = gimple_get_virt_method_for_binfo (otr_token,
1535 base_binfo,
1536 &can_refer);
1537 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
1538 maybe_record_node (nodes, target, inserted, can_refer, completep);
1539 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1540 }
1541 }
1542 }
1543
1544 /* When virtual table is removed, we may need to flush the cache. */
1545
1546 static void
1547 devirt_variable_node_removal_hook (varpool_node *n,
1548 void *d ATTRIBUTE_UNUSED)
1549 {
1550 if (cached_polymorphic_call_targets
1551 && DECL_VIRTUAL_P (n->decl)
1552 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1553 free_polymorphic_call_targets_hash ();
1554 }
1555
1556 /* Return vector containing possible targets of polymorphic call of type
1557 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1558 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1559 OTR_TYPE and include their virtual method. This is useful for types
1560 possibly in construction or destruction where the virtual table may
1561 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1562 us to walk the inheritance graph for all derivations.
1563
1564 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1565 undefined and should be redirected to unreachable.
1566
1567 If COMPLETEP is non-NULL, store true if the list is complete.
1568 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1569 in the target cache. If user needs to visit every target list
1570 just once, it can memoize them.
1571
1572 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1573 the type is not in the construction. Those targets appear first in the
1574 vector returned.
1575
1576 Returned vector is placed into cache. It is NOT caller's responsibility
1577 to free it. The vector can be freed on cgraph_remove_node call if
1578 the particular node is a virtual function present in the cache. */
1579
1580 vec <cgraph_node *>
1581 possible_polymorphic_call_targets (tree otr_type,
1582 HOST_WIDE_INT otr_token,
1583 ipa_polymorphic_call_context context,
1584 bool *completep,
1585 void **cache_token,
1586 int *nonconstruction_targetsp)
1587 {
1588 static struct cgraph_node_hook_list *node_removal_hook_holder;
1589 pointer_set_t *inserted;
1590 pointer_set_t *matched_vtables;
1591 vec <cgraph_node *> nodes = vNULL;
1592 vec <tree> bases_to_consider = vNULL;
1593 odr_type type, outer_type;
1594 polymorphic_call_target_d key;
1595 polymorphic_call_target_d **slot;
1596 unsigned int i;
1597 tree binfo, target;
1598 bool complete;
1599 bool can_refer;
1600 bool skipped = false;
1601
1602 /* If ODR is not initialized, return empty incomplete list. */
1603 if (!odr_hash)
1604 {
1605 if (completep)
1606 *completep = false;
1607 if (cache_token)
1608 *cache_token = NULL;
1609 if (nonconstruction_targetsp)
1610 *nonconstruction_targetsp = 0;
1611 return nodes;
1612 }
1613
1614 /* If we hit type inconsistency, just return empty list of targets. */
1615 if (otr_token == INT_MAX)
1616 {
1617 if (completep)
1618 *completep = true;
1619 if (cache_token)
1620 *cache_token = NULL;
1621 if (nonconstruction_targetsp)
1622 *nonconstruction_targetsp = 0;
1623 return nodes;
1624 }
1625
1626 type = get_odr_type (otr_type, true);
1627
1628 /* Lookup the outer class type we want to walk. */
1629 if (context.outer_type
1630 && !get_class_context (&context, otr_type))
1631 {
1632 if (completep)
1633 *completep = false;
1634 if (cache_token)
1635 *cache_token = NULL;
1636 if (nonconstruction_targetsp)
1637 *nonconstruction_targetsp = 0;
1638 return nodes;
1639 }
1640
1641 /* We canonicalize our query, so we do not need extra hashtable entries. */
1642
1643 /* Without outer type, we have no use for offset. Just do the
1644 basic search from innter type */
1645 if (!context.outer_type)
1646 {
1647 context.outer_type = otr_type;
1648 context.offset = 0;
1649 }
1650 /* We need to update our hiearchy if the type does not exist. */
1651 outer_type = get_odr_type (context.outer_type, true);
1652 /* If the type is complete, there are no derivations. */
1653 if (TYPE_FINAL_P (outer_type->type))
1654 context.maybe_derived_type = false;
1655
1656 /* Initialize query cache. */
1657 if (!cached_polymorphic_call_targets)
1658 {
1659 cached_polymorphic_call_targets = pointer_set_create ();
1660 polymorphic_call_target_hash
1661 = new polymorphic_call_target_hash_type (23);
1662 if (!node_removal_hook_holder)
1663 {
1664 node_removal_hook_holder =
1665 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1666 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1667 NULL);
1668 }
1669 }
1670
1671 /* Lookup cached answer. */
1672 key.type = type;
1673 key.otr_token = otr_token;
1674 key.context = context;
1675 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
1676 if (cache_token)
1677 *cache_token = (void *)*slot;
1678 if (*slot)
1679 {
1680 if (completep)
1681 *completep = (*slot)->complete;
1682 if (nonconstruction_targetsp)
1683 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1684 return (*slot)->targets;
1685 }
1686
1687 complete = true;
1688
1689 /* Do actual search. */
1690 timevar_push (TV_IPA_VIRTUAL_CALL);
1691 *slot = XCNEW (polymorphic_call_target_d);
1692 if (cache_token)
1693 *cache_token = (void *)*slot;
1694 (*slot)->type = type;
1695 (*slot)->otr_token = otr_token;
1696 (*slot)->context = context;
1697
1698 inserted = pointer_set_create ();
1699 matched_vtables = pointer_set_create ();
1700
1701 /* First see virtual method of type itself. */
1702 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1703 context.offset, otr_type);
1704 if (binfo)
1705 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1706 &can_refer);
1707 else
1708 {
1709 gcc_assert (odr_violation_reported);
1710 target = NULL;
1711 }
1712
1713 /* Destructors are never called through construction virtual tables,
1714 because the type is always known. */
1715 if (target && DECL_CXX_DESTRUCTOR_P (target))
1716 context.maybe_in_construction = false;
1717
1718 if (target)
1719 {
1720 /* In the case we get complete method, we don't need
1721 to walk derivations. */
1722 if (DECL_FINAL_P (target))
1723 context.maybe_derived_type = false;
1724 }
1725
1726 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
1727 if (type_possibly_instantiated_p (outer_type->type))
1728 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1729 else
1730 {
1731 skipped = true;
1732 gcc_assert (in_lto_p || context.maybe_derived_type);
1733 }
1734
1735 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1736
1737 /* Next walk recursively all derived types. */
1738 if (context.maybe_derived_type)
1739 {
1740 /* For anonymous namespace types we can attempt to build full type.
1741 All derivations must be in this unit (unless we see partial unit). */
1742 if (!type->all_derivations_known)
1743 complete = false;
1744 for (i = 0; i < outer_type->derived_types.length(); i++)
1745 possible_polymorphic_call_targets_1 (nodes, inserted,
1746 matched_vtables,
1747 otr_type,
1748 outer_type->derived_types[i],
1749 otr_token, outer_type->type,
1750 context.offset, &complete,
1751 bases_to_consider,
1752 context.maybe_in_construction);
1753 }
1754
1755 /* Finally walk bases, if asked to. */
1756 (*slot)->nonconstruction_targets = nodes.length();
1757
1758 /* Destructors are never called through construction virtual tables,
1759 because the type is always known. One of entries may be cxa_pure_virtual
1760 so look to at least two of them. */
1761 if (context.maybe_in_construction)
1762 for (i =0 ; i < MIN (nodes.length (), 2); i++)
1763 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
1764 context.maybe_in_construction = false;
1765 if (context.maybe_in_construction)
1766 {
1767 if (type != outer_type
1768 && (!skipped
1769 || (context.maybe_derived_type
1770 && !type_all_derivations_known_p (outer_type->type))))
1771 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1772 context.offset, nodes, inserted,
1773 matched_vtables, &complete);
1774 if (skipped)
1775 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1776 for (i = 0; i < bases_to_consider.length(); i++)
1777 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
1778 }
1779 bases_to_consider.release();
1780
1781 (*slot)->targets = nodes;
1782 (*slot)->complete = complete;
1783 if (completep)
1784 *completep = complete;
1785 if (nonconstruction_targetsp)
1786 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1787
1788 pointer_set_destroy (inserted);
1789 pointer_set_destroy (matched_vtables);
1790 timevar_pop (TV_IPA_VIRTUAL_CALL);
1791 return nodes;
1792 }
1793
1794 /* Dump all possible targets of a polymorphic call. */
1795
1796 void
1797 dump_possible_polymorphic_call_targets (FILE *f,
1798 tree otr_type,
1799 HOST_WIDE_INT otr_token,
1800 const ipa_polymorphic_call_context &ctx)
1801 {
1802 vec <cgraph_node *> targets;
1803 bool final;
1804 odr_type type = get_odr_type (otr_type, false);
1805 unsigned int i;
1806 int nonconstruction;
1807
1808 if (!type)
1809 return;
1810 targets = possible_polymorphic_call_targets (otr_type, otr_token,
1811 ctx,
1812 &final, NULL, &nonconstruction);
1813 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
1814 print_generic_expr (f, type->type, TDF_SLIM);
1815 fprintf (f, " token %i\n", (int)otr_token);
1816 if (ctx.outer_type || ctx.offset)
1817 {
1818 fprintf (f, " Contained in type:");
1819 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1820 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1821 ctx.offset);
1822 }
1823
1824 fprintf (f, " %s%s%s\n ",
1825 final ? "This is a complete list." :
1826 "This is partial list; extra targets may be defined in other units.",
1827 ctx.maybe_in_construction ? " (base types included)" : "",
1828 ctx.maybe_derived_type ? " (derived types included)" : "");
1829 for (i = 0; i < targets.length (); i++)
1830 {
1831 char *name = NULL;
1832 if (i == (unsigned)nonconstruction)
1833 fprintf (f, "\n If the type is in construction,"
1834 " then additional tarets are:\n"
1835 " ");
1836 if (in_lto_p)
1837 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1838 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1839 if (in_lto_p)
1840 free (name);
1841 if (!targets[i]->definition)
1842 fprintf (f, " (no definition%s)",
1843 DECL_DECLARED_INLINE_P (targets[i]->decl)
1844 ? " inline" : "");
1845 }
1846 fprintf (f, "\n\n");
1847 }
1848
1849
1850 /* Return true if N can be possibly target of a polymorphic call of
1851 OTR_TYPE/OTR_TOKEN. */
1852
1853 bool
1854 possible_polymorphic_call_target_p (tree otr_type,
1855 HOST_WIDE_INT otr_token,
1856 const ipa_polymorphic_call_context &ctx,
1857 struct cgraph_node *n)
1858 {
1859 vec <cgraph_node *> targets;
1860 unsigned int i;
1861 enum built_in_function fcode;
1862 bool final;
1863
1864 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1865 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1866 == BUILT_IN_UNREACHABLE
1867 || fcode == BUILT_IN_TRAP))
1868 return true;
1869
1870 if (!odr_hash)
1871 return true;
1872 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
1873 for (i = 0; i < targets.length (); i++)
1874 if (symtab_semantically_equivalent_p (n, targets[i]))
1875 return true;
1876
1877 /* At a moment we allow middle end to dig out new external declarations
1878 as a targets of polymorphic calls. */
1879 if (!final && !n->definition)
1880 return true;
1881 return false;
1882 }
1883
1884
1885 /* After callgraph construction new external nodes may appear.
1886 Add them into the graph. */
1887
1888 void
1889 update_type_inheritance_graph (void)
1890 {
1891 struct cgraph_node *n;
1892
1893 if (!odr_hash)
1894 return;
1895 free_polymorphic_call_targets_hash ();
1896 timevar_push (TV_IPA_INHERITANCE);
1897 /* We reconstruct the graph starting from types of all methods seen in the
1898 the unit. */
1899 FOR_EACH_FUNCTION (n)
1900 if (DECL_VIRTUAL_P (n->decl)
1901 && !n->definition
1902 && symtab_real_symbol_p (n))
1903 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
1904 timevar_pop (TV_IPA_INHERITANCE);
1905 }
1906
1907
1908 /* Return true if N looks like likely target of a polymorphic call.
1909 Rule out cxa_pure_virtual, noreturns, function declared cold and
1910 other obvious cases. */
1911
1912 bool
1913 likely_target_p (struct cgraph_node *n)
1914 {
1915 int flags;
1916 /* cxa_pure_virtual and similar things are not likely. */
1917 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
1918 return false;
1919 flags = flags_from_decl_or_type (n->decl);
1920 if (flags & ECF_NORETURN)
1921 return false;
1922 if (lookup_attribute ("cold",
1923 DECL_ATTRIBUTES (n->decl)))
1924 return false;
1925 if (n->frequency < NODE_FREQUENCY_NORMAL)
1926 return false;
1927 /* If there are no virtual tables refering the target alive,
1928 the only way the target can be called is an instance comming from other
1929 compilation unit; speculative devirtualization is build around an
1930 assumption that won't happen. */
1931 if (!referenced_from_vtable_p (n))
1932 return false;
1933 return true;
1934 }
1935
1936 /* The ipa-devirt pass.
1937 When polymorphic call has only one likely target in the unit,
1938 turn it into speculative call. */
1939
1940 static unsigned int
1941 ipa_devirt (void)
1942 {
1943 struct cgraph_node *n;
1944 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1945 struct cgraph_edge *e;
1946
1947 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1948 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1949 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
1950
1951 FOR_EACH_DEFINED_FUNCTION (n)
1952 {
1953 bool update = false;
1954 if (dump_file && n->indirect_calls)
1955 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
1956 n->name (), n->order);
1957 for (e = n->indirect_calls; e; e = e->next_callee)
1958 if (e->indirect_info->polymorphic)
1959 {
1960 struct cgraph_node *likely_target = NULL;
1961 void *cache_token;
1962 bool final;
1963 int nonconstruction_targets;
1964 vec <cgraph_node *>targets
1965 = possible_polymorphic_call_targets
1966 (e, &final, &cache_token, &nonconstruction_targets);
1967 unsigned int i;
1968
1969 if (dump_file)
1970 dump_possible_polymorphic_call_targets
1971 (dump_file, e);
1972
1973 npolymorphic++;
1974
1975 if (!cgraph_maybe_hot_edge_p (e))
1976 {
1977 if (dump_file)
1978 fprintf (dump_file, "Call is cold\n\n");
1979 ncold++;
1980 continue;
1981 }
1982 if (e->speculative)
1983 {
1984 if (dump_file)
1985 fprintf (dump_file, "Call is aready speculated\n\n");
1986 nspeculated++;
1987
1988 /* When dumping see if we agree with speculation. */
1989 if (!dump_file)
1990 continue;
1991 }
1992 if (pointer_set_contains (bad_call_targets,
1993 cache_token))
1994 {
1995 if (dump_file)
1996 fprintf (dump_file, "Target list is known to be useless\n\n");
1997 nmultiple++;
1998 continue;
1999 }
2000 for (i = 0; i < targets.length (); i++)
2001 if (likely_target_p (targets[i]))
2002 {
2003 if (likely_target)
2004 {
2005 if (i < (unsigned) nonconstruction_targets)
2006 {
2007 likely_target = NULL;
2008 if (dump_file)
2009 fprintf (dump_file, "More than one likely target\n\n");
2010 nmultiple++;
2011 }
2012 break;
2013 }
2014 likely_target = targets[i];
2015 }
2016 if (!likely_target)
2017 {
2018 pointer_set_insert (bad_call_targets, cache_token);
2019 continue;
2020 }
2021 /* This is reached only when dumping; check if we agree or disagree
2022 with the speculation. */
2023 if (e->speculative)
2024 {
2025 struct cgraph_edge *e2;
2026 struct ipa_ref *ref;
2027 cgraph_speculative_call_info (e, e2, e, ref);
2028 if (cgraph_function_or_thunk_node (e2->callee, NULL)
2029 == cgraph_function_or_thunk_node (likely_target, NULL))
2030 {
2031 fprintf (dump_file, "We agree with speculation\n\n");
2032 nok++;
2033 }
2034 else
2035 {
2036 fprintf (dump_file, "We disagree with speculation\n\n");
2037 nwrong++;
2038 }
2039 continue;
2040 }
2041 if (!likely_target->definition)
2042 {
2043 if (dump_file)
2044 fprintf (dump_file, "Target is not an definition\n\n");
2045 nnotdefined++;
2046 continue;
2047 }
2048 /* Do not introduce new references to external symbols. While we
2049 can handle these just well, it is common for programs to
2050 incorrectly with headers defining methods they are linked
2051 with. */
2052 if (DECL_EXTERNAL (likely_target->decl))
2053 {
2054 if (dump_file)
2055 fprintf (dump_file, "Target is external\n\n");
2056 nexternal++;
2057 continue;
2058 }
2059 /* Don't use an implicitly-declared destructor (c++/58678). */
2060 struct cgraph_node *non_thunk_target
2061 = cgraph_function_node (likely_target);
2062 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2063 && DECL_COMDAT (non_thunk_target->decl))
2064 {
2065 if (dump_file)
2066 fprintf (dump_file, "Target is artificial\n\n");
2067 nartificial++;
2068 continue;
2069 }
2070 if (cgraph_function_body_availability (likely_target)
2071 <= AVAIL_OVERWRITABLE
2072 && symtab_can_be_discarded (likely_target))
2073 {
2074 if (dump_file)
2075 fprintf (dump_file, "Target is overwritable\n\n");
2076 noverwritable++;
2077 continue;
2078 }
2079 else if (dbg_cnt (devirt))
2080 {
2081 if (dump_enabled_p ())
2082 {
2083 location_t locus = gimple_location_safe (e->call_stmt);
2084 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2085 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2086 n->name (), n->order,
2087 likely_target->name (),
2088 likely_target->order);
2089 }
2090 if (!symtab_can_be_discarded (likely_target))
2091 {
2092 cgraph_node *alias;
2093 alias = cgraph (symtab_nonoverwritable_alias
2094 (likely_target));
2095 if (alias)
2096 likely_target = alias;
2097 }
2098 nconverted++;
2099 update = true;
2100 cgraph_turn_edge_to_speculative
2101 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2102 }
2103 }
2104 if (update)
2105 inline_update_overall_summary (n);
2106 }
2107 pointer_set_destroy (bad_call_targets);
2108
2109 if (dump_file)
2110 fprintf (dump_file,
2111 "%i polymorphic calls, %i devirtualized,"
2112 " %i speculatively devirtualized, %i cold\n"
2113 "%i have multiple targets, %i overwritable,"
2114 " %i already speculated (%i agree, %i disagree),"
2115 " %i external, %i not defined, %i artificial\n",
2116 npolymorphic, ndevirtualized, nconverted, ncold,
2117 nmultiple, noverwritable, nspeculated, nok, nwrong,
2118 nexternal, nnotdefined, nartificial);
2119 return ndevirtualized ? TODO_remove_functions : 0;
2120 }
2121
2122 namespace {
2123
2124 const pass_data pass_data_ipa_devirt =
2125 {
2126 IPA_PASS, /* type */
2127 "devirt", /* name */
2128 OPTGROUP_NONE, /* optinfo_flags */
2129 true, /* has_execute */
2130 TV_IPA_DEVIRT, /* tv_id */
2131 0, /* properties_required */
2132 0, /* properties_provided */
2133 0, /* properties_destroyed */
2134 0, /* todo_flags_start */
2135 ( TODO_dump_symtab ), /* todo_flags_finish */
2136 };
2137
2138 class pass_ipa_devirt : public ipa_opt_pass_d
2139 {
2140 public:
2141 pass_ipa_devirt (gcc::context *ctxt)
2142 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2143 NULL, /* generate_summary */
2144 NULL, /* write_summary */
2145 NULL, /* read_summary */
2146 NULL, /* write_optimization_summary */
2147 NULL, /* read_optimization_summary */
2148 NULL, /* stmt_fixup */
2149 0, /* function_transform_todo_flags_start */
2150 NULL, /* function_transform */
2151 NULL) /* variable_transform */
2152 {}
2153
2154 /* opt_pass methods: */
2155 virtual bool gate (function *)
2156 {
2157 return (flag_devirtualize
2158 && flag_devirtualize_speculatively
2159 && optimize);
2160 }
2161
2162 virtual unsigned int execute (function *) { return ipa_devirt (); }
2163
2164 }; // class pass_ipa_devirt
2165
2166 } // anon namespace
2167
2168 ipa_opt_pass_d *
2169 make_pass_ipa_devirt (gcc::context *ctxt)
2170 {
2171 return new pass_ipa_devirt (ctxt);
2172 }
2173
2174 #include "gt-ipa-devirt.h"