ipa-devirt.c (set_type_binfo): New function.
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
132 #include "dbgcnt.h"
133
134 static bool odr_violation_reported = false;
135
136 /* Dummy polymorphic call context. */
137
138 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
139 = {0, NULL, false, true};
140
141 /* Pointer set of all call targets appearing in the cache. */
142 static pointer_set_t *cached_polymorphic_call_targets;
143
144 /* The node of type inheritance graph. For each type unique in
145 One Defintion Rule (ODR) sense, we produce one node linking all
146 main variants of types equivalent to it, bases and derived types. */
147
148 struct GTY(()) odr_type_d
149 {
150 /* leader type. */
151 tree type;
152 /* All bases. */
153 vec<odr_type> GTY((skip)) bases;
154 /* All derrived types with virtual methods seen in unit. */
155 vec<odr_type> GTY((skip)) derived_types;
156
157 /* All equivalent types, if more than one. */
158 vec<tree, va_gc> *types;
159 /* Set of all equivalent types, if NON-NULL. */
160 pointer_set_t * GTY((skip)) types_set;
161
162 /* Unique ID indexing the type in odr_types array. */
163 int id;
164 /* Is it in anonymous namespace? */
165 bool anonymous_namespace;
166 /* Do we know about all derivations of given type? */
167 bool all_derivations_known;
168 };
169
170
171 /* Return true if BINFO corresponds to a type with virtual methods.
172
173 Every type has several BINFOs. One is the BINFO associated by the type
174 while other represents bases of derived types. The BINFOs representing
175 bases do not have BINFO_VTABLE pointer set when this is the single
176 inheritance (because vtables are shared). Look up the BINFO of type
177 and check presence of its vtable. */
178
179 static inline bool
180 polymorphic_type_binfo_p (tree binfo)
181 {
182 /* See if BINFO's type has an virtual table associtated with it. */
183 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
184 }
185
186 /* Return TRUE if all derived types of T are known and thus
187 we may consider the walk of derived type complete.
188
189 This is typically true only for final anonymous namespace types and types
190 defined within functions (that may be COMDAT and thus shared across units,
191 but with the same set of derived types). */
192
193 static bool
194 type_all_derivations_known_p (tree t)
195 {
196 if (TYPE_FINAL_P (t))
197 return true;
198 if (flag_ltrans)
199 return false;
200 if (type_in_anonymous_namespace_p (t))
201 return true;
202 return (decl_function_context (TYPE_NAME (t)) != NULL);
203 }
204
205 /* Return TURE if type's constructors are all visible. */
206
207 static bool
208 type_all_ctors_visible_p (tree t)
209 {
210 return !flag_ltrans
211 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
212 /* We can not always use type_all_derivations_known_p.
213 For function local types we must assume case where
214 the function is COMDAT and shared in between units.
215
216 TODO: These cases are quite easy to get, but we need
217 to keep track of C++ privatizing via -Wno-weak
218 as well as the IPA privatizing. */
219 && type_in_anonymous_namespace_p (t);
220 }
221
222 /* Return TRUE if type may have instance. */
223
224 static bool
225 type_possibly_instantiated_p (tree t)
226 {
227 tree vtable;
228 varpool_node *vnode;
229
230 /* TODO: Add abstract types here. */
231 if (!type_all_ctors_visible_p (t))
232 return true;
233
234 vtable = BINFO_VTABLE (TYPE_BINFO (t));
235 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
236 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
237 vnode = varpool_get_node (vtable);
238 return vnode && vnode->definition;
239 }
240
241 /* One Definition Rule hashtable helpers. */
242
243 struct odr_hasher
244 {
245 typedef odr_type_d value_type;
246 typedef union tree_node compare_type;
247 static inline hashval_t hash (const value_type *);
248 static inline bool equal (const value_type *, const compare_type *);
249 static inline void remove (value_type *);
250 };
251
252 /* Produce hash based on type name. */
253
254 hashval_t
255 hash_type_name (tree t)
256 {
257 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
258
259 /* If not in LTO, all main variants are unique, so we can do
260 pointer hash. */
261 if (!in_lto_p)
262 return htab_hash_pointer (t);
263
264 /* Anonymous types are unique. */
265 if (type_in_anonymous_namespace_p (t))
266 return htab_hash_pointer (t);
267
268 /* For polymorphic types, we can simply hash the virtual table. */
269 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
270 {
271 tree v = BINFO_VTABLE (TYPE_BINFO (t));
272 hashval_t hash = 0;
273
274 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
275 {
276 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
277 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
278 }
279
280 v = DECL_ASSEMBLER_NAME (v);
281 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
282 return hash;
283 }
284
285 /* Rest is not implemented yet. */
286 gcc_unreachable ();
287 }
288
289 /* Return the computed hashcode for ODR_TYPE. */
290
291 inline hashval_t
292 odr_hasher::hash (const value_type *odr_type)
293 {
294 return hash_type_name (odr_type->type);
295 }
296
297 /* Compare types T1 and T2 and return true if they are
298 equivalent. */
299
300 inline bool
301 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
302 {
303 tree t2 = const_cast <tree> (ct2);
304
305 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
306 if (t1->type == t2)
307 return true;
308 if (!in_lto_p)
309 return false;
310 return types_same_for_odr (t1->type, t2);
311 }
312
313 /* Free ODR type V. */
314
315 inline void
316 odr_hasher::remove (value_type *v)
317 {
318 v->bases.release ();
319 v->derived_types.release ();
320 if (v->types_set)
321 pointer_set_destroy (v->types_set);
322 ggc_free (v);
323 }
324
325 /* ODR type hash used to lookup ODR type based on tree type node. */
326
327 typedef hash_table<odr_hasher> odr_hash_type;
328 static odr_hash_type *odr_hash;
329
330 /* ODR types are also stored into ODR_TYPE vector to allow consistent
331 walking. Bases appear before derived types. Vector is garbage collected
332 so we won't end up visiting empty types. */
333
334 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
335 #define odr_types (*odr_types_ptr)
336
337 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
338 void
339 set_type_binfo (tree type, tree binfo)
340 {
341 for (; type; type = TYPE_NEXT_VARIANT (type))
342 if (COMPLETE_TYPE_P (type))
343 TYPE_BINFO (type) = binfo;
344 else
345 gcc_assert (!TYPE_BINFO (type));
346 }
347
348 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
349 from VAL->type. This may happen in LTO where tree merging did not merge
350 all variants of the same type. It may or may not mean the ODR violation.
351 Add it to the list of duplicates and warn on some violations. */
352
353 static void
354 add_type_duplicate (odr_type val, tree type)
355 {
356 if (!val->types_set)
357 val->types_set = pointer_set_create ();
358
359 /* See if this duplicate is new. */
360 if (!pointer_set_insert (val->types_set, type))
361 {
362 bool merge = true;
363 bool base_mismatch = false;
364 gcc_assert (in_lto_p);
365 vec_safe_push (val->types, type);
366 unsigned int i,j;
367
368 /* First we compare memory layout. */
369 if (!types_compatible_p (val->type, type))
370 {
371 merge = false;
372 odr_violation_reported = true;
373 if (BINFO_VTABLE (TYPE_BINFO (val->type))
374 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
375 "type %qD violates one definition rule ",
376 type))
377 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
378 "a type with the same name but different layout is "
379 "defined in another translation unit");
380 if (cgraph_dump_file)
381 {
382 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
383
384 print_node (cgraph_dump_file, "", val->type, 0);
385 putc ('\n',cgraph_dump_file);
386 print_node (cgraph_dump_file, "", type, 0);
387 putc ('\n',cgraph_dump_file);
388 }
389 }
390
391 /* Next sanity check that bases are the same. If not, we will end
392 up producing wrong answers. */
393 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
394 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
395 {
396 odr_type base = get_odr_type
397 (BINFO_TYPE
398 (BINFO_BASE_BINFO (TYPE_BINFO (type),
399 i)),
400 true);
401 if (val->bases.length () <= j || val->bases[j] != base)
402 base_mismatch = true;
403 j++;
404 }
405 if (base_mismatch)
406 {
407 merge = false;
408 odr_violation_reported = true;
409
410 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
411 "type %qD violates one definition rule ",
412 type))
413 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
414 "a type with the same name but different bases is "
415 "defined in another translation unit");
416 if (cgraph_dump_file)
417 {
418 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
419
420 print_node (cgraph_dump_file, "", val->type, 0);
421 putc ('\n',cgraph_dump_file);
422 print_node (cgraph_dump_file, "", type, 0);
423 putc ('\n',cgraph_dump_file);
424 }
425 }
426
427 /* Regularize things a little. During LTO same types may come with
428 different BINFOs. Either because their virtual table was
429 not merged by tree merging and only later at decl merging or
430 because one type comes with external vtable, while other
431 with internal. We want to merge equivalent binfos to conserve
432 memory and streaming overhead.
433
434 The external vtables are more harmful: they contain references
435 to external declarations of methods that may be defined in the
436 merged LTO unit. For this reason we absolutely need to remove
437 them and replace by internal variants. Not doing so will lead
438 to incomplete answers from possible_polymorphic_call_targets. */
439 if (!flag_ltrans && merge)
440 {
441 tree master_binfo = TYPE_BINFO (val->type);
442 tree v1 = BINFO_VTABLE (master_binfo);
443 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
444
445 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
446 {
447 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
448 && operand_equal_p (TREE_OPERAND (v1, 1),
449 TREE_OPERAND (v2, 1), 0));
450 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
451 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
452 }
453 gcc_assert (DECL_ASSEMBLER_NAME (v1)
454 == DECL_ASSEMBLER_NAME (v2));
455
456 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
457 {
458 unsigned int i;
459
460 set_type_binfo (val->type, TYPE_BINFO (type));
461 for (i = 0; i < val->types->length (); i++)
462 {
463 if (TYPE_BINFO ((*val->types)[i])
464 == master_binfo)
465 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
466 }
467 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
468 }
469 else
470 set_type_binfo (type, master_binfo);
471 }
472 }
473 }
474
475 /* Get ODR type hash entry for TYPE. If INSERT is true, create
476 possibly new entry. */
477
478 odr_type
479 get_odr_type (tree type, bool insert)
480 {
481 odr_type_d **slot;
482 odr_type val;
483 hashval_t hash;
484
485 type = TYPE_MAIN_VARIANT (type);
486 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
487 hash = hash_type_name (type);
488 slot
489 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
490 if (!slot)
491 return NULL;
492
493 /* See if we already have entry for type. */
494 if (*slot)
495 {
496 val = *slot;
497
498 /* With LTO we need to support multiple tree representation of
499 the same ODR type. */
500 if (val->type != type)
501 add_type_duplicate (val, type);
502 }
503 else
504 {
505 tree binfo = TYPE_BINFO (type);
506 unsigned int i;
507
508 val = ggc_cleared_alloc<odr_type_d> ();
509 val->type = type;
510 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
511 val->bases = vNULL;
512 val->derived_types = vNULL;
513 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
514 val->all_derivations_known = type_all_derivations_known_p (type);
515 *slot = val;
516 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
517 /* For now record only polymorphic types. other are
518 pointless for devirtualization and we can not precisely
519 determine ODR equivalency of these during LTO. */
520 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
521 {
522 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
523 i)),
524 true);
525 base->derived_types.safe_push (val);
526 val->bases.safe_push (base);
527 }
528 /* First record bases, then add into array so ids are increasing. */
529 if (odr_types_ptr)
530 val->id = odr_types.length ();
531 vec_safe_push (odr_types_ptr, val);
532 }
533 return val;
534 }
535
536 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
537 recusive printing. */
538
539 static void
540 dump_odr_type (FILE *f, odr_type t, int indent=0)
541 {
542 unsigned int i;
543 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
544 print_generic_expr (f, t->type, TDF_SLIM);
545 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
546 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
547 if (TYPE_NAME (t->type))
548 {
549 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
550 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
551 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
552 }
553 if (t->bases.length ())
554 {
555 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
556 for (i = 0; i < t->bases.length (); i++)
557 fprintf (f, " %i", t->bases[i]->id);
558 fprintf (f, "\n");
559 }
560 if (t->derived_types.length ())
561 {
562 fprintf (f, "%*s derived types:\n", indent * 2, "");
563 for (i = 0; i < t->derived_types.length (); i++)
564 dump_odr_type (f, t->derived_types[i], indent + 1);
565 }
566 fprintf (f, "\n");
567 }
568
569 /* Dump the type inheritance graph. */
570
571 static void
572 dump_type_inheritance_graph (FILE *f)
573 {
574 unsigned int i;
575 if (!odr_types_ptr)
576 return;
577 fprintf (f, "\n\nType inheritance graph:\n");
578 for (i = 0; i < odr_types.length (); i++)
579 {
580 if (odr_types[i]->bases.length () == 0)
581 dump_odr_type (f, odr_types[i]);
582 }
583 for (i = 0; i < odr_types.length (); i++)
584 {
585 if (odr_types[i]->types && odr_types[i]->types->length ())
586 {
587 unsigned int j;
588 fprintf (f, "Duplicate tree types for odr type %i\n", i);
589 print_node (f, "", odr_types[i]->type, 0);
590 for (j = 0; j < odr_types[i]->types->length (); j++)
591 {
592 tree t;
593 fprintf (f, "duplicate #%i\n", j);
594 print_node (f, "", (*odr_types[i]->types)[j], 0);
595 t = (*odr_types[i]->types)[j];
596 while (TYPE_P (t) && TYPE_CONTEXT (t))
597 {
598 t = TYPE_CONTEXT (t);
599 print_node (f, "", t, 0);
600 }
601 putc ('\n',f);
602 }
603 }
604 }
605 }
606
607 /* Given method type T, return type of class it belongs to.
608 Lookup this pointer and get its type. */
609
610 tree
611 method_class_type (tree t)
612 {
613 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
614 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
615
616 return TREE_TYPE (first_parm_type);
617 }
618
619 /* Initialize IPA devirt and build inheritance tree graph. */
620
621 void
622 build_type_inheritance_graph (void)
623 {
624 struct symtab_node *n;
625 FILE *inheritance_dump_file;
626 int flags;
627
628 if (odr_hash)
629 return;
630 timevar_push (TV_IPA_INHERITANCE);
631 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
632 odr_hash = new odr_hash_type (23);
633
634 /* We reconstruct the graph starting of types of all methods seen in the
635 the unit. */
636 FOR_EACH_SYMBOL (n)
637 if (is_a <cgraph_node *> (n)
638 && DECL_VIRTUAL_P (n->decl)
639 && symtab_real_symbol_p (n))
640 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
641
642 /* Look also for virtual tables of types that do not define any methods.
643
644 We need it in a case where class B has virtual base of class A
645 re-defining its virtual method and there is class C with no virtual
646 methods with B as virtual base.
647
648 Here we output B's virtual method in two variant - for non-virtual
649 and virtual inheritance. B's virtual table has non-virtual version,
650 while C's has virtual.
651
652 For this reason we need to know about C in order to include both
653 variants of B. More correctly, record_target_from_binfo should
654 add both variants of the method when walking B, but we have no
655 link in between them.
656
657 We rely on fact that either the method is exported and thus we
658 assume it is called externally or C is in anonymous namespace and
659 thus we will see the vtable. */
660
661 else if (is_a <varpool_node *> (n)
662 && DECL_VIRTUAL_P (n->decl)
663 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
664 && TYPE_BINFO (DECL_CONTEXT (n->decl))
665 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
666 get_odr_type (DECL_CONTEXT (n->decl), true);
667 if (inheritance_dump_file)
668 {
669 dump_type_inheritance_graph (inheritance_dump_file);
670 dump_end (TDI_inheritance, inheritance_dump_file);
671 }
672 timevar_pop (TV_IPA_INHERITANCE);
673 }
674
675 /* Return true if N has reference from live virtual table
676 (and thus can be a destination of polymorphic call).
677 Be conservatively correct when callgraph is not built or
678 if the method may be referred externally. */
679
680 static bool
681 referenced_from_vtable_p (struct cgraph_node *node)
682 {
683 int i;
684 struct ipa_ref *ref;
685 bool found = false;
686
687 if (node->externally_visible
688 || node->used_from_other_partition)
689 return true;
690
691 /* Keep this test constant time.
692 It is unlikely this can happen except for the case where speculative
693 devirtualization introduced many speculative edges to this node.
694 In this case the target is very likely alive anyway. */
695 if (node->ref_list.referring.length () > 100)
696 return true;
697
698 /* We need references built. */
699 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
700 return true;
701
702 for (i = 0; node->iterate_referring (i, ref); i++)
703
704 if ((ref->use == IPA_REF_ALIAS
705 && referenced_from_vtable_p (cgraph (ref->referring)))
706 || (ref->use == IPA_REF_ADDR
707 && TREE_CODE (ref->referring->decl) == VAR_DECL
708 && DECL_VIRTUAL_P (ref->referring->decl)))
709 {
710 found = true;
711 break;
712 }
713 return found;
714 }
715
716 /* If TARGET has associated node, record it in the NODES array.
717 CAN_REFER specify if program can refer to the target directly.
718 if TARGET is unknown (NULL) or it can not be inserted (for example because
719 its body was already removed and there is no way to refer to it), clear
720 COMPLETEP. */
721
722 static void
723 maybe_record_node (vec <cgraph_node *> &nodes,
724 tree target, pointer_set_t *inserted,
725 bool can_refer,
726 bool *completep)
727 {
728 struct cgraph_node *target_node;
729
730 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
731 list of targets; the runtime effect of calling them is undefined.
732 Only "real" virtual methods should be accounted. */
733 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
734 return;
735
736 if (!can_refer)
737 {
738 /* The only case when method of anonymous namespace becomes unreferable
739 is when we completely optimized it out. */
740 if (flag_ltrans
741 || !target
742 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
743 *completep = false;
744 return;
745 }
746
747 if (!target)
748 return;
749
750 target_node = cgraph_get_node (target);
751
752 /* Method can only be called by polymorphic call if any
753 of vtables refering to it are alive.
754
755 While this holds for non-anonymous functions, too, there are
756 cases where we want to keep them in the list; for example
757 inline functions with -fno-weak are static, but we still
758 may devirtualize them when instance comes from other unit.
759 The same holds for LTO.
760
761 Currently we ignore these functions in speculative devirtualization.
762 ??? Maybe it would make sense to be more aggressive for LTO even
763 eslewhere. */
764 if (!flag_ltrans
765 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
766 && (!target_node
767 || !referenced_from_vtable_p (target_node)))
768 ;
769 /* See if TARGET is useful function we can deal with. */
770 else if (target_node != NULL
771 && (TREE_PUBLIC (target)
772 || DECL_EXTERNAL (target)
773 || target_node->definition)
774 && symtab_real_symbol_p (target_node))
775 {
776 gcc_assert (!target_node->global.inlined_to);
777 gcc_assert (symtab_real_symbol_p (target_node));
778 if (!pointer_set_insert (inserted, target))
779 {
780 pointer_set_insert (cached_polymorphic_call_targets,
781 target_node);
782 nodes.safe_push (target_node);
783 }
784 }
785 else if (completep
786 && (!type_in_anonymous_namespace_p
787 (DECL_CONTEXT (target))
788 || flag_ltrans))
789 *completep = false;
790 }
791
792 /* See if BINFO's type match OUTER_TYPE. If so, lookup
793 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
794 method in vtable and insert method to NODES array
795 or BASES_TO_CONSIDER if this array is non-NULL.
796 Otherwise recurse to base BINFOs.
797 This match what get_binfo_at_offset does, but with offset
798 being unknown.
799
800 TYPE_BINFOS is a stack of BINFOS of types with defined
801 virtual table seen on way from class type to BINFO.
802
803 MATCHED_VTABLES tracks virtual tables we already did lookup
804 for virtual function in. INSERTED tracks nodes we already
805 inserted.
806
807 ANONYMOUS is true if BINFO is part of anonymous namespace.
808
809 Clear COMPLETEP when we hit unreferable target.
810 */
811
812 static void
813 record_target_from_binfo (vec <cgraph_node *> &nodes,
814 vec <tree> *bases_to_consider,
815 tree binfo,
816 tree otr_type,
817 vec <tree> &type_binfos,
818 HOST_WIDE_INT otr_token,
819 tree outer_type,
820 HOST_WIDE_INT offset,
821 pointer_set_t *inserted,
822 pointer_set_t *matched_vtables,
823 bool anonymous,
824 bool *completep)
825 {
826 tree type = BINFO_TYPE (binfo);
827 int i;
828 tree base_binfo;
829
830
831 if (BINFO_VTABLE (binfo))
832 type_binfos.safe_push (binfo);
833 if (types_same_for_odr (type, outer_type))
834 {
835 int i;
836 tree type_binfo = NULL;
837
838 /* Lookup BINFO with virtual table. For normal types it is always last
839 binfo on stack. */
840 for (i = type_binfos.length () - 1; i >= 0; i--)
841 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
842 {
843 type_binfo = type_binfos[i];
844 break;
845 }
846 if (BINFO_VTABLE (binfo))
847 type_binfos.pop ();
848 /* If this is duplicated BINFO for base shared by virtual inheritance,
849 we may not have its associated vtable. This is not a problem, since
850 we will walk it on the other path. */
851 if (!type_binfo)
852 return;
853 tree inner_binfo = get_binfo_at_offset (type_binfo,
854 offset, otr_type);
855 if (!inner_binfo)
856 {
857 gcc_assert (odr_violation_reported);
858 return;
859 }
860 /* For types in anonymous namespace first check if the respective vtable
861 is alive. If not, we know the type can't be called. */
862 if (!flag_ltrans && anonymous)
863 {
864 tree vtable = BINFO_VTABLE (inner_binfo);
865 varpool_node *vnode;
866
867 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
868 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
869 vnode = varpool_get_node (vtable);
870 if (!vnode || !vnode->definition)
871 return;
872 }
873 gcc_assert (inner_binfo);
874 if (bases_to_consider
875 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
876 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
877 {
878 bool can_refer;
879 tree target = gimple_get_virt_method_for_binfo (otr_token,
880 inner_binfo,
881 &can_refer);
882 if (!bases_to_consider)
883 maybe_record_node (nodes, target, inserted, can_refer, completep);
884 /* Destructors are never called via construction vtables. */
885 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
886 bases_to_consider->safe_push (target);
887 }
888 return;
889 }
890
891 /* Walk bases. */
892 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
893 /* Walking bases that have no virtual method is pointless excercise. */
894 if (polymorphic_type_binfo_p (base_binfo))
895 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
896 type_binfos,
897 otr_token, outer_type, offset, inserted,
898 matched_vtables, anonymous, completep);
899 if (BINFO_VTABLE (binfo))
900 type_binfos.pop ();
901 }
902
903 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
904 of TYPE, insert them to NODES, recurse into derived nodes.
905 INSERTED is used to avoid duplicate insertions of methods into NODES.
906 MATCHED_VTABLES are used to avoid duplicate walking vtables.
907 Clear COMPLETEP if unreferable target is found.
908
909 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
910 all cases where BASE_SKIPPED is true (because the base is abstract
911 class). */
912
913 static void
914 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
915 pointer_set_t *inserted,
916 pointer_set_t *matched_vtables,
917 tree otr_type,
918 odr_type type,
919 HOST_WIDE_INT otr_token,
920 tree outer_type,
921 HOST_WIDE_INT offset,
922 bool *completep,
923 vec <tree> &bases_to_consider,
924 bool consider_construction)
925 {
926 tree binfo = TYPE_BINFO (type->type);
927 unsigned int i;
928 vec <tree> type_binfos = vNULL;
929 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
930
931 /* We may need to consider types w/o instances because of possible derived
932 types using their methods either directly or via construction vtables.
933 We are safe to skip them when all derivations are known, since we will
934 handle them later.
935 This is done by recording them to BASES_TO_CONSIDER array. */
936 if (possibly_instantiated || consider_construction)
937 {
938 record_target_from_binfo (nodes,
939 (!possibly_instantiated
940 && type_all_derivations_known_p (type->type))
941 ? &bases_to_consider : NULL,
942 binfo, otr_type, type_binfos, otr_token,
943 outer_type, offset,
944 inserted, matched_vtables,
945 type->anonymous_namespace, completep);
946 }
947 type_binfos.release ();
948 for (i = 0; i < type->derived_types.length (); i++)
949 possible_polymorphic_call_targets_1 (nodes, inserted,
950 matched_vtables,
951 otr_type,
952 type->derived_types[i],
953 otr_token, outer_type, offset, completep,
954 bases_to_consider, consider_construction);
955 }
956
957 /* Cache of queries for polymorphic call targets.
958
959 Enumerating all call targets may get expensive when there are many
960 polymorphic calls in the program, so we memoize all the previous
961 queries and avoid duplicated work. */
962
963 struct polymorphic_call_target_d
964 {
965 HOST_WIDE_INT otr_token;
966 ipa_polymorphic_call_context context;
967 odr_type type;
968 vec <cgraph_node *> targets;
969 int nonconstruction_targets;
970 bool complete;
971 };
972
973 /* Polymorphic call target cache helpers. */
974
975 struct polymorphic_call_target_hasher
976 {
977 typedef polymorphic_call_target_d value_type;
978 typedef polymorphic_call_target_d compare_type;
979 static inline hashval_t hash (const value_type *);
980 static inline bool equal (const value_type *, const compare_type *);
981 static inline void remove (value_type *);
982 };
983
984 /* Return the computed hashcode for ODR_QUERY. */
985
986 inline hashval_t
987 polymorphic_call_target_hasher::hash (const value_type *odr_query)
988 {
989 hashval_t hash;
990
991 hash = iterative_hash_host_wide_int
992 (odr_query->otr_token,
993 odr_query->type->id);
994 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
995 hash);
996 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
997 return iterative_hash_hashval_t
998 (((int)odr_query->context.maybe_in_construction << 1)
999 | (int)odr_query->context.maybe_derived_type, hash);
1000 }
1001
1002 /* Compare cache entries T1 and T2. */
1003
1004 inline bool
1005 polymorphic_call_target_hasher::equal (const value_type *t1,
1006 const compare_type *t2)
1007 {
1008 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1009 && t1->context.offset == t2->context.offset
1010 && t1->context.outer_type == t2->context.outer_type
1011 && t1->context.maybe_in_construction
1012 == t2->context.maybe_in_construction
1013 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
1014 }
1015
1016 /* Remove entry in polymorphic call target cache hash. */
1017
1018 inline void
1019 polymorphic_call_target_hasher::remove (value_type *v)
1020 {
1021 v->targets.release ();
1022 free (v);
1023 }
1024
1025 /* Polymorphic call target query cache. */
1026
1027 typedef hash_table<polymorphic_call_target_hasher>
1028 polymorphic_call_target_hash_type;
1029 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1030
1031 /* Destroy polymorphic call target query cache. */
1032
1033 static void
1034 free_polymorphic_call_targets_hash ()
1035 {
1036 if (cached_polymorphic_call_targets)
1037 {
1038 delete polymorphic_call_target_hash;
1039 polymorphic_call_target_hash = NULL;
1040 pointer_set_destroy (cached_polymorphic_call_targets);
1041 cached_polymorphic_call_targets = NULL;
1042 }
1043 }
1044
1045 /* When virtual function is removed, we may need to flush the cache. */
1046
1047 static void
1048 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1049 {
1050 if (cached_polymorphic_call_targets
1051 && pointer_set_contains (cached_polymorphic_call_targets, n))
1052 free_polymorphic_call_targets_hash ();
1053 }
1054
1055 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1056 is contained at CONTEXT->OFFSET. Walk the memory representation of
1057 CONTEXT->OUTER_TYPE and find the outermost class type that match
1058 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1059 to represent it.
1060
1061 For example when CONTEXT represents type
1062 class A
1063 {
1064 int a;
1065 class B b;
1066 }
1067 and we look for type at offset sizeof(int), we end up with B and offset 0.
1068 If the same is produced by multiple inheritance, we end up with A and offset
1069 sizeof(int).
1070
1071 If we can not find corresponding class, give up by setting
1072 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1073 Return true when lookup was sucesful. */
1074
1075 static bool
1076 get_class_context (ipa_polymorphic_call_context *context,
1077 tree expected_type)
1078 {
1079 tree type = context->outer_type;
1080 HOST_WIDE_INT offset = context->offset;
1081
1082 /* Find the sub-object the constant actually refers to and mark whether it is
1083 an artificial one (as opposed to a user-defined one). */
1084 while (true)
1085 {
1086 HOST_WIDE_INT pos, size;
1087 tree fld;
1088
1089 /* On a match, just return what we found. */
1090 if (TREE_CODE (type) == TREE_CODE (expected_type)
1091 && types_same_for_odr (type, expected_type))
1092 {
1093 /* Type can not contain itself on an non-zero offset. In that case
1094 just give up. */
1095 if (offset != 0)
1096 goto give_up;
1097 gcc_assert (offset == 0);
1098 return true;
1099 }
1100
1101 /* Walk fields and find corresponding on at OFFSET. */
1102 if (TREE_CODE (type) == RECORD_TYPE)
1103 {
1104 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1105 {
1106 if (TREE_CODE (fld) != FIELD_DECL)
1107 continue;
1108
1109 pos = int_bit_position (fld);
1110 size = tree_to_uhwi (DECL_SIZE (fld));
1111 if (pos <= offset && (pos + size) > offset)
1112 break;
1113 }
1114
1115 if (!fld)
1116 goto give_up;
1117
1118 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1119 offset -= pos;
1120 /* DECL_ARTIFICIAL represents a basetype. */
1121 if (!DECL_ARTIFICIAL (fld))
1122 {
1123 context->outer_type = type;
1124 context->offset = offset;
1125 /* As soon as we se an field containing the type,
1126 we know we are not looking for derivations. */
1127 context->maybe_derived_type = false;
1128 }
1129 }
1130 else if (TREE_CODE (type) == ARRAY_TYPE)
1131 {
1132 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1133
1134 /* Give up if we don't know array size. */
1135 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1136 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1137 goto give_up;
1138 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1139 type = subtype;
1140 context->outer_type = type;
1141 context->offset = offset;
1142 context->maybe_derived_type = false;
1143 }
1144 /* Give up on anything else. */
1145 else
1146 goto give_up;
1147 }
1148
1149 /* If we failed to find subtype we look for, give up and fall back to the
1150 most generic query. */
1151 give_up:
1152 context->outer_type = expected_type;
1153 context->offset = 0;
1154 context->maybe_derived_type = true;
1155 context->maybe_in_construction = true;
1156 /* POD can be changed to an instance of a polymorphic type by
1157 placement new. Here we play safe and assume that any
1158 non-polymorphic type is POD. */
1159 if ((TREE_CODE (type) != RECORD_TYPE
1160 || !TYPE_BINFO (type)
1161 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1162 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1163 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1164 tree_to_uhwi (TYPE_SIZE (type)))))
1165 return true;
1166 return false;
1167 }
1168
1169 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1170
1171 static bool
1172 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1173 tree otr_type)
1174 {
1175 ipa_polymorphic_call_context context = {offset,
1176 TYPE_MAIN_VARIANT (outer_type),
1177 false, true};
1178 return get_class_context (&context, otr_type);
1179 }
1180
1181 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1182
1183 static tree
1184 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1185 tree vtable)
1186 {
1187 tree v = BINFO_VTABLE (binfo);
1188 int i;
1189 tree base_binfo;
1190 unsigned HOST_WIDE_INT this_offset;
1191
1192 if (v)
1193 {
1194 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1195 gcc_unreachable ();
1196
1197 if (offset == this_offset
1198 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1199 return binfo;
1200 }
1201
1202 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1203 if (polymorphic_type_binfo_p (base_binfo))
1204 {
1205 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1206 if (base_binfo)
1207 return base_binfo;
1208 }
1209 return NULL;
1210 }
1211
1212 /* T is known constant value of virtual table pointer.
1213 Store virtual table to V and its offset to OFFSET.
1214 Return false if T does not look like virtual table reference. */
1215
1216 bool
1217 vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
1218 {
1219 /* We expect &MEM[(void *)&virtual_table + 16B].
1220 We obtain object's BINFO from the context of the virtual table.
1221 This one contains pointer to virtual table represented via
1222 POINTER_PLUS_EXPR. Verify that this pointer match to what
1223 we propagated through.
1224
1225 In the case of virtual inheritance, the virtual tables may
1226 be nested, i.e. the offset may be different from 16 and we may
1227 need to dive into the type representation. */
1228 if (TREE_CODE (t) == ADDR_EXPR
1229 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1230 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1231 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1232 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1233 == VAR_DECL)
1234 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1235 (TREE_OPERAND (t, 0), 0), 0)))
1236 {
1237 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1238 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1239 return true;
1240 }
1241
1242 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1243 We need to handle it when T comes from static variable initializer or
1244 BINFO. */
1245 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1246 {
1247 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1248 t = TREE_OPERAND (t, 0);
1249 }
1250 else
1251 *offset = 0;
1252
1253 if (TREE_CODE (t) != ADDR_EXPR)
1254 return false;
1255 *v = TREE_OPERAND (t, 0);
1256 return true;
1257 }
1258
1259 /* T is known constant value of virtual table pointer. Return BINFO of the
1260 instance type. */
1261
1262 tree
1263 vtable_pointer_value_to_binfo (tree t)
1264 {
1265 tree vtable;
1266 unsigned HOST_WIDE_INT offset;
1267
1268 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1269 return NULL_TREE;
1270
1271 /* FIXME: for stores of construction vtables we return NULL,
1272 because we do not have BINFO for those. Eventually we should fix
1273 our representation to allow this case to be handled, too.
1274 In the case we see store of BINFO we however may assume
1275 that standard folding will be ale to cope with it. */
1276 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1277 offset, vtable);
1278 }
1279
1280 /* Proudce polymorphic call context for call method of instance
1281 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1282
1283 static void
1284 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1285 tree base, HOST_WIDE_INT offset)
1286 {
1287 gcc_assert (DECL_P (base));
1288
1289 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
1290 context->offset = offset;
1291 /* Make very conservative assumption that all objects
1292 may be in construction.
1293 TODO: ipa-prop already contains code to tell better.
1294 merge it later. */
1295 context->maybe_in_construction = true;
1296 context->maybe_derived_type = false;
1297 }
1298
1299 /* CST is an invariant (address of decl), try to get meaningful
1300 polymorphic call context for polymorphic call of method
1301 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1302 Return FALSE if nothing meaningful can be found. */
1303
1304 bool
1305 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1306 tree cst,
1307 tree otr_type,
1308 HOST_WIDE_INT offset)
1309 {
1310 HOST_WIDE_INT offset2, size, max_size;
1311 tree base;
1312
1313 if (TREE_CODE (cst) != ADDR_EXPR)
1314 return false;
1315
1316 cst = TREE_OPERAND (cst, 0);
1317 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1318 if (!DECL_P (base) || max_size == -1 || max_size != size)
1319 return false;
1320
1321 /* Only type inconsistent programs can have otr_type that is
1322 not part of outer type. */
1323 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1324 return false;
1325
1326 get_polymorphic_call_info_for_decl (context, base, offset);
1327 return true;
1328 }
1329
1330 /* Given REF call in FNDECL, determine class of the polymorphic
1331 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1332 Return pointer to object described by the context */
1333
1334 tree
1335 get_polymorphic_call_info (tree fndecl,
1336 tree ref,
1337 tree *otr_type,
1338 HOST_WIDE_INT *otr_token,
1339 ipa_polymorphic_call_context *context)
1340 {
1341 tree base_pointer;
1342 *otr_type = obj_type_ref_class (ref);
1343 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1344
1345 /* Set up basic info in case we find nothing interesting in the analysis. */
1346 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
1347 context->offset = 0;
1348 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1349 context->maybe_derived_type = true;
1350 context->maybe_in_construction = true;
1351
1352 /* Walk SSA for outer object. */
1353 do
1354 {
1355 if (TREE_CODE (base_pointer) == SSA_NAME
1356 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1357 && SSA_NAME_DEF_STMT (base_pointer)
1358 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1359 {
1360 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1361 STRIP_NOPS (base_pointer);
1362 }
1363 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1364 {
1365 HOST_WIDE_INT size, max_size;
1366 HOST_WIDE_INT offset2;
1367 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1368 &offset2, &size, &max_size);
1369
1370 /* If this is a varying address, punt. */
1371 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1372 && max_size != -1
1373 && max_size == size)
1374 {
1375 /* We found dereference of a pointer. Type of the pointer
1376 and MEM_REF is meaningless, but we can look futher. */
1377 if (TREE_CODE (base) == MEM_REF)
1378 {
1379 base_pointer = TREE_OPERAND (base, 0);
1380 context->offset
1381 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1382 context->outer_type = NULL;
1383 }
1384 /* We found base object. In this case the outer_type
1385 is known. */
1386 else if (DECL_P (base))
1387 {
1388 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1389
1390 /* Only type inconsistent programs can have otr_type that is
1391 not part of outer type. */
1392 if (!contains_type_p (TREE_TYPE (base),
1393 context->offset + offset2, *otr_type))
1394 {
1395 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1396 code sequences; we arrange the calls to be builtin_unreachable
1397 later. */
1398 *otr_token = INT_MAX;
1399 return base_pointer;
1400 }
1401 get_polymorphic_call_info_for_decl (context, base,
1402 context->offset + offset2);
1403 return NULL;
1404 }
1405 else
1406 break;
1407 }
1408 else
1409 break;
1410 }
1411 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1412 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1413 {
1414 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1415 * BITS_PER_UNIT;
1416 base_pointer = TREE_OPERAND (base_pointer, 0);
1417 }
1418 else
1419 break;
1420 }
1421 while (true);
1422
1423 /* Try to determine type of the outer object. */
1424 if (TREE_CODE (base_pointer) == SSA_NAME
1425 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1426 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1427 {
1428 /* See if parameter is THIS pointer of a method. */
1429 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1430 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1431 {
1432 context->outer_type
1433 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1434 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1435
1436 /* Dynamic casting has possibly upcasted the type
1437 in the hiearchy. In this case outer type is less
1438 informative than inner type and we should forget
1439 about it. */
1440 if (!contains_type_p (context->outer_type, context->offset,
1441 *otr_type))
1442 {
1443 context->outer_type = NULL;
1444 return base_pointer;
1445 }
1446
1447 /* If the function is constructor or destructor, then
1448 the type is possibly in construction, but we know
1449 it is not derived type. */
1450 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1451 || DECL_CXX_DESTRUCTOR_P (fndecl))
1452 {
1453 context->maybe_in_construction = true;
1454 context->maybe_derived_type = false;
1455 }
1456 else
1457 {
1458 context->maybe_derived_type = true;
1459 context->maybe_in_construction = false;
1460 }
1461 return base_pointer;
1462 }
1463 /* Non-PODs passed by value are really passed by invisible
1464 reference. In this case we also know the type of the
1465 object. */
1466 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1467 {
1468 context->outer_type
1469 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1470 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1471 /* Only type inconsistent programs can have otr_type that is
1472 not part of outer type. */
1473 if (!contains_type_p (context->outer_type, context->offset,
1474 *otr_type))
1475 {
1476 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1477 code sequences; we arrange the calls to be builtin_unreachable
1478 later. */
1479 *otr_token = INT_MAX;
1480 return base_pointer;
1481 }
1482 context->maybe_derived_type = false;
1483 context->maybe_in_construction = false;
1484 return base_pointer;
1485 }
1486 }
1487 /* TODO: There are multiple ways to derive a type. For instance
1488 if BASE_POINTER is passed to an constructor call prior our refernece.
1489 We do not make this type of flow sensitive analysis yet. */
1490 return base_pointer;
1491 }
1492
1493 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1494 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1495 and insert them to NODES.
1496
1497 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1498
1499 static void
1500 record_targets_from_bases (tree otr_type,
1501 HOST_WIDE_INT otr_token,
1502 tree outer_type,
1503 HOST_WIDE_INT offset,
1504 vec <cgraph_node *> &nodes,
1505 pointer_set_t *inserted,
1506 pointer_set_t *matched_vtables,
1507 bool *completep)
1508 {
1509 while (true)
1510 {
1511 HOST_WIDE_INT pos, size;
1512 tree base_binfo;
1513 tree fld;
1514
1515 if (types_same_for_odr (outer_type, otr_type))
1516 return;
1517
1518 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1519 {
1520 if (TREE_CODE (fld) != FIELD_DECL)
1521 continue;
1522
1523 pos = int_bit_position (fld);
1524 size = tree_to_shwi (DECL_SIZE (fld));
1525 if (pos <= offset && (pos + size) > offset
1526 /* Do not get confused by zero sized bases. */
1527 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1528 break;
1529 }
1530 /* Within a class type we should always find correcponding fields. */
1531 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1532
1533 /* Nonbasetypes should have been stripped by outer_class_type. */
1534 gcc_assert (DECL_ARTIFICIAL (fld));
1535
1536 outer_type = TREE_TYPE (fld);
1537 offset -= pos;
1538
1539 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1540 offset, otr_type);
1541 if (!base_binfo)
1542 {
1543 gcc_assert (odr_violation_reported);
1544 return;
1545 }
1546 gcc_assert (base_binfo);
1547 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1548 {
1549 bool can_refer;
1550 tree target = gimple_get_virt_method_for_binfo (otr_token,
1551 base_binfo,
1552 &can_refer);
1553 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
1554 maybe_record_node (nodes, target, inserted, can_refer, completep);
1555 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1556 }
1557 }
1558 }
1559
1560 /* When virtual table is removed, we may need to flush the cache. */
1561
1562 static void
1563 devirt_variable_node_removal_hook (varpool_node *n,
1564 void *d ATTRIBUTE_UNUSED)
1565 {
1566 if (cached_polymorphic_call_targets
1567 && DECL_VIRTUAL_P (n->decl)
1568 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1569 free_polymorphic_call_targets_hash ();
1570 }
1571
1572 /* Return vector containing possible targets of polymorphic call of type
1573 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1574 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1575 OTR_TYPE and include their virtual method. This is useful for types
1576 possibly in construction or destruction where the virtual table may
1577 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1578 us to walk the inheritance graph for all derivations.
1579
1580 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1581 undefined and should be redirected to unreachable.
1582
1583 If COMPLETEP is non-NULL, store true if the list is complete.
1584 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1585 in the target cache. If user needs to visit every target list
1586 just once, it can memoize them.
1587
1588 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1589 the type is not in the construction. Those targets appear first in the
1590 vector returned.
1591
1592 Returned vector is placed into cache. It is NOT caller's responsibility
1593 to free it. The vector can be freed on cgraph_remove_node call if
1594 the particular node is a virtual function present in the cache. */
1595
1596 vec <cgraph_node *>
1597 possible_polymorphic_call_targets (tree otr_type,
1598 HOST_WIDE_INT otr_token,
1599 ipa_polymorphic_call_context context,
1600 bool *completep,
1601 void **cache_token,
1602 int *nonconstruction_targetsp)
1603 {
1604 static struct cgraph_node_hook_list *node_removal_hook_holder;
1605 pointer_set_t *inserted;
1606 pointer_set_t *matched_vtables;
1607 vec <cgraph_node *> nodes = vNULL;
1608 vec <tree> bases_to_consider = vNULL;
1609 odr_type type, outer_type;
1610 polymorphic_call_target_d key;
1611 polymorphic_call_target_d **slot;
1612 unsigned int i;
1613 tree binfo, target;
1614 bool complete;
1615 bool can_refer;
1616 bool skipped = false;
1617
1618 otr_type = TYPE_MAIN_VARIANT (otr_type);
1619
1620 /* If ODR is not initialized, return empty incomplete list. */
1621 if (!odr_hash)
1622 {
1623 if (completep)
1624 *completep = false;
1625 if (cache_token)
1626 *cache_token = NULL;
1627 if (nonconstruction_targetsp)
1628 *nonconstruction_targetsp = 0;
1629 return nodes;
1630 }
1631
1632 /* If we hit type inconsistency, just return empty list of targets. */
1633 if (otr_token == INT_MAX)
1634 {
1635 if (completep)
1636 *completep = true;
1637 if (cache_token)
1638 *cache_token = NULL;
1639 if (nonconstruction_targetsp)
1640 *nonconstruction_targetsp = 0;
1641 return nodes;
1642 }
1643
1644 type = get_odr_type (otr_type, true);
1645
1646 /* Recording type variants would wast results cache. */
1647 gcc_assert (!context.outer_type
1648 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1649
1650 /* Lookup the outer class type we want to walk. */
1651 if (context.outer_type
1652 && !get_class_context (&context, otr_type))
1653 {
1654 if (completep)
1655 *completep = false;
1656 if (cache_token)
1657 *cache_token = NULL;
1658 if (nonconstruction_targetsp)
1659 *nonconstruction_targetsp = 0;
1660 return nodes;
1661 }
1662
1663 /* Check that get_class_context kept the main variant. */
1664 gcc_assert (!context.outer_type
1665 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1666
1667 /* We canonicalize our query, so we do not need extra hashtable entries. */
1668
1669 /* Without outer type, we have no use for offset. Just do the
1670 basic search from innter type */
1671 if (!context.outer_type)
1672 {
1673 context.outer_type = otr_type;
1674 context.offset = 0;
1675 }
1676 /* We need to update our hiearchy if the type does not exist. */
1677 outer_type = get_odr_type (context.outer_type, true);
1678 /* If the type is complete, there are no derivations. */
1679 if (TYPE_FINAL_P (outer_type->type))
1680 context.maybe_derived_type = false;
1681
1682 /* Initialize query cache. */
1683 if (!cached_polymorphic_call_targets)
1684 {
1685 cached_polymorphic_call_targets = pointer_set_create ();
1686 polymorphic_call_target_hash
1687 = new polymorphic_call_target_hash_type (23);
1688 if (!node_removal_hook_holder)
1689 {
1690 node_removal_hook_holder =
1691 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1692 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1693 NULL);
1694 }
1695 }
1696
1697 /* Lookup cached answer. */
1698 key.type = type;
1699 key.otr_token = otr_token;
1700 key.context = context;
1701 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
1702 if (cache_token)
1703 *cache_token = (void *)*slot;
1704 if (*slot)
1705 {
1706 if (completep)
1707 *completep = (*slot)->complete;
1708 if (nonconstruction_targetsp)
1709 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1710 return (*slot)->targets;
1711 }
1712
1713 complete = true;
1714
1715 /* Do actual search. */
1716 timevar_push (TV_IPA_VIRTUAL_CALL);
1717 *slot = XCNEW (polymorphic_call_target_d);
1718 if (cache_token)
1719 *cache_token = (void *)*slot;
1720 (*slot)->type = type;
1721 (*slot)->otr_token = otr_token;
1722 (*slot)->context = context;
1723
1724 inserted = pointer_set_create ();
1725 matched_vtables = pointer_set_create ();
1726
1727 /* First see virtual method of type itself. */
1728 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1729 context.offset, otr_type);
1730 if (binfo)
1731 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1732 &can_refer);
1733 else
1734 {
1735 gcc_assert (odr_violation_reported);
1736 target = NULL;
1737 }
1738
1739 /* Destructors are never called through construction virtual tables,
1740 because the type is always known. */
1741 if (target && DECL_CXX_DESTRUCTOR_P (target))
1742 context.maybe_in_construction = false;
1743
1744 if (target)
1745 {
1746 /* In the case we get complete method, we don't need
1747 to walk derivations. */
1748 if (DECL_FINAL_P (target))
1749 context.maybe_derived_type = false;
1750 }
1751
1752 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
1753 if (type_possibly_instantiated_p (outer_type->type))
1754 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1755 else
1756 {
1757 skipped = true;
1758 gcc_assert (in_lto_p || context.maybe_derived_type);
1759 }
1760
1761 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1762
1763 /* Next walk recursively all derived types. */
1764 if (context.maybe_derived_type)
1765 {
1766 /* For anonymous namespace types we can attempt to build full type.
1767 All derivations must be in this unit (unless we see partial unit). */
1768 if (!type->all_derivations_known)
1769 complete = false;
1770 for (i = 0; i < outer_type->derived_types.length(); i++)
1771 possible_polymorphic_call_targets_1 (nodes, inserted,
1772 matched_vtables,
1773 otr_type,
1774 outer_type->derived_types[i],
1775 otr_token, outer_type->type,
1776 context.offset, &complete,
1777 bases_to_consider,
1778 context.maybe_in_construction);
1779 }
1780
1781 /* Finally walk bases, if asked to. */
1782 (*slot)->nonconstruction_targets = nodes.length();
1783
1784 /* Destructors are never called through construction virtual tables,
1785 because the type is always known. One of entries may be cxa_pure_virtual
1786 so look to at least two of them. */
1787 if (context.maybe_in_construction)
1788 for (i =0 ; i < MIN (nodes.length (), 2); i++)
1789 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
1790 context.maybe_in_construction = false;
1791 if (context.maybe_in_construction)
1792 {
1793 if (type != outer_type
1794 && (!skipped
1795 || (context.maybe_derived_type
1796 && !type_all_derivations_known_p (outer_type->type))))
1797 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1798 context.offset, nodes, inserted,
1799 matched_vtables, &complete);
1800 if (skipped)
1801 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1802 for (i = 0; i < bases_to_consider.length(); i++)
1803 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
1804 }
1805 bases_to_consider.release();
1806
1807 (*slot)->targets = nodes;
1808 (*slot)->complete = complete;
1809 if (completep)
1810 *completep = complete;
1811 if (nonconstruction_targetsp)
1812 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1813
1814 pointer_set_destroy (inserted);
1815 pointer_set_destroy (matched_vtables);
1816 timevar_pop (TV_IPA_VIRTUAL_CALL);
1817 return nodes;
1818 }
1819
1820 /* Dump all possible targets of a polymorphic call. */
1821
1822 void
1823 dump_possible_polymorphic_call_targets (FILE *f,
1824 tree otr_type,
1825 HOST_WIDE_INT otr_token,
1826 const ipa_polymorphic_call_context &ctx)
1827 {
1828 vec <cgraph_node *> targets;
1829 bool final;
1830 odr_type type = get_odr_type (otr_type, false);
1831 unsigned int i;
1832 int nonconstruction;
1833
1834 if (!type)
1835 return;
1836 targets = possible_polymorphic_call_targets (otr_type, otr_token,
1837 ctx,
1838 &final, NULL, &nonconstruction);
1839 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
1840 print_generic_expr (f, type->type, TDF_SLIM);
1841 fprintf (f, " token %i\n", (int)otr_token);
1842 if (ctx.outer_type || ctx.offset)
1843 {
1844 fprintf (f, " Contained in type:");
1845 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1846 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1847 ctx.offset);
1848 }
1849
1850 fprintf (f, " %s%s%s\n ",
1851 final ? "This is a complete list." :
1852 "This is partial list; extra targets may be defined in other units.",
1853 ctx.maybe_in_construction ? " (base types included)" : "",
1854 ctx.maybe_derived_type ? " (derived types included)" : "");
1855 for (i = 0; i < targets.length (); i++)
1856 {
1857 char *name = NULL;
1858 if (i == (unsigned)nonconstruction)
1859 fprintf (f, "\n If the type is in construction,"
1860 " then additional tarets are:\n"
1861 " ");
1862 if (in_lto_p)
1863 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1864 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1865 if (in_lto_p)
1866 free (name);
1867 if (!targets[i]->definition)
1868 fprintf (f, " (no definition%s)",
1869 DECL_DECLARED_INLINE_P (targets[i]->decl)
1870 ? " inline" : "");
1871 }
1872 fprintf (f, "\n\n");
1873 }
1874
1875
1876 /* Return true if N can be possibly target of a polymorphic call of
1877 OTR_TYPE/OTR_TOKEN. */
1878
1879 bool
1880 possible_polymorphic_call_target_p (tree otr_type,
1881 HOST_WIDE_INT otr_token,
1882 const ipa_polymorphic_call_context &ctx,
1883 struct cgraph_node *n)
1884 {
1885 vec <cgraph_node *> targets;
1886 unsigned int i;
1887 enum built_in_function fcode;
1888 bool final;
1889
1890 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1891 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1892 == BUILT_IN_UNREACHABLE
1893 || fcode == BUILT_IN_TRAP))
1894 return true;
1895
1896 if (!odr_hash)
1897 return true;
1898 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
1899 for (i = 0; i < targets.length (); i++)
1900 if (symtab_semantically_equivalent_p (n, targets[i]))
1901 return true;
1902
1903 /* At a moment we allow middle end to dig out new external declarations
1904 as a targets of polymorphic calls. */
1905 if (!final && !n->definition)
1906 return true;
1907 return false;
1908 }
1909
1910
1911 /* After callgraph construction new external nodes may appear.
1912 Add them into the graph. */
1913
1914 void
1915 update_type_inheritance_graph (void)
1916 {
1917 struct cgraph_node *n;
1918
1919 if (!odr_hash)
1920 return;
1921 free_polymorphic_call_targets_hash ();
1922 timevar_push (TV_IPA_INHERITANCE);
1923 /* We reconstruct the graph starting from types of all methods seen in the
1924 the unit. */
1925 FOR_EACH_FUNCTION (n)
1926 if (DECL_VIRTUAL_P (n->decl)
1927 && !n->definition
1928 && symtab_real_symbol_p (n))
1929 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
1930 timevar_pop (TV_IPA_INHERITANCE);
1931 }
1932
1933
1934 /* Return true if N looks like likely target of a polymorphic call.
1935 Rule out cxa_pure_virtual, noreturns, function declared cold and
1936 other obvious cases. */
1937
1938 bool
1939 likely_target_p (struct cgraph_node *n)
1940 {
1941 int flags;
1942 /* cxa_pure_virtual and similar things are not likely. */
1943 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
1944 return false;
1945 flags = flags_from_decl_or_type (n->decl);
1946 if (flags & ECF_NORETURN)
1947 return false;
1948 if (lookup_attribute ("cold",
1949 DECL_ATTRIBUTES (n->decl)))
1950 return false;
1951 if (n->frequency < NODE_FREQUENCY_NORMAL)
1952 return false;
1953 /* If there are no virtual tables refering the target alive,
1954 the only way the target can be called is an instance comming from other
1955 compilation unit; speculative devirtualization is build around an
1956 assumption that won't happen. */
1957 if (!referenced_from_vtable_p (n))
1958 return false;
1959 return true;
1960 }
1961
1962 /* The ipa-devirt pass.
1963 When polymorphic call has only one likely target in the unit,
1964 turn it into speculative call. */
1965
1966 static unsigned int
1967 ipa_devirt (void)
1968 {
1969 struct cgraph_node *n;
1970 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1971 struct cgraph_edge *e;
1972
1973 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1974 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1975 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
1976
1977 FOR_EACH_DEFINED_FUNCTION (n)
1978 {
1979 bool update = false;
1980 if (dump_file && n->indirect_calls)
1981 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
1982 n->name (), n->order);
1983 for (e = n->indirect_calls; e; e = e->next_callee)
1984 if (e->indirect_info->polymorphic)
1985 {
1986 struct cgraph_node *likely_target = NULL;
1987 void *cache_token;
1988 bool final;
1989 int nonconstruction_targets;
1990 vec <cgraph_node *>targets
1991 = possible_polymorphic_call_targets
1992 (e, &final, &cache_token, &nonconstruction_targets);
1993 unsigned int i;
1994
1995 if (dump_file)
1996 dump_possible_polymorphic_call_targets
1997 (dump_file, e);
1998
1999 npolymorphic++;
2000
2001 if (!cgraph_maybe_hot_edge_p (e))
2002 {
2003 if (dump_file)
2004 fprintf (dump_file, "Call is cold\n\n");
2005 ncold++;
2006 continue;
2007 }
2008 if (e->speculative)
2009 {
2010 if (dump_file)
2011 fprintf (dump_file, "Call is aready speculated\n\n");
2012 nspeculated++;
2013
2014 /* When dumping see if we agree with speculation. */
2015 if (!dump_file)
2016 continue;
2017 }
2018 if (pointer_set_contains (bad_call_targets,
2019 cache_token))
2020 {
2021 if (dump_file)
2022 fprintf (dump_file, "Target list is known to be useless\n\n");
2023 nmultiple++;
2024 continue;
2025 }
2026 for (i = 0; i < targets.length (); i++)
2027 if (likely_target_p (targets[i]))
2028 {
2029 if (likely_target)
2030 {
2031 if (i < (unsigned) nonconstruction_targets)
2032 {
2033 likely_target = NULL;
2034 if (dump_file)
2035 fprintf (dump_file, "More than one likely target\n\n");
2036 nmultiple++;
2037 }
2038 break;
2039 }
2040 likely_target = targets[i];
2041 }
2042 if (!likely_target)
2043 {
2044 pointer_set_insert (bad_call_targets, cache_token);
2045 continue;
2046 }
2047 /* This is reached only when dumping; check if we agree or disagree
2048 with the speculation. */
2049 if (e->speculative)
2050 {
2051 struct cgraph_edge *e2;
2052 struct ipa_ref *ref;
2053 cgraph_speculative_call_info (e, e2, e, ref);
2054 if (cgraph_function_or_thunk_node (e2->callee, NULL)
2055 == cgraph_function_or_thunk_node (likely_target, NULL))
2056 {
2057 fprintf (dump_file, "We agree with speculation\n\n");
2058 nok++;
2059 }
2060 else
2061 {
2062 fprintf (dump_file, "We disagree with speculation\n\n");
2063 nwrong++;
2064 }
2065 continue;
2066 }
2067 if (!likely_target->definition)
2068 {
2069 if (dump_file)
2070 fprintf (dump_file, "Target is not an definition\n\n");
2071 nnotdefined++;
2072 continue;
2073 }
2074 /* Do not introduce new references to external symbols. While we
2075 can handle these just well, it is common for programs to
2076 incorrectly with headers defining methods they are linked
2077 with. */
2078 if (DECL_EXTERNAL (likely_target->decl))
2079 {
2080 if (dump_file)
2081 fprintf (dump_file, "Target is external\n\n");
2082 nexternal++;
2083 continue;
2084 }
2085 /* Don't use an implicitly-declared destructor (c++/58678). */
2086 struct cgraph_node *non_thunk_target
2087 = cgraph_function_node (likely_target);
2088 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2089 && DECL_COMDAT (non_thunk_target->decl))
2090 {
2091 if (dump_file)
2092 fprintf (dump_file, "Target is artificial\n\n");
2093 nartificial++;
2094 continue;
2095 }
2096 if (cgraph_function_body_availability (likely_target)
2097 <= AVAIL_OVERWRITABLE
2098 && symtab_can_be_discarded (likely_target))
2099 {
2100 if (dump_file)
2101 fprintf (dump_file, "Target is overwritable\n\n");
2102 noverwritable++;
2103 continue;
2104 }
2105 else if (dbg_cnt (devirt))
2106 {
2107 if (dump_enabled_p ())
2108 {
2109 location_t locus = gimple_location_safe (e->call_stmt);
2110 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2111 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2112 n->name (), n->order,
2113 likely_target->name (),
2114 likely_target->order);
2115 }
2116 if (!symtab_can_be_discarded (likely_target))
2117 {
2118 cgraph_node *alias;
2119 alias = cgraph (symtab_nonoverwritable_alias
2120 (likely_target));
2121 if (alias)
2122 likely_target = alias;
2123 }
2124 nconverted++;
2125 update = true;
2126 cgraph_turn_edge_to_speculative
2127 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2128 }
2129 }
2130 if (update)
2131 inline_update_overall_summary (n);
2132 }
2133 pointer_set_destroy (bad_call_targets);
2134
2135 if (dump_file)
2136 fprintf (dump_file,
2137 "%i polymorphic calls, %i devirtualized,"
2138 " %i speculatively devirtualized, %i cold\n"
2139 "%i have multiple targets, %i overwritable,"
2140 " %i already speculated (%i agree, %i disagree),"
2141 " %i external, %i not defined, %i artificial\n",
2142 npolymorphic, ndevirtualized, nconverted, ncold,
2143 nmultiple, noverwritable, nspeculated, nok, nwrong,
2144 nexternal, nnotdefined, nartificial);
2145 return ndevirtualized ? TODO_remove_functions : 0;
2146 }
2147
2148 namespace {
2149
2150 const pass_data pass_data_ipa_devirt =
2151 {
2152 IPA_PASS, /* type */
2153 "devirt", /* name */
2154 OPTGROUP_NONE, /* optinfo_flags */
2155 true, /* has_execute */
2156 TV_IPA_DEVIRT, /* tv_id */
2157 0, /* properties_required */
2158 0, /* properties_provided */
2159 0, /* properties_destroyed */
2160 0, /* todo_flags_start */
2161 ( TODO_dump_symtab ), /* todo_flags_finish */
2162 };
2163
2164 class pass_ipa_devirt : public ipa_opt_pass_d
2165 {
2166 public:
2167 pass_ipa_devirt (gcc::context *ctxt)
2168 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2169 NULL, /* generate_summary */
2170 NULL, /* write_summary */
2171 NULL, /* read_summary */
2172 NULL, /* write_optimization_summary */
2173 NULL, /* read_optimization_summary */
2174 NULL, /* stmt_fixup */
2175 0, /* function_transform_todo_flags_start */
2176 NULL, /* function_transform */
2177 NULL) /* variable_transform */
2178 {}
2179
2180 /* opt_pass methods: */
2181 virtual bool gate (function *)
2182 {
2183 return (flag_devirtualize
2184 && flag_devirtualize_speculatively
2185 && optimize);
2186 }
2187
2188 virtual unsigned int execute (function *) { return ipa_devirt (); }
2189
2190 }; // class pass_ipa_devirt
2191
2192 } // anon namespace
2193
2194 ipa_opt_pass_d *
2195 make_pass_ipa_devirt (gcc::context *ctxt)
2196 {
2197 return new pass_ipa_devirt (ctxt);
2198 }
2199
2200 #include "gt-ipa-devirt.h"