ipa-devirt.c (referenced_from_vtable_p): New predicate.
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
132
133 static bool odr_violation_reported = false;
134
135 /* Dummy polymorphic call context. */
136
137 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
138 = {0, NULL, false, true};
139
140 /* Pointer set of all call targets appearing in the cache. */
141 static pointer_set_t *cached_polymorphic_call_targets;
142
143 /* The node of type inheritance graph. For each type unique in
144 One Defintion Rule (ODR) sense, we produce one node linking all
145 main variants of types equivalent to it, bases and derived types. */
146
147 struct GTY(()) odr_type_d
148 {
149 /* leader type. */
150 tree type;
151 /* All bases. */
152 vec<odr_type> GTY((skip)) bases;
153 /* All derrived types with virtual methods seen in unit. */
154 vec<odr_type> GTY((skip)) derived_types;
155
156 /* All equivalent types, if more than one. */
157 vec<tree, va_gc> *types;
158 /* Set of all equivalent types, if NON-NULL. */
159 pointer_set_t * GTY((skip)) types_set;
160
161 /* Unique ID indexing the type in odr_types array. */
162 int id;
163 /* Is it in anonymous namespace? */
164 bool anonymous_namespace;
165 };
166
167
168 /* Return true if BINFO corresponds to a type with virtual methods.
169
170 Every type has several BINFOs. One is the BINFO associated by the type
171 while other represents bases of derived types. The BINFOs representing
172 bases do not have BINFO_VTABLE pointer set when this is the single
173 inheritance (because vtables are shared). Look up the BINFO of type
174 and check presence of its vtable. */
175
176 static inline bool
177 polymorphic_type_binfo_p (tree binfo)
178 {
179 /* See if BINFO's type has an virtual table associtated with it. */
180 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
181 }
182
183 /* One Definition Rule hashtable helpers. */
184
185 struct odr_hasher
186 {
187 typedef odr_type_d value_type;
188 typedef union tree_node compare_type;
189 static inline hashval_t hash (const value_type *);
190 static inline bool equal (const value_type *, const compare_type *);
191 static inline void remove (value_type *);
192 };
193
194 /* Produce hash based on type name. */
195
196 hashval_t
197 hash_type_name (tree t)
198 {
199 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
200
201 /* If not in LTO, all main variants are unique, so we can do
202 pointer hash. */
203 if (!in_lto_p)
204 return htab_hash_pointer (t);
205
206 /* Anonymous types are unique. */
207 if (type_in_anonymous_namespace_p (t))
208 return htab_hash_pointer (t);
209
210 /* For polymorphic types, we can simply hash the virtual table. */
211 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
212 {
213 tree v = BINFO_VTABLE (TYPE_BINFO (t));
214 hashval_t hash = 0;
215
216 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
217 {
218 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
219 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
220 }
221
222 v = DECL_ASSEMBLER_NAME (v);
223 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
224 return hash;
225 }
226
227 /* Rest is not implemented yet. */
228 gcc_unreachable ();
229 }
230
231 /* Return the computed hashcode for ODR_TYPE. */
232
233 inline hashval_t
234 odr_hasher::hash (const value_type *odr_type)
235 {
236 return hash_type_name (odr_type->type);
237 }
238
239 /* Compare types T1 and T2 and return true if they are
240 equivalent. */
241
242 inline bool
243 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
244 {
245 tree t2 = const_cast <tree> (ct2);
246
247 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
248 if (t1->type == t2)
249 return true;
250 if (!in_lto_p)
251 return false;
252 return types_same_for_odr (t1->type, t2);
253 }
254
255 /* Free ODR type V. */
256
257 inline void
258 odr_hasher::remove (value_type *v)
259 {
260 v->bases.release ();
261 v->derived_types.release ();
262 if (v->types_set)
263 pointer_set_destroy (v->types_set);
264 ggc_free (v);
265 }
266
267 /* ODR type hash used to lookup ODR type based on tree type node. */
268
269 typedef hash_table <odr_hasher> odr_hash_type;
270 static odr_hash_type odr_hash;
271
272 /* ODR types are also stored into ODR_TYPE vector to allow consistent
273 walking. Bases appear before derived types. Vector is garbage collected
274 so we won't end up visiting empty types. */
275
276 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
277 #define odr_types (*odr_types_ptr)
278
279 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
280 from VAL->type. This may happen in LTO where tree merging did not merge
281 all variants of the same type. It may or may not mean the ODR violation.
282 Add it to the list of duplicates and warn on some violations. */
283
284 static void
285 add_type_duplicate (odr_type val, tree type)
286 {
287 if (!val->types_set)
288 val->types_set = pointer_set_create ();
289
290 /* See if this duplicate is new. */
291 if (!pointer_set_insert (val->types_set, type))
292 {
293 bool merge = true;
294 bool base_mismatch = false;
295 gcc_assert (in_lto_p);
296 vec_safe_push (val->types, type);
297 unsigned int i,j;
298
299 /* First we compare memory layout. */
300 if (!types_compatible_p (val->type, type))
301 {
302 merge = false;
303 odr_violation_reported = true;
304 if (BINFO_VTABLE (TYPE_BINFO (val->type))
305 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
306 "type %qD violates one definition rule ",
307 type))
308 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
309 "a type with the same name but different layout is "
310 "defined in another translation unit");
311 if (cgraph_dump_file)
312 {
313 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
314
315 print_node (cgraph_dump_file, "", val->type, 0);
316 putc ('\n',cgraph_dump_file);
317 print_node (cgraph_dump_file, "", type, 0);
318 putc ('\n',cgraph_dump_file);
319 }
320 }
321
322 /* Next sanity check that bases are the same. If not, we will end
323 up producing wrong answers. */
324 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
325 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
326 {
327 odr_type base = get_odr_type
328 (BINFO_TYPE
329 (BINFO_BASE_BINFO (TYPE_BINFO (type),
330 i)),
331 true);
332 if (val->bases.length () <= j || val->bases[j] != base)
333 base_mismatch = true;
334 j++;
335 }
336 if (base_mismatch)
337 {
338 merge = false;
339 odr_violation_reported = true;
340
341 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
342 "type %qD violates one definition rule ",
343 type))
344 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
345 "a type with the same name but different bases is "
346 "defined in another translation unit");
347 if (cgraph_dump_file)
348 {
349 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
350
351 print_node (cgraph_dump_file, "", val->type, 0);
352 putc ('\n',cgraph_dump_file);
353 print_node (cgraph_dump_file, "", type, 0);
354 putc ('\n',cgraph_dump_file);
355 }
356 }
357
358 /* Regularize things a little. During LTO same types may come with
359 different BINFOs. Either because their virtual table was
360 not merged by tree merging and only later at decl merging or
361 because one type comes with external vtable, while other
362 with internal. We want to merge equivalent binfos to conserve
363 memory and streaming overhead.
364
365 The external vtables are more harmful: they contain references
366 to external declarations of methods that may be defined in the
367 merged LTO unit. For this reason we absolutely need to remove
368 them and replace by internal variants. Not doing so will lead
369 to incomplete answers from possible_polymorphic_call_targets. */
370 if (!flag_ltrans && merge)
371 {
372 tree master_binfo = TYPE_BINFO (val->type);
373 tree v1 = BINFO_VTABLE (master_binfo);
374 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
375
376 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
377 {
378 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
379 && operand_equal_p (TREE_OPERAND (v1, 1),
380 TREE_OPERAND (v2, 1), 0));
381 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
382 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
383 }
384 gcc_assert (DECL_ASSEMBLER_NAME (v1)
385 == DECL_ASSEMBLER_NAME (v2));
386
387 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
388 {
389 unsigned int i;
390
391 TYPE_BINFO (val->type) = TYPE_BINFO (type);
392 for (i = 0; i < val->types->length (); i++)
393 {
394 if (TYPE_BINFO ((*val->types)[i])
395 == master_binfo)
396 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
397 }
398 }
399 else
400 TYPE_BINFO (type) = master_binfo;
401 }
402 }
403 }
404
405 /* Get ODR type hash entry for TYPE. If INSERT is true, create
406 possibly new entry. */
407
408 odr_type
409 get_odr_type (tree type, bool insert)
410 {
411 odr_type_d **slot;
412 odr_type val;
413 hashval_t hash;
414
415 type = TYPE_MAIN_VARIANT (type);
416 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
417 hash = hash_type_name (type);
418 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
419 if (!slot)
420 return NULL;
421
422 /* See if we already have entry for type. */
423 if (*slot)
424 {
425 val = *slot;
426
427 /* With LTO we need to support multiple tree representation of
428 the same ODR type. */
429 if (val->type != type)
430 add_type_duplicate (val, type);
431 }
432 else
433 {
434 tree binfo = TYPE_BINFO (type);
435 unsigned int i;
436
437 val = ggc_alloc_cleared_odr_type_d ();
438 val->type = type;
439 val->bases = vNULL;
440 val->derived_types = vNULL;
441 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
442 *slot = val;
443 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
444 /* For now record only polymorphic types. other are
445 pointless for devirtualization and we can not precisely
446 determine ODR equivalency of these during LTO. */
447 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
448 {
449 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
450 i)),
451 true);
452 base->derived_types.safe_push (val);
453 val->bases.safe_push (base);
454 }
455 /* First record bases, then add into array so ids are increasing. */
456 if (odr_types_ptr)
457 val->id = odr_types.length ();
458 vec_safe_push (odr_types_ptr, val);
459 }
460 return val;
461 }
462
463 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
464 recusive printing. */
465
466 static void
467 dump_odr_type (FILE *f, odr_type t, int indent=0)
468 {
469 unsigned int i;
470 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
471 print_generic_expr (f, t->type, TDF_SLIM);
472 fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
473 if (TYPE_NAME (t->type))
474 {
475 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
476 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
477 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
478 }
479 if (t->bases.length ())
480 {
481 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
482 for (i = 0; i < t->bases.length (); i++)
483 fprintf (f, " %i", t->bases[i]->id);
484 fprintf (f, "\n");
485 }
486 if (t->derived_types.length ())
487 {
488 fprintf (f, "%*s derived types:\n", indent * 2, "");
489 for (i = 0; i < t->derived_types.length (); i++)
490 dump_odr_type (f, t->derived_types[i], indent + 1);
491 }
492 fprintf (f, "\n");
493 }
494
495 /* Dump the type inheritance graph. */
496
497 static void
498 dump_type_inheritance_graph (FILE *f)
499 {
500 unsigned int i;
501 if (!odr_types_ptr)
502 return;
503 fprintf (f, "\n\nType inheritance graph:\n");
504 for (i = 0; i < odr_types.length (); i++)
505 {
506 if (odr_types[i]->bases.length () == 0)
507 dump_odr_type (f, odr_types[i]);
508 }
509 for (i = 0; i < odr_types.length (); i++)
510 {
511 if (odr_types[i]->types && odr_types[i]->types->length ())
512 {
513 unsigned int j;
514 fprintf (f, "Duplicate tree types for odr type %i\n", i);
515 print_node (f, "", odr_types[i]->type, 0);
516 for (j = 0; j < odr_types[i]->types->length (); j++)
517 {
518 tree t;
519 fprintf (f, "duplicate #%i\n", j);
520 print_node (f, "", (*odr_types[i]->types)[j], 0);
521 t = (*odr_types[i]->types)[j];
522 while (TYPE_P (t) && TYPE_CONTEXT (t))
523 {
524 t = TYPE_CONTEXT (t);
525 print_node (f, "", t, 0);
526 }
527 putc ('\n',f);
528 }
529 }
530 }
531 }
532
533 /* Given method type T, return type of class it belongs to.
534 Lookup this pointer and get its type. */
535
536 tree
537 method_class_type (tree t)
538 {
539 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
540 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
541
542 return TREE_TYPE (first_parm_type);
543 }
544
545 /* Initialize IPA devirt and build inheritance tree graph. */
546
547 void
548 build_type_inheritance_graph (void)
549 {
550 struct symtab_node *n;
551 FILE *inheritance_dump_file;
552 int flags;
553
554 if (odr_hash.is_created ())
555 return;
556 timevar_push (TV_IPA_INHERITANCE);
557 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
558 odr_hash.create (23);
559
560 /* We reconstruct the graph starting of types of all methods seen in the
561 the unit. */
562 FOR_EACH_SYMBOL (n)
563 if (is_a <cgraph_node> (n)
564 && DECL_VIRTUAL_P (n->decl)
565 && symtab_real_symbol_p (n))
566 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
567
568 /* Look also for virtual tables of types that do not define any methods.
569
570 We need it in a case where class B has virtual base of class A
571 re-defining its virtual method and there is class C with no virtual
572 methods with B as virtual base.
573
574 Here we output B's virtual method in two variant - for non-virtual
575 and virtual inheritance. B's virtual table has non-virtual version,
576 while C's has virtual.
577
578 For this reason we need to know about C in order to include both
579 variants of B. More correctly, record_target_from_binfo should
580 add both variants of the method when walking B, but we have no
581 link in between them.
582
583 We rely on fact that either the method is exported and thus we
584 assume it is called externally or C is in anonymous namespace and
585 thus we will see the vtable. */
586
587 else if (is_a <varpool_node> (n)
588 && DECL_VIRTUAL_P (n->decl)
589 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
590 && TYPE_BINFO (DECL_CONTEXT (n->decl))
591 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
592 get_odr_type (DECL_CONTEXT (n->decl), true);
593 if (inheritance_dump_file)
594 {
595 dump_type_inheritance_graph (inheritance_dump_file);
596 dump_end (TDI_inheritance, inheritance_dump_file);
597 }
598 timevar_pop (TV_IPA_INHERITANCE);
599 }
600
601 /* Return true if N has reference from live virtual table
602 (and thus can be a destination of polymorphic call).
603 Be conservatively correct when callgraph is not built or
604 if the method may be referred externally. */
605
606 static bool
607 referenced_from_vtable_p (struct cgraph_node *node)
608 {
609 int i;
610 struct ipa_ref *ref;
611 bool found = false;
612
613 if (node->externally_visible
614 || node->used_from_other_partition)
615 return true;
616
617 /* Keep this test constant time.
618 It is unlikely this can happen except for the case where speculative
619 devirtualization introduced many speculative edges to this node.
620 In this case the target is very likely alive anyway. */
621 if (node->ref_list.referring.length () > 100)
622 return true;
623
624 /* We need references built. */
625 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
626 return true;
627
628 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
629 i, ref); i++)
630
631 if ((ref->use == IPA_REF_ALIAS
632 && referenced_from_vtable_p (cgraph (ref->referring)))
633 || (ref->use == IPA_REF_ADDR
634 && TREE_CODE (ref->referring->decl) == VAR_DECL
635 && DECL_VIRTUAL_P (ref->referring->decl)))
636 {
637 found = true;
638 break;
639 }
640 return found;
641 }
642
643 /* If TARGET has associated node, record it in the NODES array.
644 CAN_REFER specify if program can refer to the target directly.
645 if TARGET is unknown (NULL) or it can not be inserted (for example because
646 its body was already removed and there is no way to refer to it), clear
647 COMPLETEP. */
648
649 static void
650 maybe_record_node (vec <cgraph_node *> &nodes,
651 tree target, pointer_set_t *inserted,
652 bool can_refer,
653 bool *completep)
654 {
655 struct cgraph_node *target_node;
656
657 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
658 list of targets; the runtime effect of calling them is undefined.
659 Only "real" virtual methods should be accounted. */
660 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
661 return;
662
663 if (!can_refer)
664 {
665 /* The only case when method of anonymous namespace becomes unreferable
666 is when we completely optimized it out. */
667 if (flag_ltrans
668 || !target
669 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
670 *completep = false;
671 return;
672 }
673
674 if (!target)
675 return;
676
677 target_node = cgraph_get_node (target);
678
679 /* Method can only be called by polymorphic call if any
680 of vtables refering to it are alive.
681
682 While this holds for non-anonymous functions, too, there are
683 cases where we want to keep them in the list; for example
684 inline functions with -fno-weak are static, but we still
685 may devirtualize them when instance comes from other unit.
686 The same holds for LTO.
687
688 Currently we ignore these functions in speculative devirtualization.
689 ??? Maybe it would make sense to be more aggressive for LTO even
690 eslewhere. */
691 if (!flag_ltrans
692 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
693 && (!target_node
694 || !referenced_from_vtable_p (target_node)))
695 ;
696 /* See if TARGET is useful function we can deal with. */
697 else if (target_node != NULL
698 && (TREE_PUBLIC (target)
699 || DECL_EXTERNAL (target)
700 || target_node->definition)
701 && symtab_real_symbol_p (target_node))
702 {
703 gcc_assert (!target_node->global.inlined_to);
704 gcc_assert (symtab_real_symbol_p (target_node));
705 if (!pointer_set_insert (inserted, target))
706 {
707 pointer_set_insert (cached_polymorphic_call_targets,
708 target_node);
709 nodes.safe_push (target_node);
710 }
711 }
712 else if (completep
713 && !type_in_anonymous_namespace_p
714 (method_class_type (TREE_TYPE (target))))
715 *completep = false;
716 }
717
718 /* See if BINFO's type match OUTER_TYPE. If so, lookup
719 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
720 method in vtable and insert method to NODES array.
721 Otherwise recurse to base BINFOs.
722 This match what get_binfo_at_offset does, but with offset
723 being unknown.
724
725 TYPE_BINFOS is a stack of BINFOS of types with defined
726 virtual table seen on way from class type to BINFO.
727
728 MATCHED_VTABLES tracks virtual tables we already did lookup
729 for virtual function in. INSERTED tracks nodes we already
730 inserted.
731
732 ANONYMOUS is true if BINFO is part of anonymous namespace.
733
734 Clear COMPLETEP when we hit unreferable target.
735 */
736
737 static void
738 record_target_from_binfo (vec <cgraph_node *> &nodes,
739 tree binfo,
740 tree otr_type,
741 vec <tree> &type_binfos,
742 HOST_WIDE_INT otr_token,
743 tree outer_type,
744 HOST_WIDE_INT offset,
745 pointer_set_t *inserted,
746 pointer_set_t *matched_vtables,
747 bool anonymous,
748 bool *completep)
749 {
750 tree type = BINFO_TYPE (binfo);
751 int i;
752 tree base_binfo;
753
754
755 if (BINFO_VTABLE (binfo))
756 type_binfos.safe_push (binfo);
757 if (types_same_for_odr (type, outer_type))
758 {
759 int i;
760 tree type_binfo = NULL;
761
762 /* Lookup BINFO with virtual table. For normal types it is always last
763 binfo on stack. */
764 for (i = type_binfos.length () - 1; i >= 0; i--)
765 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
766 {
767 type_binfo = type_binfos[i];
768 break;
769 }
770 if (BINFO_VTABLE (binfo))
771 type_binfos.pop ();
772 /* If this is duplicated BINFO for base shared by virtual inheritance,
773 we may not have its associated vtable. This is not a problem, since
774 we will walk it on the other path. */
775 if (!type_binfo)
776 return;
777 tree inner_binfo = get_binfo_at_offset (type_binfo,
778 offset, otr_type);
779 if (!inner_binfo)
780 {
781 gcc_assert (odr_violation_reported);
782 return;
783 }
784 /* For types in anonymous namespace first check if the respective vtable
785 is alive. If not, we know the type can't be called. */
786 if (!flag_ltrans && anonymous)
787 {
788 tree vtable = BINFO_VTABLE (inner_binfo);
789 varpool_node *vnode;
790
791 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
792 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
793 vnode = varpool_get_node (vtable);
794 if (!vnode || !vnode->definition)
795 return;
796 }
797 gcc_assert (inner_binfo);
798 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
799 {
800 bool can_refer;
801 tree target = gimple_get_virt_method_for_binfo (otr_token,
802 inner_binfo,
803 &can_refer);
804 maybe_record_node (nodes, target, inserted, can_refer, completep);
805 }
806 return;
807 }
808
809 /* Walk bases. */
810 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
811 /* Walking bases that have no virtual method is pointless excercise. */
812 if (polymorphic_type_binfo_p (base_binfo))
813 record_target_from_binfo (nodes, base_binfo, otr_type,
814 type_binfos,
815 otr_token, outer_type, offset, inserted,
816 matched_vtables, anonymous, completep);
817 if (BINFO_VTABLE (binfo))
818 type_binfos.pop ();
819 }
820
821 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
822 of TYPE, insert them to NODES, recurse into derived nodes.
823 INSERTED is used to avoid duplicate insertions of methods into NODES.
824 MATCHED_VTABLES are used to avoid duplicate walking vtables.
825 Clear COMPLETEP if unreferable target is found. */
826
827 static void
828 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
829 pointer_set_t *inserted,
830 pointer_set_t *matched_vtables,
831 tree otr_type,
832 odr_type type,
833 HOST_WIDE_INT otr_token,
834 tree outer_type,
835 HOST_WIDE_INT offset,
836 bool *completep)
837 {
838 tree binfo = TYPE_BINFO (type->type);
839 unsigned int i;
840 vec <tree> type_binfos = vNULL;
841
842 record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token,
843 outer_type, offset,
844 inserted, matched_vtables,
845 type->anonymous_namespace, completep);
846 type_binfos.release ();
847 for (i = 0; i < type->derived_types.length (); i++)
848 possible_polymorphic_call_targets_1 (nodes, inserted,
849 matched_vtables,
850 otr_type,
851 type->derived_types[i],
852 otr_token, outer_type, offset, completep);
853 }
854
855 /* Cache of queries for polymorphic call targets.
856
857 Enumerating all call targets may get expensive when there are many
858 polymorphic calls in the program, so we memoize all the previous
859 queries and avoid duplicated work. */
860
861 struct polymorphic_call_target_d
862 {
863 HOST_WIDE_INT otr_token;
864 ipa_polymorphic_call_context context;
865 odr_type type;
866 vec <cgraph_node *> targets;
867 int nonconstruction_targets;
868 bool complete;
869 };
870
871 /* Polymorphic call target cache helpers. */
872
873 struct polymorphic_call_target_hasher
874 {
875 typedef polymorphic_call_target_d value_type;
876 typedef polymorphic_call_target_d compare_type;
877 static inline hashval_t hash (const value_type *);
878 static inline bool equal (const value_type *, const compare_type *);
879 static inline void remove (value_type *);
880 };
881
882 /* Return the computed hashcode for ODR_QUERY. */
883
884 inline hashval_t
885 polymorphic_call_target_hasher::hash (const value_type *odr_query)
886 {
887 hashval_t hash;
888
889 hash = iterative_hash_host_wide_int
890 (odr_query->otr_token,
891 odr_query->type->id);
892 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
893 hash);
894 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
895 return iterative_hash_hashval_t
896 (((int)odr_query->context.maybe_in_construction << 1)
897 | (int)odr_query->context.maybe_derived_type, hash);
898 }
899
900 /* Compare cache entries T1 and T2. */
901
902 inline bool
903 polymorphic_call_target_hasher::equal (const value_type *t1,
904 const compare_type *t2)
905 {
906 return (t1->type == t2->type && t1->otr_token == t2->otr_token
907 && t1->context.offset == t2->context.offset
908 && t1->context.outer_type == t2->context.outer_type
909 && t1->context.maybe_in_construction
910 == t2->context.maybe_in_construction
911 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
912 }
913
914 /* Remove entry in polymorphic call target cache hash. */
915
916 inline void
917 polymorphic_call_target_hasher::remove (value_type *v)
918 {
919 v->targets.release ();
920 free (v);
921 }
922
923 /* Polymorphic call target query cache. */
924
925 typedef hash_table <polymorphic_call_target_hasher>
926 polymorphic_call_target_hash_type;
927 static polymorphic_call_target_hash_type polymorphic_call_target_hash;
928
929 /* Destroy polymorphic call target query cache. */
930
931 static void
932 free_polymorphic_call_targets_hash ()
933 {
934 if (cached_polymorphic_call_targets)
935 {
936 polymorphic_call_target_hash.dispose ();
937 pointer_set_destroy (cached_polymorphic_call_targets);
938 cached_polymorphic_call_targets = NULL;
939 }
940 }
941
942 /* When virtual function is removed, we may need to flush the cache. */
943
944 static void
945 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
946 {
947 if (cached_polymorphic_call_targets
948 && pointer_set_contains (cached_polymorphic_call_targets, n))
949 free_polymorphic_call_targets_hash ();
950 }
951
952 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
953 is contained at CONTEXT->OFFSET. Walk the memory representation of
954 CONTEXT->OUTER_TYPE and find the outermost class type that match
955 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
956 to represent it.
957
958 For example when CONTEXT represents type
959 class A
960 {
961 int a;
962 class B b;
963 }
964 and we look for type at offset sizeof(int), we end up with B and offset 0.
965 If the same is produced by multiple inheritance, we end up with A and offset
966 sizeof(int).
967
968 If we can not find corresponding class, give up by setting
969 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
970 Return true when lookup was sucesful. */
971
972 static bool
973 get_class_context (ipa_polymorphic_call_context *context,
974 tree expected_type)
975 {
976 tree type = context->outer_type;
977 HOST_WIDE_INT offset = context->offset;
978
979 /* Find the sub-object the constant actually refers to and mark whether it is
980 an artificial one (as opposed to a user-defined one). */
981 while (true)
982 {
983 HOST_WIDE_INT pos, size;
984 tree fld;
985
986 /* On a match, just return what we found. */
987 if (TREE_CODE (type) == TREE_CODE (expected_type)
988 && types_same_for_odr (type, expected_type))
989 {
990 /* Type can not contain itself on an non-zero offset. In that case
991 just give up. */
992 if (offset != 0)
993 goto give_up;
994 gcc_assert (offset == 0);
995 return true;
996 }
997
998 /* Walk fields and find corresponding on at OFFSET. */
999 if (TREE_CODE (type) == RECORD_TYPE)
1000 {
1001 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1002 {
1003 if (TREE_CODE (fld) != FIELD_DECL)
1004 continue;
1005
1006 pos = int_bit_position (fld);
1007 size = tree_to_uhwi (DECL_SIZE (fld));
1008 if (pos <= offset && (pos + size) > offset)
1009 break;
1010 }
1011
1012 if (!fld)
1013 goto give_up;
1014
1015 type = TREE_TYPE (fld);
1016 offset -= pos;
1017 /* DECL_ARTIFICIAL represents a basetype. */
1018 if (!DECL_ARTIFICIAL (fld))
1019 {
1020 context->outer_type = type;
1021 context->offset = offset;
1022 /* As soon as we se an field containing the type,
1023 we know we are not looking for derivations. */
1024 context->maybe_derived_type = false;
1025 }
1026 }
1027 else if (TREE_CODE (type) == ARRAY_TYPE)
1028 {
1029 tree subtype = TREE_TYPE (type);
1030
1031 /* Give up if we don't know array size. */
1032 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1033 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1034 goto give_up;
1035 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1036 type = subtype;
1037 context->outer_type = type;
1038 context->offset = offset;
1039 context->maybe_derived_type = false;
1040 }
1041 /* Give up on anything else. */
1042 else
1043 goto give_up;
1044 }
1045
1046 /* If we failed to find subtype we look for, give up and fall back to the
1047 most generic query. */
1048 give_up:
1049 context->outer_type = expected_type;
1050 context->offset = 0;
1051 context->maybe_derived_type = true;
1052 return false;
1053 }
1054
1055 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1056
1057 static bool
1058 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1059 tree otr_type)
1060 {
1061 ipa_polymorphic_call_context context = {offset, outer_type,
1062 false, true};
1063 return get_class_context (&context, otr_type);
1064 }
1065
1066 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1067
1068 static tree
1069 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1070 tree vtable)
1071 {
1072 tree v = BINFO_VTABLE (binfo);
1073 int i;
1074 tree base_binfo;
1075 unsigned HOST_WIDE_INT this_offset;
1076
1077 if (v)
1078 {
1079 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1080 gcc_unreachable ();
1081
1082 if (offset == this_offset
1083 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1084 return binfo;
1085 }
1086
1087 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1088 if (polymorphic_type_binfo_p (base_binfo))
1089 {
1090 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1091 if (base_binfo)
1092 return base_binfo;
1093 }
1094 return NULL;
1095 }
1096
1097 /* T is known constant value of virtual table pointer.
1098 Store virtual table to V and its offset to OFFSET.
1099 Return false if T does not look like virtual table reference. */
1100
1101 bool
1102 vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
1103 {
1104 /* We expect &MEM[(void *)&virtual_table + 16B].
1105 We obtain object's BINFO from the context of the virtual table.
1106 This one contains pointer to virtual table represented via
1107 POINTER_PLUS_EXPR. Verify that this pointer match to what
1108 we propagated through.
1109
1110 In the case of virtual inheritance, the virtual tables may
1111 be nested, i.e. the offset may be different from 16 and we may
1112 need to dive into the type representation. */
1113 if (TREE_CODE (t) == ADDR_EXPR
1114 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1115 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1116 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1117 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1118 == VAR_DECL)
1119 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1120 (TREE_OPERAND (t, 0), 0), 0)))
1121 {
1122 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1123 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1124 return true;
1125 }
1126
1127 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1128 We need to handle it when T comes from static variable initializer or
1129 BINFO. */
1130 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1131 {
1132 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1133 t = TREE_OPERAND (t, 0);
1134 }
1135 else
1136 *offset = 0;
1137
1138 if (TREE_CODE (t) != ADDR_EXPR)
1139 return false;
1140 *v = TREE_OPERAND (t, 0);
1141 return true;
1142 }
1143
1144 /* T is known constant value of virtual table pointer. Return BINFO of the
1145 instance type. */
1146
1147 tree
1148 vtable_pointer_value_to_binfo (tree t)
1149 {
1150 tree vtable;
1151 unsigned HOST_WIDE_INT offset;
1152
1153 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1154 return NULL_TREE;
1155
1156 /* FIXME: for stores of construction vtables we return NULL,
1157 because we do not have BINFO for those. Eventually we should fix
1158 our representation to allow this case to be handled, too.
1159 In the case we see store of BINFO we however may assume
1160 that standard folding will be ale to cope with it. */
1161 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1162 offset, vtable);
1163 }
1164
1165 /* Proudce polymorphic call context for call method of instance
1166 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1167
1168 static void
1169 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1170 tree base, HOST_WIDE_INT offset)
1171 {
1172 gcc_assert (DECL_P (base));
1173
1174 context->outer_type = TREE_TYPE (base);
1175 context->offset = offset;
1176 /* Make very conservative assumption that all objects
1177 may be in construction.
1178 TODO: ipa-prop already contains code to tell better.
1179 merge it later. */
1180 context->maybe_in_construction = true;
1181 context->maybe_derived_type = false;
1182 }
1183
1184 /* CST is an invariant (address of decl), try to get meaningful
1185 polymorphic call context for polymorphic call of method
1186 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1187 Return FALSE if nothing meaningful can be found. */
1188
1189 bool
1190 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1191 tree cst,
1192 tree otr_type,
1193 HOST_WIDE_INT offset)
1194 {
1195 HOST_WIDE_INT offset2, size, max_size;
1196 tree base;
1197
1198 if (TREE_CODE (cst) != ADDR_EXPR)
1199 return false;
1200
1201 cst = TREE_OPERAND (cst, 0);
1202 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1203 if (!DECL_P (base) || max_size == -1 || max_size != size)
1204 return false;
1205
1206 /* Only type inconsistent programs can have otr_type that is
1207 not part of outer type. */
1208 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1209 return false;
1210
1211 get_polymorphic_call_info_for_decl (context, base, offset);
1212 return true;
1213 }
1214
1215 /* Given REF call in FNDECL, determine class of the polymorphic
1216 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1217 Return pointer to object described by the context */
1218
1219 tree
1220 get_polymorphic_call_info (tree fndecl,
1221 tree ref,
1222 tree *otr_type,
1223 HOST_WIDE_INT *otr_token,
1224 ipa_polymorphic_call_context *context)
1225 {
1226 tree base_pointer;
1227 *otr_type = obj_type_ref_class (ref);
1228 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1229
1230 /* Set up basic info in case we find nothing interesting in the analysis. */
1231 context->outer_type = *otr_type;
1232 context->offset = 0;
1233 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1234 context->maybe_derived_type = true;
1235 context->maybe_in_construction = false;
1236
1237 /* Walk SSA for outer object. */
1238 do
1239 {
1240 if (TREE_CODE (base_pointer) == SSA_NAME
1241 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1242 && SSA_NAME_DEF_STMT (base_pointer)
1243 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1244 {
1245 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1246 STRIP_NOPS (base_pointer);
1247 }
1248 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1249 {
1250 HOST_WIDE_INT size, max_size;
1251 HOST_WIDE_INT offset2;
1252 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1253 &offset2, &size, &max_size);
1254
1255 /* If this is a varying address, punt. */
1256 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1257 && max_size != -1
1258 && max_size == size)
1259 {
1260 /* We found dereference of a pointer. Type of the pointer
1261 and MEM_REF is meaningless, but we can look futher. */
1262 if (TREE_CODE (base) == MEM_REF)
1263 {
1264 base_pointer = TREE_OPERAND (base, 0);
1265 context->offset
1266 += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
1267 context->outer_type = NULL;
1268 }
1269 /* We found base object. In this case the outer_type
1270 is known. */
1271 else if (DECL_P (base))
1272 {
1273 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1274
1275 /* Only type inconsistent programs can have otr_type that is
1276 not part of outer type. */
1277 if (!contains_type_p (TREE_TYPE (base),
1278 context->offset + offset2, *otr_type))
1279 {
1280 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1281 code sequences; we arrange the calls to be builtin_unreachable
1282 later. */
1283 *otr_token = INT_MAX;
1284 return base_pointer;
1285 }
1286 get_polymorphic_call_info_for_decl (context, base,
1287 context->offset + offset2);
1288 return NULL;
1289 }
1290 else
1291 break;
1292 }
1293 else
1294 break;
1295 }
1296 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1297 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1298 {
1299 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1300 * BITS_PER_UNIT;
1301 base_pointer = TREE_OPERAND (base_pointer, 0);
1302 }
1303 else
1304 break;
1305 }
1306 while (true);
1307
1308 /* Try to determine type of the outer object. */
1309 if (TREE_CODE (base_pointer) == SSA_NAME
1310 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1311 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1312 {
1313 /* See if parameter is THIS pointer of a method. */
1314 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1315 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1316 {
1317 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1318 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1319
1320 /* Dynamic casting has possibly upcasted the type
1321 in the hiearchy. In this case outer type is less
1322 informative than inner type and we should forget
1323 about it. */
1324 if (!contains_type_p (context->outer_type, context->offset,
1325 *otr_type))
1326 {
1327 context->outer_type = NULL;
1328 return base_pointer;
1329 }
1330
1331 /* If the function is constructor or destructor, then
1332 the type is possibly in construction, but we know
1333 it is not derived type. */
1334 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1335 || DECL_CXX_DESTRUCTOR_P (fndecl))
1336 {
1337 context->maybe_in_construction = true;
1338 context->maybe_derived_type = false;
1339 }
1340 else
1341 {
1342 context->maybe_derived_type = true;
1343 context->maybe_in_construction = false;
1344 }
1345 return base_pointer;
1346 }
1347 /* Non-PODs passed by value are really passed by invisible
1348 reference. In this case we also know the type of the
1349 object. */
1350 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1351 {
1352 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1353 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1354 /* Only type inconsistent programs can have otr_type that is
1355 not part of outer type. */
1356 if (!contains_type_p (context->outer_type, context->offset,
1357 *otr_type))
1358 {
1359 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1360 code sequences; we arrange the calls to be builtin_unreachable
1361 later. */
1362 *otr_token = INT_MAX;
1363 return base_pointer;
1364 }
1365 context->maybe_derived_type = false;
1366 context->maybe_in_construction = false;
1367 return base_pointer;
1368 }
1369 }
1370 /* TODO: There are multiple ways to derive a type. For instance
1371 if BASE_POINTER is passed to an constructor call prior our refernece.
1372 We do not make this type of flow sensitive analysis yet. */
1373 return base_pointer;
1374 }
1375
1376 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1377 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1378 and insert them to NODES.
1379
1380 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1381
1382 static void
1383 record_targets_from_bases (tree otr_type,
1384 HOST_WIDE_INT otr_token,
1385 tree outer_type,
1386 HOST_WIDE_INT offset,
1387 vec <cgraph_node *> &nodes,
1388 pointer_set_t *inserted,
1389 pointer_set_t *matched_vtables,
1390 bool *completep)
1391 {
1392 while (true)
1393 {
1394 HOST_WIDE_INT pos, size;
1395 tree base_binfo;
1396 tree fld;
1397
1398 if (types_same_for_odr (outer_type, otr_type))
1399 return;
1400
1401 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1402 {
1403 if (TREE_CODE (fld) != FIELD_DECL)
1404 continue;
1405
1406 pos = int_bit_position (fld);
1407 size = tree_to_shwi (DECL_SIZE (fld));
1408 if (pos <= offset && (pos + size) > offset
1409 /* Do not get confused by zero sized bases. */
1410 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1411 break;
1412 }
1413 /* Within a class type we should always find correcponding fields. */
1414 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1415
1416 /* Nonbasetypes should have been stripped by outer_class_type. */
1417 gcc_assert (DECL_ARTIFICIAL (fld));
1418
1419 outer_type = TREE_TYPE (fld);
1420 offset -= pos;
1421
1422 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1423 offset, otr_type);
1424 if (!base_binfo)
1425 {
1426 gcc_assert (odr_violation_reported);
1427 return;
1428 }
1429 gcc_assert (base_binfo);
1430 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1431 {
1432 bool can_refer;
1433 tree target = gimple_get_virt_method_for_binfo (otr_token,
1434 base_binfo,
1435 &can_refer);
1436 maybe_record_node (nodes, target, inserted, can_refer, completep);
1437 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1438 }
1439 }
1440 }
1441
1442 /* When virtual table is removed, we may need to flush the cache. */
1443
1444 static void
1445 devirt_variable_node_removal_hook (varpool_node *n,
1446 void *d ATTRIBUTE_UNUSED)
1447 {
1448 if (cached_polymorphic_call_targets
1449 && DECL_VIRTUAL_P (n->decl)
1450 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1451 free_polymorphic_call_targets_hash ();
1452 }
1453
1454 /* Return vector containing possible targets of polymorphic call of type
1455 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1456 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1457 OTR_TYPE and include their virtual method. This is useful for types
1458 possibly in construction or destruction where the virtual table may
1459 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1460 us to walk the inheritance graph for all derivations.
1461
1462 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1463 undefined and should be redirected to unreachable.
1464
1465 If COMPLETEP is non-NULL, store true if the list is complete.
1466 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1467 in the target cache. If user needs to visit every target list
1468 just once, it can memoize them.
1469
1470 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1471 the type is not in the construction. Those targets appear first in the
1472 vector returned.
1473
1474 Returned vector is placed into cache. It is NOT caller's responsibility
1475 to free it. The vector can be freed on cgraph_remove_node call if
1476 the particular node is a virtual function present in the cache. */
1477
1478 vec <cgraph_node *>
1479 possible_polymorphic_call_targets (tree otr_type,
1480 HOST_WIDE_INT otr_token,
1481 ipa_polymorphic_call_context context,
1482 bool *completep,
1483 void **cache_token,
1484 int *nonconstruction_targetsp)
1485 {
1486 static struct cgraph_node_hook_list *node_removal_hook_holder;
1487 pointer_set_t *inserted;
1488 pointer_set_t *matched_vtables;
1489 vec <cgraph_node *> nodes = vNULL;
1490 odr_type type, outer_type;
1491 polymorphic_call_target_d key;
1492 polymorphic_call_target_d **slot;
1493 unsigned int i;
1494 tree binfo, target;
1495 bool complete;
1496 bool can_refer;
1497
1498 /* If ODR is not initialized, return empty incomplete list. */
1499 if (!odr_hash.is_created ())
1500 {
1501 if (completep)
1502 *completep = false;
1503 if (nonconstruction_targetsp)
1504 *nonconstruction_targetsp = 0;
1505 return nodes;
1506 }
1507
1508 /* If we hit type inconsistency, just return empty list of targets. */
1509 if (otr_token == INT_MAX)
1510 {
1511 if (completep)
1512 *completep = true;
1513 if (nonconstruction_targetsp)
1514 *nonconstruction_targetsp = 0;
1515 return nodes;
1516 }
1517
1518 type = get_odr_type (otr_type, true);
1519
1520 /* Lookup the outer class type we want to walk. */
1521 if (context.outer_type
1522 && !get_class_context (&context, otr_type))
1523 {
1524 if (completep)
1525 *completep = false;
1526 if (nonconstruction_targetsp)
1527 *nonconstruction_targetsp = 0;
1528 return nodes;
1529 }
1530
1531 /* We canonicalize our query, so we do not need extra hashtable entries. */
1532
1533 /* Without outer type, we have no use for offset. Just do the
1534 basic search from innter type */
1535 if (!context.outer_type)
1536 {
1537 context.outer_type = otr_type;
1538 context.offset = 0;
1539 }
1540 /* We need to update our hiearchy if the type does not exist. */
1541 outer_type = get_odr_type (context.outer_type, true);
1542 /* If outer and inner type match, there are no bases to see. */
1543 if (type == outer_type)
1544 context.maybe_in_construction = false;
1545 /* If the type is complete, there are no derivations. */
1546 if (TYPE_FINAL_P (outer_type->type))
1547 context.maybe_derived_type = false;
1548
1549 /* Initialize query cache. */
1550 if (!cached_polymorphic_call_targets)
1551 {
1552 cached_polymorphic_call_targets = pointer_set_create ();
1553 polymorphic_call_target_hash.create (23);
1554 if (!node_removal_hook_holder)
1555 {
1556 node_removal_hook_holder =
1557 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1558 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1559 NULL);
1560 }
1561 }
1562
1563 /* Lookup cached answer. */
1564 key.type = type;
1565 key.otr_token = otr_token;
1566 key.context = context;
1567 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1568 if (cache_token)
1569 *cache_token = (void *)*slot;
1570 if (*slot)
1571 {
1572 if (completep)
1573 *completep = (*slot)->complete;
1574 if (nonconstruction_targetsp)
1575 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1576 return (*slot)->targets;
1577 }
1578
1579 complete = true;
1580
1581 /* Do actual search. */
1582 timevar_push (TV_IPA_VIRTUAL_CALL);
1583 *slot = XCNEW (polymorphic_call_target_d);
1584 if (cache_token)
1585 *cache_token = (void *)*slot;
1586 (*slot)->type = type;
1587 (*slot)->otr_token = otr_token;
1588 (*slot)->context = context;
1589
1590 inserted = pointer_set_create ();
1591 matched_vtables = pointer_set_create ();
1592
1593 /* First see virtual method of type itself. */
1594 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1595 context.offset, otr_type);
1596 if (binfo)
1597 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1598 &can_refer);
1599 else
1600 {
1601 gcc_assert (odr_violation_reported);
1602 target = NULL;
1603 }
1604
1605 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1606
1607 if (target)
1608 {
1609 /* In the case we get complete method, we don't need
1610 to walk derivations. */
1611 if (DECL_FINAL_P (target))
1612 context.maybe_derived_type = false;
1613 }
1614 else
1615 gcc_assert (!complete);
1616
1617 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1618
1619 /* Next walk recursively all derived types. */
1620 if (context.maybe_derived_type)
1621 {
1622 /* For anonymous namespace types we can attempt to build full type.
1623 All derivations must be in this unit (unless we see partial unit). */
1624 if (!type->anonymous_namespace || flag_ltrans)
1625 complete = false;
1626 for (i = 0; i < outer_type->derived_types.length(); i++)
1627 possible_polymorphic_call_targets_1 (nodes, inserted,
1628 matched_vtables,
1629 otr_type,
1630 outer_type->derived_types[i],
1631 otr_token, outer_type->type,
1632 context.offset, &complete);
1633 }
1634
1635 /* Finally walk bases, if asked to. */
1636 (*slot)->nonconstruction_targets = nodes.length();
1637 if (context.maybe_in_construction)
1638 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1639 context.offset, nodes, inserted,
1640 matched_vtables, &complete);
1641
1642 (*slot)->targets = nodes;
1643 (*slot)->complete = complete;
1644 if (completep)
1645 *completep = complete;
1646 if (nonconstruction_targetsp)
1647 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1648
1649 pointer_set_destroy (inserted);
1650 pointer_set_destroy (matched_vtables);
1651 timevar_pop (TV_IPA_VIRTUAL_CALL);
1652 return nodes;
1653 }
1654
1655 /* Dump all possible targets of a polymorphic call. */
1656
1657 void
1658 dump_possible_polymorphic_call_targets (FILE *f,
1659 tree otr_type,
1660 HOST_WIDE_INT otr_token,
1661 const ipa_polymorphic_call_context &ctx)
1662 {
1663 vec <cgraph_node *> targets;
1664 bool final;
1665 odr_type type = get_odr_type (otr_type, false);
1666 unsigned int i;
1667 int nonconstruction;
1668
1669 if (!type)
1670 return;
1671 targets = possible_polymorphic_call_targets (otr_type, otr_token,
1672 ctx,
1673 &final, NULL, &nonconstruction);
1674 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
1675 print_generic_expr (f, type->type, TDF_SLIM);
1676 fprintf (f, " token %i\n", (int)otr_token);
1677 if (ctx.outer_type || ctx.offset)
1678 {
1679 fprintf (f, " Contained in type:");
1680 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1681 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
1682 ctx.offset);
1683 }
1684
1685 fprintf (f, " %s%s%s\n ",
1686 final ? "This is a complete list." :
1687 "This is partial list; extra targets may be defined in other units.",
1688 ctx.maybe_in_construction ? " (base types included)" : "",
1689 ctx.maybe_derived_type ? " (derived types included)" : "");
1690 for (i = 0; i < targets.length (); i++)
1691 {
1692 char *name = NULL;
1693 if (i == (unsigned)nonconstruction)
1694 fprintf (f, "\n If the type is in construction,"
1695 " then additional tarets are:\n"
1696 " ");
1697 if (in_lto_p)
1698 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
1699 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
1700 if (in_lto_p)
1701 free (name);
1702 if (!targets[i]->definition)
1703 fprintf (f, " (no definition%s)",
1704 DECL_DECLARED_INLINE_P (targets[i]->decl)
1705 ? " inline" : "");
1706 }
1707 fprintf (f, "\n\n");
1708 }
1709
1710
1711 /* Return true if N can be possibly target of a polymorphic call of
1712 OTR_TYPE/OTR_TOKEN. */
1713
1714 bool
1715 possible_polymorphic_call_target_p (tree otr_type,
1716 HOST_WIDE_INT otr_token,
1717 const ipa_polymorphic_call_context &ctx,
1718 struct cgraph_node *n)
1719 {
1720 vec <cgraph_node *> targets;
1721 unsigned int i;
1722 enum built_in_function fcode;
1723 bool final;
1724
1725 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1726 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1727 == BUILT_IN_UNREACHABLE
1728 || fcode == BUILT_IN_TRAP))
1729 return true;
1730
1731 if (!odr_hash.is_created ())
1732 return true;
1733 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
1734 for (i = 0; i < targets.length (); i++)
1735 if (symtab_semantically_equivalent_p (n, targets[i]))
1736 return true;
1737
1738 /* At a moment we allow middle end to dig out new external declarations
1739 as a targets of polymorphic calls. */
1740 if (!final && !n->definition)
1741 return true;
1742 return false;
1743 }
1744
1745
1746 /* After callgraph construction new external nodes may appear.
1747 Add them into the graph. */
1748
1749 void
1750 update_type_inheritance_graph (void)
1751 {
1752 struct cgraph_node *n;
1753
1754 if (!odr_hash.is_created ())
1755 return;
1756 free_polymorphic_call_targets_hash ();
1757 timevar_push (TV_IPA_INHERITANCE);
1758 /* We reconstruct the graph starting from types of all methods seen in the
1759 the unit. */
1760 FOR_EACH_FUNCTION (n)
1761 if (DECL_VIRTUAL_P (n->decl)
1762 && !n->definition
1763 && symtab_real_symbol_p (n))
1764 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
1765 timevar_pop (TV_IPA_INHERITANCE);
1766 }
1767
1768
1769 /* Return true if N looks like likely target of a polymorphic call.
1770 Rule out cxa_pure_virtual, noreturns, function declared cold and
1771 other obvious cases. */
1772
1773 bool
1774 likely_target_p (struct cgraph_node *n)
1775 {
1776 int flags;
1777 /* cxa_pure_virtual and similar things are not likely. */
1778 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
1779 return false;
1780 flags = flags_from_decl_or_type (n->decl);
1781 if (flags & ECF_NORETURN)
1782 return false;
1783 if (lookup_attribute ("cold",
1784 DECL_ATTRIBUTES (n->decl)))
1785 return false;
1786 if (n->frequency < NODE_FREQUENCY_NORMAL)
1787 return false;
1788 /* If there are no virtual tables refering the target alive,
1789 the only way the target can be called is an instance comming from other
1790 compilation unit; speculative devirtualization is build around an
1791 assumption that won't happen. */
1792 if (!referenced_from_vtable_p (n))
1793 return false;
1794 return true;
1795 }
1796
1797 /* The ipa-devirt pass.
1798 When polymorphic call has only one likely target in the unit,
1799 turn it into speculative call. */
1800
1801 static unsigned int
1802 ipa_devirt (void)
1803 {
1804 struct cgraph_node *n;
1805 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1806 struct cgraph_edge *e;
1807
1808 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1809 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1810 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
1811
1812 FOR_EACH_DEFINED_FUNCTION (n)
1813 {
1814 bool update = false;
1815 if (dump_file && n->indirect_calls)
1816 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
1817 n->name (), n->order);
1818 for (e = n->indirect_calls; e; e = e->next_callee)
1819 if (e->indirect_info->polymorphic)
1820 {
1821 struct cgraph_node *likely_target = NULL;
1822 void *cache_token;
1823 bool final;
1824 int nonconstruction_targets;
1825 vec <cgraph_node *>targets
1826 = possible_polymorphic_call_targets
1827 (e, &final, &cache_token, &nonconstruction_targets);
1828 unsigned int i;
1829
1830 if (dump_file)
1831 dump_possible_polymorphic_call_targets
1832 (dump_file, e);
1833
1834 npolymorphic++;
1835
1836 if (!cgraph_maybe_hot_edge_p (e))
1837 {
1838 if (dump_file)
1839 fprintf (dump_file, "Call is cold\n\n");
1840 ncold++;
1841 continue;
1842 }
1843 if (e->speculative)
1844 {
1845 if (dump_file)
1846 fprintf (dump_file, "Call is aready speculated\n\n");
1847 nspeculated++;
1848
1849 /* When dumping see if we agree with speculation. */
1850 if (!dump_file)
1851 continue;
1852 }
1853 if (pointer_set_contains (bad_call_targets,
1854 cache_token))
1855 {
1856 if (dump_file)
1857 fprintf (dump_file, "Target list is known to be useless\n\n");
1858 nmultiple++;
1859 continue;
1860 }
1861 for (i = 0; i < targets.length (); i++)
1862 if (likely_target_p (targets[i]))
1863 {
1864 if (likely_target)
1865 {
1866 if (i < (unsigned) nonconstruction_targets)
1867 {
1868 likely_target = NULL;
1869 if (dump_file)
1870 fprintf (dump_file, "More than one likely target\n\n");
1871 nmultiple++;
1872 }
1873 break;
1874 }
1875 likely_target = targets[i];
1876 }
1877 if (!likely_target)
1878 {
1879 pointer_set_insert (bad_call_targets, cache_token);
1880 continue;
1881 }
1882 /* This is reached only when dumping; check if we agree or disagree
1883 with the speculation. */
1884 if (e->speculative)
1885 {
1886 struct cgraph_edge *e2;
1887 struct ipa_ref *ref;
1888 cgraph_speculative_call_info (e, e2, e, ref);
1889 if (cgraph_function_or_thunk_node (e2->callee, NULL)
1890 == cgraph_function_or_thunk_node (likely_target, NULL))
1891 {
1892 fprintf (dump_file, "We agree with speculation\n\n");
1893 nok++;
1894 }
1895 else
1896 {
1897 fprintf (dump_file, "We disagree with speculation\n\n");
1898 nwrong++;
1899 }
1900 continue;
1901 }
1902 if (!likely_target->definition)
1903 {
1904 if (dump_file)
1905 fprintf (dump_file, "Target is not an definition\n\n");
1906 nnotdefined++;
1907 continue;
1908 }
1909 /* Do not introduce new references to external symbols. While we
1910 can handle these just well, it is common for programs to
1911 incorrectly with headers defining methods they are linked
1912 with. */
1913 if (DECL_EXTERNAL (likely_target->decl))
1914 {
1915 if (dump_file)
1916 fprintf (dump_file, "Target is external\n\n");
1917 nexternal++;
1918 continue;
1919 }
1920 /* Don't use an implicitly-declared destructor (c++/58678). */
1921 struct cgraph_node *non_thunk_target
1922 = cgraph_function_node (likely_target);
1923 if (DECL_ARTIFICIAL (non_thunk_target->decl)
1924 && DECL_COMDAT (non_thunk_target->decl))
1925 {
1926 if (dump_file)
1927 fprintf (dump_file, "Target is artificial\n\n");
1928 nartificial++;
1929 continue;
1930 }
1931 if (cgraph_function_body_availability (likely_target)
1932 <= AVAIL_OVERWRITABLE
1933 && symtab_can_be_discarded (likely_target))
1934 {
1935 if (dump_file)
1936 fprintf (dump_file, "Target is overwritable\n\n");
1937 noverwritable++;
1938 continue;
1939 }
1940 else
1941 {
1942 if (dump_file)
1943 fprintf (dump_file,
1944 "Speculatively devirtualizing call in %s/%i to %s/%i\n\n",
1945 n->name (), n->order,
1946 likely_target->name (),
1947 likely_target->order);
1948 if (!symtab_can_be_discarded (likely_target))
1949 {
1950 cgraph_node *alias;
1951 alias = cgraph (symtab_nonoverwritable_alias
1952 (likely_target));
1953 if (alias)
1954 likely_target = alias;
1955 }
1956 nconverted++;
1957 update = true;
1958 cgraph_turn_edge_to_speculative
1959 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
1960 }
1961 }
1962 if (update)
1963 inline_update_overall_summary (n);
1964 }
1965 pointer_set_destroy (bad_call_targets);
1966
1967 if (dump_file)
1968 fprintf (dump_file,
1969 "%i polymorphic calls, %i devirtualized,"
1970 " %i speculatively devirtualized, %i cold\n"
1971 "%i have multiple targets, %i overwritable,"
1972 " %i already speculated (%i agree, %i disagree),"
1973 " %i external, %i not defined, %i artificial\n",
1974 npolymorphic, ndevirtualized, nconverted, ncold,
1975 nmultiple, noverwritable, nspeculated, nok, nwrong,
1976 nexternal, nnotdefined, nartificial);
1977 return ndevirtualized ? TODO_remove_functions : 0;
1978 }
1979
1980 /* Gate for speculative IPA devirtualization optimization. */
1981
1982 static bool
1983 gate_ipa_devirt (void)
1984 {
1985 return (flag_devirtualize
1986 && flag_devirtualize_speculatively
1987 && optimize);
1988 }
1989
1990 namespace {
1991
1992 const pass_data pass_data_ipa_devirt =
1993 {
1994 IPA_PASS, /* type */
1995 "devirt", /* name */
1996 OPTGROUP_NONE, /* optinfo_flags */
1997 true, /* has_gate */
1998 true, /* has_execute */
1999 TV_IPA_DEVIRT, /* tv_id */
2000 0, /* properties_required */
2001 0, /* properties_provided */
2002 0, /* properties_destroyed */
2003 0, /* todo_flags_start */
2004 ( TODO_dump_symtab ), /* todo_flags_finish */
2005 };
2006
2007 class pass_ipa_devirt : public ipa_opt_pass_d
2008 {
2009 public:
2010 pass_ipa_devirt (gcc::context *ctxt)
2011 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2012 NULL, /* generate_summary */
2013 NULL, /* write_summary */
2014 NULL, /* read_summary */
2015 NULL, /* write_optimization_summary */
2016 NULL, /* read_optimization_summary */
2017 NULL, /* stmt_fixup */
2018 0, /* function_transform_todo_flags_start */
2019 NULL, /* function_transform */
2020 NULL) /* variable_transform */
2021 {}
2022
2023 /* opt_pass methods: */
2024 bool gate () { return gate_ipa_devirt (); }
2025 unsigned int execute () { return ipa_devirt (); }
2026
2027 }; // class pass_ipa_devirt
2028
2029 } // anon namespace
2030
2031 ipa_opt_pass_d *
2032 make_pass_ipa_devirt (gcc::context *ctxt)
2033 {
2034 return new pass_ipa_devirt (ctxt);
2035 }
2036
2037 #include "gt-ipa-devirt.h"