* ipa-devirt.c (record_node): Walk through aliases.
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
132 #include "dbgcnt.h"
133
134 static bool odr_violation_reported = false;
135
136 /* Dummy polymorphic call context. */
137
138 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
139 = {0, NULL, false, true};
140
141 /* Pointer set of all call targets appearing in the cache. */
142 static pointer_set_t *cached_polymorphic_call_targets;
143
144 /* The node of type inheritance graph. For each type unique in
145 One Defintion Rule (ODR) sense, we produce one node linking all
146 main variants of types equivalent to it, bases and derived types. */
147
148 struct GTY(()) odr_type_d
149 {
150 /* leader type. */
151 tree type;
152 /* All bases; built only for main variants of types */
153 vec<odr_type> GTY((skip)) bases;
154 /* All derrived types with virtual methods seen in unit;
155 built only for main variants oftypes */
156 vec<odr_type> GTY((skip)) derived_types;
157
158 /* All equivalent types, if more than one. */
159 vec<tree, va_gc> *types;
160 /* Set of all equivalent types, if NON-NULL. */
161 pointer_set_t * GTY((skip)) types_set;
162
163 /* Unique ID indexing the type in odr_types array. */
164 int id;
165 /* Is it in anonymous namespace? */
166 bool anonymous_namespace;
167 /* Do we know about all derivations of given type? */
168 bool all_derivations_known;
169 /* Did we report ODR violation here? */
170 bool odr_violated;
171 };
172
173
174 /* Return true if BINFO corresponds to a type with virtual methods.
175
176 Every type has several BINFOs. One is the BINFO associated by the type
177 while other represents bases of derived types. The BINFOs representing
178 bases do not have BINFO_VTABLE pointer set when this is the single
179 inheritance (because vtables are shared). Look up the BINFO of type
180 and check presence of its vtable. */
181
182 static inline bool
183 polymorphic_type_binfo_p (tree binfo)
184 {
185 /* See if BINFO's type has an virtual table associtated with it. */
186 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
187 }
188
189 /* Return TRUE if all derived types of T are known and thus
190 we may consider the walk of derived type complete.
191
192 This is typically true only for final anonymous namespace types and types
193 defined within functions (that may be COMDAT and thus shared across units,
194 but with the same set of derived types). */
195
196 static bool
197 type_all_derivations_known_p (tree t)
198 {
199 if (TYPE_FINAL_P (t))
200 return true;
201 if (flag_ltrans)
202 return false;
203 if (type_in_anonymous_namespace_p (t))
204 return true;
205 return (decl_function_context (TYPE_NAME (t)) != NULL);
206 }
207
208 /* Return TURE if type's constructors are all visible. */
209
210 static bool
211 type_all_ctors_visible_p (tree t)
212 {
213 return !flag_ltrans
214 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
215 /* We can not always use type_all_derivations_known_p.
216 For function local types we must assume case where
217 the function is COMDAT and shared in between units.
218
219 TODO: These cases are quite easy to get, but we need
220 to keep track of C++ privatizing via -Wno-weak
221 as well as the IPA privatizing. */
222 && type_in_anonymous_namespace_p (t);
223 }
224
225 /* Return TRUE if type may have instance. */
226
227 static bool
228 type_possibly_instantiated_p (tree t)
229 {
230 tree vtable;
231 varpool_node *vnode;
232
233 /* TODO: Add abstract types here. */
234 if (!type_all_ctors_visible_p (t))
235 return true;
236
237 vtable = BINFO_VTABLE (TYPE_BINFO (t));
238 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
239 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
240 vnode = varpool_get_node (vtable);
241 return vnode && vnode->definition;
242 }
243
244 /* One Definition Rule hashtable helpers. */
245
246 struct odr_hasher
247 {
248 typedef odr_type_d value_type;
249 typedef union tree_node compare_type;
250 static inline hashval_t hash (const value_type *);
251 static inline bool equal (const value_type *, const compare_type *);
252 static inline void remove (value_type *);
253 };
254
255 /* Return type that was declared with T's name so that T is an
256 qualified variant of it. */
257
258 static inline tree
259 main_odr_variant (const_tree t)
260 {
261 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
262 return TREE_TYPE (TYPE_NAME (t));
263 /* Unnamed types and non-C++ produced types can be compared by variants. */
264 else
265 return TYPE_MAIN_VARIANT (t);
266 }
267
268 /* Produce hash based on type name. */
269
270 static hashval_t
271 hash_type_name (tree t)
272 {
273 gcc_checking_assert (main_odr_variant (t) == t);
274
275 /* If not in LTO, all main variants are unique, so we can do
276 pointer hash. */
277 if (!in_lto_p)
278 return htab_hash_pointer (t);
279
280 /* Anonymous types are unique. */
281 if (type_in_anonymous_namespace_p (t))
282 return htab_hash_pointer (t);
283
284 /* For polymorphic types, we can simply hash the virtual table. */
285 if (TREE_CODE (t) == RECORD_TYPE
286 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
287 {
288 tree v = BINFO_VTABLE (TYPE_BINFO (t));
289 hashval_t hash = 0;
290
291 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
292 {
293 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
294 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
295 }
296
297 v = DECL_ASSEMBLER_NAME (v);
298 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
299 return hash;
300 }
301
302 /* Rest is not implemented yet. */
303 gcc_unreachable ();
304 }
305
306 /* Return the computed hashcode for ODR_TYPE. */
307
308 inline hashval_t
309 odr_hasher::hash (const value_type *odr_type)
310 {
311 return hash_type_name (odr_type->type);
312 }
313
314 /* For languages with One Definition Rule, work out if
315 types are the same based on their name.
316
317 This is non-trivial for LTO where minnor differences in
318 the type representation may have prevented type merging
319 to merge two copies of otherwise equivalent type.
320
321 Until we start streaming mangled type names, this function works
322 only for polymorphic types. */
323
324 bool
325 types_same_for_odr (const_tree type1, const_tree type2)
326 {
327 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
328
329 type1 = main_odr_variant (type1);
330 type2 = main_odr_variant (type2);
331
332 if (type1 == type2)
333 return true;
334
335 if (!in_lto_p)
336 return false;
337
338 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
339 on the corresponding TYPE_STUB_DECL. */
340 if (type_in_anonymous_namespace_p (type1)
341 || type_in_anonymous_namespace_p (type2))
342 return false;
343
344 /* At the moment we have no way to establish ODR equivlaence at LTO
345 other than comparing virtual table pointrs of polymorphic types.
346 Eventually we should start saving mangled names in TYPE_NAME.
347 Then this condition will become non-trivial. */
348
349 if (TREE_CODE (type1) == RECORD_TYPE
350 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
351 && BINFO_VTABLE (TYPE_BINFO (type1))
352 && BINFO_VTABLE (TYPE_BINFO (type2)))
353 {
354 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
355 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
356 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
357 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
358 return (operand_equal_p (TREE_OPERAND (v1, 1),
359 TREE_OPERAND (v2, 1), 0)
360 && DECL_ASSEMBLER_NAME
361 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
362 == DECL_ASSEMBLER_NAME
363 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
364 }
365 gcc_unreachable ();
366 }
367
368
369 /* Compare types T1 and T2 and return true if they are
370 equivalent. */
371
372 inline bool
373 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
374 {
375 tree t2 = const_cast <tree> (ct2);
376
377 gcc_checking_assert (main_odr_variant (t2) == t2);
378 if (t1->type == t2)
379 return true;
380 if (!in_lto_p)
381 return false;
382 return types_same_for_odr (t1->type, t2);
383 }
384
385 /* Free ODR type V. */
386
387 inline void
388 odr_hasher::remove (value_type *v)
389 {
390 v->bases.release ();
391 v->derived_types.release ();
392 if (v->types_set)
393 pointer_set_destroy (v->types_set);
394 ggc_free (v);
395 }
396
397 /* ODR type hash used to lookup ODR type based on tree type node. */
398
399 typedef hash_table<odr_hasher> odr_hash_type;
400 static odr_hash_type *odr_hash;
401
402 /* ODR types are also stored into ODR_TYPE vector to allow consistent
403 walking. Bases appear before derived types. Vector is garbage collected
404 so we won't end up visiting empty types. */
405
406 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
407 #define odr_types (*odr_types_ptr)
408
409 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
410 void
411 set_type_binfo (tree type, tree binfo)
412 {
413 for (; type; type = TYPE_NEXT_VARIANT (type))
414 if (COMPLETE_TYPE_P (type))
415 TYPE_BINFO (type) = binfo;
416 else
417 gcc_assert (!TYPE_BINFO (type));
418 }
419
420 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
421 from VAL->type. This may happen in LTO where tree merging did not merge
422 all variants of the same type. It may or may not mean the ODR violation.
423 Add it to the list of duplicates and warn on some violations. */
424
425 static bool
426 add_type_duplicate (odr_type val, tree type)
427 {
428 bool build_bases = false;
429 if (!val->types_set)
430 val->types_set = pointer_set_create ();
431
432 /* Always prefer complete type to be the leader. */
433 if (!COMPLETE_TYPE_P (val->type)
434 && COMPLETE_TYPE_P (type))
435 {
436 tree tmp = type;
437
438 build_bases = true;
439 type = val->type;
440 val->type = tmp;
441 }
442
443 /* See if this duplicate is new. */
444 if (!pointer_set_insert (val->types_set, type))
445 {
446 bool merge = true;
447 bool base_mismatch = false;
448 bool warned = 0;
449 unsigned int i,j;
450
451 gcc_assert (in_lto_p);
452 vec_safe_push (val->types, type);
453
454 /* First we compare memory layout. */
455 if (!types_compatible_p (val->type, type))
456 {
457 merge = false;
458 odr_violation_reported = true;
459 if (BINFO_VTABLE (TYPE_BINFO (val->type))
460 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
461 "type %qD violates one definition rule ",
462 type))
463 {
464 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
465 "a type with the same name but different layout is "
466 "defined in another translation unit");
467 warned = true;
468 }
469 val->odr_violated = true;
470 if (cgraph_dump_file)
471 {
472 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
473
474 print_node (cgraph_dump_file, "", val->type, 0);
475 putc ('\n',cgraph_dump_file);
476 print_node (cgraph_dump_file, "", type, 0);
477 putc ('\n',cgraph_dump_file);
478 }
479 }
480
481 /* Next sanity check that bases are the same. If not, we will end
482 up producing wrong answers. */
483 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
484 && TREE_CODE (val->type) == RECORD_TYPE
485 && TREE_CODE (type) == RECORD_TYPE
486 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
487 {
488 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
489 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
490 {
491 odr_type base = get_odr_type
492 (BINFO_TYPE
493 (BINFO_BASE_BINFO (TYPE_BINFO (type),
494 i)),
495 true);
496 if (val->bases.length () <= j || val->bases[j] != base)
497 base_mismatch = true;
498 j++;
499 }
500 if (base_mismatch)
501 {
502 merge = false;
503 odr_violation_reported = true;
504
505 if (!warned
506 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
507 "type %qD violates one definition rule ",
508 type))
509 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
510 "a type with the same name but different bases is "
511 "defined in another translation unit");
512 val->odr_violated = true;
513 if (cgraph_dump_file)
514 {
515 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
516
517 print_node (cgraph_dump_file, "", val->type, 0);
518 putc ('\n',cgraph_dump_file);
519 print_node (cgraph_dump_file, "", type, 0);
520 putc ('\n',cgraph_dump_file);
521 }
522 }
523 }
524
525 /* Regularize things a little. During LTO same types may come with
526 different BINFOs. Either because their virtual table was
527 not merged by tree merging and only later at decl merging or
528 because one type comes with external vtable, while other
529 with internal. We want to merge equivalent binfos to conserve
530 memory and streaming overhead.
531
532 The external vtables are more harmful: they contain references
533 to external declarations of methods that may be defined in the
534 merged LTO unit. For this reason we absolutely need to remove
535 them and replace by internal variants. Not doing so will lead
536 to incomplete answers from possible_polymorphic_call_targets. */
537 if (!flag_ltrans && merge
538 && TREE_CODE (val->type) == RECORD_TYPE
539 && TREE_CODE (type) == RECORD_TYPE
540 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
541 && TYPE_MAIN_VARIANT (type) == type
542 && TYPE_MAIN_VARIANT (val->type) == val->type
543 && BINFO_VTABLE (TYPE_BINFO (val->type))
544 && BINFO_VTABLE (TYPE_BINFO (type)))
545 {
546 tree master_binfo = TYPE_BINFO (val->type);
547 tree v1 = BINFO_VTABLE (master_binfo);
548 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
549
550 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
551 {
552 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
553 && operand_equal_p (TREE_OPERAND (v1, 1),
554 TREE_OPERAND (v2, 1), 0));
555 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
556 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
557 }
558 gcc_assert (DECL_ASSEMBLER_NAME (v1)
559 == DECL_ASSEMBLER_NAME (v2));
560
561 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
562 {
563 unsigned int i;
564
565 set_type_binfo (val->type, TYPE_BINFO (type));
566 for (i = 0; i < val->types->length (); i++)
567 {
568 if (TYPE_BINFO ((*val->types)[i])
569 == master_binfo)
570 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
571 }
572 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
573 }
574 else
575 set_type_binfo (type, master_binfo);
576 }
577 }
578 return build_bases;
579 }
580
581 /* Get ODR type hash entry for TYPE. If INSERT is true, create
582 possibly new entry. */
583
584 odr_type
585 get_odr_type (tree type, bool insert)
586 {
587 odr_type_d **slot;
588 odr_type val;
589 hashval_t hash;
590 bool build_bases = false;
591 bool insert_to_odr_array = false;
592 int base_id = -1;
593
594 type = main_odr_variant (type);
595
596 hash = hash_type_name (type);
597 slot
598 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
599 if (!slot)
600 return NULL;
601
602 /* See if we already have entry for type. */
603 if (*slot)
604 {
605 val = *slot;
606
607 /* With LTO we need to support multiple tree representation of
608 the same ODR type. */
609 if (val->type != type)
610 build_bases = add_type_duplicate (val, type);
611 }
612 else
613 {
614
615 val = ggc_cleared_alloc<odr_type_d> ();
616 val->type = type;
617 val->bases = vNULL;
618 val->derived_types = vNULL;
619 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
620 build_bases = COMPLETE_TYPE_P (val->type);
621 insert_to_odr_array = true;
622 }
623
624 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
625 && type == TYPE_MAIN_VARIANT (type))
626 {
627 tree binfo = TYPE_BINFO (type);
628 unsigned int i;
629
630 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
631
632 val->all_derivations_known = type_all_derivations_known_p (type);
633 *slot = val;
634 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
635 /* For now record only polymorphic types. other are
636 pointless for devirtualization and we can not precisely
637 determine ODR equivalency of these during LTO. */
638 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
639 {
640 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
641 i)),
642 true);
643 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
644 base->derived_types.safe_push (val);
645 val->bases.safe_push (base);
646 if (base->id > base_id)
647 base_id = base->id;
648 }
649 }
650 /* Ensure that type always appears after bases. */
651 if (insert_to_odr_array)
652 {
653 if (odr_types_ptr)
654 val->id = odr_types.length ();
655 vec_safe_push (odr_types_ptr, val);
656 }
657 else if (base_id > val->id)
658 {
659 odr_types[val->id] = 0;
660 /* Be sure we did not recorded any derived types; these may need
661 renumbering too. */
662 gcc_assert (val->derived_types.length() == 0);
663 if (odr_types_ptr)
664 val->id = odr_types.length ();
665 vec_safe_push (odr_types_ptr, val);
666 }
667 return val;
668 }
669
670 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
671 recusive printing. */
672
673 static void
674 dump_odr_type (FILE *f, odr_type t, int indent=0)
675 {
676 unsigned int i;
677 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
678 print_generic_expr (f, t->type, TDF_SLIM);
679 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
680 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
681 if (TYPE_NAME (t->type))
682 {
683 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
684 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
685 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
686 }
687 if (t->bases.length ())
688 {
689 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
690 for (i = 0; i < t->bases.length (); i++)
691 fprintf (f, " %i", t->bases[i]->id);
692 fprintf (f, "\n");
693 }
694 if (t->derived_types.length ())
695 {
696 fprintf (f, "%*s derived types:\n", indent * 2, "");
697 for (i = 0; i < t->derived_types.length (); i++)
698 dump_odr_type (f, t->derived_types[i], indent + 1);
699 }
700 fprintf (f, "\n");
701 }
702
703 /* Dump the type inheritance graph. */
704
705 static void
706 dump_type_inheritance_graph (FILE *f)
707 {
708 unsigned int i;
709 if (!odr_types_ptr)
710 return;
711 fprintf (f, "\n\nType inheritance graph:\n");
712 for (i = 0; i < odr_types.length (); i++)
713 {
714 if (odr_types[i] && odr_types[i]->bases.length () == 0)
715 dump_odr_type (f, odr_types[i]);
716 }
717 for (i = 0; i < odr_types.length (); i++)
718 {
719 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
720 {
721 unsigned int j;
722 fprintf (f, "Duplicate tree types for odr type %i\n", i);
723 print_node (f, "", odr_types[i]->type, 0);
724 for (j = 0; j < odr_types[i]->types->length (); j++)
725 {
726 tree t;
727 fprintf (f, "duplicate #%i\n", j);
728 print_node (f, "", (*odr_types[i]->types)[j], 0);
729 t = (*odr_types[i]->types)[j];
730 while (TYPE_P (t) && TYPE_CONTEXT (t))
731 {
732 t = TYPE_CONTEXT (t);
733 print_node (f, "", t, 0);
734 }
735 putc ('\n',f);
736 }
737 }
738 }
739 }
740
741 /* Given method type T, return type of class it belongs to.
742 Lookup this pointer and get its type. */
743
744 tree
745 method_class_type (const_tree t)
746 {
747 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
748 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
749
750 return TREE_TYPE (first_parm_type);
751 }
752
753 /* Initialize IPA devirt and build inheritance tree graph. */
754
755 void
756 build_type_inheritance_graph (void)
757 {
758 struct symtab_node *n;
759 FILE *inheritance_dump_file;
760 int flags;
761
762 if (odr_hash)
763 return;
764 timevar_push (TV_IPA_INHERITANCE);
765 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
766 odr_hash = new odr_hash_type (23);
767
768 /* We reconstruct the graph starting of types of all methods seen in the
769 the unit. */
770 FOR_EACH_SYMBOL (n)
771 if (is_a <cgraph_node *> (n)
772 && DECL_VIRTUAL_P (n->decl)
773 && symtab_real_symbol_p (n))
774 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
775 true);
776
777 /* Look also for virtual tables of types that do not define any methods.
778
779 We need it in a case where class B has virtual base of class A
780 re-defining its virtual method and there is class C with no virtual
781 methods with B as virtual base.
782
783 Here we output B's virtual method in two variant - for non-virtual
784 and virtual inheritance. B's virtual table has non-virtual version,
785 while C's has virtual.
786
787 For this reason we need to know about C in order to include both
788 variants of B. More correctly, record_target_from_binfo should
789 add both variants of the method when walking B, but we have no
790 link in between them.
791
792 We rely on fact that either the method is exported and thus we
793 assume it is called externally or C is in anonymous namespace and
794 thus we will see the vtable. */
795
796 else if (is_a <varpool_node *> (n)
797 && DECL_VIRTUAL_P (n->decl)
798 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
799 && TYPE_BINFO (DECL_CONTEXT (n->decl))
800 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
801 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
802 if (inheritance_dump_file)
803 {
804 dump_type_inheritance_graph (inheritance_dump_file);
805 dump_end (TDI_inheritance, inheritance_dump_file);
806 }
807 timevar_pop (TV_IPA_INHERITANCE);
808 }
809
810 /* Return true if N has reference from live virtual table
811 (and thus can be a destination of polymorphic call).
812 Be conservatively correct when callgraph is not built or
813 if the method may be referred externally. */
814
815 static bool
816 referenced_from_vtable_p (struct cgraph_node *node)
817 {
818 int i;
819 struct ipa_ref *ref;
820 bool found = false;
821
822 if (node->externally_visible
823 || node->used_from_other_partition)
824 return true;
825
826 /* Keep this test constant time.
827 It is unlikely this can happen except for the case where speculative
828 devirtualization introduced many speculative edges to this node.
829 In this case the target is very likely alive anyway. */
830 if (node->ref_list.referring.length () > 100)
831 return true;
832
833 /* We need references built. */
834 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
835 return true;
836
837 for (i = 0; node->iterate_referring (i, ref); i++)
838
839 if ((ref->use == IPA_REF_ALIAS
840 && referenced_from_vtable_p (cgraph (ref->referring)))
841 || (ref->use == IPA_REF_ADDR
842 && TREE_CODE (ref->referring->decl) == VAR_DECL
843 && DECL_VIRTUAL_P (ref->referring->decl)))
844 {
845 found = true;
846 break;
847 }
848 return found;
849 }
850
851 /* If TARGET has associated node, record it in the NODES array.
852 CAN_REFER specify if program can refer to the target directly.
853 if TARGET is unknown (NULL) or it can not be inserted (for example because
854 its body was already removed and there is no way to refer to it), clear
855 COMPLETEP. */
856
857 static void
858 maybe_record_node (vec <cgraph_node *> &nodes,
859 tree target, pointer_set_t *inserted,
860 bool can_refer,
861 bool *completep)
862 {
863 struct cgraph_node *target_node, *alias_target;
864 enum availability avail;
865
866 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
867 list of targets; the runtime effect of calling them is undefined.
868 Only "real" virtual methods should be accounted. */
869 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
870 return;
871
872 if (!can_refer)
873 {
874 /* The only case when method of anonymous namespace becomes unreferable
875 is when we completely optimized it out. */
876 if (flag_ltrans
877 || !target
878 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
879 *completep = false;
880 return;
881 }
882
883 if (!target)
884 return;
885
886 target_node = cgraph_get_node (target);
887
888 /* Preffer alias target over aliases, so we do not get confused by
889 fake duplicates. */
890 if (target_node)
891 {
892 alias_target = cgraph_function_or_thunk_node (target_node, &avail);
893 if (target_node != alias_target
894 && avail >= AVAIL_AVAILABLE
895 && cgraph_function_body_availability (target_node))
896 target_node = alias_target;
897 }
898
899 /* Method can only be called by polymorphic call if any
900 of vtables refering to it are alive.
901
902 While this holds for non-anonymous functions, too, there are
903 cases where we want to keep them in the list; for example
904 inline functions with -fno-weak are static, but we still
905 may devirtualize them when instance comes from other unit.
906 The same holds for LTO.
907
908 Currently we ignore these functions in speculative devirtualization.
909 ??? Maybe it would make sense to be more aggressive for LTO even
910 eslewhere. */
911 if (!flag_ltrans
912 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
913 && (!target_node
914 || !referenced_from_vtable_p (target_node)))
915 ;
916 /* See if TARGET is useful function we can deal with. */
917 else if (target_node != NULL
918 && (TREE_PUBLIC (target)
919 || DECL_EXTERNAL (target)
920 || target_node->definition)
921 && symtab_real_symbol_p (target_node))
922 {
923 gcc_assert (!target_node->global.inlined_to);
924 gcc_assert (symtab_real_symbol_p (target_node));
925 if (!pointer_set_insert (inserted, target_node->decl))
926 {
927 pointer_set_insert (cached_polymorphic_call_targets,
928 target_node);
929 nodes.safe_push (target_node);
930 }
931 }
932 else if (completep
933 && (!type_in_anonymous_namespace_p
934 (DECL_CONTEXT (target))
935 || flag_ltrans))
936 *completep = false;
937 }
938
939 /* See if BINFO's type match OUTER_TYPE. If so, lookup
940 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
941 method in vtable and insert method to NODES array
942 or BASES_TO_CONSIDER if this array is non-NULL.
943 Otherwise recurse to base BINFOs.
944 This match what get_binfo_at_offset does, but with offset
945 being unknown.
946
947 TYPE_BINFOS is a stack of BINFOS of types with defined
948 virtual table seen on way from class type to BINFO.
949
950 MATCHED_VTABLES tracks virtual tables we already did lookup
951 for virtual function in. INSERTED tracks nodes we already
952 inserted.
953
954 ANONYMOUS is true if BINFO is part of anonymous namespace.
955
956 Clear COMPLETEP when we hit unreferable target.
957 */
958
959 static void
960 record_target_from_binfo (vec <cgraph_node *> &nodes,
961 vec <tree> *bases_to_consider,
962 tree binfo,
963 tree otr_type,
964 vec <tree> &type_binfos,
965 HOST_WIDE_INT otr_token,
966 tree outer_type,
967 HOST_WIDE_INT offset,
968 pointer_set_t *inserted,
969 pointer_set_t *matched_vtables,
970 bool anonymous,
971 bool *completep)
972 {
973 tree type = BINFO_TYPE (binfo);
974 int i;
975 tree base_binfo;
976
977
978 if (BINFO_VTABLE (binfo))
979 type_binfos.safe_push (binfo);
980 if (types_same_for_odr (type, outer_type))
981 {
982 int i;
983 tree type_binfo = NULL;
984
985 /* Lookup BINFO with virtual table. For normal types it is always last
986 binfo on stack. */
987 for (i = type_binfos.length () - 1; i >= 0; i--)
988 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
989 {
990 type_binfo = type_binfos[i];
991 break;
992 }
993 if (BINFO_VTABLE (binfo))
994 type_binfos.pop ();
995 /* If this is duplicated BINFO for base shared by virtual inheritance,
996 we may not have its associated vtable. This is not a problem, since
997 we will walk it on the other path. */
998 if (!type_binfo)
999 return;
1000 tree inner_binfo = get_binfo_at_offset (type_binfo,
1001 offset, otr_type);
1002 if (!inner_binfo)
1003 {
1004 gcc_assert (odr_violation_reported);
1005 return;
1006 }
1007 /* For types in anonymous namespace first check if the respective vtable
1008 is alive. If not, we know the type can't be called. */
1009 if (!flag_ltrans && anonymous)
1010 {
1011 tree vtable = BINFO_VTABLE (inner_binfo);
1012 varpool_node *vnode;
1013
1014 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1015 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1016 vnode = varpool_get_node (vtable);
1017 if (!vnode || !vnode->definition)
1018 return;
1019 }
1020 gcc_assert (inner_binfo);
1021 if (bases_to_consider
1022 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
1023 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
1024 {
1025 bool can_refer;
1026 tree target = gimple_get_virt_method_for_binfo (otr_token,
1027 inner_binfo,
1028 &can_refer);
1029 if (!bases_to_consider)
1030 maybe_record_node (nodes, target, inserted, can_refer, completep);
1031 /* Destructors are never called via construction vtables. */
1032 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1033 bases_to_consider->safe_push (target);
1034 }
1035 return;
1036 }
1037
1038 /* Walk bases. */
1039 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1040 /* Walking bases that have no virtual method is pointless excercise. */
1041 if (polymorphic_type_binfo_p (base_binfo))
1042 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1043 type_binfos,
1044 otr_token, outer_type, offset, inserted,
1045 matched_vtables, anonymous, completep);
1046 if (BINFO_VTABLE (binfo))
1047 type_binfos.pop ();
1048 }
1049
1050 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1051 of TYPE, insert them to NODES, recurse into derived nodes.
1052 INSERTED is used to avoid duplicate insertions of methods into NODES.
1053 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1054 Clear COMPLETEP if unreferable target is found.
1055
1056 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1057 all cases where BASE_SKIPPED is true (because the base is abstract
1058 class). */
1059
1060 static void
1061 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1062 pointer_set_t *inserted,
1063 pointer_set_t *matched_vtables,
1064 tree otr_type,
1065 odr_type type,
1066 HOST_WIDE_INT otr_token,
1067 tree outer_type,
1068 HOST_WIDE_INT offset,
1069 bool *completep,
1070 vec <tree> &bases_to_consider,
1071 bool consider_construction)
1072 {
1073 tree binfo = TYPE_BINFO (type->type);
1074 unsigned int i;
1075 vec <tree> type_binfos = vNULL;
1076 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1077
1078 /* We may need to consider types w/o instances because of possible derived
1079 types using their methods either directly or via construction vtables.
1080 We are safe to skip them when all derivations are known, since we will
1081 handle them later.
1082 This is done by recording them to BASES_TO_CONSIDER array. */
1083 if (possibly_instantiated || consider_construction)
1084 {
1085 record_target_from_binfo (nodes,
1086 (!possibly_instantiated
1087 && type_all_derivations_known_p (type->type))
1088 ? &bases_to_consider : NULL,
1089 binfo, otr_type, type_binfos, otr_token,
1090 outer_type, offset,
1091 inserted, matched_vtables,
1092 type->anonymous_namespace, completep);
1093 }
1094 type_binfos.release ();
1095 for (i = 0; i < type->derived_types.length (); i++)
1096 possible_polymorphic_call_targets_1 (nodes, inserted,
1097 matched_vtables,
1098 otr_type,
1099 type->derived_types[i],
1100 otr_token, outer_type, offset, completep,
1101 bases_to_consider, consider_construction);
1102 }
1103
1104 /* Cache of queries for polymorphic call targets.
1105
1106 Enumerating all call targets may get expensive when there are many
1107 polymorphic calls in the program, so we memoize all the previous
1108 queries and avoid duplicated work. */
1109
1110 struct polymorphic_call_target_d
1111 {
1112 HOST_WIDE_INT otr_token;
1113 ipa_polymorphic_call_context context;
1114 odr_type type;
1115 vec <cgraph_node *> targets;
1116 int nonconstruction_targets;
1117 bool complete;
1118 };
1119
1120 /* Polymorphic call target cache helpers. */
1121
1122 struct polymorphic_call_target_hasher
1123 {
1124 typedef polymorphic_call_target_d value_type;
1125 typedef polymorphic_call_target_d compare_type;
1126 static inline hashval_t hash (const value_type *);
1127 static inline bool equal (const value_type *, const compare_type *);
1128 static inline void remove (value_type *);
1129 };
1130
1131 /* Return the computed hashcode for ODR_QUERY. */
1132
1133 inline hashval_t
1134 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1135 {
1136 hashval_t hash;
1137
1138 hash = iterative_hash_host_wide_int
1139 (odr_query->otr_token,
1140 odr_query->type->id);
1141 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
1142 hash);
1143 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
1144 return iterative_hash_hashval_t
1145 (((int)odr_query->context.maybe_in_construction << 1)
1146 | (int)odr_query->context.maybe_derived_type, hash);
1147 }
1148
1149 /* Compare cache entries T1 and T2. */
1150
1151 inline bool
1152 polymorphic_call_target_hasher::equal (const value_type *t1,
1153 const compare_type *t2)
1154 {
1155 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1156 && t1->context.offset == t2->context.offset
1157 && t1->context.outer_type == t2->context.outer_type
1158 && t1->context.maybe_in_construction
1159 == t2->context.maybe_in_construction
1160 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
1161 }
1162
1163 /* Remove entry in polymorphic call target cache hash. */
1164
1165 inline void
1166 polymorphic_call_target_hasher::remove (value_type *v)
1167 {
1168 v->targets.release ();
1169 free (v);
1170 }
1171
1172 /* Polymorphic call target query cache. */
1173
1174 typedef hash_table<polymorphic_call_target_hasher>
1175 polymorphic_call_target_hash_type;
1176 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1177
1178 /* Destroy polymorphic call target query cache. */
1179
1180 static void
1181 free_polymorphic_call_targets_hash ()
1182 {
1183 if (cached_polymorphic_call_targets)
1184 {
1185 delete polymorphic_call_target_hash;
1186 polymorphic_call_target_hash = NULL;
1187 pointer_set_destroy (cached_polymorphic_call_targets);
1188 cached_polymorphic_call_targets = NULL;
1189 }
1190 }
1191
1192 /* When virtual function is removed, we may need to flush the cache. */
1193
1194 static void
1195 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1196 {
1197 if (cached_polymorphic_call_targets
1198 && pointer_set_contains (cached_polymorphic_call_targets, n))
1199 free_polymorphic_call_targets_hash ();
1200 }
1201
1202 /* Return true when TYPE contains an polymorphic type and thus is interesting
1203 for devirtualization machinery. */
1204
1205 bool
1206 contains_polymorphic_type_p (const_tree type)
1207 {
1208 type = TYPE_MAIN_VARIANT (type);
1209
1210 if (RECORD_OR_UNION_TYPE_P (type))
1211 {
1212 if (TYPE_BINFO (type)
1213 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1214 return true;
1215 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1216 if (TREE_CODE (fld) == FIELD_DECL
1217 && !DECL_ARTIFICIAL (fld)
1218 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1219 return true;
1220 return false;
1221 }
1222 if (TREE_CODE (type) == ARRAY_TYPE)
1223 return contains_polymorphic_type_p (TREE_TYPE (type));
1224 return false;
1225 }
1226
1227 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1228 is contained at CONTEXT->OFFSET. Walk the memory representation of
1229 CONTEXT->OUTER_TYPE and find the outermost class type that match
1230 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1231 to represent it.
1232
1233 For example when CONTEXT represents type
1234 class A
1235 {
1236 int a;
1237 class B b;
1238 }
1239 and we look for type at offset sizeof(int), we end up with B and offset 0.
1240 If the same is produced by multiple inheritance, we end up with A and offset
1241 sizeof(int).
1242
1243 If we can not find corresponding class, give up by setting
1244 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1245 Return true when lookup was sucesful. */
1246
1247 static bool
1248 get_class_context (ipa_polymorphic_call_context *context,
1249 tree expected_type)
1250 {
1251 tree type = context->outer_type;
1252 HOST_WIDE_INT offset = context->offset;
1253
1254 /* Find the sub-object the constant actually refers to and mark whether it is
1255 an artificial one (as opposed to a user-defined one). */
1256 while (true)
1257 {
1258 HOST_WIDE_INT pos, size;
1259 tree fld;
1260
1261 /* On a match, just return what we found. */
1262 if (TREE_CODE (type) == TREE_CODE (expected_type)
1263 && types_same_for_odr (type, expected_type))
1264 {
1265 /* Type can not contain itself on an non-zero offset. In that case
1266 just give up. */
1267 if (offset != 0)
1268 goto give_up;
1269 gcc_assert (offset == 0);
1270 return true;
1271 }
1272
1273 /* Walk fields and find corresponding on at OFFSET. */
1274 if (TREE_CODE (type) == RECORD_TYPE)
1275 {
1276 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1277 {
1278 if (TREE_CODE (fld) != FIELD_DECL)
1279 continue;
1280
1281 pos = int_bit_position (fld);
1282 size = tree_to_uhwi (DECL_SIZE (fld));
1283 if (pos <= offset && (pos + size) > offset)
1284 break;
1285 }
1286
1287 if (!fld)
1288 goto give_up;
1289
1290 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1291 offset -= pos;
1292 /* DECL_ARTIFICIAL represents a basetype. */
1293 if (!DECL_ARTIFICIAL (fld))
1294 {
1295 context->outer_type = type;
1296 context->offset = offset;
1297 /* As soon as we se an field containing the type,
1298 we know we are not looking for derivations. */
1299 context->maybe_derived_type = false;
1300 }
1301 }
1302 else if (TREE_CODE (type) == ARRAY_TYPE)
1303 {
1304 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1305
1306 /* Give up if we don't know array size. */
1307 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1308 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1309 goto give_up;
1310 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1311 type = subtype;
1312 context->outer_type = type;
1313 context->offset = offset;
1314 context->maybe_derived_type = false;
1315 }
1316 /* Give up on anything else. */
1317 else
1318 goto give_up;
1319 }
1320
1321 /* If we failed to find subtype we look for, give up and fall back to the
1322 most generic query. */
1323 give_up:
1324 context->outer_type = expected_type;
1325 context->offset = 0;
1326 context->maybe_derived_type = true;
1327 context->maybe_in_construction = true;
1328 /* POD can be changed to an instance of a polymorphic type by
1329 placement new. Here we play safe and assume that any
1330 non-polymorphic type is POD. */
1331 if ((TREE_CODE (type) != RECORD_TYPE
1332 || !TYPE_BINFO (type)
1333 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1334 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1335 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1336 tree_to_uhwi (TYPE_SIZE (type)))))
1337 return true;
1338 return false;
1339 }
1340
1341 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1342
1343 static bool
1344 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1345 tree otr_type)
1346 {
1347 ipa_polymorphic_call_context context = {offset,
1348 TYPE_MAIN_VARIANT (outer_type),
1349 false, true};
1350 return get_class_context (&context, otr_type);
1351 }
1352
1353 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1354
1355 static tree
1356 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1357 tree vtable)
1358 {
1359 tree v = BINFO_VTABLE (binfo);
1360 int i;
1361 tree base_binfo;
1362 unsigned HOST_WIDE_INT this_offset;
1363
1364 if (v)
1365 {
1366 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1367 gcc_unreachable ();
1368
1369 if (offset == this_offset
1370 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1371 return binfo;
1372 }
1373
1374 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1375 if (polymorphic_type_binfo_p (base_binfo))
1376 {
1377 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1378 if (base_binfo)
1379 return base_binfo;
1380 }
1381 return NULL;
1382 }
1383
1384 /* T is known constant value of virtual table pointer.
1385 Store virtual table to V and its offset to OFFSET.
1386 Return false if T does not look like virtual table reference. */
1387
1388 bool
1389 vtable_pointer_value_to_vtable (const_tree t, tree *v,
1390 unsigned HOST_WIDE_INT *offset)
1391 {
1392 /* We expect &MEM[(void *)&virtual_table + 16B].
1393 We obtain object's BINFO from the context of the virtual table.
1394 This one contains pointer to virtual table represented via
1395 POINTER_PLUS_EXPR. Verify that this pointer match to what
1396 we propagated through.
1397
1398 In the case of virtual inheritance, the virtual tables may
1399 be nested, i.e. the offset may be different from 16 and we may
1400 need to dive into the type representation. */
1401 if (TREE_CODE (t) == ADDR_EXPR
1402 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1403 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1404 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1405 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1406 == VAR_DECL)
1407 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1408 (TREE_OPERAND (t, 0), 0), 0)))
1409 {
1410 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1411 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1412 return true;
1413 }
1414
1415 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1416 We need to handle it when T comes from static variable initializer or
1417 BINFO. */
1418 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1419 {
1420 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1421 t = TREE_OPERAND (t, 0);
1422 }
1423 else
1424 *offset = 0;
1425
1426 if (TREE_CODE (t) != ADDR_EXPR)
1427 return false;
1428 *v = TREE_OPERAND (t, 0);
1429 return true;
1430 }
1431
1432 /* T is known constant value of virtual table pointer. Return BINFO of the
1433 instance type. */
1434
1435 tree
1436 vtable_pointer_value_to_binfo (const_tree t)
1437 {
1438 tree vtable;
1439 unsigned HOST_WIDE_INT offset;
1440
1441 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1442 return NULL_TREE;
1443
1444 /* FIXME: for stores of construction vtables we return NULL,
1445 because we do not have BINFO for those. Eventually we should fix
1446 our representation to allow this case to be handled, too.
1447 In the case we see store of BINFO we however may assume
1448 that standard folding will be ale to cope with it. */
1449 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1450 offset, vtable);
1451 }
1452
1453 /* We know that the instance is stored in variable or parameter
1454 (not dynamically allocated) and we want to disprove the fact
1455 that it may be in construction at invocation of CALL.
1456
1457 For the variable to be in construction we actually need to
1458 be in constructor of corresponding global variable or
1459 the inline stack of CALL must contain the constructor.
1460 Check this condition. This check works safely only before
1461 IPA passes, because inline stacks may become out of date
1462 later. */
1463
1464 bool
1465 decl_maybe_in_construction_p (tree base, tree outer_type,
1466 gimple call, tree function)
1467 {
1468 outer_type = TYPE_MAIN_VARIANT (outer_type);
1469 gcc_assert (DECL_P (base));
1470
1471 /* After inlining the code unification optimizations may invalidate
1472 inline stacks. Also we need to give up on global variables after
1473 IPA, because addresses of these may have been propagated to their
1474 constructors. */
1475 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
1476 return true;
1477
1478 /* Pure functions can not do any changes on the dynamic type;
1479 that require writting to memory. */
1480 if (!auto_var_in_fn_p (base, function)
1481 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
1482 return false;
1483
1484 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
1485 block = BLOCK_SUPERCONTEXT (block))
1486 if (BLOCK_ABSTRACT_ORIGIN (block)
1487 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
1488 {
1489 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
1490
1491 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
1492 || (!DECL_CXX_CONSTRUCTOR_P (fn)
1493 || !DECL_CXX_DESTRUCTOR_P (fn)))
1494 {
1495 /* Watch for clones where we constant propagated the first
1496 argument (pointer to the instance). */
1497 fn = DECL_ABSTRACT_ORIGIN (fn);
1498 if (!fn
1499 || !is_global_var (base)
1500 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
1501 || (!DECL_CXX_CONSTRUCTOR_P (fn)
1502 || !DECL_CXX_DESTRUCTOR_P (fn)))
1503 continue;
1504 }
1505 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
1506 continue;
1507
1508 /* FIXME: this can go away once we have ODR types equivalency on
1509 LTO level. */
1510 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
1511 return true;
1512 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
1513 if (types_same_for_odr (type, outer_type))
1514 return true;
1515 }
1516
1517 if (TREE_CODE (base) == VAR_DECL
1518 && is_global_var (base))
1519 {
1520 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
1521 || (!DECL_CXX_CONSTRUCTOR_P (function)
1522 || !DECL_CXX_DESTRUCTOR_P (function)))
1523 {
1524 if (!DECL_ABSTRACT_ORIGIN (function))
1525 return false;
1526 /* Watch for clones where we constant propagated the first
1527 argument (pointer to the instance). */
1528 function = DECL_ABSTRACT_ORIGIN (function);
1529 if (!function
1530 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
1531 || (!DECL_CXX_CONSTRUCTOR_P (function)
1532 || !DECL_CXX_DESTRUCTOR_P (function)))
1533 return false;
1534 }
1535 /* FIXME: this can go away once we have ODR types equivalency on
1536 LTO level. */
1537 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
1538 return true;
1539 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
1540 if (types_same_for_odr (type, outer_type))
1541 return true;
1542 }
1543 return false;
1544 }
1545
1546 /* Proudce polymorphic call context for call method of instance
1547 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1548
1549 static void
1550 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1551 tree base, HOST_WIDE_INT offset)
1552 {
1553 gcc_assert (DECL_P (base));
1554
1555 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
1556 context->offset = offset;
1557 /* Make very conservative assumption that all objects
1558 may be in construction.
1559 TODO: ipa-prop already contains code to tell better.
1560 merge it later. */
1561 context->maybe_in_construction = true;
1562 context->maybe_derived_type = false;
1563 }
1564
1565 /* CST is an invariant (address of decl), try to get meaningful
1566 polymorphic call context for polymorphic call of method
1567 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1568 Return FALSE if nothing meaningful can be found. */
1569
1570 bool
1571 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1572 tree cst,
1573 tree otr_type,
1574 HOST_WIDE_INT offset)
1575 {
1576 HOST_WIDE_INT offset2, size, max_size;
1577 tree base;
1578
1579 if (TREE_CODE (cst) != ADDR_EXPR)
1580 return false;
1581
1582 cst = TREE_OPERAND (cst, 0);
1583 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1584 if (!DECL_P (base) || max_size == -1 || max_size != size)
1585 return false;
1586
1587 /* Only type inconsistent programs can have otr_type that is
1588 not part of outer type. */
1589 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1590 return false;
1591
1592 get_polymorphic_call_info_for_decl (context, base, offset);
1593 return true;
1594 }
1595
1596 /* Given REF call in FNDECL, determine class of the polymorphic
1597 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1598 CALL is optional argument giving the actual statement (usually call) where
1599 the context is used.
1600 Return pointer to object described by the context */
1601
1602 tree
1603 get_polymorphic_call_info (tree fndecl,
1604 tree ref,
1605 tree *otr_type,
1606 HOST_WIDE_INT *otr_token,
1607 ipa_polymorphic_call_context *context,
1608 gimple call)
1609 {
1610 tree base_pointer;
1611 *otr_type = obj_type_ref_class (ref);
1612 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1613
1614 /* Set up basic info in case we find nothing interesting in the analysis. */
1615 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
1616 context->offset = 0;
1617 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1618 context->maybe_derived_type = true;
1619 context->maybe_in_construction = true;
1620
1621 /* Walk SSA for outer object. */
1622 do
1623 {
1624 if (TREE_CODE (base_pointer) == SSA_NAME
1625 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1626 && SSA_NAME_DEF_STMT (base_pointer)
1627 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1628 {
1629 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1630 STRIP_NOPS (base_pointer);
1631 }
1632 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1633 {
1634 HOST_WIDE_INT size, max_size;
1635 HOST_WIDE_INT offset2;
1636 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1637 &offset2, &size, &max_size);
1638
1639 /* If this is a varying address, punt. */
1640 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1641 && max_size != -1
1642 && max_size == size)
1643 {
1644 /* We found dereference of a pointer. Type of the pointer
1645 and MEM_REF is meaningless, but we can look futher. */
1646 if (TREE_CODE (base) == MEM_REF)
1647 {
1648 base_pointer = TREE_OPERAND (base, 0);
1649 context->offset
1650 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1651 context->outer_type = NULL;
1652 }
1653 /* We found base object. In this case the outer_type
1654 is known. */
1655 else if (DECL_P (base))
1656 {
1657 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1658
1659 /* Only type inconsistent programs can have otr_type that is
1660 not part of outer type. */
1661 if (!contains_type_p (TREE_TYPE (base),
1662 context->offset + offset2, *otr_type))
1663 {
1664 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1665 code sequences; we arrange the calls to be builtin_unreachable
1666 later. */
1667 *otr_token = INT_MAX;
1668 return base_pointer;
1669 }
1670 get_polymorphic_call_info_for_decl (context, base,
1671 context->offset + offset2);
1672 if (context->maybe_in_construction && call)
1673 context->maybe_in_construction
1674 = decl_maybe_in_construction_p (base,
1675 context->outer_type,
1676 call,
1677 current_function_decl);
1678 return NULL;
1679 }
1680 else
1681 break;
1682 }
1683 else
1684 break;
1685 }
1686 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1687 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1688 {
1689 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1690 * BITS_PER_UNIT;
1691 base_pointer = TREE_OPERAND (base_pointer, 0);
1692 }
1693 else
1694 break;
1695 }
1696 while (true);
1697
1698 /* Try to determine type of the outer object. */
1699 if (TREE_CODE (base_pointer) == SSA_NAME
1700 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1701 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1702 {
1703 /* See if parameter is THIS pointer of a method. */
1704 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1705 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1706 {
1707 context->outer_type
1708 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1709 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1710
1711 /* Dynamic casting has possibly upcasted the type
1712 in the hiearchy. In this case outer type is less
1713 informative than inner type and we should forget
1714 about it. */
1715 if (!contains_type_p (context->outer_type, context->offset,
1716 *otr_type))
1717 {
1718 context->outer_type = NULL;
1719 return base_pointer;
1720 }
1721
1722 /* If the function is constructor or destructor, then
1723 the type is possibly in construction, but we know
1724 it is not derived type. */
1725 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1726 || DECL_CXX_DESTRUCTOR_P (fndecl))
1727 {
1728 context->maybe_in_construction = true;
1729 context->maybe_derived_type = false;
1730 }
1731 else
1732 {
1733 context->maybe_derived_type = true;
1734 context->maybe_in_construction = false;
1735 }
1736 return base_pointer;
1737 }
1738 /* Non-PODs passed by value are really passed by invisible
1739 reference. In this case we also know the type of the
1740 object. */
1741 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1742 {
1743 context->outer_type
1744 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1745 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1746 /* Only type inconsistent programs can have otr_type that is
1747 not part of outer type. */
1748 if (!contains_type_p (context->outer_type, context->offset,
1749 *otr_type))
1750 {
1751 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1752 code sequences; we arrange the calls to be builtin_unreachable
1753 later. */
1754 *otr_token = INT_MAX;
1755 return base_pointer;
1756 }
1757 context->maybe_derived_type = false;
1758 context->maybe_in_construction = false;
1759 return base_pointer;
1760 }
1761 }
1762 /* TODO: There are multiple ways to derive a type. For instance
1763 if BASE_POINTER is passed to an constructor call prior our refernece.
1764 We do not make this type of flow sensitive analysis yet. */
1765 return base_pointer;
1766 }
1767
1768 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1769 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1770 and insert them to NODES.
1771
1772 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1773
1774 static void
1775 record_targets_from_bases (tree otr_type,
1776 HOST_WIDE_INT otr_token,
1777 tree outer_type,
1778 HOST_WIDE_INT offset,
1779 vec <cgraph_node *> &nodes,
1780 pointer_set_t *inserted,
1781 pointer_set_t *matched_vtables,
1782 bool *completep)
1783 {
1784 while (true)
1785 {
1786 HOST_WIDE_INT pos, size;
1787 tree base_binfo;
1788 tree fld;
1789
1790 if (types_same_for_odr (outer_type, otr_type))
1791 return;
1792
1793 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1794 {
1795 if (TREE_CODE (fld) != FIELD_DECL)
1796 continue;
1797
1798 pos = int_bit_position (fld);
1799 size = tree_to_shwi (DECL_SIZE (fld));
1800 if (pos <= offset && (pos + size) > offset
1801 /* Do not get confused by zero sized bases. */
1802 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1803 break;
1804 }
1805 /* Within a class type we should always find correcponding fields. */
1806 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1807
1808 /* Nonbasetypes should have been stripped by outer_class_type. */
1809 gcc_assert (DECL_ARTIFICIAL (fld));
1810
1811 outer_type = TREE_TYPE (fld);
1812 offset -= pos;
1813
1814 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1815 offset, otr_type);
1816 if (!base_binfo)
1817 {
1818 gcc_assert (odr_violation_reported);
1819 return;
1820 }
1821 gcc_assert (base_binfo);
1822 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1823 {
1824 bool can_refer;
1825 tree target = gimple_get_virt_method_for_binfo (otr_token,
1826 base_binfo,
1827 &can_refer);
1828 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
1829 maybe_record_node (nodes, target, inserted, can_refer, completep);
1830 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1831 }
1832 }
1833 }
1834
1835 /* When virtual table is removed, we may need to flush the cache. */
1836
1837 static void
1838 devirt_variable_node_removal_hook (varpool_node *n,
1839 void *d ATTRIBUTE_UNUSED)
1840 {
1841 if (cached_polymorphic_call_targets
1842 && DECL_VIRTUAL_P (n->decl)
1843 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1844 free_polymorphic_call_targets_hash ();
1845 }
1846
1847 /* Return vector containing possible targets of polymorphic call of type
1848 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1849 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1850 OTR_TYPE and include their virtual method. This is useful for types
1851 possibly in construction or destruction where the virtual table may
1852 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1853 us to walk the inheritance graph for all derivations.
1854
1855 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1856 undefined and should be redirected to unreachable.
1857
1858 If COMPLETEP is non-NULL, store true if the list is complete.
1859 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1860 in the target cache. If user needs to visit every target list
1861 just once, it can memoize them.
1862
1863 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1864 the type is not in the construction. Those targets appear first in the
1865 vector returned.
1866
1867 Returned vector is placed into cache. It is NOT caller's responsibility
1868 to free it. The vector can be freed on cgraph_remove_node call if
1869 the particular node is a virtual function present in the cache. */
1870
1871 vec <cgraph_node *>
1872 possible_polymorphic_call_targets (tree otr_type,
1873 HOST_WIDE_INT otr_token,
1874 ipa_polymorphic_call_context context,
1875 bool *completep,
1876 void **cache_token,
1877 int *nonconstruction_targetsp)
1878 {
1879 static struct cgraph_node_hook_list *node_removal_hook_holder;
1880 pointer_set_t *inserted;
1881 pointer_set_t *matched_vtables;
1882 vec <cgraph_node *> nodes = vNULL;
1883 vec <tree> bases_to_consider = vNULL;
1884 odr_type type, outer_type;
1885 polymorphic_call_target_d key;
1886 polymorphic_call_target_d **slot;
1887 unsigned int i;
1888 tree binfo, target;
1889 bool complete;
1890 bool can_refer;
1891 bool skipped = false;
1892
1893 otr_type = TYPE_MAIN_VARIANT (otr_type);
1894
1895 /* If ODR is not initialized, return empty incomplete list. */
1896 if (!odr_hash)
1897 {
1898 if (completep)
1899 *completep = false;
1900 if (cache_token)
1901 *cache_token = NULL;
1902 if (nonconstruction_targetsp)
1903 *nonconstruction_targetsp = 0;
1904 return nodes;
1905 }
1906
1907 /* If we hit type inconsistency, just return empty list of targets. */
1908 if (otr_token == INT_MAX)
1909 {
1910 if (completep)
1911 *completep = true;
1912 if (cache_token)
1913 *cache_token = NULL;
1914 if (nonconstruction_targetsp)
1915 *nonconstruction_targetsp = 0;
1916 return nodes;
1917 }
1918
1919 type = get_odr_type (otr_type, true);
1920
1921 /* Recording type variants would wast results cache. */
1922 gcc_assert (!context.outer_type
1923 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1924
1925 /* Lookup the outer class type we want to walk. */
1926 if (context.outer_type
1927 && !get_class_context (&context, otr_type))
1928 {
1929 if (completep)
1930 *completep = false;
1931 if (cache_token)
1932 *cache_token = NULL;
1933 if (nonconstruction_targetsp)
1934 *nonconstruction_targetsp = 0;
1935 return nodes;
1936 }
1937
1938 /* Check that get_class_context kept the main variant. */
1939 gcc_assert (!context.outer_type
1940 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1941
1942 /* We canonicalize our query, so we do not need extra hashtable entries. */
1943
1944 /* Without outer type, we have no use for offset. Just do the
1945 basic search from innter type */
1946 if (!context.outer_type)
1947 {
1948 context.outer_type = otr_type;
1949 context.offset = 0;
1950 }
1951 /* We need to update our hiearchy if the type does not exist. */
1952 outer_type = get_odr_type (context.outer_type, true);
1953 /* If the type is complete, there are no derivations. */
1954 if (TYPE_FINAL_P (outer_type->type))
1955 context.maybe_derived_type = false;
1956
1957 /* Initialize query cache. */
1958 if (!cached_polymorphic_call_targets)
1959 {
1960 cached_polymorphic_call_targets = pointer_set_create ();
1961 polymorphic_call_target_hash
1962 = new polymorphic_call_target_hash_type (23);
1963 if (!node_removal_hook_holder)
1964 {
1965 node_removal_hook_holder =
1966 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1967 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1968 NULL);
1969 }
1970 }
1971
1972 /* Lookup cached answer. */
1973 key.type = type;
1974 key.otr_token = otr_token;
1975 key.context = context;
1976 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
1977 if (cache_token)
1978 *cache_token = (void *)*slot;
1979 if (*slot)
1980 {
1981 if (completep)
1982 *completep = (*slot)->complete;
1983 if (nonconstruction_targetsp)
1984 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1985 return (*slot)->targets;
1986 }
1987
1988 complete = true;
1989
1990 /* Do actual search. */
1991 timevar_push (TV_IPA_VIRTUAL_CALL);
1992 *slot = XCNEW (polymorphic_call_target_d);
1993 if (cache_token)
1994 *cache_token = (void *)*slot;
1995 (*slot)->type = type;
1996 (*slot)->otr_token = otr_token;
1997 (*slot)->context = context;
1998
1999 inserted = pointer_set_create ();
2000 matched_vtables = pointer_set_create ();
2001
2002 /* First see virtual method of type itself. */
2003 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
2004 context.offset, otr_type);
2005 if (binfo)
2006 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
2007 &can_refer);
2008 else
2009 {
2010 gcc_assert (odr_violation_reported);
2011 target = NULL;
2012 }
2013
2014 /* Destructors are never called through construction virtual tables,
2015 because the type is always known. */
2016 if (target && DECL_CXX_DESTRUCTOR_P (target))
2017 context.maybe_in_construction = false;
2018
2019 if (target)
2020 {
2021 /* In the case we get complete method, we don't need
2022 to walk derivations. */
2023 if (DECL_FINAL_P (target))
2024 context.maybe_derived_type = false;
2025 }
2026
2027 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
2028 if (type_possibly_instantiated_p (outer_type->type))
2029 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2030 else
2031 {
2032 skipped = true;
2033 gcc_assert (in_lto_p || context.maybe_derived_type);
2034 }
2035
2036 if (binfo)
2037 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
2038
2039 /* Next walk recursively all derived types. */
2040 if (context.maybe_derived_type)
2041 {
2042 /* For anonymous namespace types we can attempt to build full type.
2043 All derivations must be in this unit (unless we see partial unit). */
2044 if (!type->all_derivations_known)
2045 complete = false;
2046 for (i = 0; i < outer_type->derived_types.length(); i++)
2047 possible_polymorphic_call_targets_1 (nodes, inserted,
2048 matched_vtables,
2049 otr_type,
2050 outer_type->derived_types[i],
2051 otr_token, outer_type->type,
2052 context.offset, &complete,
2053 bases_to_consider,
2054 context.maybe_in_construction);
2055 }
2056
2057 /* Finally walk bases, if asked to. */
2058 (*slot)->nonconstruction_targets = nodes.length();
2059
2060 /* Destructors are never called through construction virtual tables,
2061 because the type is always known. One of entries may be cxa_pure_virtual
2062 so look to at least two of them. */
2063 if (context.maybe_in_construction)
2064 for (i =0 ; i < MIN (nodes.length (), 2); i++)
2065 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
2066 context.maybe_in_construction = false;
2067 if (context.maybe_in_construction)
2068 {
2069 if (type != outer_type
2070 && (!skipped
2071 || (context.maybe_derived_type
2072 && !type_all_derivations_known_p (outer_type->type))))
2073 record_targets_from_bases (otr_type, otr_token, outer_type->type,
2074 context.offset, nodes, inserted,
2075 matched_vtables, &complete);
2076 if (skipped)
2077 maybe_record_node (nodes, target, inserted, can_refer, &complete);
2078 for (i = 0; i < bases_to_consider.length(); i++)
2079 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
2080 }
2081 bases_to_consider.release();
2082
2083 (*slot)->targets = nodes;
2084 (*slot)->complete = complete;
2085 if (completep)
2086 *completep = complete;
2087 if (nonconstruction_targetsp)
2088 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
2089
2090 pointer_set_destroy (inserted);
2091 pointer_set_destroy (matched_vtables);
2092 timevar_pop (TV_IPA_VIRTUAL_CALL);
2093 return nodes;
2094 }
2095
2096 /* Dump all possible targets of a polymorphic call. */
2097
2098 void
2099 dump_possible_polymorphic_call_targets (FILE *f,
2100 tree otr_type,
2101 HOST_WIDE_INT otr_token,
2102 const ipa_polymorphic_call_context &ctx)
2103 {
2104 vec <cgraph_node *> targets;
2105 bool final;
2106 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
2107 unsigned int i;
2108 int nonconstruction;
2109
2110 if (!type)
2111 return;
2112 targets = possible_polymorphic_call_targets (otr_type, otr_token,
2113 ctx,
2114 &final, NULL, &nonconstruction);
2115 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2116 print_generic_expr (f, type->type, TDF_SLIM);
2117 fprintf (f, " token %i\n", (int)otr_token);
2118 if (ctx.outer_type || ctx.offset)
2119 {
2120 fprintf (f, " Contained in type:");
2121 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2122 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2123 ctx.offset);
2124 }
2125
2126 fprintf (f, " %s%s%s\n ",
2127 final ? "This is a complete list." :
2128 "This is partial list; extra targets may be defined in other units.",
2129 ctx.maybe_in_construction ? " (base types included)" : "",
2130 ctx.maybe_derived_type ? " (derived types included)" : "");
2131 for (i = 0; i < targets.length (); i++)
2132 {
2133 char *name = NULL;
2134 if (i == (unsigned)nonconstruction)
2135 fprintf (f, "\n If the type is in construction,"
2136 " then additional tarets are:\n"
2137 " ");
2138 if (in_lto_p)
2139 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2140 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2141 if (in_lto_p)
2142 free (name);
2143 if (!targets[i]->definition)
2144 fprintf (f, " (no definition%s)",
2145 DECL_DECLARED_INLINE_P (targets[i]->decl)
2146 ? " inline" : "");
2147 }
2148 fprintf (f, "\n\n");
2149 }
2150
2151
2152 /* Return true if N can be possibly target of a polymorphic call of
2153 OTR_TYPE/OTR_TOKEN. */
2154
2155 bool
2156 possible_polymorphic_call_target_p (tree otr_type,
2157 HOST_WIDE_INT otr_token,
2158 const ipa_polymorphic_call_context &ctx,
2159 struct cgraph_node *n)
2160 {
2161 vec <cgraph_node *> targets;
2162 unsigned int i;
2163 enum built_in_function fcode;
2164 bool final;
2165
2166 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2167 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2168 == BUILT_IN_UNREACHABLE
2169 || fcode == BUILT_IN_TRAP))
2170 return true;
2171
2172 if (!odr_hash)
2173 return true;
2174 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2175 for (i = 0; i < targets.length (); i++)
2176 if (symtab_semantically_equivalent_p (n, targets[i]))
2177 return true;
2178
2179 /* At a moment we allow middle end to dig out new external declarations
2180 as a targets of polymorphic calls. */
2181 if (!final && !n->definition)
2182 return true;
2183 return false;
2184 }
2185
2186
2187 /* After callgraph construction new external nodes may appear.
2188 Add them into the graph. */
2189
2190 void
2191 update_type_inheritance_graph (void)
2192 {
2193 struct cgraph_node *n;
2194
2195 if (!odr_hash)
2196 return;
2197 free_polymorphic_call_targets_hash ();
2198 timevar_push (TV_IPA_INHERITANCE);
2199 /* We reconstruct the graph starting from types of all methods seen in the
2200 the unit. */
2201 FOR_EACH_FUNCTION (n)
2202 if (DECL_VIRTUAL_P (n->decl)
2203 && !n->definition
2204 && symtab_real_symbol_p (n))
2205 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2206 true);
2207 timevar_pop (TV_IPA_INHERITANCE);
2208 }
2209
2210
2211 /* Return true if N looks like likely target of a polymorphic call.
2212 Rule out cxa_pure_virtual, noreturns, function declared cold and
2213 other obvious cases. */
2214
2215 bool
2216 likely_target_p (struct cgraph_node *n)
2217 {
2218 int flags;
2219 /* cxa_pure_virtual and similar things are not likely. */
2220 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2221 return false;
2222 flags = flags_from_decl_or_type (n->decl);
2223 if (flags & ECF_NORETURN)
2224 return false;
2225 if (lookup_attribute ("cold",
2226 DECL_ATTRIBUTES (n->decl)))
2227 return false;
2228 if (n->frequency < NODE_FREQUENCY_NORMAL)
2229 return false;
2230 /* If there are no virtual tables refering the target alive,
2231 the only way the target can be called is an instance comming from other
2232 compilation unit; speculative devirtualization is build around an
2233 assumption that won't happen. */
2234 if (!referenced_from_vtable_p (n))
2235 return false;
2236 return true;
2237 }
2238
2239 /* The ipa-devirt pass.
2240 When polymorphic call has only one likely target in the unit,
2241 turn it into speculative call. */
2242
2243 static unsigned int
2244 ipa_devirt (void)
2245 {
2246 struct cgraph_node *n;
2247 struct pointer_set_t *bad_call_targets = pointer_set_create ();
2248 struct cgraph_edge *e;
2249
2250 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2251 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
2252 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
2253
2254 FOR_EACH_DEFINED_FUNCTION (n)
2255 {
2256 bool update = false;
2257 if (dump_file && n->indirect_calls)
2258 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
2259 n->name (), n->order);
2260 for (e = n->indirect_calls; e; e = e->next_callee)
2261 if (e->indirect_info->polymorphic)
2262 {
2263 struct cgraph_node *likely_target = NULL;
2264 void *cache_token;
2265 bool final;
2266 int nonconstruction_targets;
2267 vec <cgraph_node *>targets
2268 = possible_polymorphic_call_targets
2269 (e, &final, &cache_token, &nonconstruction_targets);
2270 unsigned int i;
2271
2272 if (dump_file)
2273 dump_possible_polymorphic_call_targets
2274 (dump_file, e);
2275
2276 npolymorphic++;
2277
2278 if (!cgraph_maybe_hot_edge_p (e))
2279 {
2280 if (dump_file)
2281 fprintf (dump_file, "Call is cold\n\n");
2282 ncold++;
2283 continue;
2284 }
2285 if (e->speculative)
2286 {
2287 if (dump_file)
2288 fprintf (dump_file, "Call is aready speculated\n\n");
2289 nspeculated++;
2290
2291 /* When dumping see if we agree with speculation. */
2292 if (!dump_file)
2293 continue;
2294 }
2295 if (pointer_set_contains (bad_call_targets,
2296 cache_token))
2297 {
2298 if (dump_file)
2299 fprintf (dump_file, "Target list is known to be useless\n\n");
2300 nmultiple++;
2301 continue;
2302 }
2303 for (i = 0; i < targets.length (); i++)
2304 if (likely_target_p (targets[i]))
2305 {
2306 if (likely_target)
2307 {
2308 if (i < (unsigned) nonconstruction_targets)
2309 {
2310 likely_target = NULL;
2311 if (dump_file)
2312 fprintf (dump_file, "More than one likely target\n\n");
2313 nmultiple++;
2314 }
2315 break;
2316 }
2317 likely_target = targets[i];
2318 }
2319 if (!likely_target)
2320 {
2321 pointer_set_insert (bad_call_targets, cache_token);
2322 continue;
2323 }
2324 /* This is reached only when dumping; check if we agree or disagree
2325 with the speculation. */
2326 if (e->speculative)
2327 {
2328 struct cgraph_edge *e2;
2329 struct ipa_ref *ref;
2330 cgraph_speculative_call_info (e, e2, e, ref);
2331 if (cgraph_function_or_thunk_node (e2->callee, NULL)
2332 == cgraph_function_or_thunk_node (likely_target, NULL))
2333 {
2334 fprintf (dump_file, "We agree with speculation\n\n");
2335 nok++;
2336 }
2337 else
2338 {
2339 fprintf (dump_file, "We disagree with speculation\n\n");
2340 nwrong++;
2341 }
2342 continue;
2343 }
2344 if (!likely_target->definition)
2345 {
2346 if (dump_file)
2347 fprintf (dump_file, "Target is not an definition\n\n");
2348 nnotdefined++;
2349 continue;
2350 }
2351 /* Do not introduce new references to external symbols. While we
2352 can handle these just well, it is common for programs to
2353 incorrectly with headers defining methods they are linked
2354 with. */
2355 if (DECL_EXTERNAL (likely_target->decl))
2356 {
2357 if (dump_file)
2358 fprintf (dump_file, "Target is external\n\n");
2359 nexternal++;
2360 continue;
2361 }
2362 /* Don't use an implicitly-declared destructor (c++/58678). */
2363 struct cgraph_node *non_thunk_target
2364 = cgraph_function_node (likely_target);
2365 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2366 && DECL_COMDAT (non_thunk_target->decl))
2367 {
2368 if (dump_file)
2369 fprintf (dump_file, "Target is artificial\n\n");
2370 nartificial++;
2371 continue;
2372 }
2373 if (cgraph_function_body_availability (likely_target)
2374 <= AVAIL_OVERWRITABLE
2375 && symtab_can_be_discarded (likely_target))
2376 {
2377 if (dump_file)
2378 fprintf (dump_file, "Target is overwritable\n\n");
2379 noverwritable++;
2380 continue;
2381 }
2382 else if (dbg_cnt (devirt))
2383 {
2384 if (dump_enabled_p ())
2385 {
2386 location_t locus = gimple_location_safe (e->call_stmt);
2387 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2388 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2389 n->name (), n->order,
2390 likely_target->name (),
2391 likely_target->order);
2392 }
2393 if (!symtab_can_be_discarded (likely_target))
2394 {
2395 cgraph_node *alias;
2396 alias = cgraph (symtab_nonoverwritable_alias
2397 (likely_target));
2398 if (alias)
2399 likely_target = alias;
2400 }
2401 nconverted++;
2402 update = true;
2403 cgraph_turn_edge_to_speculative
2404 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2405 }
2406 }
2407 if (update)
2408 inline_update_overall_summary (n);
2409 }
2410 pointer_set_destroy (bad_call_targets);
2411
2412 if (dump_file)
2413 fprintf (dump_file,
2414 "%i polymorphic calls, %i devirtualized,"
2415 " %i speculatively devirtualized, %i cold\n"
2416 "%i have multiple targets, %i overwritable,"
2417 " %i already speculated (%i agree, %i disagree),"
2418 " %i external, %i not defined, %i artificial\n",
2419 npolymorphic, ndevirtualized, nconverted, ncold,
2420 nmultiple, noverwritable, nspeculated, nok, nwrong,
2421 nexternal, nnotdefined, nartificial);
2422 return ndevirtualized ? TODO_remove_functions : 0;
2423 }
2424
2425 namespace {
2426
2427 const pass_data pass_data_ipa_devirt =
2428 {
2429 IPA_PASS, /* type */
2430 "devirt", /* name */
2431 OPTGROUP_NONE, /* optinfo_flags */
2432 TV_IPA_DEVIRT, /* tv_id */
2433 0, /* properties_required */
2434 0, /* properties_provided */
2435 0, /* properties_destroyed */
2436 0, /* todo_flags_start */
2437 ( TODO_dump_symtab ), /* todo_flags_finish */
2438 };
2439
2440 class pass_ipa_devirt : public ipa_opt_pass_d
2441 {
2442 public:
2443 pass_ipa_devirt (gcc::context *ctxt)
2444 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2445 NULL, /* generate_summary */
2446 NULL, /* write_summary */
2447 NULL, /* read_summary */
2448 NULL, /* write_optimization_summary */
2449 NULL, /* read_optimization_summary */
2450 NULL, /* stmt_fixup */
2451 0, /* function_transform_todo_flags_start */
2452 NULL, /* function_transform */
2453 NULL) /* variable_transform */
2454 {}
2455
2456 /* opt_pass methods: */
2457 virtual bool gate (function *)
2458 {
2459 return (flag_devirtualize
2460 && flag_devirtualize_speculatively
2461 && optimize);
2462 }
2463
2464 virtual unsigned int execute (function *) { return ipa_devirt (); }
2465
2466 }; // class pass_ipa_devirt
2467
2468 } // anon namespace
2469
2470 ipa_opt_pass_d *
2471 make_pass_ipa_devirt (gcc::context *ctxt)
2472 {
2473 return new pass_ipa_devirt (ctxt);
2474 }
2475
2476 #include "gt-ipa-devirt.h"