ipa-utils.h (method_class_type, [...]): Constify.
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "pointer-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "tree-pretty-print.h"
122 #include "ipa-utils.h"
123 #include "tree-ssa-alias.h"
124 #include "internal-fn.h"
125 #include "gimple-fold.h"
126 #include "gimple-expr.h"
127 #include "gimple.h"
128 #include "ipa-inline.h"
129 #include "diagnostic.h"
130 #include "tree-dfa.h"
131 #include "demangle.h"
132 #include "dbgcnt.h"
133
134 static bool odr_violation_reported = false;
135
136 /* Dummy polymorphic call context. */
137
138 const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
139 = {0, NULL, false, true};
140
141 /* Pointer set of all call targets appearing in the cache. */
142 static pointer_set_t *cached_polymorphic_call_targets;
143
144 /* The node of type inheritance graph. For each type unique in
145 One Defintion Rule (ODR) sense, we produce one node linking all
146 main variants of types equivalent to it, bases and derived types. */
147
148 struct GTY(()) odr_type_d
149 {
150 /* leader type. */
151 tree type;
152 /* All bases; built only for main variants of types */
153 vec<odr_type> GTY((skip)) bases;
154 /* All derrived types with virtual methods seen in unit;
155 built only for main variants oftypes */
156 vec<odr_type> GTY((skip)) derived_types;
157
158 /* All equivalent types, if more than one. */
159 vec<tree, va_gc> *types;
160 /* Set of all equivalent types, if NON-NULL. */
161 pointer_set_t * GTY((skip)) types_set;
162
163 /* Unique ID indexing the type in odr_types array. */
164 int id;
165 /* Is it in anonymous namespace? */
166 bool anonymous_namespace;
167 /* Do we know about all derivations of given type? */
168 bool all_derivations_known;
169 /* Did we report ODR violation here? */
170 bool odr_violated;
171 };
172
173
174 /* Return true if BINFO corresponds to a type with virtual methods.
175
176 Every type has several BINFOs. One is the BINFO associated by the type
177 while other represents bases of derived types. The BINFOs representing
178 bases do not have BINFO_VTABLE pointer set when this is the single
179 inheritance (because vtables are shared). Look up the BINFO of type
180 and check presence of its vtable. */
181
182 static inline bool
183 polymorphic_type_binfo_p (tree binfo)
184 {
185 /* See if BINFO's type has an virtual table associtated with it. */
186 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
187 }
188
189 /* Return TRUE if all derived types of T are known and thus
190 we may consider the walk of derived type complete.
191
192 This is typically true only for final anonymous namespace types and types
193 defined within functions (that may be COMDAT and thus shared across units,
194 but with the same set of derived types). */
195
196 static bool
197 type_all_derivations_known_p (tree t)
198 {
199 if (TYPE_FINAL_P (t))
200 return true;
201 if (flag_ltrans)
202 return false;
203 if (type_in_anonymous_namespace_p (t))
204 return true;
205 return (decl_function_context (TYPE_NAME (t)) != NULL);
206 }
207
208 /* Return TURE if type's constructors are all visible. */
209
210 static bool
211 type_all_ctors_visible_p (tree t)
212 {
213 return !flag_ltrans
214 && cgraph_state >= CGRAPH_STATE_CONSTRUCTION
215 /* We can not always use type_all_derivations_known_p.
216 For function local types we must assume case where
217 the function is COMDAT and shared in between units.
218
219 TODO: These cases are quite easy to get, but we need
220 to keep track of C++ privatizing via -Wno-weak
221 as well as the IPA privatizing. */
222 && type_in_anonymous_namespace_p (t);
223 }
224
225 /* Return TRUE if type may have instance. */
226
227 static bool
228 type_possibly_instantiated_p (tree t)
229 {
230 tree vtable;
231 varpool_node *vnode;
232
233 /* TODO: Add abstract types here. */
234 if (!type_all_ctors_visible_p (t))
235 return true;
236
237 vtable = BINFO_VTABLE (TYPE_BINFO (t));
238 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
239 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
240 vnode = varpool_get_node (vtable);
241 return vnode && vnode->definition;
242 }
243
244 /* One Definition Rule hashtable helpers. */
245
246 struct odr_hasher
247 {
248 typedef odr_type_d value_type;
249 typedef union tree_node compare_type;
250 static inline hashval_t hash (const value_type *);
251 static inline bool equal (const value_type *, const compare_type *);
252 static inline void remove (value_type *);
253 };
254
255 /* Return type that was declared with T's name so that T is an
256 qualified variant of it. */
257
258 static inline tree
259 main_odr_variant (const_tree t)
260 {
261 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
262 return TREE_TYPE (TYPE_NAME (t));
263 /* Unnamed types and non-C++ produced types can be compared by variants. */
264 else
265 return TYPE_MAIN_VARIANT (t);
266 }
267
268 /* Produce hash based on type name. */
269
270 static hashval_t
271 hash_type_name (tree t)
272 {
273 gcc_checking_assert (main_odr_variant (t) == t);
274
275 /* If not in LTO, all main variants are unique, so we can do
276 pointer hash. */
277 if (!in_lto_p)
278 return htab_hash_pointer (t);
279
280 /* Anonymous types are unique. */
281 if (type_in_anonymous_namespace_p (t))
282 return htab_hash_pointer (t);
283
284 /* For polymorphic types, we can simply hash the virtual table. */
285 if (TREE_CODE (t) == RECORD_TYPE
286 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
287 {
288 tree v = BINFO_VTABLE (TYPE_BINFO (t));
289 hashval_t hash = 0;
290
291 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
292 {
293 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
294 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
295 }
296
297 v = DECL_ASSEMBLER_NAME (v);
298 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
299 return hash;
300 }
301
302 /* Rest is not implemented yet. */
303 gcc_unreachable ();
304 }
305
306 /* Return the computed hashcode for ODR_TYPE. */
307
308 inline hashval_t
309 odr_hasher::hash (const value_type *odr_type)
310 {
311 return hash_type_name (odr_type->type);
312 }
313
314 /* For languages with One Definition Rule, work out if
315 types are the same based on their name.
316
317 This is non-trivial for LTO where minnor differences in
318 the type representation may have prevented type merging
319 to merge two copies of otherwise equivalent type.
320
321 Until we start streaming mangled type names, this function works
322 only for polymorphic types. */
323
324 bool
325 types_same_for_odr (const_tree type1, const_tree type2)
326 {
327 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
328
329 type1 = main_odr_variant (type1);
330 type2 = main_odr_variant (type2);
331
332 if (type1 == type2)
333 return true;
334
335 if (!in_lto_p)
336 return false;
337
338 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
339 on the corresponding TYPE_STUB_DECL. */
340 if (type_in_anonymous_namespace_p (type1)
341 || type_in_anonymous_namespace_p (type2))
342 return false;
343
344 /* At the moment we have no way to establish ODR equivlaence at LTO
345 other than comparing virtual table pointrs of polymorphic types.
346 Eventually we should start saving mangled names in TYPE_NAME.
347 Then this condition will become non-trivial. */
348
349 if (TREE_CODE (type1) == RECORD_TYPE
350 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
351 && BINFO_VTABLE (TYPE_BINFO (type1))
352 && BINFO_VTABLE (TYPE_BINFO (type2)))
353 {
354 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
355 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
356 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
357 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
358 return (operand_equal_p (TREE_OPERAND (v1, 1),
359 TREE_OPERAND (v2, 1), 0)
360 && DECL_ASSEMBLER_NAME
361 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
362 == DECL_ASSEMBLER_NAME
363 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
364 }
365 gcc_unreachable ();
366 }
367
368
369 /* Compare types T1 and T2 and return true if they are
370 equivalent. */
371
372 inline bool
373 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
374 {
375 tree t2 = const_cast <tree> (ct2);
376
377 gcc_checking_assert (main_odr_variant (t2) == t2);
378 if (t1->type == t2)
379 return true;
380 if (!in_lto_p)
381 return false;
382 return types_same_for_odr (t1->type, t2);
383 }
384
385 /* Free ODR type V. */
386
387 inline void
388 odr_hasher::remove (value_type *v)
389 {
390 v->bases.release ();
391 v->derived_types.release ();
392 if (v->types_set)
393 pointer_set_destroy (v->types_set);
394 ggc_free (v);
395 }
396
397 /* ODR type hash used to lookup ODR type based on tree type node. */
398
399 typedef hash_table<odr_hasher> odr_hash_type;
400 static odr_hash_type *odr_hash;
401
402 /* ODR types are also stored into ODR_TYPE vector to allow consistent
403 walking. Bases appear before derived types. Vector is garbage collected
404 so we won't end up visiting empty types. */
405
406 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
407 #define odr_types (*odr_types_ptr)
408
409 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
410 void
411 set_type_binfo (tree type, tree binfo)
412 {
413 for (; type; type = TYPE_NEXT_VARIANT (type))
414 if (COMPLETE_TYPE_P (type))
415 TYPE_BINFO (type) = binfo;
416 else
417 gcc_assert (!TYPE_BINFO (type));
418 }
419
420 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
421 from VAL->type. This may happen in LTO where tree merging did not merge
422 all variants of the same type. It may or may not mean the ODR violation.
423 Add it to the list of duplicates and warn on some violations. */
424
425 static bool
426 add_type_duplicate (odr_type val, tree type)
427 {
428 bool build_bases = false;
429 if (!val->types_set)
430 val->types_set = pointer_set_create ();
431
432 /* Always prefer complete type to be the leader. */
433 if (!COMPLETE_TYPE_P (val->type)
434 && COMPLETE_TYPE_P (type))
435 {
436 tree tmp = type;
437
438 build_bases = true;
439 type = val->type;
440 val->type = tmp;
441 }
442
443 /* See if this duplicate is new. */
444 if (!pointer_set_insert (val->types_set, type))
445 {
446 bool merge = true;
447 bool base_mismatch = false;
448 bool warned = 0;
449 unsigned int i,j;
450
451 gcc_assert (in_lto_p);
452 vec_safe_push (val->types, type);
453
454 /* First we compare memory layout. */
455 if (!types_compatible_p (val->type, type))
456 {
457 merge = false;
458 odr_violation_reported = true;
459 if (BINFO_VTABLE (TYPE_BINFO (val->type))
460 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
461 "type %qD violates one definition rule ",
462 type))
463 {
464 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
465 "a type with the same name but different layout is "
466 "defined in another translation unit");
467 warned = true;
468 }
469 val->odr_violated = true;
470 if (cgraph_dump_file)
471 {
472 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
473
474 print_node (cgraph_dump_file, "", val->type, 0);
475 putc ('\n',cgraph_dump_file);
476 print_node (cgraph_dump_file, "", type, 0);
477 putc ('\n',cgraph_dump_file);
478 }
479 }
480
481 /* Next sanity check that bases are the same. If not, we will end
482 up producing wrong answers. */
483 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
484 && TREE_CODE (val->type) == RECORD_TYPE
485 && TREE_CODE (type) == RECORD_TYPE
486 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
487 {
488 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
489 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
490 {
491 odr_type base = get_odr_type
492 (BINFO_TYPE
493 (BINFO_BASE_BINFO (TYPE_BINFO (type),
494 i)),
495 true);
496 if (val->bases.length () <= j || val->bases[j] != base)
497 base_mismatch = true;
498 j++;
499 }
500 if (base_mismatch)
501 {
502 merge = false;
503 odr_violation_reported = true;
504
505 if (!warned
506 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
507 "type %qD violates one definition rule ",
508 type))
509 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
510 "a type with the same name but different bases is "
511 "defined in another translation unit");
512 val->odr_violated = true;
513 if (cgraph_dump_file)
514 {
515 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
516
517 print_node (cgraph_dump_file, "", val->type, 0);
518 putc ('\n',cgraph_dump_file);
519 print_node (cgraph_dump_file, "", type, 0);
520 putc ('\n',cgraph_dump_file);
521 }
522 }
523 }
524
525 /* Regularize things a little. During LTO same types may come with
526 different BINFOs. Either because their virtual table was
527 not merged by tree merging and only later at decl merging or
528 because one type comes with external vtable, while other
529 with internal. We want to merge equivalent binfos to conserve
530 memory and streaming overhead.
531
532 The external vtables are more harmful: they contain references
533 to external declarations of methods that may be defined in the
534 merged LTO unit. For this reason we absolutely need to remove
535 them and replace by internal variants. Not doing so will lead
536 to incomplete answers from possible_polymorphic_call_targets. */
537 if (!flag_ltrans && merge
538 && TREE_CODE (val->type) == RECORD_TYPE
539 && TREE_CODE (type) == RECORD_TYPE
540 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
541 && TYPE_MAIN_VARIANT (type) == type
542 && TYPE_MAIN_VARIANT (val->type) == val->type
543 && BINFO_VTABLE (TYPE_BINFO (val->type))
544 && BINFO_VTABLE (TYPE_BINFO (type)))
545 {
546 tree master_binfo = TYPE_BINFO (val->type);
547 tree v1 = BINFO_VTABLE (master_binfo);
548 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
549
550 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
551 {
552 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
553 && operand_equal_p (TREE_OPERAND (v1, 1),
554 TREE_OPERAND (v2, 1), 0));
555 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
556 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
557 }
558 gcc_assert (DECL_ASSEMBLER_NAME (v1)
559 == DECL_ASSEMBLER_NAME (v2));
560
561 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
562 {
563 unsigned int i;
564
565 set_type_binfo (val->type, TYPE_BINFO (type));
566 for (i = 0; i < val->types->length (); i++)
567 {
568 if (TYPE_BINFO ((*val->types)[i])
569 == master_binfo)
570 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
571 }
572 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
573 }
574 else
575 set_type_binfo (type, master_binfo);
576 }
577 }
578 return build_bases;
579 }
580
581 /* Get ODR type hash entry for TYPE. If INSERT is true, create
582 possibly new entry. */
583
584 odr_type
585 get_odr_type (tree type, bool insert)
586 {
587 odr_type_d **slot;
588 odr_type val;
589 hashval_t hash;
590 bool build_bases = false;
591 bool insert_to_odr_array = false;
592 int base_id = -1;
593
594 type = main_odr_variant (type);
595
596 hash = hash_type_name (type);
597 slot
598 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
599 if (!slot)
600 return NULL;
601
602 /* See if we already have entry for type. */
603 if (*slot)
604 {
605 val = *slot;
606
607 /* With LTO we need to support multiple tree representation of
608 the same ODR type. */
609 if (val->type != type)
610 build_bases = add_type_duplicate (val, type);
611 }
612 else
613 {
614
615 val = ggc_cleared_alloc<odr_type_d> ();
616 val->type = type;
617 val->bases = vNULL;
618 val->derived_types = vNULL;
619 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
620 build_bases = COMPLETE_TYPE_P (val->type);
621 insert_to_odr_array = true;
622 }
623
624 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
625 && type == TYPE_MAIN_VARIANT (type))
626 {
627 tree binfo = TYPE_BINFO (type);
628 unsigned int i;
629
630 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
631
632 val->all_derivations_known = type_all_derivations_known_p (type);
633 *slot = val;
634 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
635 /* For now record only polymorphic types. other are
636 pointless for devirtualization and we can not precisely
637 determine ODR equivalency of these during LTO. */
638 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
639 {
640 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
641 i)),
642 true);
643 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
644 base->derived_types.safe_push (val);
645 val->bases.safe_push (base);
646 if (base->id > base_id)
647 base_id = base->id;
648 }
649 }
650 /* Ensure that type always appears after bases. */
651 if (insert_to_odr_array)
652 {
653 if (odr_types_ptr)
654 val->id = odr_types.length ();
655 vec_safe_push (odr_types_ptr, val);
656 }
657 else if (base_id > val->id)
658 {
659 odr_types[val->id] = 0;
660 /* Be sure we did not recorded any derived types; these may need
661 renumbering too. */
662 gcc_assert (val->derived_types.length() == 0);
663 if (odr_types_ptr)
664 val->id = odr_types.length ();
665 vec_safe_push (odr_types_ptr, val);
666 }
667 return val;
668 }
669
670 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
671 recusive printing. */
672
673 static void
674 dump_odr_type (FILE *f, odr_type t, int indent=0)
675 {
676 unsigned int i;
677 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
678 print_generic_expr (f, t->type, TDF_SLIM);
679 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
680 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
681 if (TYPE_NAME (t->type))
682 {
683 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
684 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
685 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
686 }
687 if (t->bases.length ())
688 {
689 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
690 for (i = 0; i < t->bases.length (); i++)
691 fprintf (f, " %i", t->bases[i]->id);
692 fprintf (f, "\n");
693 }
694 if (t->derived_types.length ())
695 {
696 fprintf (f, "%*s derived types:\n", indent * 2, "");
697 for (i = 0; i < t->derived_types.length (); i++)
698 dump_odr_type (f, t->derived_types[i], indent + 1);
699 }
700 fprintf (f, "\n");
701 }
702
703 /* Dump the type inheritance graph. */
704
705 static void
706 dump_type_inheritance_graph (FILE *f)
707 {
708 unsigned int i;
709 if (!odr_types_ptr)
710 return;
711 fprintf (f, "\n\nType inheritance graph:\n");
712 for (i = 0; i < odr_types.length (); i++)
713 {
714 if (odr_types[i] && odr_types[i]->bases.length () == 0)
715 dump_odr_type (f, odr_types[i]);
716 }
717 for (i = 0; i < odr_types.length (); i++)
718 {
719 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
720 {
721 unsigned int j;
722 fprintf (f, "Duplicate tree types for odr type %i\n", i);
723 print_node (f, "", odr_types[i]->type, 0);
724 for (j = 0; j < odr_types[i]->types->length (); j++)
725 {
726 tree t;
727 fprintf (f, "duplicate #%i\n", j);
728 print_node (f, "", (*odr_types[i]->types)[j], 0);
729 t = (*odr_types[i]->types)[j];
730 while (TYPE_P (t) && TYPE_CONTEXT (t))
731 {
732 t = TYPE_CONTEXT (t);
733 print_node (f, "", t, 0);
734 }
735 putc ('\n',f);
736 }
737 }
738 }
739 }
740
741 /* Given method type T, return type of class it belongs to.
742 Lookup this pointer and get its type. */
743
744 tree
745 method_class_type (const_tree t)
746 {
747 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
748 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
749
750 return TREE_TYPE (first_parm_type);
751 }
752
753 /* Initialize IPA devirt and build inheritance tree graph. */
754
755 void
756 build_type_inheritance_graph (void)
757 {
758 struct symtab_node *n;
759 FILE *inheritance_dump_file;
760 int flags;
761
762 if (odr_hash)
763 return;
764 timevar_push (TV_IPA_INHERITANCE);
765 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
766 odr_hash = new odr_hash_type (23);
767
768 /* We reconstruct the graph starting of types of all methods seen in the
769 the unit. */
770 FOR_EACH_SYMBOL (n)
771 if (is_a <cgraph_node *> (n)
772 && DECL_VIRTUAL_P (n->decl)
773 && symtab_real_symbol_p (n))
774 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
775 true);
776
777 /* Look also for virtual tables of types that do not define any methods.
778
779 We need it in a case where class B has virtual base of class A
780 re-defining its virtual method and there is class C with no virtual
781 methods with B as virtual base.
782
783 Here we output B's virtual method in two variant - for non-virtual
784 and virtual inheritance. B's virtual table has non-virtual version,
785 while C's has virtual.
786
787 For this reason we need to know about C in order to include both
788 variants of B. More correctly, record_target_from_binfo should
789 add both variants of the method when walking B, but we have no
790 link in between them.
791
792 We rely on fact that either the method is exported and thus we
793 assume it is called externally or C is in anonymous namespace and
794 thus we will see the vtable. */
795
796 else if (is_a <varpool_node *> (n)
797 && DECL_VIRTUAL_P (n->decl)
798 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
799 && TYPE_BINFO (DECL_CONTEXT (n->decl))
800 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
801 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
802 if (inheritance_dump_file)
803 {
804 dump_type_inheritance_graph (inheritance_dump_file);
805 dump_end (TDI_inheritance, inheritance_dump_file);
806 }
807 timevar_pop (TV_IPA_INHERITANCE);
808 }
809
810 /* Return true if N has reference from live virtual table
811 (and thus can be a destination of polymorphic call).
812 Be conservatively correct when callgraph is not built or
813 if the method may be referred externally. */
814
815 static bool
816 referenced_from_vtable_p (struct cgraph_node *node)
817 {
818 int i;
819 struct ipa_ref *ref;
820 bool found = false;
821
822 if (node->externally_visible
823 || node->used_from_other_partition)
824 return true;
825
826 /* Keep this test constant time.
827 It is unlikely this can happen except for the case where speculative
828 devirtualization introduced many speculative edges to this node.
829 In this case the target is very likely alive anyway. */
830 if (node->ref_list.referring.length () > 100)
831 return true;
832
833 /* We need references built. */
834 if (cgraph_state <= CGRAPH_STATE_CONSTRUCTION)
835 return true;
836
837 for (i = 0; node->iterate_referring (i, ref); i++)
838
839 if ((ref->use == IPA_REF_ALIAS
840 && referenced_from_vtable_p (cgraph (ref->referring)))
841 || (ref->use == IPA_REF_ADDR
842 && TREE_CODE (ref->referring->decl) == VAR_DECL
843 && DECL_VIRTUAL_P (ref->referring->decl)))
844 {
845 found = true;
846 break;
847 }
848 return found;
849 }
850
851 /* If TARGET has associated node, record it in the NODES array.
852 CAN_REFER specify if program can refer to the target directly.
853 if TARGET is unknown (NULL) or it can not be inserted (for example because
854 its body was already removed and there is no way to refer to it), clear
855 COMPLETEP. */
856
857 static void
858 maybe_record_node (vec <cgraph_node *> &nodes,
859 tree target, pointer_set_t *inserted,
860 bool can_refer,
861 bool *completep)
862 {
863 struct cgraph_node *target_node;
864
865 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
866 list of targets; the runtime effect of calling them is undefined.
867 Only "real" virtual methods should be accounted. */
868 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
869 return;
870
871 if (!can_refer)
872 {
873 /* The only case when method of anonymous namespace becomes unreferable
874 is when we completely optimized it out. */
875 if (flag_ltrans
876 || !target
877 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
878 *completep = false;
879 return;
880 }
881
882 if (!target)
883 return;
884
885 target_node = cgraph_get_node (target);
886
887 /* Method can only be called by polymorphic call if any
888 of vtables refering to it are alive.
889
890 While this holds for non-anonymous functions, too, there are
891 cases where we want to keep them in the list; for example
892 inline functions with -fno-weak are static, but we still
893 may devirtualize them when instance comes from other unit.
894 The same holds for LTO.
895
896 Currently we ignore these functions in speculative devirtualization.
897 ??? Maybe it would make sense to be more aggressive for LTO even
898 eslewhere. */
899 if (!flag_ltrans
900 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
901 && (!target_node
902 || !referenced_from_vtable_p (target_node)))
903 ;
904 /* See if TARGET is useful function we can deal with. */
905 else if (target_node != NULL
906 && (TREE_PUBLIC (target)
907 || DECL_EXTERNAL (target)
908 || target_node->definition)
909 && symtab_real_symbol_p (target_node))
910 {
911 gcc_assert (!target_node->global.inlined_to);
912 gcc_assert (symtab_real_symbol_p (target_node));
913 if (!pointer_set_insert (inserted, target))
914 {
915 pointer_set_insert (cached_polymorphic_call_targets,
916 target_node);
917 nodes.safe_push (target_node);
918 }
919 }
920 else if (completep
921 && (!type_in_anonymous_namespace_p
922 (DECL_CONTEXT (target))
923 || flag_ltrans))
924 *completep = false;
925 }
926
927 /* See if BINFO's type match OUTER_TYPE. If so, lookup
928 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
929 method in vtable and insert method to NODES array
930 or BASES_TO_CONSIDER if this array is non-NULL.
931 Otherwise recurse to base BINFOs.
932 This match what get_binfo_at_offset does, but with offset
933 being unknown.
934
935 TYPE_BINFOS is a stack of BINFOS of types with defined
936 virtual table seen on way from class type to BINFO.
937
938 MATCHED_VTABLES tracks virtual tables we already did lookup
939 for virtual function in. INSERTED tracks nodes we already
940 inserted.
941
942 ANONYMOUS is true if BINFO is part of anonymous namespace.
943
944 Clear COMPLETEP when we hit unreferable target.
945 */
946
947 static void
948 record_target_from_binfo (vec <cgraph_node *> &nodes,
949 vec <tree> *bases_to_consider,
950 tree binfo,
951 tree otr_type,
952 vec <tree> &type_binfos,
953 HOST_WIDE_INT otr_token,
954 tree outer_type,
955 HOST_WIDE_INT offset,
956 pointer_set_t *inserted,
957 pointer_set_t *matched_vtables,
958 bool anonymous,
959 bool *completep)
960 {
961 tree type = BINFO_TYPE (binfo);
962 int i;
963 tree base_binfo;
964
965
966 if (BINFO_VTABLE (binfo))
967 type_binfos.safe_push (binfo);
968 if (types_same_for_odr (type, outer_type))
969 {
970 int i;
971 tree type_binfo = NULL;
972
973 /* Lookup BINFO with virtual table. For normal types it is always last
974 binfo on stack. */
975 for (i = type_binfos.length () - 1; i >= 0; i--)
976 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
977 {
978 type_binfo = type_binfos[i];
979 break;
980 }
981 if (BINFO_VTABLE (binfo))
982 type_binfos.pop ();
983 /* If this is duplicated BINFO for base shared by virtual inheritance,
984 we may not have its associated vtable. This is not a problem, since
985 we will walk it on the other path. */
986 if (!type_binfo)
987 return;
988 tree inner_binfo = get_binfo_at_offset (type_binfo,
989 offset, otr_type);
990 if (!inner_binfo)
991 {
992 gcc_assert (odr_violation_reported);
993 return;
994 }
995 /* For types in anonymous namespace first check if the respective vtable
996 is alive. If not, we know the type can't be called. */
997 if (!flag_ltrans && anonymous)
998 {
999 tree vtable = BINFO_VTABLE (inner_binfo);
1000 varpool_node *vnode;
1001
1002 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1003 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1004 vnode = varpool_get_node (vtable);
1005 if (!vnode || !vnode->definition)
1006 return;
1007 }
1008 gcc_assert (inner_binfo);
1009 if (bases_to_consider
1010 ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
1011 : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
1012 {
1013 bool can_refer;
1014 tree target = gimple_get_virt_method_for_binfo (otr_token,
1015 inner_binfo,
1016 &can_refer);
1017 if (!bases_to_consider)
1018 maybe_record_node (nodes, target, inserted, can_refer, completep);
1019 /* Destructors are never called via construction vtables. */
1020 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1021 bases_to_consider->safe_push (target);
1022 }
1023 return;
1024 }
1025
1026 /* Walk bases. */
1027 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1028 /* Walking bases that have no virtual method is pointless excercise. */
1029 if (polymorphic_type_binfo_p (base_binfo))
1030 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1031 type_binfos,
1032 otr_token, outer_type, offset, inserted,
1033 matched_vtables, anonymous, completep);
1034 if (BINFO_VTABLE (binfo))
1035 type_binfos.pop ();
1036 }
1037
1038 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1039 of TYPE, insert them to NODES, recurse into derived nodes.
1040 INSERTED is used to avoid duplicate insertions of methods into NODES.
1041 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1042 Clear COMPLETEP if unreferable target is found.
1043
1044 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1045 all cases where BASE_SKIPPED is true (because the base is abstract
1046 class). */
1047
1048 static void
1049 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1050 pointer_set_t *inserted,
1051 pointer_set_t *matched_vtables,
1052 tree otr_type,
1053 odr_type type,
1054 HOST_WIDE_INT otr_token,
1055 tree outer_type,
1056 HOST_WIDE_INT offset,
1057 bool *completep,
1058 vec <tree> &bases_to_consider,
1059 bool consider_construction)
1060 {
1061 tree binfo = TYPE_BINFO (type->type);
1062 unsigned int i;
1063 vec <tree> type_binfos = vNULL;
1064 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1065
1066 /* We may need to consider types w/o instances because of possible derived
1067 types using their methods either directly or via construction vtables.
1068 We are safe to skip them when all derivations are known, since we will
1069 handle them later.
1070 This is done by recording them to BASES_TO_CONSIDER array. */
1071 if (possibly_instantiated || consider_construction)
1072 {
1073 record_target_from_binfo (nodes,
1074 (!possibly_instantiated
1075 && type_all_derivations_known_p (type->type))
1076 ? &bases_to_consider : NULL,
1077 binfo, otr_type, type_binfos, otr_token,
1078 outer_type, offset,
1079 inserted, matched_vtables,
1080 type->anonymous_namespace, completep);
1081 }
1082 type_binfos.release ();
1083 for (i = 0; i < type->derived_types.length (); i++)
1084 possible_polymorphic_call_targets_1 (nodes, inserted,
1085 matched_vtables,
1086 otr_type,
1087 type->derived_types[i],
1088 otr_token, outer_type, offset, completep,
1089 bases_to_consider, consider_construction);
1090 }
1091
1092 /* Cache of queries for polymorphic call targets.
1093
1094 Enumerating all call targets may get expensive when there are many
1095 polymorphic calls in the program, so we memoize all the previous
1096 queries and avoid duplicated work. */
1097
1098 struct polymorphic_call_target_d
1099 {
1100 HOST_WIDE_INT otr_token;
1101 ipa_polymorphic_call_context context;
1102 odr_type type;
1103 vec <cgraph_node *> targets;
1104 int nonconstruction_targets;
1105 bool complete;
1106 };
1107
1108 /* Polymorphic call target cache helpers. */
1109
1110 struct polymorphic_call_target_hasher
1111 {
1112 typedef polymorphic_call_target_d value_type;
1113 typedef polymorphic_call_target_d compare_type;
1114 static inline hashval_t hash (const value_type *);
1115 static inline bool equal (const value_type *, const compare_type *);
1116 static inline void remove (value_type *);
1117 };
1118
1119 /* Return the computed hashcode for ODR_QUERY. */
1120
1121 inline hashval_t
1122 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1123 {
1124 hashval_t hash;
1125
1126 hash = iterative_hash_host_wide_int
1127 (odr_query->otr_token,
1128 odr_query->type->id);
1129 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
1130 hash);
1131 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
1132 return iterative_hash_hashval_t
1133 (((int)odr_query->context.maybe_in_construction << 1)
1134 | (int)odr_query->context.maybe_derived_type, hash);
1135 }
1136
1137 /* Compare cache entries T1 and T2. */
1138
1139 inline bool
1140 polymorphic_call_target_hasher::equal (const value_type *t1,
1141 const compare_type *t2)
1142 {
1143 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1144 && t1->context.offset == t2->context.offset
1145 && t1->context.outer_type == t2->context.outer_type
1146 && t1->context.maybe_in_construction
1147 == t2->context.maybe_in_construction
1148 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
1149 }
1150
1151 /* Remove entry in polymorphic call target cache hash. */
1152
1153 inline void
1154 polymorphic_call_target_hasher::remove (value_type *v)
1155 {
1156 v->targets.release ();
1157 free (v);
1158 }
1159
1160 /* Polymorphic call target query cache. */
1161
1162 typedef hash_table<polymorphic_call_target_hasher>
1163 polymorphic_call_target_hash_type;
1164 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1165
1166 /* Destroy polymorphic call target query cache. */
1167
1168 static void
1169 free_polymorphic_call_targets_hash ()
1170 {
1171 if (cached_polymorphic_call_targets)
1172 {
1173 delete polymorphic_call_target_hash;
1174 polymorphic_call_target_hash = NULL;
1175 pointer_set_destroy (cached_polymorphic_call_targets);
1176 cached_polymorphic_call_targets = NULL;
1177 }
1178 }
1179
1180 /* When virtual function is removed, we may need to flush the cache. */
1181
1182 static void
1183 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1184 {
1185 if (cached_polymorphic_call_targets
1186 && pointer_set_contains (cached_polymorphic_call_targets, n))
1187 free_polymorphic_call_targets_hash ();
1188 }
1189
1190 /* Return true when TYPE contains an polymorphic type and thus is interesting
1191 for devirtualization machinery. */
1192
1193 bool
1194 contains_polymorphic_type_p (const_tree type)
1195 {
1196 type = TYPE_MAIN_VARIANT (type);
1197
1198 if (RECORD_OR_UNION_TYPE_P (type))
1199 {
1200 if (TYPE_BINFO (type)
1201 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1202 return true;
1203 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1204 if (TREE_CODE (fld) == FIELD_DECL
1205 && !DECL_ARTIFICIAL (fld)
1206 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1207 return true;
1208 return false;
1209 }
1210 if (TREE_CODE (type) == ARRAY_TYPE)
1211 return contains_polymorphic_type_p (TREE_TYPE (type));
1212 return false;
1213 }
1214
1215 /* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1216 is contained at CONTEXT->OFFSET. Walk the memory representation of
1217 CONTEXT->OUTER_TYPE and find the outermost class type that match
1218 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
1219 to represent it.
1220
1221 For example when CONTEXT represents type
1222 class A
1223 {
1224 int a;
1225 class B b;
1226 }
1227 and we look for type at offset sizeof(int), we end up with B and offset 0.
1228 If the same is produced by multiple inheritance, we end up with A and offset
1229 sizeof(int).
1230
1231 If we can not find corresponding class, give up by setting
1232 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
1233 Return true when lookup was sucesful. */
1234
1235 static bool
1236 get_class_context (ipa_polymorphic_call_context *context,
1237 tree expected_type)
1238 {
1239 tree type = context->outer_type;
1240 HOST_WIDE_INT offset = context->offset;
1241
1242 /* Find the sub-object the constant actually refers to and mark whether it is
1243 an artificial one (as opposed to a user-defined one). */
1244 while (true)
1245 {
1246 HOST_WIDE_INT pos, size;
1247 tree fld;
1248
1249 /* On a match, just return what we found. */
1250 if (TREE_CODE (type) == TREE_CODE (expected_type)
1251 && types_same_for_odr (type, expected_type))
1252 {
1253 /* Type can not contain itself on an non-zero offset. In that case
1254 just give up. */
1255 if (offset != 0)
1256 goto give_up;
1257 gcc_assert (offset == 0);
1258 return true;
1259 }
1260
1261 /* Walk fields and find corresponding on at OFFSET. */
1262 if (TREE_CODE (type) == RECORD_TYPE)
1263 {
1264 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1265 {
1266 if (TREE_CODE (fld) != FIELD_DECL)
1267 continue;
1268
1269 pos = int_bit_position (fld);
1270 size = tree_to_uhwi (DECL_SIZE (fld));
1271 if (pos <= offset && (pos + size) > offset)
1272 break;
1273 }
1274
1275 if (!fld)
1276 goto give_up;
1277
1278 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
1279 offset -= pos;
1280 /* DECL_ARTIFICIAL represents a basetype. */
1281 if (!DECL_ARTIFICIAL (fld))
1282 {
1283 context->outer_type = type;
1284 context->offset = offset;
1285 /* As soon as we se an field containing the type,
1286 we know we are not looking for derivations. */
1287 context->maybe_derived_type = false;
1288 }
1289 }
1290 else if (TREE_CODE (type) == ARRAY_TYPE)
1291 {
1292 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1293
1294 /* Give up if we don't know array size. */
1295 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
1296 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
1297 goto give_up;
1298 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
1299 type = subtype;
1300 context->outer_type = type;
1301 context->offset = offset;
1302 context->maybe_derived_type = false;
1303 }
1304 /* Give up on anything else. */
1305 else
1306 goto give_up;
1307 }
1308
1309 /* If we failed to find subtype we look for, give up and fall back to the
1310 most generic query. */
1311 give_up:
1312 context->outer_type = expected_type;
1313 context->offset = 0;
1314 context->maybe_derived_type = true;
1315 context->maybe_in_construction = true;
1316 /* POD can be changed to an instance of a polymorphic type by
1317 placement new. Here we play safe and assume that any
1318 non-polymorphic type is POD. */
1319 if ((TREE_CODE (type) != RECORD_TYPE
1320 || !TYPE_BINFO (type)
1321 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
1322 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1323 || (offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
1324 tree_to_uhwi (TYPE_SIZE (type)))))
1325 return true;
1326 return false;
1327 }
1328
1329 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
1330
1331 static bool
1332 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
1333 tree otr_type)
1334 {
1335 ipa_polymorphic_call_context context = {offset,
1336 TYPE_MAIN_VARIANT (outer_type),
1337 false, true};
1338 return get_class_context (&context, otr_type);
1339 }
1340
1341 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
1342
1343 static tree
1344 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
1345 tree vtable)
1346 {
1347 tree v = BINFO_VTABLE (binfo);
1348 int i;
1349 tree base_binfo;
1350 unsigned HOST_WIDE_INT this_offset;
1351
1352 if (v)
1353 {
1354 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
1355 gcc_unreachable ();
1356
1357 if (offset == this_offset
1358 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
1359 return binfo;
1360 }
1361
1362 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1363 if (polymorphic_type_binfo_p (base_binfo))
1364 {
1365 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1366 if (base_binfo)
1367 return base_binfo;
1368 }
1369 return NULL;
1370 }
1371
1372 /* T is known constant value of virtual table pointer.
1373 Store virtual table to V and its offset to OFFSET.
1374 Return false if T does not look like virtual table reference. */
1375
1376 bool
1377 vtable_pointer_value_to_vtable (const_tree t, tree *v,
1378 unsigned HOST_WIDE_INT *offset)
1379 {
1380 /* We expect &MEM[(void *)&virtual_table + 16B].
1381 We obtain object's BINFO from the context of the virtual table.
1382 This one contains pointer to virtual table represented via
1383 POINTER_PLUS_EXPR. Verify that this pointer match to what
1384 we propagated through.
1385
1386 In the case of virtual inheritance, the virtual tables may
1387 be nested, i.e. the offset may be different from 16 and we may
1388 need to dive into the type representation. */
1389 if (TREE_CODE (t) == ADDR_EXPR
1390 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1391 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1392 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1393 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1394 == VAR_DECL)
1395 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1396 (TREE_OPERAND (t, 0), 0), 0)))
1397 {
1398 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1399 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1400 return true;
1401 }
1402
1403 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1404 We need to handle it when T comes from static variable initializer or
1405 BINFO. */
1406 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1407 {
1408 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1409 t = TREE_OPERAND (t, 0);
1410 }
1411 else
1412 *offset = 0;
1413
1414 if (TREE_CODE (t) != ADDR_EXPR)
1415 return false;
1416 *v = TREE_OPERAND (t, 0);
1417 return true;
1418 }
1419
1420 /* T is known constant value of virtual table pointer. Return BINFO of the
1421 instance type. */
1422
1423 tree
1424 vtable_pointer_value_to_binfo (const_tree t)
1425 {
1426 tree vtable;
1427 unsigned HOST_WIDE_INT offset;
1428
1429 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1430 return NULL_TREE;
1431
1432 /* FIXME: for stores of construction vtables we return NULL,
1433 because we do not have BINFO for those. Eventually we should fix
1434 our representation to allow this case to be handled, too.
1435 In the case we see store of BINFO we however may assume
1436 that standard folding will be ale to cope with it. */
1437 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1438 offset, vtable);
1439 }
1440
1441 /* Proudce polymorphic call context for call method of instance
1442 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1443
1444 static void
1445 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1446 tree base, HOST_WIDE_INT offset)
1447 {
1448 gcc_assert (DECL_P (base));
1449
1450 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
1451 context->offset = offset;
1452 /* Make very conservative assumption that all objects
1453 may be in construction.
1454 TODO: ipa-prop already contains code to tell better.
1455 merge it later. */
1456 context->maybe_in_construction = true;
1457 context->maybe_derived_type = false;
1458 }
1459
1460 /* CST is an invariant (address of decl), try to get meaningful
1461 polymorphic call context for polymorphic call of method
1462 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1463 Return FALSE if nothing meaningful can be found. */
1464
1465 bool
1466 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1467 tree cst,
1468 tree otr_type,
1469 HOST_WIDE_INT offset)
1470 {
1471 HOST_WIDE_INT offset2, size, max_size;
1472 tree base;
1473
1474 if (TREE_CODE (cst) != ADDR_EXPR)
1475 return false;
1476
1477 cst = TREE_OPERAND (cst, 0);
1478 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1479 if (!DECL_P (base) || max_size == -1 || max_size != size)
1480 return false;
1481
1482 /* Only type inconsistent programs can have otr_type that is
1483 not part of outer type. */
1484 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
1485 return false;
1486
1487 get_polymorphic_call_info_for_decl (context, base, offset);
1488 return true;
1489 }
1490
1491 /* Given REF call in FNDECL, determine class of the polymorphic
1492 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1493 Return pointer to object described by the context */
1494
1495 tree
1496 get_polymorphic_call_info (tree fndecl,
1497 tree ref,
1498 tree *otr_type,
1499 HOST_WIDE_INT *otr_token,
1500 ipa_polymorphic_call_context *context)
1501 {
1502 tree base_pointer;
1503 *otr_type = obj_type_ref_class (ref);
1504 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1505
1506 /* Set up basic info in case we find nothing interesting in the analysis. */
1507 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
1508 context->offset = 0;
1509 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1510 context->maybe_derived_type = true;
1511 context->maybe_in_construction = true;
1512
1513 /* Walk SSA for outer object. */
1514 do
1515 {
1516 if (TREE_CODE (base_pointer) == SSA_NAME
1517 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1518 && SSA_NAME_DEF_STMT (base_pointer)
1519 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1520 {
1521 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1522 STRIP_NOPS (base_pointer);
1523 }
1524 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1525 {
1526 HOST_WIDE_INT size, max_size;
1527 HOST_WIDE_INT offset2;
1528 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1529 &offset2, &size, &max_size);
1530
1531 /* If this is a varying address, punt. */
1532 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1533 && max_size != -1
1534 && max_size == size)
1535 {
1536 /* We found dereference of a pointer. Type of the pointer
1537 and MEM_REF is meaningless, but we can look futher. */
1538 if (TREE_CODE (base) == MEM_REF)
1539 {
1540 base_pointer = TREE_OPERAND (base, 0);
1541 context->offset
1542 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1543 context->outer_type = NULL;
1544 }
1545 /* We found base object. In this case the outer_type
1546 is known. */
1547 else if (DECL_P (base))
1548 {
1549 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
1550
1551 /* Only type inconsistent programs can have otr_type that is
1552 not part of outer type. */
1553 if (!contains_type_p (TREE_TYPE (base),
1554 context->offset + offset2, *otr_type))
1555 {
1556 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1557 code sequences; we arrange the calls to be builtin_unreachable
1558 later. */
1559 *otr_token = INT_MAX;
1560 return base_pointer;
1561 }
1562 get_polymorphic_call_info_for_decl (context, base,
1563 context->offset + offset2);
1564 return NULL;
1565 }
1566 else
1567 break;
1568 }
1569 else
1570 break;
1571 }
1572 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1573 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1574 {
1575 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1576 * BITS_PER_UNIT;
1577 base_pointer = TREE_OPERAND (base_pointer, 0);
1578 }
1579 else
1580 break;
1581 }
1582 while (true);
1583
1584 /* Try to determine type of the outer object. */
1585 if (TREE_CODE (base_pointer) == SSA_NAME
1586 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1587 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1588 {
1589 /* See if parameter is THIS pointer of a method. */
1590 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1591 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1592 {
1593 context->outer_type
1594 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1595 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1596
1597 /* Dynamic casting has possibly upcasted the type
1598 in the hiearchy. In this case outer type is less
1599 informative than inner type and we should forget
1600 about it. */
1601 if (!contains_type_p (context->outer_type, context->offset,
1602 *otr_type))
1603 {
1604 context->outer_type = NULL;
1605 return base_pointer;
1606 }
1607
1608 /* If the function is constructor or destructor, then
1609 the type is possibly in construction, but we know
1610 it is not derived type. */
1611 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1612 || DECL_CXX_DESTRUCTOR_P (fndecl))
1613 {
1614 context->maybe_in_construction = true;
1615 context->maybe_derived_type = false;
1616 }
1617 else
1618 {
1619 context->maybe_derived_type = true;
1620 context->maybe_in_construction = false;
1621 }
1622 return base_pointer;
1623 }
1624 /* Non-PODs passed by value are really passed by invisible
1625 reference. In this case we also know the type of the
1626 object. */
1627 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1628 {
1629 context->outer_type
1630 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1631 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1632 /* Only type inconsistent programs can have otr_type that is
1633 not part of outer type. */
1634 if (!contains_type_p (context->outer_type, context->offset,
1635 *otr_type))
1636 {
1637 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
1638 code sequences; we arrange the calls to be builtin_unreachable
1639 later. */
1640 *otr_token = INT_MAX;
1641 return base_pointer;
1642 }
1643 context->maybe_derived_type = false;
1644 context->maybe_in_construction = false;
1645 return base_pointer;
1646 }
1647 }
1648 /* TODO: There are multiple ways to derive a type. For instance
1649 if BASE_POINTER is passed to an constructor call prior our refernece.
1650 We do not make this type of flow sensitive analysis yet. */
1651 return base_pointer;
1652 }
1653
1654 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1655 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1656 and insert them to NODES.
1657
1658 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1659
1660 static void
1661 record_targets_from_bases (tree otr_type,
1662 HOST_WIDE_INT otr_token,
1663 tree outer_type,
1664 HOST_WIDE_INT offset,
1665 vec <cgraph_node *> &nodes,
1666 pointer_set_t *inserted,
1667 pointer_set_t *matched_vtables,
1668 bool *completep)
1669 {
1670 while (true)
1671 {
1672 HOST_WIDE_INT pos, size;
1673 tree base_binfo;
1674 tree fld;
1675
1676 if (types_same_for_odr (outer_type, otr_type))
1677 return;
1678
1679 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1680 {
1681 if (TREE_CODE (fld) != FIELD_DECL)
1682 continue;
1683
1684 pos = int_bit_position (fld);
1685 size = tree_to_shwi (DECL_SIZE (fld));
1686 if (pos <= offset && (pos + size) > offset
1687 /* Do not get confused by zero sized bases. */
1688 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
1689 break;
1690 }
1691 /* Within a class type we should always find correcponding fields. */
1692 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1693
1694 /* Nonbasetypes should have been stripped by outer_class_type. */
1695 gcc_assert (DECL_ARTIFICIAL (fld));
1696
1697 outer_type = TREE_TYPE (fld);
1698 offset -= pos;
1699
1700 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1701 offset, otr_type);
1702 if (!base_binfo)
1703 {
1704 gcc_assert (odr_violation_reported);
1705 return;
1706 }
1707 gcc_assert (base_binfo);
1708 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1709 {
1710 bool can_refer;
1711 tree target = gimple_get_virt_method_for_binfo (otr_token,
1712 base_binfo,
1713 &can_refer);
1714 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
1715 maybe_record_node (nodes, target, inserted, can_refer, completep);
1716 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1717 }
1718 }
1719 }
1720
1721 /* When virtual table is removed, we may need to flush the cache. */
1722
1723 static void
1724 devirt_variable_node_removal_hook (varpool_node *n,
1725 void *d ATTRIBUTE_UNUSED)
1726 {
1727 if (cached_polymorphic_call_targets
1728 && DECL_VIRTUAL_P (n->decl)
1729 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
1730 free_polymorphic_call_targets_hash ();
1731 }
1732
1733 /* Return vector containing possible targets of polymorphic call of type
1734 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1735 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1736 OTR_TYPE and include their virtual method. This is useful for types
1737 possibly in construction or destruction where the virtual table may
1738 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1739 us to walk the inheritance graph for all derivations.
1740
1741 OTR_TOKEN == INT_MAX is used to mark calls that are provably
1742 undefined and should be redirected to unreachable.
1743
1744 If COMPLETEP is non-NULL, store true if the list is complete.
1745 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1746 in the target cache. If user needs to visit every target list
1747 just once, it can memoize them.
1748
1749 NONCONSTRUCTION_TARGETS specify number of targets with asumption that
1750 the type is not in the construction. Those targets appear first in the
1751 vector returned.
1752
1753 Returned vector is placed into cache. It is NOT caller's responsibility
1754 to free it. The vector can be freed on cgraph_remove_node call if
1755 the particular node is a virtual function present in the cache. */
1756
1757 vec <cgraph_node *>
1758 possible_polymorphic_call_targets (tree otr_type,
1759 HOST_WIDE_INT otr_token,
1760 ipa_polymorphic_call_context context,
1761 bool *completep,
1762 void **cache_token,
1763 int *nonconstruction_targetsp)
1764 {
1765 static struct cgraph_node_hook_list *node_removal_hook_holder;
1766 pointer_set_t *inserted;
1767 pointer_set_t *matched_vtables;
1768 vec <cgraph_node *> nodes = vNULL;
1769 vec <tree> bases_to_consider = vNULL;
1770 odr_type type, outer_type;
1771 polymorphic_call_target_d key;
1772 polymorphic_call_target_d **slot;
1773 unsigned int i;
1774 tree binfo, target;
1775 bool complete;
1776 bool can_refer;
1777 bool skipped = false;
1778
1779 otr_type = TYPE_MAIN_VARIANT (otr_type);
1780
1781 /* If ODR is not initialized, return empty incomplete list. */
1782 if (!odr_hash)
1783 {
1784 if (completep)
1785 *completep = false;
1786 if (cache_token)
1787 *cache_token = NULL;
1788 if (nonconstruction_targetsp)
1789 *nonconstruction_targetsp = 0;
1790 return nodes;
1791 }
1792
1793 /* If we hit type inconsistency, just return empty list of targets. */
1794 if (otr_token == INT_MAX)
1795 {
1796 if (completep)
1797 *completep = true;
1798 if (cache_token)
1799 *cache_token = NULL;
1800 if (nonconstruction_targetsp)
1801 *nonconstruction_targetsp = 0;
1802 return nodes;
1803 }
1804
1805 type = get_odr_type (otr_type, true);
1806
1807 /* Recording type variants would wast results cache. */
1808 gcc_assert (!context.outer_type
1809 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1810
1811 /* Lookup the outer class type we want to walk. */
1812 if (context.outer_type
1813 && !get_class_context (&context, otr_type))
1814 {
1815 if (completep)
1816 *completep = false;
1817 if (cache_token)
1818 *cache_token = NULL;
1819 if (nonconstruction_targetsp)
1820 *nonconstruction_targetsp = 0;
1821 return nodes;
1822 }
1823
1824 /* Check that get_class_context kept the main variant. */
1825 gcc_assert (!context.outer_type
1826 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
1827
1828 /* We canonicalize our query, so we do not need extra hashtable entries. */
1829
1830 /* Without outer type, we have no use for offset. Just do the
1831 basic search from innter type */
1832 if (!context.outer_type)
1833 {
1834 context.outer_type = otr_type;
1835 context.offset = 0;
1836 }
1837 /* We need to update our hiearchy if the type does not exist. */
1838 outer_type = get_odr_type (context.outer_type, true);
1839 /* If the type is complete, there are no derivations. */
1840 if (TYPE_FINAL_P (outer_type->type))
1841 context.maybe_derived_type = false;
1842
1843 /* Initialize query cache. */
1844 if (!cached_polymorphic_call_targets)
1845 {
1846 cached_polymorphic_call_targets = pointer_set_create ();
1847 polymorphic_call_target_hash
1848 = new polymorphic_call_target_hash_type (23);
1849 if (!node_removal_hook_holder)
1850 {
1851 node_removal_hook_holder =
1852 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1853 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1854 NULL);
1855 }
1856 }
1857
1858 /* Lookup cached answer. */
1859 key.type = type;
1860 key.otr_token = otr_token;
1861 key.context = context;
1862 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
1863 if (cache_token)
1864 *cache_token = (void *)*slot;
1865 if (*slot)
1866 {
1867 if (completep)
1868 *completep = (*slot)->complete;
1869 if (nonconstruction_targetsp)
1870 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1871 return (*slot)->targets;
1872 }
1873
1874 complete = true;
1875
1876 /* Do actual search. */
1877 timevar_push (TV_IPA_VIRTUAL_CALL);
1878 *slot = XCNEW (polymorphic_call_target_d);
1879 if (cache_token)
1880 *cache_token = (void *)*slot;
1881 (*slot)->type = type;
1882 (*slot)->otr_token = otr_token;
1883 (*slot)->context = context;
1884
1885 inserted = pointer_set_create ();
1886 matched_vtables = pointer_set_create ();
1887
1888 /* First see virtual method of type itself. */
1889 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1890 context.offset, otr_type);
1891 if (binfo)
1892 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
1893 &can_refer);
1894 else
1895 {
1896 gcc_assert (odr_violation_reported);
1897 target = NULL;
1898 }
1899
1900 /* Destructors are never called through construction virtual tables,
1901 because the type is always known. */
1902 if (target && DECL_CXX_DESTRUCTOR_P (target))
1903 context.maybe_in_construction = false;
1904
1905 if (target)
1906 {
1907 /* In the case we get complete method, we don't need
1908 to walk derivations. */
1909 if (DECL_FINAL_P (target))
1910 context.maybe_derived_type = false;
1911 }
1912
1913 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
1914 if (type_possibly_instantiated_p (outer_type->type))
1915 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1916 else
1917 {
1918 skipped = true;
1919 gcc_assert (in_lto_p || context.maybe_derived_type);
1920 }
1921
1922 if (binfo)
1923 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1924
1925 /* Next walk recursively all derived types. */
1926 if (context.maybe_derived_type)
1927 {
1928 /* For anonymous namespace types we can attempt to build full type.
1929 All derivations must be in this unit (unless we see partial unit). */
1930 if (!type->all_derivations_known)
1931 complete = false;
1932 for (i = 0; i < outer_type->derived_types.length(); i++)
1933 possible_polymorphic_call_targets_1 (nodes, inserted,
1934 matched_vtables,
1935 otr_type,
1936 outer_type->derived_types[i],
1937 otr_token, outer_type->type,
1938 context.offset, &complete,
1939 bases_to_consider,
1940 context.maybe_in_construction);
1941 }
1942
1943 /* Finally walk bases, if asked to. */
1944 (*slot)->nonconstruction_targets = nodes.length();
1945
1946 /* Destructors are never called through construction virtual tables,
1947 because the type is always known. One of entries may be cxa_pure_virtual
1948 so look to at least two of them. */
1949 if (context.maybe_in_construction)
1950 for (i =0 ; i < MIN (nodes.length (), 2); i++)
1951 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
1952 context.maybe_in_construction = false;
1953 if (context.maybe_in_construction)
1954 {
1955 if (type != outer_type
1956 && (!skipped
1957 || (context.maybe_derived_type
1958 && !type_all_derivations_known_p (outer_type->type))))
1959 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1960 context.offset, nodes, inserted,
1961 matched_vtables, &complete);
1962 if (skipped)
1963 maybe_record_node (nodes, target, inserted, can_refer, &complete);
1964 for (i = 0; i < bases_to_consider.length(); i++)
1965 maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
1966 }
1967 bases_to_consider.release();
1968
1969 (*slot)->targets = nodes;
1970 (*slot)->complete = complete;
1971 if (completep)
1972 *completep = complete;
1973 if (nonconstruction_targetsp)
1974 *nonconstruction_targetsp = (*slot)->nonconstruction_targets;
1975
1976 pointer_set_destroy (inserted);
1977 pointer_set_destroy (matched_vtables);
1978 timevar_pop (TV_IPA_VIRTUAL_CALL);
1979 return nodes;
1980 }
1981
1982 /* Dump all possible targets of a polymorphic call. */
1983
1984 void
1985 dump_possible_polymorphic_call_targets (FILE *f,
1986 tree otr_type,
1987 HOST_WIDE_INT otr_token,
1988 const ipa_polymorphic_call_context &ctx)
1989 {
1990 vec <cgraph_node *> targets;
1991 bool final;
1992 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
1993 unsigned int i;
1994 int nonconstruction;
1995
1996 if (!type)
1997 return;
1998 targets = possible_polymorphic_call_targets (otr_type, otr_token,
1999 ctx,
2000 &final, NULL, &nonconstruction);
2001 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
2002 print_generic_expr (f, type->type, TDF_SLIM);
2003 fprintf (f, " token %i\n", (int)otr_token);
2004 if (ctx.outer_type || ctx.offset)
2005 {
2006 fprintf (f, " Contained in type:");
2007 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
2008 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
2009 ctx.offset);
2010 }
2011
2012 fprintf (f, " %s%s%s\n ",
2013 final ? "This is a complete list." :
2014 "This is partial list; extra targets may be defined in other units.",
2015 ctx.maybe_in_construction ? " (base types included)" : "",
2016 ctx.maybe_derived_type ? " (derived types included)" : "");
2017 for (i = 0; i < targets.length (); i++)
2018 {
2019 char *name = NULL;
2020 if (i == (unsigned)nonconstruction)
2021 fprintf (f, "\n If the type is in construction,"
2022 " then additional tarets are:\n"
2023 " ");
2024 if (in_lto_p)
2025 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
2026 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
2027 if (in_lto_p)
2028 free (name);
2029 if (!targets[i]->definition)
2030 fprintf (f, " (no definition%s)",
2031 DECL_DECLARED_INLINE_P (targets[i]->decl)
2032 ? " inline" : "");
2033 }
2034 fprintf (f, "\n\n");
2035 }
2036
2037
2038 /* Return true if N can be possibly target of a polymorphic call of
2039 OTR_TYPE/OTR_TOKEN. */
2040
2041 bool
2042 possible_polymorphic_call_target_p (tree otr_type,
2043 HOST_WIDE_INT otr_token,
2044 const ipa_polymorphic_call_context &ctx,
2045 struct cgraph_node *n)
2046 {
2047 vec <cgraph_node *> targets;
2048 unsigned int i;
2049 enum built_in_function fcode;
2050 bool final;
2051
2052 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
2053 && ((fcode = DECL_FUNCTION_CODE (n->decl))
2054 == BUILT_IN_UNREACHABLE
2055 || fcode == BUILT_IN_TRAP))
2056 return true;
2057
2058 if (!odr_hash)
2059 return true;
2060 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
2061 for (i = 0; i < targets.length (); i++)
2062 if (symtab_semantically_equivalent_p (n, targets[i]))
2063 return true;
2064
2065 /* At a moment we allow middle end to dig out new external declarations
2066 as a targets of polymorphic calls. */
2067 if (!final && !n->definition)
2068 return true;
2069 return false;
2070 }
2071
2072
2073 /* After callgraph construction new external nodes may appear.
2074 Add them into the graph. */
2075
2076 void
2077 update_type_inheritance_graph (void)
2078 {
2079 struct cgraph_node *n;
2080
2081 if (!odr_hash)
2082 return;
2083 free_polymorphic_call_targets_hash ();
2084 timevar_push (TV_IPA_INHERITANCE);
2085 /* We reconstruct the graph starting from types of all methods seen in the
2086 the unit. */
2087 FOR_EACH_FUNCTION (n)
2088 if (DECL_VIRTUAL_P (n->decl)
2089 && !n->definition
2090 && symtab_real_symbol_p (n))
2091 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
2092 true);
2093 timevar_pop (TV_IPA_INHERITANCE);
2094 }
2095
2096
2097 /* Return true if N looks like likely target of a polymorphic call.
2098 Rule out cxa_pure_virtual, noreturns, function declared cold and
2099 other obvious cases. */
2100
2101 bool
2102 likely_target_p (struct cgraph_node *n)
2103 {
2104 int flags;
2105 /* cxa_pure_virtual and similar things are not likely. */
2106 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
2107 return false;
2108 flags = flags_from_decl_or_type (n->decl);
2109 if (flags & ECF_NORETURN)
2110 return false;
2111 if (lookup_attribute ("cold",
2112 DECL_ATTRIBUTES (n->decl)))
2113 return false;
2114 if (n->frequency < NODE_FREQUENCY_NORMAL)
2115 return false;
2116 /* If there are no virtual tables refering the target alive,
2117 the only way the target can be called is an instance comming from other
2118 compilation unit; speculative devirtualization is build around an
2119 assumption that won't happen. */
2120 if (!referenced_from_vtable_p (n))
2121 return false;
2122 return true;
2123 }
2124
2125 /* The ipa-devirt pass.
2126 When polymorphic call has only one likely target in the unit,
2127 turn it into speculative call. */
2128
2129 static unsigned int
2130 ipa_devirt (void)
2131 {
2132 struct cgraph_node *n;
2133 struct pointer_set_t *bad_call_targets = pointer_set_create ();
2134 struct cgraph_edge *e;
2135
2136 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
2137 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
2138 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
2139
2140 FOR_EACH_DEFINED_FUNCTION (n)
2141 {
2142 bool update = false;
2143 if (dump_file && n->indirect_calls)
2144 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
2145 n->name (), n->order);
2146 for (e = n->indirect_calls; e; e = e->next_callee)
2147 if (e->indirect_info->polymorphic)
2148 {
2149 struct cgraph_node *likely_target = NULL;
2150 void *cache_token;
2151 bool final;
2152 int nonconstruction_targets;
2153 vec <cgraph_node *>targets
2154 = possible_polymorphic_call_targets
2155 (e, &final, &cache_token, &nonconstruction_targets);
2156 unsigned int i;
2157
2158 if (dump_file)
2159 dump_possible_polymorphic_call_targets
2160 (dump_file, e);
2161
2162 npolymorphic++;
2163
2164 if (!cgraph_maybe_hot_edge_p (e))
2165 {
2166 if (dump_file)
2167 fprintf (dump_file, "Call is cold\n\n");
2168 ncold++;
2169 continue;
2170 }
2171 if (e->speculative)
2172 {
2173 if (dump_file)
2174 fprintf (dump_file, "Call is aready speculated\n\n");
2175 nspeculated++;
2176
2177 /* When dumping see if we agree with speculation. */
2178 if (!dump_file)
2179 continue;
2180 }
2181 if (pointer_set_contains (bad_call_targets,
2182 cache_token))
2183 {
2184 if (dump_file)
2185 fprintf (dump_file, "Target list is known to be useless\n\n");
2186 nmultiple++;
2187 continue;
2188 }
2189 for (i = 0; i < targets.length (); i++)
2190 if (likely_target_p (targets[i]))
2191 {
2192 if (likely_target)
2193 {
2194 if (i < (unsigned) nonconstruction_targets)
2195 {
2196 likely_target = NULL;
2197 if (dump_file)
2198 fprintf (dump_file, "More than one likely target\n\n");
2199 nmultiple++;
2200 }
2201 break;
2202 }
2203 likely_target = targets[i];
2204 }
2205 if (!likely_target)
2206 {
2207 pointer_set_insert (bad_call_targets, cache_token);
2208 continue;
2209 }
2210 /* This is reached only when dumping; check if we agree or disagree
2211 with the speculation. */
2212 if (e->speculative)
2213 {
2214 struct cgraph_edge *e2;
2215 struct ipa_ref *ref;
2216 cgraph_speculative_call_info (e, e2, e, ref);
2217 if (cgraph_function_or_thunk_node (e2->callee, NULL)
2218 == cgraph_function_or_thunk_node (likely_target, NULL))
2219 {
2220 fprintf (dump_file, "We agree with speculation\n\n");
2221 nok++;
2222 }
2223 else
2224 {
2225 fprintf (dump_file, "We disagree with speculation\n\n");
2226 nwrong++;
2227 }
2228 continue;
2229 }
2230 if (!likely_target->definition)
2231 {
2232 if (dump_file)
2233 fprintf (dump_file, "Target is not an definition\n\n");
2234 nnotdefined++;
2235 continue;
2236 }
2237 /* Do not introduce new references to external symbols. While we
2238 can handle these just well, it is common for programs to
2239 incorrectly with headers defining methods they are linked
2240 with. */
2241 if (DECL_EXTERNAL (likely_target->decl))
2242 {
2243 if (dump_file)
2244 fprintf (dump_file, "Target is external\n\n");
2245 nexternal++;
2246 continue;
2247 }
2248 /* Don't use an implicitly-declared destructor (c++/58678). */
2249 struct cgraph_node *non_thunk_target
2250 = cgraph_function_node (likely_target);
2251 if (DECL_ARTIFICIAL (non_thunk_target->decl)
2252 && DECL_COMDAT (non_thunk_target->decl))
2253 {
2254 if (dump_file)
2255 fprintf (dump_file, "Target is artificial\n\n");
2256 nartificial++;
2257 continue;
2258 }
2259 if (cgraph_function_body_availability (likely_target)
2260 <= AVAIL_OVERWRITABLE
2261 && symtab_can_be_discarded (likely_target))
2262 {
2263 if (dump_file)
2264 fprintf (dump_file, "Target is overwritable\n\n");
2265 noverwritable++;
2266 continue;
2267 }
2268 else if (dbg_cnt (devirt))
2269 {
2270 if (dump_enabled_p ())
2271 {
2272 location_t locus = gimple_location_safe (e->call_stmt);
2273 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2274 "speculatively devirtualizing call in %s/%i to %s/%i\n",
2275 n->name (), n->order,
2276 likely_target->name (),
2277 likely_target->order);
2278 }
2279 if (!symtab_can_be_discarded (likely_target))
2280 {
2281 cgraph_node *alias;
2282 alias = cgraph (symtab_nonoverwritable_alias
2283 (likely_target));
2284 if (alias)
2285 likely_target = alias;
2286 }
2287 nconverted++;
2288 update = true;
2289 cgraph_turn_edge_to_speculative
2290 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
2291 }
2292 }
2293 if (update)
2294 inline_update_overall_summary (n);
2295 }
2296 pointer_set_destroy (bad_call_targets);
2297
2298 if (dump_file)
2299 fprintf (dump_file,
2300 "%i polymorphic calls, %i devirtualized,"
2301 " %i speculatively devirtualized, %i cold\n"
2302 "%i have multiple targets, %i overwritable,"
2303 " %i already speculated (%i agree, %i disagree),"
2304 " %i external, %i not defined, %i artificial\n",
2305 npolymorphic, ndevirtualized, nconverted, ncold,
2306 nmultiple, noverwritable, nspeculated, nok, nwrong,
2307 nexternal, nnotdefined, nartificial);
2308 return ndevirtualized ? TODO_remove_functions : 0;
2309 }
2310
2311 namespace {
2312
2313 const pass_data pass_data_ipa_devirt =
2314 {
2315 IPA_PASS, /* type */
2316 "devirt", /* name */
2317 OPTGROUP_NONE, /* optinfo_flags */
2318 true, /* has_execute */
2319 TV_IPA_DEVIRT, /* tv_id */
2320 0, /* properties_required */
2321 0, /* properties_provided */
2322 0, /* properties_destroyed */
2323 0, /* todo_flags_start */
2324 ( TODO_dump_symtab ), /* todo_flags_finish */
2325 };
2326
2327 class pass_ipa_devirt : public ipa_opt_pass_d
2328 {
2329 public:
2330 pass_ipa_devirt (gcc::context *ctxt)
2331 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
2332 NULL, /* generate_summary */
2333 NULL, /* write_summary */
2334 NULL, /* read_summary */
2335 NULL, /* write_optimization_summary */
2336 NULL, /* read_optimization_summary */
2337 NULL, /* stmt_fixup */
2338 0, /* function_transform_todo_flags_start */
2339 NULL, /* function_transform */
2340 NULL) /* variable_transform */
2341 {}
2342
2343 /* opt_pass methods: */
2344 virtual bool gate (function *)
2345 {
2346 return (flag_devirtualize
2347 && flag_devirtualize_speculatively
2348 && optimize);
2349 }
2350
2351 virtual unsigned int execute (function *) { return ipa_devirt (); }
2352
2353 }; // class pass_ipa_devirt
2354
2355 } // anon namespace
2356
2357 ipa_opt_pass_d *
2358 make_pass_ipa_devirt (gcc::context *ctxt)
2359 {
2360 return new pass_ipa_devirt (ctxt);
2361 }
2362
2363 #include "gt-ipa-devirt.h"