common.opt (flto-odr-type-merging): New flag.
[gcc.git] / gcc / ipa-devirt.c
1 /* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
40 This is the Gimple representation of type information of a polymorphic call.
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
43 otr_token is the index into virtual table where address is taken.
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
58 virtual table of the base type. Also BINFO_OFFSET specifies
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
75 or from DECL_VINDEX of a given virtual table.
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
89 inserted into the graph. Also types without virtual methods are not
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
98 Edges are represented by odr_type->base and odr_type->derived_types.
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
104
105 pass_ipa_devirt performs simple speculative devirtualization.
106 */
107
108 #include "config.h"
109 #include "system.h"
110 #include "coretypes.h"
111 #include "tm.h"
112 #include "tree.h"
113 #include "print-tree.h"
114 #include "calls.h"
115 #include "cgraph.h"
116 #include "expr.h"
117 #include "tree-pass.h"
118 #include "hash-set.h"
119 #include "target.h"
120 #include "hash-table.h"
121 #include "inchash.h"
122 #include "tree-pretty-print.h"
123 #include "ipa-utils.h"
124 #include "tree-ssa-alias.h"
125 #include "internal-fn.h"
126 #include "gimple-fold.h"
127 #include "gimple-expr.h"
128 #include "gimple.h"
129 #include "ipa-inline.h"
130 #include "diagnostic.h"
131 #include "tree-dfa.h"
132 #include "demangle.h"
133 #include "dbgcnt.h"
134 #include "gimple-pretty-print.h"
135 #include "stor-layout.h"
136 #include "intl.h"
137 #include "hash-map.h"
138
139 static bool odr_types_equivalent_p (tree, tree, bool, bool *,
140 hash_set<tree> *);
141
142 static bool odr_violation_reported = false;
143
144
145 /* Pointer set of all call targets appearing in the cache. */
146 static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
147
148 /* The node of type inheritance graph. For each type unique in
149 One Defintion Rule (ODR) sense, we produce one node linking all
150 main variants of types equivalent to it, bases and derived types. */
151
152 struct GTY(()) odr_type_d
153 {
154 /* leader type. */
155 tree type;
156 /* All bases; built only for main variants of types */
157 vec<odr_type> GTY((skip)) bases;
158 /* All derrived types with virtual methods seen in unit;
159 built only for main variants oftypes */
160 vec<odr_type> GTY((skip)) derived_types;
161
162 /* All equivalent types, if more than one. */
163 vec<tree, va_gc> *types;
164 /* Set of all equivalent types, if NON-NULL. */
165 hash_set<tree> * GTY((skip)) types_set;
166
167 /* Unique ID indexing the type in odr_types array. */
168 int id;
169 /* Is it in anonymous namespace? */
170 bool anonymous_namespace;
171 /* Do we know about all derivations of given type? */
172 bool all_derivations_known;
173 /* Did we report ODR violation here? */
174 bool odr_violated;
175 };
176
177 static bool contains_type_p (tree, HOST_WIDE_INT, tree);
178
179
180 /* Return true if BINFO corresponds to a type with virtual methods.
181
182 Every type has several BINFOs. One is the BINFO associated by the type
183 while other represents bases of derived types. The BINFOs representing
184 bases do not have BINFO_VTABLE pointer set when this is the single
185 inheritance (because vtables are shared). Look up the BINFO of type
186 and check presence of its vtable. */
187
188 static inline bool
189 polymorphic_type_binfo_p (tree binfo)
190 {
191 /* See if BINFO's type has an virtual table associtated with it. */
192 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
193 }
194
195 /* Return TRUE if all derived types of T are known and thus
196 we may consider the walk of derived type complete.
197
198 This is typically true only for final anonymous namespace types and types
199 defined within functions (that may be COMDAT and thus shared across units,
200 but with the same set of derived types). */
201
202 static bool
203 type_all_derivations_known_p (tree t)
204 {
205 if (TYPE_FINAL_P (t))
206 return true;
207 if (flag_ltrans)
208 return false;
209 if (type_in_anonymous_namespace_p (t))
210 return true;
211 return (decl_function_context (TYPE_NAME (t)) != NULL);
212 }
213
214 /* Return TURE if type's constructors are all visible. */
215
216 static bool
217 type_all_ctors_visible_p (tree t)
218 {
219 return !flag_ltrans
220 && symtab->state >= CONSTRUCTION
221 /* We can not always use type_all_derivations_known_p.
222 For function local types we must assume case where
223 the function is COMDAT and shared in between units.
224
225 TODO: These cases are quite easy to get, but we need
226 to keep track of C++ privatizing via -Wno-weak
227 as well as the IPA privatizing. */
228 && type_in_anonymous_namespace_p (t);
229 }
230
231 /* Return TRUE if type may have instance. */
232
233 static bool
234 type_possibly_instantiated_p (tree t)
235 {
236 tree vtable;
237 varpool_node *vnode;
238
239 /* TODO: Add abstract types here. */
240 if (!type_all_ctors_visible_p (t))
241 return true;
242
243 vtable = BINFO_VTABLE (TYPE_BINFO (t));
244 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
245 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
246 vnode = varpool_node::get (vtable);
247 return vnode && vnode->definition;
248 }
249
250 /* One Definition Rule hashtable helpers. */
251
252 struct odr_hasher
253 {
254 typedef odr_type_d value_type;
255 typedef union tree_node compare_type;
256 static inline hashval_t hash (const value_type *);
257 static inline bool equal (const value_type *, const compare_type *);
258 static inline void remove (value_type *);
259 };
260
261 /* Return type that was declared with T's name so that T is an
262 qualified variant of it. */
263
264 static inline tree
265 main_odr_variant (const_tree t)
266 {
267 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
268 return TREE_TYPE (TYPE_NAME (t));
269 /* Unnamed types and non-C++ produced types can be compared by variants. */
270 else
271 return TYPE_MAIN_VARIANT (t);
272 }
273
274 /* Produce hash based on type name. */
275
276 static hashval_t
277 hash_type_name (tree t)
278 {
279 gcc_checking_assert (main_odr_variant (t) == t);
280
281 /* If not in LTO, all main variants are unique, so we can do
282 pointer hash. */
283 if (!in_lto_p)
284 return htab_hash_pointer (t);
285
286 /* Anonymous types are unique. */
287 if (type_in_anonymous_namespace_p (t))
288 return htab_hash_pointer (t);
289
290 /* ODR types have name specified. */
291 if (TYPE_NAME (t)
292 && DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t)))
293 return IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (TYPE_NAME (t)));
294
295 /* For polymorphic types that was compiled with -fno-lto-odr-type-merging
296 we can simply hash the virtual table. */
297 if (TREE_CODE (t) == RECORD_TYPE
298 && TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
299 {
300 tree v = BINFO_VTABLE (TYPE_BINFO (t));
301 hashval_t hash = 0;
302
303 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
304 {
305 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
306 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
307 }
308
309 v = DECL_ASSEMBLER_NAME (v);
310 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
311 return hash;
312 }
313
314 /* Builtin types may appear as main variants of ODR types and are unique.
315 Sanity check we do not get anything that looks non-builtin. */
316 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
317 || TREE_CODE (t) == VOID_TYPE
318 || TREE_CODE (t) == COMPLEX_TYPE
319 || TREE_CODE (t) == REAL_TYPE
320 || TREE_CODE (t) == POINTER_TYPE);
321 return htab_hash_pointer (t);
322 }
323
324 /* Return the computed hashcode for ODR_TYPE. */
325
326 inline hashval_t
327 odr_hasher::hash (const value_type *odr_type)
328 {
329 return hash_type_name (odr_type->type);
330 }
331
332 /* For languages with One Definition Rule, work out if
333 types are the same based on their name.
334
335 This is non-trivial for LTO where minnor differences in
336 the type representation may have prevented type merging
337 to merge two copies of otherwise equivalent type.
338
339 Until we start streaming mangled type names, this function works
340 only for polymorphic types. */
341
342 bool
343 types_same_for_odr (const_tree type1, const_tree type2)
344 {
345 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
346
347 type1 = main_odr_variant (type1);
348 type2 = main_odr_variant (type2);
349
350 if (type1 == type2)
351 return true;
352
353 if (!in_lto_p)
354 return false;
355
356 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
357 on the corresponding TYPE_STUB_DECL. */
358 if (type_in_anonymous_namespace_p (type1)
359 || type_in_anonymous_namespace_p (type2))
360 return false;
361
362
363 /* ODR name of the type is set in DECL_ASSEMBLER_NAME of its TYPE_NAME.
364
365 Ideally we should never meed types without ODR names here. It can however
366 happen in two cases:
367
368 1) for builtin types that are not streamed but rebuilt in lto/lto-lang.c
369 Here testing for equivalence is safe, since their MAIN_VARIANTs are
370 unique.
371 2) for units streamed with -fno-lto-odr-type-merging. Here we can't
372 establish precise ODR equivalency, but for correctness we care only
373 about equivalency on complete polymorphic types. For these we can
374 compare assembler names of their virtual tables. */
375 if ((!TYPE_NAME (type1) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type1)))
376 || (!TYPE_NAME (type2) || !DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type2))))
377 {
378 /* See if types are obvoiusly different (i.e. different codes
379 or polymorphis wrt non-polymorphic). This is not strictly correct
380 for ODR violating programs, but we can't do better without streaming
381 ODR names. */
382 if (TREE_CODE (type1) != TREE_CODE (type2))
383 return false;
384 if (TREE_CODE (type1) == RECORD_TYPE
385 && (TYPE_BINFO (type1) == NULL_TREE) != (TYPE_BINFO (type1) == NULL_TREE))
386 return false;
387 if (TREE_CODE (type1) == RECORD_TYPE && TYPE_BINFO (type1)
388 && (BINFO_VTABLE (TYPE_BINFO (type1)) == NULL_TREE)
389 != (BINFO_VTABLE (TYPE_BINFO (type2)) == NULL_TREE))
390 return false;
391
392 /* At the moment we have no way to establish ODR equivlaence at LTO
393 other than comparing virtual table pointrs of polymorphic types.
394 Eventually we should start saving mangled names in TYPE_NAME.
395 Then this condition will become non-trivial. */
396
397 if (TREE_CODE (type1) == RECORD_TYPE
398 && TYPE_BINFO (type1) && TYPE_BINFO (type2)
399 && BINFO_VTABLE (TYPE_BINFO (type1))
400 && BINFO_VTABLE (TYPE_BINFO (type2)))
401 {
402 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
403 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
404 gcc_assert (TREE_CODE (v1) == POINTER_PLUS_EXPR
405 && TREE_CODE (v2) == POINTER_PLUS_EXPR);
406 return (operand_equal_p (TREE_OPERAND (v1, 1),
407 TREE_OPERAND (v2, 1), 0)
408 && DECL_ASSEMBLER_NAME
409 (TREE_OPERAND (TREE_OPERAND (v1, 0), 0))
410 == DECL_ASSEMBLER_NAME
411 (TREE_OPERAND (TREE_OPERAND (v2, 0), 0)));
412 }
413 gcc_unreachable ();
414 }
415 return (DECL_ASSEMBLER_NAME (TYPE_NAME (type1))
416 == DECL_ASSEMBLER_NAME (TYPE_NAME (type2)));
417 }
418
419
420 /* Compare types T1 and T2 and return true if they are
421 equivalent. */
422
423 inline bool
424 odr_hasher::equal (const value_type *t1, const compare_type *ct2)
425 {
426 tree t2 = const_cast <tree> (ct2);
427
428 gcc_checking_assert (main_odr_variant (t2) == t2);
429 if (t1->type == t2)
430 return true;
431 if (!in_lto_p)
432 return false;
433 return types_same_for_odr (t1->type, t2);
434 }
435
436 /* Free ODR type V. */
437
438 inline void
439 odr_hasher::remove (value_type *v)
440 {
441 v->bases.release ();
442 v->derived_types.release ();
443 if (v->types_set)
444 delete v->types_set;
445 ggc_free (v);
446 }
447
448 /* ODR type hash used to lookup ODR type based on tree type node. */
449
450 typedef hash_table<odr_hasher> odr_hash_type;
451 static odr_hash_type *odr_hash;
452
453 /* ODR types are also stored into ODR_TYPE vector to allow consistent
454 walking. Bases appear before derived types. Vector is garbage collected
455 so we won't end up visiting empty types. */
456
457 static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
458 #define odr_types (*odr_types_ptr)
459
460 /* Set TYPE_BINFO of TYPE and its variants to BINFO. */
461 void
462 set_type_binfo (tree type, tree binfo)
463 {
464 for (; type; type = TYPE_NEXT_VARIANT (type))
465 if (COMPLETE_TYPE_P (type))
466 TYPE_BINFO (type) = binfo;
467 else
468 gcc_assert (!TYPE_BINFO (type));
469 }
470
471 /* Compare T2 and T2 based on name or structure. */
472
473 static bool
474 odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<tree> *visited)
475 {
476 bool an1, an2;
477
478 /* This can happen in incomplete types that should be handled earlier. */
479 gcc_assert (t1 && t2);
480
481 t1 = main_odr_variant (t1);
482 t2 = main_odr_variant (t2);
483 if (t1 == t2)
484 return true;
485
486 /* Anonymous namespace types must match exactly. */
487 an1 = type_in_anonymous_namespace_p (t1);
488 an2 = type_in_anonymous_namespace_p (t2);
489 if (an1 != an2 || an1)
490 return false;
491
492 /* For types where we can not establish ODR equivalency (either by ODR names
493 or by virtual tables), recurse and deeply compare. */
494 if ((!odr_type_p (t1) || !odr_type_p (t2))
495 && (TREE_CODE (t1) != RECORD_TYPE || TREE_CODE (t2) != RECORD_TYPE
496 || !TYPE_BINFO (t1) || !TYPE_BINFO (t2)
497 || !polymorphic_type_binfo_p (TYPE_BINFO (t1))
498 || !polymorphic_type_binfo_p (TYPE_BINFO (t2))))
499 {
500 if (TREE_CODE (t1) != TREE_CODE (t2))
501 return false;
502 if ((TYPE_NAME (t1) == NULL_TREE) != (TYPE_NAME (t2) == NULL_TREE))
503 return false;
504 if (TYPE_NAME (t1) && DECL_NAME (TYPE_NAME (t1)) != DECL_NAME (TYPE_NAME (t2)))
505 return false;
506 /* This should really be a pair hash, but for the moment we do not need
507 100% reliability and it would be better to compare all ODR types so
508 recursion here is needed only for component types. */
509 if (visited->add (t1))
510 return true;
511 return odr_types_equivalent_p (t1, t2, false, NULL, visited);
512 }
513
514 return types_same_for_odr (t1, t2);
515 }
516
517 /* Compare two virtual tables, PREVAILING and VTABLE and output ODR
518 violation warings. */
519
520 void
521 compare_virtual_tables (varpool_node *prevailing, varpool_node *vtable)
522 {
523 int n1, n2;
524 if (DECL_VIRTUAL_P (prevailing->decl) != DECL_VIRTUAL_P (vtable->decl))
525 {
526 odr_violation_reported = true;
527 if (DECL_VIRTUAL_P (prevailing->decl))
528 {
529 varpool_node *tmp = prevailing;
530 prevailing = vtable;
531 vtable = tmp;
532 }
533 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))),
534 OPT_Wodr,
535 "virtual table of type %qD violates one definition rule",
536 DECL_CONTEXT (vtable->decl)))
537 inform (DECL_SOURCE_LOCATION (prevailing->decl),
538 "variable of same assembler name as the virtual table is "
539 "defined in another translation unit");
540 return;
541 }
542 if (!prevailing->definition || !vtable->definition)
543 return;
544 for (n1 = 0, n2 = 0; true; n1++, n2++)
545 {
546 struct ipa_ref *ref1, *ref2;
547 bool end1, end2;
548 end1 = !prevailing->iterate_reference (n1, ref1);
549 end2 = !vtable->iterate_reference (n2, ref2);
550 if (end1 && end2)
551 return;
552 if (!end1 && !end2
553 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
554 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
555 && !n2
556 && !DECL_VIRTUAL_P (ref2->referred->decl)
557 && DECL_VIRTUAL_P (ref1->referred->decl))
558 {
559 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
560 "virtual table of type %qD contains RTTI information",
561 DECL_CONTEXT (vtable->decl)))
562 {
563 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
564 "but is prevailed by one without from other translation unit");
565 inform (DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
566 "RTTI will not work on this type");
567 }
568 n2++;
569 end2 = !vtable->iterate_reference (n2, ref2);
570 }
571 if (!end1 && !end2
572 && DECL_ASSEMBLER_NAME (ref1->referred->decl)
573 != DECL_ASSEMBLER_NAME (ref2->referred->decl)
574 && !n1
575 && !DECL_VIRTUAL_P (ref1->referred->decl)
576 && DECL_VIRTUAL_P (ref2->referred->decl))
577 {
578 n1++;
579 end1 = !vtable->iterate_reference (n1, ref1);
580 }
581 if (end1 || end2)
582 {
583 if (end1)
584 {
585 varpool_node *tmp = prevailing;
586 prevailing = vtable;
587 vtable = tmp;
588 ref1 = ref2;
589 }
590 if (warning_at (DECL_SOURCE_LOCATION
591 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
592 "virtual table of type %qD violates "
593 "one definition rule",
594 DECL_CONTEXT (vtable->decl)))
595 {
596 inform (DECL_SOURCE_LOCATION
597 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
598 "the conflicting type defined in another translation "
599 "unit");
600 inform (DECL_SOURCE_LOCATION
601 (TYPE_NAME (DECL_CONTEXT (ref1->referring->decl))),
602 "contains additional virtual method %qD",
603 ref1->referred->decl);
604 }
605 return;
606 }
607 if (DECL_ASSEMBLER_NAME (ref1->referred->decl)
608 != DECL_ASSEMBLER_NAME (ref2->referred->decl))
609 {
610 if (warning_at (DECL_SOURCE_LOCATION
611 (TYPE_NAME (DECL_CONTEXT (vtable->decl))), 0,
612 "virtual table of type %qD violates "
613 "one definition rule ",
614 DECL_CONTEXT (vtable->decl)))
615 {
616 inform (DECL_SOURCE_LOCATION
617 (TYPE_NAME (DECL_CONTEXT (prevailing->decl))),
618 "the conflicting type defined in another translation "
619 "unit");
620 inform (DECL_SOURCE_LOCATION (ref1->referred->decl),
621 "virtual method %qD", ref1->referred->decl);
622 inform (DECL_SOURCE_LOCATION (ref2->referred->decl),
623 "ought to match virtual method %qD but does not",
624 ref2->referred->decl);
625 return;
626 }
627 }
628 }
629 }
630
631 /* Output ODR violation warning about T1 and T2 with REASON.
632 Display location of ST1 and ST2 if REASON speaks about field or
633 method of the type.
634 If WARN is false, do nothing. Set WARNED if warning was indeed
635 output. */
636
637 void
638 warn_odr (tree t1, tree t2, tree st1, tree st2,
639 bool warn, bool *warned, const char *reason)
640 {
641 tree decl2 = TYPE_NAME (t2);
642
643 if (!warn)
644 return;
645 if (!warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (t1)), OPT_Wodr,
646 "type %qT violates one definition rule",
647 t1))
648 return;
649 if (!st1)
650 ;
651 else if (TREE_CODE (st1) == FIELD_DECL)
652 {
653 inform (DECL_SOURCE_LOCATION (decl2),
654 "a different type is defined in another translation unit");
655 inform (DECL_SOURCE_LOCATION (st1),
656 "the first difference of corresponding definitions is field %qD",
657 st1);
658 decl2 = st2;
659 }
660 else if (TREE_CODE (st1) == FUNCTION_DECL)
661 {
662 inform (DECL_SOURCE_LOCATION (decl2),
663 "a different type is defined in another translation unit");
664 inform (DECL_SOURCE_LOCATION (st1),
665 "the first difference of corresponding definitions is method %qD",
666 st1);
667 decl2 = st2;
668 }
669 else
670 return;
671 inform (DECL_SOURCE_LOCATION (decl2), reason);
672
673 if (warned)
674 *warned = true;
675 }
676
677 /* We already warned about ODR mismatch. T1 and T2 ought to be equivalent
678 because they are used on same place in ODR matching types.
679 They are not; inform the user. */
680
681 void
682 warn_types_mismatch (tree t1, tree t2)
683 {
684 if (!TYPE_NAME (t1) || !TYPE_NAME (t2))
685 return;
686 /* In Firefox it is a common bug to have same types but in
687 different namespaces. Be a bit more informative on
688 this. */
689 if (TYPE_CONTEXT (t1) && TYPE_CONTEXT (t2)
690 && (((TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL)
691 != (TREE_CODE (TYPE_CONTEXT (t2)) == NAMESPACE_DECL))
692 || (TREE_CODE (TYPE_CONTEXT (t1)) == NAMESPACE_DECL
693 && (DECL_NAME (TYPE_CONTEXT (t1)) !=
694 DECL_NAME (TYPE_CONTEXT (t2))))))
695 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
696 "type %qT should match type %qT but is defined "
697 "in different namespace ",
698 t1, t2);
699 else
700 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t1)),
701 "type %qT should match type %qT",
702 t1, t2);
703 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t2)),
704 "the incompatible type is defined here");
705 }
706
707 /* Compare T1 and T2, report ODR violations if WARN is true and set
708 WARNED to true if anything is reported. Return true if types match.
709 If true is returned, the types are also compatible in the sense of
710 gimple_canonical_types_compatible_p. */
711
712 static bool
713 odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<tree> *visited)
714 {
715 /* Check first for the obvious case of pointer identity. */
716 if (t1 == t2)
717 return true;
718 gcc_assert (!type_in_anonymous_namespace_p (t1));
719 gcc_assert (!type_in_anonymous_namespace_p (t2));
720
721 /* Can't be the same type if the types don't have the same code. */
722 if (TREE_CODE (t1) != TREE_CODE (t2))
723 {
724 warn_odr (t1, t2, NULL, NULL, warn, warned,
725 G_("a different type is defined in another translation unit"));
726 return false;
727 }
728
729 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
730 {
731 warn_odr (t1, t2, NULL, NULL, warn, warned,
732 G_("a type with different qualifiers is defined in another "
733 "translation unit"));
734 return false;
735 }
736
737 if (comp_type_attributes (t1, t2) != 1)
738 {
739 warn_odr (t1, t2, NULL, NULL, warn, warned,
740 G_("a type with attributes "
741 "is defined in another translation unit"));
742 return false;
743 }
744
745 if (TREE_CODE (t1) == ENUMERAL_TYPE)
746 {
747 tree v1, v2;
748 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
749 v1 && v2 ; v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
750 {
751 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
752 {
753 warn_odr (t1, t2, NULL, NULL, warn, warned,
754 G_("an enum with different value name"
755 " is defined in another translation unit"));
756 return false;
757 }
758 if (TREE_VALUE (v1) != TREE_VALUE (v2)
759 && !operand_equal_p (DECL_INITIAL (TREE_VALUE (v1)),
760 DECL_INITIAL (TREE_VALUE (v2)), 0))
761 {
762 warn_odr (t1, t2, NULL, NULL, warn, warned,
763 G_("an enum with different values is defined"
764 " in another translation unit"));
765 return false;
766 }
767 }
768 if (v1 || v2)
769 {
770 warn_odr (t1, t2, NULL, NULL, warn, warned,
771 G_("an enum with mismatching number of values "
772 "is defined in another translation unit"));
773 return false;
774 }
775 }
776
777 /* Non-aggregate types can be handled cheaply. */
778 if (INTEGRAL_TYPE_P (t1)
779 || SCALAR_FLOAT_TYPE_P (t1)
780 || FIXED_POINT_TYPE_P (t1)
781 || TREE_CODE (t1) == VECTOR_TYPE
782 || TREE_CODE (t1) == COMPLEX_TYPE
783 || TREE_CODE (t1) == OFFSET_TYPE
784 || POINTER_TYPE_P (t1))
785 {
786 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
787 {
788 warn_odr (t1, t2, NULL, NULL, warn, warned,
789 G_("a type with different precision is defined "
790 "in another translation unit"));
791 return false;
792 }
793 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
794 {
795 warn_odr (t1, t2, NULL, NULL, warn, warned,
796 G_("a type with different signedness is defined "
797 "in another translation unit"));
798 return false;
799 }
800
801 if (TREE_CODE (t1) == INTEGER_TYPE
802 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
803 {
804 /* char WRT uint_8? */
805 warn_odr (t1, t2, NULL, NULL, warn, warned,
806 G_("a different type is defined in another "
807 "translation unit"));
808 return false;
809 }
810
811 /* For canonical type comparisons we do not want to build SCCs
812 so we cannot compare pointed-to types. But we can, for now,
813 require the same pointed-to type kind and match what
814 useless_type_conversion_p would do. */
815 if (POINTER_TYPE_P (t1))
816 {
817 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
818 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
819 {
820 warn_odr (t1, t2, NULL, NULL, warn, warned,
821 G_("it is defined as a pointer in different address "
822 "space in another translation unit"));
823 return false;
824 }
825
826 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
827 {
828 warn_odr (t1, t2, NULL, NULL, warn, warned,
829 G_("it is defined as a pointer to different type "
830 "in another translation unit"));
831 if (warn && warned)
832 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
833 return false;
834 }
835 }
836
837 /* Tail-recurse to components. */
838 if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
839 && !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
840 {
841 /* Probably specific enough. */
842 warn_odr (t1, t2, NULL, NULL, warn, warned,
843 G_("a different type is defined "
844 "in another translation unit"));
845 if (warn && warned)
846 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
847 return false;
848 }
849
850 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
851 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
852 TYPE_SIZE_UNIT (t2), 0));
853 gcc_assert (TYPE_MODE (t1) == TYPE_MODE (t2));
854
855 return true;
856 }
857
858 /* Do type-specific comparisons. */
859 switch (TREE_CODE (t1))
860 {
861 case ARRAY_TYPE:
862 {
863 /* Array types are the same if the element types are the same and
864 the number of elements are the same. */
865 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
866 {
867 warn_odr (t1, t2, NULL, NULL, warn, warned,
868 G_("a different type is defined in another "
869 "translation unit"));
870 if (warn && warned)
871 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
872 }
873 gcc_assert (TYPE_STRING_FLAG (t1) == TYPE_STRING_FLAG (t2));
874 gcc_assert (TYPE_NONALIASED_COMPONENT (t1)
875 == TYPE_NONALIASED_COMPONENT (t2));
876
877 tree i1 = TYPE_DOMAIN (t1);
878 tree i2 = TYPE_DOMAIN (t2);
879
880 /* For an incomplete external array, the type domain can be
881 NULL_TREE. Check this condition also. */
882 if (i1 == NULL_TREE || i2 == NULL_TREE)
883 return true;
884
885 tree min1 = TYPE_MIN_VALUE (i1);
886 tree min2 = TYPE_MIN_VALUE (i2);
887 tree max1 = TYPE_MAX_VALUE (i1);
888 tree max2 = TYPE_MAX_VALUE (i2);
889
890 /* In C++, minimums should be always 0. */
891 gcc_assert (min1 == min2);
892 if (!operand_equal_p (max1, max2, 0))
893 {
894 warn_odr (t1, t2, NULL, NULL, warn, warned,
895 G_("an array of different size is defined "
896 "in another translation unit"));
897 return false;
898 }
899 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
900 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
901 TYPE_SIZE_UNIT (t2), 0));
902 }
903 return true;
904
905 case METHOD_TYPE:
906 case FUNCTION_TYPE:
907 /* Function types are the same if the return type and arguments types
908 are the same. */
909 if (!odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2), visited))
910 {
911 warn_odr (t1, t2, NULL, NULL, warn, warned,
912 G_("has different return value "
913 "in another translation unit"));
914 if (warn && warned)
915 warn_types_mismatch (TREE_TYPE (t1), TREE_TYPE (t2));
916 return false;
917 }
918
919 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
920 return true;
921 else
922 {
923 tree parms1, parms2;
924
925 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
926 parms1 && parms2;
927 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
928 {
929 if (!odr_subtypes_equivalent_p
930 (TREE_VALUE (parms1), TREE_VALUE (parms2), visited))
931 {
932 warn_odr (t1, t2, NULL, NULL, warn, warned,
933 G_("has different parameters in another "
934 "translation unit"));
935 if (warn && warned)
936 warn_types_mismatch (TREE_VALUE (parms1),
937 TREE_VALUE (parms2));
938 return false;
939 }
940 }
941
942 if (parms1 || parms2)
943 {
944 warn_odr (t1, t2, NULL, NULL, warn, warned,
945 G_("has different parameters "
946 "in another translation unit"));
947 return false;
948 }
949
950 return true;
951 }
952
953 case RECORD_TYPE:
954 case UNION_TYPE:
955 case QUAL_UNION_TYPE:
956 {
957 tree f1, f2;
958
959 /* For aggregate types, all the fields must be the same. */
960 if (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2))
961 {
962 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
963 f1 || f2;
964 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
965 {
966 /* Skip non-fields. */
967 while (f1 && TREE_CODE (f1) != FIELD_DECL)
968 f1 = TREE_CHAIN (f1);
969 while (f2 && TREE_CODE (f2) != FIELD_DECL)
970 f2 = TREE_CHAIN (f2);
971 if (!f1 || !f2)
972 break;
973 if (DECL_ARTIFICIAL (f1) != DECL_ARTIFICIAL (f2))
974 break;
975 if (DECL_NAME (f1) != DECL_NAME (f2)
976 && !DECL_ARTIFICIAL (f1))
977 {
978 warn_odr (t1, t2, f1, f2, warn, warned,
979 G_("a field with different name is defined "
980 "in another translation unit"));
981 return false;
982 }
983 if (!odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
984 {
985 /* Do not warn about artificial fields and just go into generic
986 field mismatch warning. */
987 if (DECL_ARTIFICIAL (f1))
988 break;
989
990 warn_odr (t1, t2, f1, f2, warn, warned,
991 G_("a field of same name but different type "
992 "is defined in another translation unit"));
993 if (warn && warned)
994 warn_types_mismatch (TREE_TYPE (f1), TREE_TYPE (f2));
995 return false;
996 }
997 if (!gimple_compare_field_offset (f1, f2))
998 {
999 /* Do not warn about artificial fields and just go into generic
1000 field mismatch warning. */
1001 if (DECL_ARTIFICIAL (f1))
1002 break;
1003 warn_odr (t1, t2, t1, t2, warn, warned,
1004 G_("fields has different layout "
1005 "in another translation unit"));
1006 return false;
1007 }
1008 gcc_assert (DECL_NONADDRESSABLE_P (f1)
1009 == DECL_NONADDRESSABLE_P (f2));
1010 }
1011
1012 /* If one aggregate has more fields than the other, they
1013 are not the same. */
1014 if (f1 || f2)
1015 {
1016 warn_odr (t1, t2, NULL, NULL, warn, warned,
1017 G_("a type with different number of fields "
1018 "is defined in another translation unit"));
1019 return false;
1020 }
1021 if ((TYPE_MAIN_VARIANT (t1) == t1 || TYPE_MAIN_VARIANT (t2) == t2)
1022 && (TYPE_METHODS (TYPE_MAIN_VARIANT (t1))
1023 != TYPE_METHODS (TYPE_MAIN_VARIANT (t2))))
1024 {
1025 for (f1 = TYPE_METHODS (TYPE_MAIN_VARIANT (t1)),
1026 f2 = TYPE_METHODS (TYPE_MAIN_VARIANT (t2));
1027 f1 && f2 ; f1 = DECL_CHAIN (f1), f2 = DECL_CHAIN (f2))
1028 {
1029 if (DECL_ASSEMBLER_NAME (f1) != DECL_ASSEMBLER_NAME (f2))
1030 {
1031 warn_odr (t1, t2, f1, f2, warn, warned,
1032 G_("a different method of same type "
1033 "is defined in another translation unit"));
1034 return false;
1035 }
1036 if (DECL_VIRTUAL_P (f1) != DECL_VIRTUAL_P (f2))
1037 {
1038 warn_odr (t1, t2, f1, f2, warn, warned,
1039 G_("s definition that differs by virtual "
1040 "keyword in another translation unit"));
1041 return false;
1042 }
1043 if (DECL_VINDEX (f1) != DECL_VINDEX (f2))
1044 {
1045 warn_odr (t1, t2, f1, f2, warn, warned,
1046 G_("virtual table layout differs in another "
1047 "translation unit"));
1048 return false;
1049 }
1050 if (odr_subtypes_equivalent_p (TREE_TYPE (f1), TREE_TYPE (f2), visited))
1051 {
1052 warn_odr (t1, t2, f1, f2, warn, warned,
1053 G_("method with incompatible type is defined "
1054 "in another translation unit"));
1055 return false;
1056 }
1057 }
1058 if (f1 || f2)
1059 {
1060 warn_odr (t1, t2, NULL, NULL, warn, warned,
1061 G_("a type with different number of methods "
1062 "is defined in another translation unit"));
1063 return false;
1064 }
1065 }
1066 gcc_assert (operand_equal_p (TYPE_SIZE (t1), TYPE_SIZE (t2), 0));
1067 gcc_assert (operand_equal_p (TYPE_SIZE_UNIT (t1),
1068 TYPE_SIZE_UNIT (t2), 0));
1069 }
1070
1071 return true;
1072 }
1073
1074 default:
1075 gcc_unreachable ();
1076 }
1077 }
1078
1079 /* TYPE is equivalent to VAL by ODR, but its tree representation differs
1080 from VAL->type. This may happen in LTO where tree merging did not merge
1081 all variants of the same type. It may or may not mean the ODR violation.
1082 Add it to the list of duplicates and warn on some violations. */
1083
1084 static bool
1085 add_type_duplicate (odr_type val, tree type)
1086 {
1087 bool build_bases = false;
1088 if (!val->types_set)
1089 val->types_set = new hash_set<tree>;
1090
1091 /* Always prefer complete type to be the leader. */
1092 if (!COMPLETE_TYPE_P (val->type)
1093 && COMPLETE_TYPE_P (type))
1094 {
1095 tree tmp = type;
1096
1097 build_bases = true;
1098 type = val->type;
1099 val->type = tmp;
1100 }
1101
1102 /* See if this duplicate is new. */
1103 if (!val->types_set->add (type))
1104 {
1105 bool merge = true;
1106 bool base_mismatch = false;
1107 unsigned int i,j;
1108 bool warned = false;
1109 hash_set<tree> visited;
1110
1111 gcc_assert (in_lto_p);
1112 vec_safe_push (val->types, type);
1113
1114 /* First we compare memory layout. */
1115 if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
1116 &warned, &visited))
1117 {
1118 merge = false;
1119 odr_violation_reported = true;
1120 val->odr_violated = true;
1121 if (symtab->dump_file)
1122 {
1123 fprintf (symtab->dump_file, "ODR violation\n");
1124
1125 print_node (symtab->dump_file, "", val->type, 0);
1126 putc ('\n',symtab->dump_file);
1127 print_node (symtab->dump_file, "", type, 0);
1128 putc ('\n',symtab->dump_file);
1129 }
1130 }
1131
1132 /* Next sanity check that bases are the same. If not, we will end
1133 up producing wrong answers. */
1134 if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
1135 && TREE_CODE (val->type) == RECORD_TYPE
1136 && TREE_CODE (type) == RECORD_TYPE
1137 && TYPE_BINFO (val->type) && TYPE_BINFO (type))
1138 {
1139 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
1140 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
1141 {
1142 odr_type base = get_odr_type
1143 (BINFO_TYPE
1144 (BINFO_BASE_BINFO (TYPE_BINFO (type),
1145 i)),
1146 true);
1147 if (val->bases.length () <= j || val->bases[j] != base)
1148 base_mismatch = true;
1149 j++;
1150 }
1151 if (base_mismatch)
1152 {
1153 merge = false;
1154 odr_violation_reported = true;
1155
1156 if (!warned && !val->odr_violated)
1157 warn_odr (type, val->type, NULL, NULL, !warned, &warned,
1158 "a type with the same name but different bases is "
1159 "defined in another translation unit");
1160 val->odr_violated = true;
1161 if (symtab->dump_file)
1162 {
1163 fprintf (symtab->dump_file, "ODR bse violation or merging bug?\n");
1164
1165 print_node (symtab->dump_file, "", val->type, 0);
1166 putc ('\n',symtab->dump_file);
1167 print_node (symtab->dump_file, "", type, 0);
1168 putc ('\n',symtab->dump_file);
1169 }
1170 }
1171 }
1172
1173 /* Regularize things a little. During LTO same types may come with
1174 different BINFOs. Either because their virtual table was
1175 not merged by tree merging and only later at decl merging or
1176 because one type comes with external vtable, while other
1177 with internal. We want to merge equivalent binfos to conserve
1178 memory and streaming overhead.
1179
1180 The external vtables are more harmful: they contain references
1181 to external declarations of methods that may be defined in the
1182 merged LTO unit. For this reason we absolutely need to remove
1183 them and replace by internal variants. Not doing so will lead
1184 to incomplete answers from possible_polymorphic_call_targets.
1185
1186 FIXME: disable for now; because ODR types are now build during
1187 streaming in, the variants do not need to be linked to the type,
1188 yet. We need to do the merging in cleanup pass to be implemented
1189 soon. */
1190 if (!flag_ltrans && merge
1191 && 0
1192 && TREE_CODE (val->type) == RECORD_TYPE
1193 && TREE_CODE (type) == RECORD_TYPE
1194 && TYPE_BINFO (val->type) && TYPE_BINFO (type)
1195 && TYPE_MAIN_VARIANT (type) == type
1196 && TYPE_MAIN_VARIANT (val->type) == val->type
1197 && BINFO_VTABLE (TYPE_BINFO (val->type))
1198 && BINFO_VTABLE (TYPE_BINFO (type)))
1199 {
1200 tree master_binfo = TYPE_BINFO (val->type);
1201 tree v1 = BINFO_VTABLE (master_binfo);
1202 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
1203
1204 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
1205 {
1206 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
1207 && operand_equal_p (TREE_OPERAND (v1, 1),
1208 TREE_OPERAND (v2, 1), 0));
1209 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
1210 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
1211 }
1212 gcc_assert (DECL_ASSEMBLER_NAME (v1)
1213 == DECL_ASSEMBLER_NAME (v2));
1214
1215 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
1216 {
1217 unsigned int i;
1218
1219 set_type_binfo (val->type, TYPE_BINFO (type));
1220 for (i = 0; i < val->types->length (); i++)
1221 {
1222 if (TYPE_BINFO ((*val->types)[i])
1223 == master_binfo)
1224 set_type_binfo ((*val->types)[i], TYPE_BINFO (type));
1225 }
1226 BINFO_TYPE (TYPE_BINFO (type)) = val->type;
1227 }
1228 else
1229 set_type_binfo (type, master_binfo);
1230 }
1231 }
1232 return build_bases;
1233 }
1234
1235 /* Get ODR type hash entry for TYPE. If INSERT is true, create
1236 possibly new entry. */
1237
1238 odr_type
1239 get_odr_type (tree type, bool insert)
1240 {
1241 odr_type_d **slot;
1242 odr_type val;
1243 hashval_t hash;
1244 bool build_bases = false;
1245 bool insert_to_odr_array = false;
1246 int base_id = -1;
1247
1248 type = main_odr_variant (type);
1249
1250 hash = hash_type_name (type);
1251 slot
1252 = odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
1253 if (!slot)
1254 return NULL;
1255
1256 /* See if we already have entry for type. */
1257 if (*slot)
1258 {
1259 val = *slot;
1260
1261 /* With LTO we need to support multiple tree representation of
1262 the same ODR type. */
1263 if (val->type != type)
1264 build_bases = add_type_duplicate (val, type);
1265 }
1266 else
1267 {
1268 val = ggc_cleared_alloc<odr_type_d> ();
1269 val->type = type;
1270 val->bases = vNULL;
1271 val->derived_types = vNULL;
1272 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
1273 build_bases = COMPLETE_TYPE_P (val->type);
1274 insert_to_odr_array = true;
1275 }
1276
1277 if (build_bases && TREE_CODE (type) == RECORD_TYPE && TYPE_BINFO (type)
1278 && type == TYPE_MAIN_VARIANT (type))
1279 {
1280 tree binfo = TYPE_BINFO (type);
1281 unsigned int i;
1282
1283 gcc_assert (BINFO_TYPE (TYPE_BINFO (val->type)) = type);
1284
1285 val->all_derivations_known = type_all_derivations_known_p (type);
1286 *slot = val;
1287 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
1288 /* For now record only polymorphic types. other are
1289 pointless for devirtualization and we can not precisely
1290 determine ODR equivalency of these during LTO. */
1291 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
1292 {
1293 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
1294 i)),
1295 true);
1296 gcc_assert (TYPE_MAIN_VARIANT (base->type) == base->type);
1297 base->derived_types.safe_push (val);
1298 val->bases.safe_push (base);
1299 if (base->id > base_id)
1300 base_id = base->id;
1301 }
1302 }
1303 /* Ensure that type always appears after bases. */
1304 if (insert_to_odr_array)
1305 {
1306 if (odr_types_ptr)
1307 val->id = odr_types.length ();
1308 vec_safe_push (odr_types_ptr, val);
1309 }
1310 else if (base_id > val->id)
1311 {
1312 odr_types[val->id] = 0;
1313 /* Be sure we did not recorded any derived types; these may need
1314 renumbering too. */
1315 gcc_assert (val->derived_types.length() == 0);
1316 if (odr_types_ptr)
1317 val->id = odr_types.length ();
1318 vec_safe_push (odr_types_ptr, val);
1319 }
1320 return val;
1321 }
1322
1323 /* Add TYPE od ODR type hash. */
1324
1325 void
1326 register_odr_type (tree type)
1327 {
1328 if (!odr_hash)
1329 odr_hash = new odr_hash_type (23);
1330 /* Arrange things to be nicer and insert main variants first. */
1331 if (odr_type_p (TYPE_MAIN_VARIANT (type)))
1332 get_odr_type (TYPE_MAIN_VARIANT (type), true);
1333 if (TYPE_MAIN_VARIANT (type) != type)
1334 get_odr_type (type, true);
1335 }
1336
1337 /* Dump ODR type T and all its derrived type. INDENT specify indentation for
1338 recusive printing. */
1339
1340 static void
1341 dump_odr_type (FILE *f, odr_type t, int indent=0)
1342 {
1343 unsigned int i;
1344 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
1345 print_generic_expr (f, t->type, TDF_SLIM);
1346 fprintf (f, "%s", t->anonymous_namespace ? " (anonymous namespace)":"");
1347 fprintf (f, "%s\n", t->all_derivations_known ? " (derivations known)":"");
1348 if (TYPE_NAME (t->type))
1349 {
1350 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
1351 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
1352 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
1353 }
1354 if (t->bases.length ())
1355 {
1356 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
1357 for (i = 0; i < t->bases.length (); i++)
1358 fprintf (f, " %i", t->bases[i]->id);
1359 fprintf (f, "\n");
1360 }
1361 if (t->derived_types.length ())
1362 {
1363 fprintf (f, "%*s derived types:\n", indent * 2, "");
1364 for (i = 0; i < t->derived_types.length (); i++)
1365 dump_odr_type (f, t->derived_types[i], indent + 1);
1366 }
1367 fprintf (f, "\n");
1368 }
1369
1370 /* Dump the type inheritance graph. */
1371
1372 static void
1373 dump_type_inheritance_graph (FILE *f)
1374 {
1375 unsigned int i;
1376 if (!odr_types_ptr)
1377 return;
1378 fprintf (f, "\n\nType inheritance graph:\n");
1379 for (i = 0; i < odr_types.length (); i++)
1380 {
1381 if (odr_types[i] && odr_types[i]->bases.length () == 0)
1382 dump_odr_type (f, odr_types[i]);
1383 }
1384 for (i = 0; i < odr_types.length (); i++)
1385 {
1386 if (odr_types[i] && odr_types[i]->types && odr_types[i]->types->length ())
1387 {
1388 unsigned int j;
1389 fprintf (f, "Duplicate tree types for odr type %i\n", i);
1390 print_node (f, "", odr_types[i]->type, 0);
1391 for (j = 0; j < odr_types[i]->types->length (); j++)
1392 {
1393 tree t;
1394 fprintf (f, "duplicate #%i\n", j);
1395 print_node (f, "", (*odr_types[i]->types)[j], 0);
1396 t = (*odr_types[i]->types)[j];
1397 while (TYPE_P (t) && TYPE_CONTEXT (t))
1398 {
1399 t = TYPE_CONTEXT (t);
1400 print_node (f, "", t, 0);
1401 }
1402 putc ('\n',f);
1403 }
1404 }
1405 }
1406 }
1407
1408 /* Given method type T, return type of class it belongs to.
1409 Lookup this pointer and get its type. */
1410
1411 tree
1412 method_class_type (const_tree t)
1413 {
1414 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
1415 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
1416
1417 return TREE_TYPE (first_parm_type);
1418 }
1419
1420 /* Initialize IPA devirt and build inheritance tree graph. */
1421
1422 void
1423 build_type_inheritance_graph (void)
1424 {
1425 struct symtab_node *n;
1426 FILE *inheritance_dump_file;
1427 int flags;
1428
1429 if (odr_hash)
1430 return;
1431 timevar_push (TV_IPA_INHERITANCE);
1432 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
1433 odr_hash = new odr_hash_type (23);
1434
1435 /* We reconstruct the graph starting of types of all methods seen in the
1436 the unit. */
1437 FOR_EACH_SYMBOL (n)
1438 if (is_a <cgraph_node *> (n)
1439 && DECL_VIRTUAL_P (n->decl)
1440 && n->real_symbol_p ())
1441 get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
1442 true);
1443
1444 /* Look also for virtual tables of types that do not define any methods.
1445
1446 We need it in a case where class B has virtual base of class A
1447 re-defining its virtual method and there is class C with no virtual
1448 methods with B as virtual base.
1449
1450 Here we output B's virtual method in two variant - for non-virtual
1451 and virtual inheritance. B's virtual table has non-virtual version,
1452 while C's has virtual.
1453
1454 For this reason we need to know about C in order to include both
1455 variants of B. More correctly, record_target_from_binfo should
1456 add both variants of the method when walking B, but we have no
1457 link in between them.
1458
1459 We rely on fact that either the method is exported and thus we
1460 assume it is called externally or C is in anonymous namespace and
1461 thus we will see the vtable. */
1462
1463 else if (is_a <varpool_node *> (n)
1464 && DECL_VIRTUAL_P (n->decl)
1465 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
1466 && TYPE_BINFO (DECL_CONTEXT (n->decl))
1467 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
1468 get_odr_type (TYPE_MAIN_VARIANT (DECL_CONTEXT (n->decl)), true);
1469 if (inheritance_dump_file)
1470 {
1471 dump_type_inheritance_graph (inheritance_dump_file);
1472 dump_end (TDI_inheritance, inheritance_dump_file);
1473 }
1474 timevar_pop (TV_IPA_INHERITANCE);
1475 }
1476
1477 /* Return true if N has reference from live virtual table
1478 (and thus can be a destination of polymorphic call).
1479 Be conservatively correct when callgraph is not built or
1480 if the method may be referred externally. */
1481
1482 static bool
1483 referenced_from_vtable_p (struct cgraph_node *node)
1484 {
1485 int i;
1486 struct ipa_ref *ref;
1487 bool found = false;
1488
1489 if (node->externally_visible
1490 || DECL_EXTERNAL (node->decl)
1491 || node->used_from_other_partition)
1492 return true;
1493
1494 /* Keep this test constant time.
1495 It is unlikely this can happen except for the case where speculative
1496 devirtualization introduced many speculative edges to this node.
1497 In this case the target is very likely alive anyway. */
1498 if (node->ref_list.referring.length () > 100)
1499 return true;
1500
1501 /* We need references built. */
1502 if (symtab->state <= CONSTRUCTION)
1503 return true;
1504
1505 for (i = 0; node->iterate_referring (i, ref); i++)
1506
1507 if ((ref->use == IPA_REF_ALIAS
1508 && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
1509 || (ref->use == IPA_REF_ADDR
1510 && TREE_CODE (ref->referring->decl) == VAR_DECL
1511 && DECL_VIRTUAL_P (ref->referring->decl)))
1512 {
1513 found = true;
1514 break;
1515 }
1516 return found;
1517 }
1518
1519 /* If TARGET has associated node, record it in the NODES array.
1520 CAN_REFER specify if program can refer to the target directly.
1521 if TARGET is unknown (NULL) or it can not be inserted (for example because
1522 its body was already removed and there is no way to refer to it), clear
1523 COMPLETEP. */
1524
1525 static void
1526 maybe_record_node (vec <cgraph_node *> &nodes,
1527 tree target, hash_set<tree> *inserted,
1528 bool can_refer,
1529 bool *completep)
1530 {
1531 struct cgraph_node *target_node, *alias_target;
1532 enum availability avail;
1533
1534 /* cxa_pure_virtual and __builtin_unreachable do not need to be added into
1535 list of targets; the runtime effect of calling them is undefined.
1536 Only "real" virtual methods should be accounted. */
1537 if (target && TREE_CODE (TREE_TYPE (target)) != METHOD_TYPE)
1538 return;
1539
1540 if (!can_refer)
1541 {
1542 /* The only case when method of anonymous namespace becomes unreferable
1543 is when we completely optimized it out. */
1544 if (flag_ltrans
1545 || !target
1546 || !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
1547 *completep = false;
1548 return;
1549 }
1550
1551 if (!target)
1552 return;
1553
1554 target_node = cgraph_node::get (target);
1555
1556 /* Preffer alias target over aliases, so we do not get confused by
1557 fake duplicates. */
1558 if (target_node)
1559 {
1560 alias_target = target_node->ultimate_alias_target (&avail);
1561 if (target_node != alias_target
1562 && avail >= AVAIL_AVAILABLE
1563 && target_node->get_availability ())
1564 target_node = alias_target;
1565 }
1566
1567 /* Method can only be called by polymorphic call if any
1568 of vtables refering to it are alive.
1569
1570 While this holds for non-anonymous functions, too, there are
1571 cases where we want to keep them in the list; for example
1572 inline functions with -fno-weak are static, but we still
1573 may devirtualize them when instance comes from other unit.
1574 The same holds for LTO.
1575
1576 Currently we ignore these functions in speculative devirtualization.
1577 ??? Maybe it would make sense to be more aggressive for LTO even
1578 eslewhere. */
1579 if (!flag_ltrans
1580 && type_in_anonymous_namespace_p (DECL_CONTEXT (target))
1581 && (!target_node
1582 || !referenced_from_vtable_p (target_node)))
1583 ;
1584 /* See if TARGET is useful function we can deal with. */
1585 else if (target_node != NULL
1586 && (TREE_PUBLIC (target)
1587 || DECL_EXTERNAL (target)
1588 || target_node->definition)
1589 && target_node->real_symbol_p ())
1590 {
1591 gcc_assert (!target_node->global.inlined_to);
1592 gcc_assert (target_node->real_symbol_p ());
1593 if (!inserted->add (target))
1594 {
1595 cached_polymorphic_call_targets->add (target_node);
1596 nodes.safe_push (target_node);
1597 }
1598 }
1599 else if (completep
1600 && (!type_in_anonymous_namespace_p
1601 (DECL_CONTEXT (target))
1602 || flag_ltrans))
1603 *completep = false;
1604 }
1605
1606 /* See if BINFO's type match OUTER_TYPE. If so, lookup
1607 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
1608 method in vtable and insert method to NODES array
1609 or BASES_TO_CONSIDER if this array is non-NULL.
1610 Otherwise recurse to base BINFOs.
1611 This match what get_binfo_at_offset does, but with offset
1612 being unknown.
1613
1614 TYPE_BINFOS is a stack of BINFOS of types with defined
1615 virtual table seen on way from class type to BINFO.
1616
1617 MATCHED_VTABLES tracks virtual tables we already did lookup
1618 for virtual function in. INSERTED tracks nodes we already
1619 inserted.
1620
1621 ANONYMOUS is true if BINFO is part of anonymous namespace.
1622
1623 Clear COMPLETEP when we hit unreferable target.
1624 */
1625
1626 static void
1627 record_target_from_binfo (vec <cgraph_node *> &nodes,
1628 vec <tree> *bases_to_consider,
1629 tree binfo,
1630 tree otr_type,
1631 vec <tree> &type_binfos,
1632 HOST_WIDE_INT otr_token,
1633 tree outer_type,
1634 HOST_WIDE_INT offset,
1635 hash_set<tree> *inserted,
1636 hash_set<tree> *matched_vtables,
1637 bool anonymous,
1638 bool *completep)
1639 {
1640 tree type = BINFO_TYPE (binfo);
1641 int i;
1642 tree base_binfo;
1643
1644
1645 if (BINFO_VTABLE (binfo))
1646 type_binfos.safe_push (binfo);
1647 if (types_same_for_odr (type, outer_type))
1648 {
1649 int i;
1650 tree type_binfo = NULL;
1651
1652 /* Lookup BINFO with virtual table. For normal types it is always last
1653 binfo on stack. */
1654 for (i = type_binfos.length () - 1; i >= 0; i--)
1655 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
1656 {
1657 type_binfo = type_binfos[i];
1658 break;
1659 }
1660 if (BINFO_VTABLE (binfo))
1661 type_binfos.pop ();
1662 /* If this is duplicated BINFO for base shared by virtual inheritance,
1663 we may not have its associated vtable. This is not a problem, since
1664 we will walk it on the other path. */
1665 if (!type_binfo)
1666 return;
1667 tree inner_binfo = get_binfo_at_offset (type_binfo,
1668 offset, otr_type);
1669 if (!inner_binfo)
1670 {
1671 gcc_assert (odr_violation_reported);
1672 return;
1673 }
1674 /* For types in anonymous namespace first check if the respective vtable
1675 is alive. If not, we know the type can't be called. */
1676 if (!flag_ltrans && anonymous)
1677 {
1678 tree vtable = BINFO_VTABLE (inner_binfo);
1679 varpool_node *vnode;
1680
1681 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
1682 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
1683 vnode = varpool_node::get (vtable);
1684 if (!vnode || !vnode->definition)
1685 return;
1686 }
1687 gcc_assert (inner_binfo);
1688 if (bases_to_consider
1689 ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
1690 : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
1691 {
1692 bool can_refer;
1693 tree target = gimple_get_virt_method_for_binfo (otr_token,
1694 inner_binfo,
1695 &can_refer);
1696 if (!bases_to_consider)
1697 maybe_record_node (nodes, target, inserted, can_refer, completep);
1698 /* Destructors are never called via construction vtables. */
1699 else if (!target || !DECL_CXX_DESTRUCTOR_P (target))
1700 bases_to_consider->safe_push (target);
1701 }
1702 return;
1703 }
1704
1705 /* Walk bases. */
1706 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1707 /* Walking bases that have no virtual method is pointless excercise. */
1708 if (polymorphic_type_binfo_p (base_binfo))
1709 record_target_from_binfo (nodes, bases_to_consider, base_binfo, otr_type,
1710 type_binfos,
1711 otr_token, outer_type, offset, inserted,
1712 matched_vtables, anonymous, completep);
1713 if (BINFO_VTABLE (binfo))
1714 type_binfos.pop ();
1715 }
1716
1717 /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
1718 of TYPE, insert them to NODES, recurse into derived nodes.
1719 INSERTED is used to avoid duplicate insertions of methods into NODES.
1720 MATCHED_VTABLES are used to avoid duplicate walking vtables.
1721 Clear COMPLETEP if unreferable target is found.
1722
1723 If CONSIDER_CONSTURCTION is true, record to BASES_TO_CONSDIER
1724 all cases where BASE_SKIPPED is true (because the base is abstract
1725 class). */
1726
1727 static void
1728 possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
1729 hash_set<tree> *inserted,
1730 hash_set<tree> *matched_vtables,
1731 tree otr_type,
1732 odr_type type,
1733 HOST_WIDE_INT otr_token,
1734 tree outer_type,
1735 HOST_WIDE_INT offset,
1736 bool *completep,
1737 vec <tree> &bases_to_consider,
1738 bool consider_construction)
1739 {
1740 tree binfo = TYPE_BINFO (type->type);
1741 unsigned int i;
1742 vec <tree> type_binfos = vNULL;
1743 bool possibly_instantiated = type_possibly_instantiated_p (type->type);
1744
1745 /* We may need to consider types w/o instances because of possible derived
1746 types using their methods either directly or via construction vtables.
1747 We are safe to skip them when all derivations are known, since we will
1748 handle them later.
1749 This is done by recording them to BASES_TO_CONSIDER array. */
1750 if (possibly_instantiated || consider_construction)
1751 {
1752 record_target_from_binfo (nodes,
1753 (!possibly_instantiated
1754 && type_all_derivations_known_p (type->type))
1755 ? &bases_to_consider : NULL,
1756 binfo, otr_type, type_binfos, otr_token,
1757 outer_type, offset,
1758 inserted, matched_vtables,
1759 type->anonymous_namespace, completep);
1760 }
1761 type_binfos.release ();
1762 for (i = 0; i < type->derived_types.length (); i++)
1763 possible_polymorphic_call_targets_1 (nodes, inserted,
1764 matched_vtables,
1765 otr_type,
1766 type->derived_types[i],
1767 otr_token, outer_type, offset, completep,
1768 bases_to_consider, consider_construction);
1769 }
1770
1771 /* Cache of queries for polymorphic call targets.
1772
1773 Enumerating all call targets may get expensive when there are many
1774 polymorphic calls in the program, so we memoize all the previous
1775 queries and avoid duplicated work. */
1776
1777 struct polymorphic_call_target_d
1778 {
1779 HOST_WIDE_INT otr_token;
1780 ipa_polymorphic_call_context context;
1781 odr_type type;
1782 vec <cgraph_node *> targets;
1783 int speculative_targets;
1784 bool complete;
1785 int type_warning;
1786 tree decl_warning;
1787 };
1788
1789 /* Polymorphic call target cache helpers. */
1790
1791 struct polymorphic_call_target_hasher
1792 {
1793 typedef polymorphic_call_target_d value_type;
1794 typedef polymorphic_call_target_d compare_type;
1795 static inline hashval_t hash (const value_type *);
1796 static inline bool equal (const value_type *, const compare_type *);
1797 static inline void remove (value_type *);
1798 };
1799
1800 /* Return the computed hashcode for ODR_QUERY. */
1801
1802 inline hashval_t
1803 polymorphic_call_target_hasher::hash (const value_type *odr_query)
1804 {
1805 inchash::hash hstate (odr_query->otr_token);
1806
1807 hstate.add_wide_int (odr_query->type->id);
1808 hstate.merge_hash (TYPE_UID (odr_query->context.outer_type));
1809 hstate.add_wide_int (odr_query->context.offset);
1810
1811 if (odr_query->context.speculative_outer_type)
1812 {
1813 hstate.merge_hash (TYPE_UID (odr_query->context.speculative_outer_type));
1814 hstate.add_wide_int (odr_query->context.speculative_offset);
1815 }
1816 hstate.add_flag (odr_query->context.maybe_in_construction);
1817 hstate.add_flag (odr_query->context.maybe_derived_type);
1818 hstate.add_flag (odr_query->context.speculative_maybe_derived_type);
1819 hstate.commit_flag ();
1820 return hstate.end ();
1821 }
1822
1823 /* Compare cache entries T1 and T2. */
1824
1825 inline bool
1826 polymorphic_call_target_hasher::equal (const value_type *t1,
1827 const compare_type *t2)
1828 {
1829 return (t1->type == t2->type && t1->otr_token == t2->otr_token
1830 && t1->context.offset == t2->context.offset
1831 && t1->context.speculative_offset == t2->context.speculative_offset
1832 && t1->context.outer_type == t2->context.outer_type
1833 && t1->context.speculative_outer_type == t2->context.speculative_outer_type
1834 && t1->context.maybe_in_construction
1835 == t2->context.maybe_in_construction
1836 && t1->context.maybe_derived_type == t2->context.maybe_derived_type
1837 && (t1->context.speculative_maybe_derived_type
1838 == t2->context.speculative_maybe_derived_type));
1839 }
1840
1841 /* Remove entry in polymorphic call target cache hash. */
1842
1843 inline void
1844 polymorphic_call_target_hasher::remove (value_type *v)
1845 {
1846 v->targets.release ();
1847 free (v);
1848 }
1849
1850 /* Polymorphic call target query cache. */
1851
1852 typedef hash_table<polymorphic_call_target_hasher>
1853 polymorphic_call_target_hash_type;
1854 static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
1855
1856 /* Destroy polymorphic call target query cache. */
1857
1858 static void
1859 free_polymorphic_call_targets_hash ()
1860 {
1861 if (cached_polymorphic_call_targets)
1862 {
1863 delete polymorphic_call_target_hash;
1864 polymorphic_call_target_hash = NULL;
1865 delete cached_polymorphic_call_targets;
1866 cached_polymorphic_call_targets = NULL;
1867 }
1868 }
1869
1870 /* When virtual function is removed, we may need to flush the cache. */
1871
1872 static void
1873 devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
1874 {
1875 if (cached_polymorphic_call_targets
1876 && cached_polymorphic_call_targets->contains (n))
1877 free_polymorphic_call_targets_hash ();
1878 }
1879
1880 /* Return true when TYPE contains an polymorphic type and thus is interesting
1881 for devirtualization machinery. */
1882
1883 bool
1884 contains_polymorphic_type_p (const_tree type)
1885 {
1886 type = TYPE_MAIN_VARIANT (type);
1887
1888 if (RECORD_OR_UNION_TYPE_P (type))
1889 {
1890 if (TYPE_BINFO (type)
1891 && polymorphic_type_binfo_p (TYPE_BINFO (type)))
1892 return true;
1893 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1894 if (TREE_CODE (fld) == FIELD_DECL
1895 && !DECL_ARTIFICIAL (fld)
1896 && contains_polymorphic_type_p (TREE_TYPE (fld)))
1897 return true;
1898 return false;
1899 }
1900 if (TREE_CODE (type) == ARRAY_TYPE)
1901 return contains_polymorphic_type_p (TREE_TYPE (type));
1902 return false;
1903 }
1904
1905 /* THIS->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
1906 is contained at THIS->OFFSET. Walk the memory representation of
1907 THIS->OUTER_TYPE and find the outermost class type that match
1908 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update THIS
1909 to represent it.
1910
1911 For example when THIS represents type
1912 class A
1913 {
1914 int a;
1915 class B b;
1916 }
1917 and we look for type at offset sizeof(int), we end up with B and offset 0.
1918 If the same is produced by multiple inheritance, we end up with A and offset
1919 sizeof(int).
1920
1921 If we can not find corresponding class, give up by setting
1922 THIS->OUTER_TYPE to EXPECTED_TYPE and THIS->OFFSET to NULL.
1923 Return true when lookup was sucesful. */
1924
1925 bool
1926 ipa_polymorphic_call_context::restrict_to_inner_class (tree expected_type)
1927 {
1928 tree type = outer_type;
1929 HOST_WIDE_INT cur_offset = offset;
1930 bool speculative = false;
1931 bool speculation_valid = false;
1932 bool valid = false;
1933
1934 if (!outer_type)
1935 {
1936 type = outer_type = expected_type;
1937 offset = cur_offset = 0;
1938 }
1939
1940 if (speculative_outer_type == outer_type
1941 && (!maybe_derived_type
1942 || speculative_maybe_derived_type))
1943 {
1944 speculative_outer_type = NULL;
1945 speculative_offset = 0;
1946 speculative_maybe_derived_type = false;
1947 }
1948
1949 /* See if speculative type seem to be derrived from outer_type.
1950 Then speculation is valid only if it really is a derivate and derived types
1951 are allowed.
1952
1953 The test does not really look for derivate, but also accepts the case where
1954 outer_type is a field of speculative_outer_type. In this case eiter
1955 MAYBE_DERIVED_TYPE is false and we have full non-speculative information or
1956 the loop bellow will correctly update SPECULATIVE_OUTER_TYPE
1957 and SPECULATIVE_MAYBE_DERIVED_TYPE. */
1958 if (speculative_outer_type
1959 && speculative_offset >= offset
1960 && contains_type_p (speculative_outer_type,
1961 offset - speculative_offset,
1962 outer_type))
1963 speculation_valid = maybe_derived_type;
1964 else
1965 clear_speculation ();
1966
1967 /* Find the sub-object the constant actually refers to and mark whether it is
1968 an artificial one (as opposed to a user-defined one).
1969
1970 This loop is performed twice; first time for outer_type and second time
1971 for speculative_outer_type. The second iteration has SPECULATIVE set. */
1972 while (true)
1973 {
1974 HOST_WIDE_INT pos, size;
1975 tree fld;
1976
1977 /* On a match, just return what we found. */
1978 if (TREE_CODE (type) == TREE_CODE (expected_type)
1979 && (!in_lto_p
1980 || (TREE_CODE (type) == RECORD_TYPE
1981 && TYPE_BINFO (type)
1982 && polymorphic_type_binfo_p (TYPE_BINFO (type))))
1983 && types_same_for_odr (type, expected_type))
1984 {
1985 if (speculative)
1986 {
1987 gcc_assert (speculation_valid);
1988 gcc_assert (valid);
1989
1990 /* If we did not match the offset, just give up on speculation. */
1991 if (cur_offset != 0
1992 || (types_same_for_odr (speculative_outer_type,
1993 outer_type)
1994 && (maybe_derived_type
1995 == speculative_maybe_derived_type)))
1996 clear_speculation ();
1997 return true;
1998 }
1999 else
2000 {
2001 /* Type can not contain itself on an non-zero offset. In that case
2002 just give up. */
2003 if (cur_offset != 0)
2004 {
2005 valid = false;
2006 goto give_up;
2007 }
2008 valid = true;
2009 /* If speculation is not valid or we determined type precisely,
2010 we are done. */
2011 if (!speculation_valid
2012 || !maybe_derived_type)
2013 {
2014 clear_speculation ();
2015 return true;
2016 }
2017 /* Otherwise look into speculation now. */
2018 else
2019 {
2020 speculative = true;
2021 type = speculative_outer_type;
2022 cur_offset = speculative_offset;
2023 continue;
2024 }
2025 }
2026 }
2027
2028 /* Walk fields and find corresponding on at OFFSET. */
2029 if (TREE_CODE (type) == RECORD_TYPE)
2030 {
2031 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
2032 {
2033 if (TREE_CODE (fld) != FIELD_DECL)
2034 continue;
2035
2036 pos = int_bit_position (fld);
2037 size = tree_to_uhwi (DECL_SIZE (fld));
2038 if (pos <= cur_offset && (pos + size) > cur_offset)
2039 break;
2040 }
2041
2042 if (!fld)
2043 goto give_up;
2044
2045 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
2046 cur_offset -= pos;
2047 /* DECL_ARTIFICIAL represents a basetype. */
2048 if (!DECL_ARTIFICIAL (fld))
2049 {
2050 if (!speculative)
2051 {
2052 outer_type = type;
2053 offset = cur_offset;
2054 /* As soon as we se an field containing the type,
2055 we know we are not looking for derivations. */
2056 maybe_derived_type = false;
2057 }
2058 else
2059 {
2060 speculative_outer_type = type;
2061 speculative_offset = cur_offset;
2062 speculative_maybe_derived_type = false;
2063 }
2064 }
2065 }
2066 else if (TREE_CODE (type) == ARRAY_TYPE)
2067 {
2068 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
2069
2070 /* Give up if we don't know array size. */
2071 if (!TYPE_SIZE (subtype)
2072 || !tree_fits_shwi_p (TYPE_SIZE (subtype))
2073 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
2074 || !contains_polymorphic_type_p (subtype))
2075 goto give_up;
2076 cur_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
2077 type = subtype;
2078 if (!speculative)
2079 {
2080 outer_type = type;
2081 offset = cur_offset;
2082 maybe_derived_type = false;
2083 }
2084 else
2085 {
2086 speculative_outer_type = type;
2087 speculative_offset = cur_offset;
2088 speculative_maybe_derived_type = false;
2089 }
2090 }
2091 /* Give up on anything else. */
2092 else
2093 goto give_up;
2094 }
2095
2096 /* If we failed to find subtype we look for, give up and fall back to the
2097 most generic query. */
2098 give_up:
2099 clear_speculation ();
2100 if (valid)
2101 return true;
2102 outer_type = expected_type;
2103 offset = 0;
2104 maybe_derived_type = true;
2105 maybe_in_construction = true;
2106 /* POD can be changed to an instance of a polymorphic type by
2107 placement new. Here we play safe and assume that any
2108 non-polymorphic type is POD. */
2109 if ((TREE_CODE (type) != RECORD_TYPE
2110 || !TYPE_BINFO (type)
2111 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
2112 && (!TYPE_SIZE (type)
2113 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2114 || (cur_offset + tree_to_uhwi (TYPE_SIZE (expected_type)) <=
2115 tree_to_uhwi (TYPE_SIZE (type)))))
2116 return true;
2117 return false;
2118 }
2119
2120 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
2121
2122 static bool
2123 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
2124 tree otr_type)
2125 {
2126 ipa_polymorphic_call_context context;
2127 context.offset = offset;
2128 context.outer_type = TYPE_MAIN_VARIANT (outer_type);
2129 return context.restrict_to_inner_class (otr_type);
2130 }
2131
2132 /* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
2133
2134 static tree
2135 subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
2136 tree vtable)
2137 {
2138 tree v = BINFO_VTABLE (binfo);
2139 int i;
2140 tree base_binfo;
2141 unsigned HOST_WIDE_INT this_offset;
2142
2143 if (v)
2144 {
2145 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
2146 gcc_unreachable ();
2147
2148 if (offset == this_offset
2149 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
2150 return binfo;
2151 }
2152
2153 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2154 if (polymorphic_type_binfo_p (base_binfo))
2155 {
2156 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
2157 if (base_binfo)
2158 return base_binfo;
2159 }
2160 return NULL;
2161 }
2162
2163 /* T is known constant value of virtual table pointer.
2164 Store virtual table to V and its offset to OFFSET.
2165 Return false if T does not look like virtual table reference. */
2166
2167 bool
2168 vtable_pointer_value_to_vtable (const_tree t, tree *v,
2169 unsigned HOST_WIDE_INT *offset)
2170 {
2171 /* We expect &MEM[(void *)&virtual_table + 16B].
2172 We obtain object's BINFO from the context of the virtual table.
2173 This one contains pointer to virtual table represented via
2174 POINTER_PLUS_EXPR. Verify that this pointer match to what
2175 we propagated through.
2176
2177 In the case of virtual inheritance, the virtual tables may
2178 be nested, i.e. the offset may be different from 16 and we may
2179 need to dive into the type representation. */
2180 if (TREE_CODE (t) == ADDR_EXPR
2181 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
2182 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
2183 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
2184 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
2185 == VAR_DECL)
2186 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
2187 (TREE_OPERAND (t, 0), 0), 0)))
2188 {
2189 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
2190 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
2191 return true;
2192 }
2193
2194 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
2195 We need to handle it when T comes from static variable initializer or
2196 BINFO. */
2197 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2198 {
2199 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
2200 t = TREE_OPERAND (t, 0);
2201 }
2202 else
2203 *offset = 0;
2204
2205 if (TREE_CODE (t) != ADDR_EXPR)
2206 return false;
2207 *v = TREE_OPERAND (t, 0);
2208 return true;
2209 }
2210
2211 /* T is known constant value of virtual table pointer. Return BINFO of the
2212 instance type. */
2213
2214 tree
2215 vtable_pointer_value_to_binfo (const_tree t)
2216 {
2217 tree vtable;
2218 unsigned HOST_WIDE_INT offset;
2219
2220 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
2221 return NULL_TREE;
2222
2223 /* FIXME: for stores of construction vtables we return NULL,
2224 because we do not have BINFO for those. Eventually we should fix
2225 our representation to allow this case to be handled, too.
2226 In the case we see store of BINFO we however may assume
2227 that standard folding will be ale to cope with it. */
2228 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2229 offset, vtable);
2230 }
2231
2232 /* We know that the instance is stored in variable or parameter
2233 (not dynamically allocated) and we want to disprove the fact
2234 that it may be in construction at invocation of CALL.
2235
2236 For the variable to be in construction we actually need to
2237 be in constructor of corresponding global variable or
2238 the inline stack of CALL must contain the constructor.
2239 Check this condition. This check works safely only before
2240 IPA passes, because inline stacks may become out of date
2241 later. */
2242
2243 bool
2244 decl_maybe_in_construction_p (tree base, tree outer_type,
2245 gimple call, tree function)
2246 {
2247 outer_type = TYPE_MAIN_VARIANT (outer_type);
2248 gcc_assert (DECL_P (base));
2249
2250 /* After inlining the code unification optimizations may invalidate
2251 inline stacks. Also we need to give up on global variables after
2252 IPA, because addresses of these may have been propagated to their
2253 constructors. */
2254 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
2255 return true;
2256
2257 /* Pure functions can not do any changes on the dynamic type;
2258 that require writting to memory. */
2259 if (!auto_var_in_fn_p (base, function)
2260 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
2261 return false;
2262
2263 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
2264 block = BLOCK_SUPERCONTEXT (block))
2265 if (BLOCK_ABSTRACT_ORIGIN (block)
2266 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2267 {
2268 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2269
2270 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2271 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2272 && !DECL_CXX_DESTRUCTOR_P (fn)))
2273 {
2274 /* Watch for clones where we constant propagated the first
2275 argument (pointer to the instance). */
2276 fn = DECL_ABSTRACT_ORIGIN (fn);
2277 if (!fn
2278 || !is_global_var (base)
2279 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
2280 || (!DECL_CXX_CONSTRUCTOR_P (fn)
2281 && !DECL_CXX_DESTRUCTOR_P (fn)))
2282 continue;
2283 }
2284 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2285 continue;
2286
2287 /* FIXME: this can go away once we have ODR types equivalency on
2288 LTO level. */
2289 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2290 return true;
2291 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (fn)));
2292 if (types_same_for_odr (type, outer_type))
2293 return true;
2294 }
2295
2296 if (TREE_CODE (base) == VAR_DECL
2297 && is_global_var (base))
2298 {
2299 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2300 || (!DECL_CXX_CONSTRUCTOR_P (function)
2301 && !DECL_CXX_DESTRUCTOR_P (function)))
2302 {
2303 if (!DECL_ABSTRACT_ORIGIN (function))
2304 return false;
2305 /* Watch for clones where we constant propagated the first
2306 argument (pointer to the instance). */
2307 function = DECL_ABSTRACT_ORIGIN (function);
2308 if (!function
2309 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
2310 || (!DECL_CXX_CONSTRUCTOR_P (function)
2311 && !DECL_CXX_DESTRUCTOR_P (function)))
2312 return false;
2313 }
2314 /* FIXME: this can go away once we have ODR types equivalency on
2315 LTO level. */
2316 if (in_lto_p && !polymorphic_type_binfo_p (TYPE_BINFO (outer_type)))
2317 return true;
2318 tree type = TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (function)));
2319 if (types_same_for_odr (type, outer_type))
2320 return true;
2321 }
2322 return false;
2323 }
2324
2325 /* Proudce polymorphic call context for call method of instance
2326 that is located within BASE (that is assumed to be a decl) at OFFSET. */
2327
2328 static void
2329 get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
2330 tree base, HOST_WIDE_INT offset)
2331 {
2332 gcc_assert (DECL_P (base));
2333
2334 context->outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
2335 context->offset = offset;
2336 context->speculative_outer_type = NULL;
2337 context->speculative_offset = 0;
2338 context->speculative_maybe_derived_type = true;
2339 /* Make very conservative assumption that all objects
2340 may be in construction.
2341 TODO: ipa-prop already contains code to tell better.
2342 merge it later. */
2343 context->maybe_in_construction = true;
2344 context->maybe_derived_type = false;
2345 }
2346
2347 /* CST is an invariant (address of decl), try to get meaningful
2348 polymorphic call context for polymorphic call of method
2349 if instance of OTR_TYPE that is located at OFFSET of this invariant.
2350 Return FALSE if nothing meaningful can be found. */
2351
2352 bool
2353 get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
2354 tree cst,
2355 tree otr_type,
2356 HOST_WIDE_INT offset)
2357 {
2358 HOST_WIDE_INT offset2, size, max_size;
2359 tree base;
2360
2361 if (TREE_CODE (cst) != ADDR_EXPR)
2362 return false;
2363
2364 cst = TREE_OPERAND (cst, 0);
2365 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
2366 if (!DECL_P (base) || max_size == -1 || max_size != size)
2367 return false;
2368
2369 /* Only type inconsistent programs can have otr_type that is
2370 not part of outer type. */
2371 if (!contains_type_p (TREE_TYPE (base), offset, otr_type))
2372 return false;
2373
2374 get_polymorphic_call_info_for_decl (context, base, offset);
2375 return true;
2376 }
2377
2378 /* See if OP is SSA name initialized as a copy or by single assignment.
2379 If so, walk the SSA graph up. */
2380
2381 static tree
2382 walk_ssa_copies (tree op)
2383 {
2384 STRIP_NOPS (op);
2385 while (TREE_CODE (op) == SSA_NAME
2386 && !SSA_NAME_IS_DEFAULT_DEF (op)
2387 && SSA_NAME_DEF_STMT (op)
2388 && gimple_assign_single_p (SSA_NAME_DEF_STMT (op)))
2389 {
2390 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
2391 return op;
2392 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
2393 STRIP_NOPS (op);
2394 }
2395 return op;
2396 }
2397
2398 /* Given REF call in FNDECL, determine class of the polymorphic
2399 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
2400 CALL is optional argument giving the actual statement (usually call) where
2401 the context is used.
2402 Return pointer to object described by the context or an declaration if
2403 we found the instance to be stored in the static storage. */
2404
2405 tree
2406 get_polymorphic_call_info (tree fndecl,
2407 tree ref,
2408 tree *otr_type,
2409 HOST_WIDE_INT *otr_token,
2410 ipa_polymorphic_call_context *context,
2411 gimple call)
2412 {
2413 tree base_pointer;
2414 *otr_type = obj_type_ref_class (ref);
2415 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
2416
2417 /* Set up basic info in case we find nothing interesting in the analysis. */
2418 context->speculative_outer_type = NULL;
2419 context->speculative_offset = 0;
2420 context->speculative_maybe_derived_type = true;
2421 context->outer_type = TYPE_MAIN_VARIANT (*otr_type);
2422 context->offset = 0;
2423 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
2424 context->maybe_derived_type = true;
2425 context->maybe_in_construction = true;
2426
2427 /* Walk SSA for outer object. */
2428 do
2429 {
2430 base_pointer = walk_ssa_copies (base_pointer);
2431 if (TREE_CODE (base_pointer) == ADDR_EXPR)
2432 {
2433 HOST_WIDE_INT size, max_size;
2434 HOST_WIDE_INT offset2;
2435 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
2436 &offset2, &size, &max_size);
2437
2438 /* If this is a varying address, punt. */
2439 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
2440 && max_size != -1
2441 && max_size == size)
2442 {
2443 /* We found dereference of a pointer. Type of the pointer
2444 and MEM_REF is meaningless, but we can look futher. */
2445 if (TREE_CODE (base) == MEM_REF)
2446 {
2447 base_pointer = TREE_OPERAND (base, 0);
2448 context->offset
2449 += offset2 + mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
2450 context->outer_type = NULL;
2451 }
2452 /* We found base object. In this case the outer_type
2453 is known. */
2454 else if (DECL_P (base))
2455 {
2456 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
2457
2458 /* Only type inconsistent programs can have otr_type that is
2459 not part of outer type. */
2460 if (!contains_type_p (TREE_TYPE (base),
2461 context->offset + offset2, *otr_type))
2462 {
2463 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2464 code sequences; we arrange the calls to be builtin_unreachable
2465 later. */
2466 *otr_token = INT_MAX;
2467 return base_pointer;
2468 }
2469 get_polymorphic_call_info_for_decl (context, base,
2470 context->offset + offset2);
2471 if (context->maybe_in_construction && call)
2472 context->maybe_in_construction
2473 = decl_maybe_in_construction_p (base,
2474 context->outer_type,
2475 call,
2476 fndecl);
2477 return base;
2478 }
2479 else
2480 break;
2481 }
2482 else
2483 break;
2484 }
2485 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
2486 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
2487 {
2488 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
2489 * BITS_PER_UNIT;
2490 base_pointer = TREE_OPERAND (base_pointer, 0);
2491 }
2492 else
2493 break;
2494 }
2495 while (true);
2496
2497 /* Try to determine type of the outer object. */
2498 if (TREE_CODE (base_pointer) == SSA_NAME
2499 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2500 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
2501 {
2502 /* See if parameter is THIS pointer of a method. */
2503 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
2504 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
2505 {
2506 context->outer_type
2507 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2508 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
2509
2510 /* Dynamic casting has possibly upcasted the type
2511 in the hiearchy. In this case outer type is less
2512 informative than inner type and we should forget
2513 about it. */
2514 if (!contains_type_p (context->outer_type, context->offset,
2515 *otr_type))
2516 {
2517 context->outer_type = NULL;
2518 return base_pointer;
2519 }
2520
2521 /* If the function is constructor or destructor, then
2522 the type is possibly in construction, but we know
2523 it is not derived type. */
2524 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
2525 || DECL_CXX_DESTRUCTOR_P (fndecl))
2526 {
2527 context->maybe_in_construction = true;
2528 context->maybe_derived_type = false;
2529 }
2530 else
2531 {
2532 context->maybe_derived_type = true;
2533 context->maybe_in_construction = false;
2534 }
2535 return base_pointer;
2536 }
2537 /* Non-PODs passed by value are really passed by invisible
2538 reference. In this case we also know the type of the
2539 object. */
2540 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
2541 {
2542 context->outer_type
2543 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
2544 gcc_assert (!POINTER_TYPE_P (context->outer_type));
2545 /* Only type inconsistent programs can have otr_type that is
2546 not part of outer type. */
2547 if (!contains_type_p (context->outer_type, context->offset,
2548 *otr_type))
2549 {
2550 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2551 code sequences; we arrange the calls to be builtin_unreachable
2552 later. */
2553 *otr_token = INT_MAX;
2554 return base_pointer;
2555 }
2556 context->maybe_derived_type = false;
2557 context->maybe_in_construction = false;
2558 return base_pointer;
2559 }
2560 }
2561
2562 tree base_type = TREE_TYPE (base_pointer);
2563
2564 if (TREE_CODE (base_pointer) == SSA_NAME
2565 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
2566 && TREE_CODE (SSA_NAME_VAR (base_pointer)) != PARM_DECL)
2567 {
2568 /* Use OTR_TOKEN = INT_MAX as a marker of probably type inconsistent
2569 code sequences; we arrange the calls to be builtin_unreachable
2570 later. */
2571 *otr_token = INT_MAX;
2572 return base_pointer;
2573 }
2574 if (TREE_CODE (base_pointer) == SSA_NAME
2575 && SSA_NAME_DEF_STMT (base_pointer)
2576 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
2577 base_type = TREE_TYPE (gimple_assign_rhs1
2578 (SSA_NAME_DEF_STMT (base_pointer)));
2579
2580 if (POINTER_TYPE_P (base_type)
2581 && contains_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
2582 context->offset,
2583 *otr_type))
2584 {
2585 context->speculative_outer_type = TYPE_MAIN_VARIANT
2586 (TREE_TYPE (base_type));
2587 context->speculative_offset = context->offset;
2588 context->speculative_maybe_derived_type = true;
2589 }
2590 /* TODO: There are multiple ways to derive a type. For instance
2591 if BASE_POINTER is passed to an constructor call prior our refernece.
2592 We do not make this type of flow sensitive analysis yet. */
2593 return base_pointer;
2594 }
2595
2596 /* Structure to be passed in between detect_type_change and
2597 check_stmt_for_type_change. */
2598
2599 struct type_change_info
2600 {
2601 /* Offset into the object where there is the virtual method pointer we are
2602 looking for. */
2603 HOST_WIDE_INT offset;
2604 /* The declaration or SSA_NAME pointer of the base that we are checking for
2605 type change. */
2606 tree instance;
2607 /* The reference to virtual table pointer used. */
2608 tree vtbl_ptr_ref;
2609 tree otr_type;
2610 /* If we actually can tell the type that the object has changed to, it is
2611 stored in this field. Otherwise it remains NULL_TREE. */
2612 tree known_current_type;
2613 HOST_WIDE_INT known_current_offset;
2614
2615 /* Set to true if dynamic type change has been detected. */
2616 bool type_maybe_changed;
2617 /* Set to true if multiple types have been encountered. known_current_type
2618 must be disregarded in that case. */
2619 bool multiple_types_encountered;
2620 /* Set to true if we possibly missed some dynamic type changes and we should
2621 consider the set to be speculative. */
2622 bool speculative;
2623 bool seen_unanalyzed_store;
2624 };
2625
2626 /* Return true if STMT is not call and can modify a virtual method table pointer.
2627 We take advantage of fact that vtable stores must appear within constructor
2628 and destructor functions. */
2629
2630 bool
2631 noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
2632 {
2633 if (is_gimple_assign (stmt))
2634 {
2635 tree lhs = gimple_assign_lhs (stmt);
2636
2637 if (gimple_clobber_p (stmt))
2638 return false;
2639 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
2640 {
2641 if (flag_strict_aliasing
2642 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
2643 return false;
2644
2645 if (TREE_CODE (lhs) == COMPONENT_REF
2646 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2647 return false;
2648 /* In the future we might want to use get_base_ref_and_offset to find
2649 if there is a field corresponding to the offset and if so, proceed
2650 almost like if it was a component ref. */
2651 }
2652 }
2653
2654 /* Code unification may mess with inline stacks. */
2655 if (cfun->after_inlining)
2656 return true;
2657
2658 /* Walk the inline stack and watch out for ctors/dtors.
2659 TODO: Maybe we can require the store to appear in toplevel
2660 block of CTOR/DTOR. */
2661 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
2662 block = BLOCK_SUPERCONTEXT (block))
2663 if (BLOCK_ABSTRACT_ORIGIN (block)
2664 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
2665 {
2666 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
2667
2668 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
2669 return false;
2670 return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2671 && (DECL_CXX_CONSTRUCTOR_P (fn)
2672 || DECL_CXX_DESTRUCTOR_P (fn)));
2673 }
2674 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
2675 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
2676 || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
2677 }
2678
2679 /* If STMT can be proved to be an assignment to the virtual method table
2680 pointer of ANALYZED_OBJ and the type associated with the new table
2681 identified, return the type. Otherwise return NULL_TREE. */
2682
2683 static tree
2684 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
2685 HOST_WIDE_INT *type_offset)
2686 {
2687 HOST_WIDE_INT offset, size, max_size;
2688 tree lhs, rhs, base;
2689
2690 if (!gimple_assign_single_p (stmt))
2691 return NULL_TREE;
2692
2693 lhs = gimple_assign_lhs (stmt);
2694 rhs = gimple_assign_rhs1 (stmt);
2695 if (TREE_CODE (lhs) != COMPONENT_REF
2696 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
2697 {
2698 if (dump_file)
2699 fprintf (dump_file, " LHS is not virtual table.\n");
2700 return NULL_TREE;
2701 }
2702
2703 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
2704 ;
2705 else
2706 {
2707 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2708 if (offset != tci->offset
2709 || size != POINTER_SIZE
2710 || max_size != POINTER_SIZE)
2711 {
2712 if (dump_file)
2713 fprintf (dump_file, " wrong offset %i!=%i or size %i\n",
2714 (int)offset, (int)tci->offset, (int)size);
2715 return NULL_TREE;
2716 }
2717 if (DECL_P (tci->instance))
2718 {
2719 if (base != tci->instance)
2720 {
2721 if (dump_file)
2722 {
2723 fprintf (dump_file, " base:");
2724 print_generic_expr (dump_file, base, TDF_SLIM);
2725 fprintf (dump_file, " does not match instance:");
2726 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
2727 fprintf (dump_file, "\n");
2728 }
2729 return NULL_TREE;
2730 }
2731 }
2732 else if (TREE_CODE (base) == MEM_REF)
2733 {
2734 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)
2735 || !integer_zerop (TREE_OPERAND (base, 1)))
2736 {
2737 if (dump_file)
2738 {
2739 fprintf (dump_file, " base mem ref:");
2740 print_generic_expr (dump_file, base, TDF_SLIM);
2741 fprintf (dump_file, " has nonzero offset or does not match instance:");
2742 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
2743 fprintf (dump_file, "\n");
2744 }
2745 return NULL_TREE;
2746 }
2747 }
2748 else if (!operand_equal_p (tci->instance, base, 0)
2749 || tci->offset)
2750 {
2751 if (dump_file)
2752 {
2753 fprintf (dump_file, " base:");
2754 print_generic_expr (dump_file, base, TDF_SLIM);
2755 fprintf (dump_file, " does not match instance:");
2756 print_generic_expr (dump_file, tci->instance, TDF_SLIM);
2757 fprintf (dump_file, " with offset %i\n", (int)tci->offset);
2758 }
2759 return NULL_TREE;
2760 }
2761 }
2762
2763 tree vtable;
2764 unsigned HOST_WIDE_INT offset2;
2765
2766 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
2767 {
2768 if (dump_file)
2769 fprintf (dump_file, " Failed to lookup binfo\n");
2770 return NULL;
2771 }
2772
2773 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
2774 offset2, vtable);
2775 if (!binfo)
2776 {
2777 if (dump_file)
2778 fprintf (dump_file, " Construction vtable used\n");
2779 /* FIXME: We should suport construction contextes. */
2780 return NULL;
2781 }
2782
2783 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
2784 return DECL_CONTEXT (vtable);
2785 }
2786
2787 /* Record dynamic type change of TCI to TYPE. */
2788
2789 void
2790 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
2791 {
2792 if (dump_file)
2793 {
2794 if (type)
2795 {
2796 fprintf (dump_file, " Recording type: ");
2797 print_generic_expr (dump_file, type, TDF_SLIM);
2798 fprintf (dump_file, " at offset %i\n", (int)offset);
2799 }
2800 else
2801 fprintf (dump_file, " Recording unknown type\n");
2802 }
2803
2804 /* If we found a constructor of type that is not polymorphic or
2805 that may contain the type in question as a field (not as base),
2806 restrict to the inner class first to make type matching bellow
2807 happier. */
2808 if (type
2809 && (offset
2810 || (TREE_CODE (type) != RECORD_TYPE
2811 || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
2812 {
2813 ipa_polymorphic_call_context context;
2814
2815 context.offset = offset;
2816 context.outer_type = type;
2817 context.maybe_in_construction = false;
2818 context.maybe_derived_type = false;
2819 /* If we failed to find the inner type, we know that the call
2820 would be undefined for type produced here. */
2821 if (!context.restrict_to_inner_class (tci->otr_type))
2822 {
2823 if (dump_file)
2824 fprintf (dump_file, " Ignoring; does not contain otr_type\n");
2825 return;
2826 }
2827 /* Watch for case we reached an POD type and anticipate placement
2828 new. */
2829 if (!context.maybe_derived_type)
2830 {
2831 type = context.outer_type;
2832 offset = context.offset;
2833 }
2834 }
2835 if (tci->type_maybe_changed
2836 && (!types_same_for_odr (type, tci->known_current_type)
2837 || offset != tci->known_current_offset))
2838 tci->multiple_types_encountered = true;
2839 tci->known_current_type = TYPE_MAIN_VARIANT (type);
2840 tci->known_current_offset = offset;
2841 tci->type_maybe_changed = true;
2842 }
2843
2844 /* Callback of walk_aliased_vdefs and a helper function for
2845 detect_type_change to check whether a particular statement may modify
2846 the virtual table pointer, and if possible also determine the new type of
2847 the (sub-)object. It stores its result into DATA, which points to a
2848 type_change_info structure. */
2849
2850 static bool
2851 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
2852 {
2853 gimple stmt = SSA_NAME_DEF_STMT (vdef);
2854 struct type_change_info *tci = (struct type_change_info *) data;
2855 tree fn;
2856
2857 /* If we already gave up, just terminate the rest of walk. */
2858 if (tci->multiple_types_encountered)
2859 return true;
2860
2861 if (is_gimple_call (stmt))
2862 {
2863 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
2864 return false;
2865
2866 /* Check for a constructor call. */
2867 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
2868 && DECL_CXX_CONSTRUCTOR_P (fn)
2869 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
2870 && gimple_call_num_args (stmt))
2871 {
2872 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
2873 tree type = method_class_type (TREE_TYPE (fn));
2874 HOST_WIDE_INT offset = 0, size, max_size;
2875
2876 if (dump_file)
2877 {
2878 fprintf (dump_file, " Checking constructor call: ");
2879 print_gimple_stmt (dump_file, stmt, 0, 0);
2880 }
2881
2882 /* See if THIS parameter seems like instance pointer. */
2883 if (TREE_CODE (op) == ADDR_EXPR)
2884 {
2885 op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
2886 &offset, &size, &max_size);
2887 if (size != max_size || max_size == -1)
2888 {
2889 tci->speculative = true;
2890 return false;
2891 }
2892 if (op && TREE_CODE (op) == MEM_REF)
2893 {
2894 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
2895 {
2896 tci->speculative = true;
2897 return false;
2898 }
2899 offset += tree_to_shwi (TREE_OPERAND (op, 1))
2900 * BITS_PER_UNIT;
2901 op = TREE_OPERAND (op, 0);
2902 }
2903 else if (DECL_P (op))
2904 ;
2905 else
2906 {
2907 tci->speculative = true;
2908 return false;
2909 }
2910 op = walk_ssa_copies (op);
2911 }
2912 if (operand_equal_p (op, tci->instance, 0)
2913 && TYPE_SIZE (type)
2914 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2915 && tree_fits_shwi_p (TYPE_SIZE (type))
2916 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset)
2917 {
2918 record_known_type (tci, type, tci->offset - offset);
2919 return true;
2920 }
2921 }
2922 /* Calls may possibly change dynamic type by placement new. Assume
2923 it will not happen, but make result speculative only. */
2924 if (dump_file)
2925 {
2926 fprintf (dump_file, " Function call may change dynamic type:");
2927 print_gimple_stmt (dump_file, stmt, 0, 0);
2928 }
2929 tci->speculative = true;
2930 return false;
2931 }
2932 /* Check for inlined virtual table store. */
2933 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
2934 {
2935 tree type;
2936 HOST_WIDE_INT offset = 0;
2937 if (dump_file)
2938 {
2939 fprintf (dump_file, " Checking vtbl store: ");
2940 print_gimple_stmt (dump_file, stmt, 0, 0);
2941 }
2942
2943 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
2944 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
2945 if (!type)
2946 {
2947 if (dump_file)
2948 fprintf (dump_file, " Unanalyzed store may change type.\n");
2949 tci->seen_unanalyzed_store = true;
2950 tci->speculative = true;
2951 }
2952 else
2953 record_known_type (tci, type, offset);
2954 return true;
2955 }
2956 else
2957 return false;
2958 }
2959
2960 /* THIS is polymorphic call context obtained from get_polymorphic_context.
2961 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
2962 INSTANCE is pointer to the outer instance as returned by
2963 get_polymorphic_context. To avoid creation of temporary expressions,
2964 INSTANCE may also be an declaration of get_polymorphic_context found the
2965 value to be in static storage.
2966
2967 If the type of instance is not fully determined
2968 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
2969 is set), try to walk memory writes and find the actual construction of the
2970 instance.
2971
2972 We do not include this analysis in the context analysis itself, because
2973 it needs memory SSA to be fully built and the walk may be expensive.
2974 So it is not suitable for use withing fold_stmt and similar uses. */
2975
2976 bool
2977 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
2978 tree otr_object,
2979 tree otr_type,
2980 gimple call)
2981 {
2982 struct type_change_info tci;
2983 ao_ref ao;
2984 bool function_entry_reached = false;
2985 tree instance_ref = NULL;
2986 gimple stmt = call;
2987 /* Remember OFFSET before it is modified by restrict_to_inner_class.
2988 This is because we do not update INSTANCE when walking inwards. */
2989 HOST_WIDE_INT instance_offset = offset;
2990
2991 otr_type = TYPE_MAIN_VARIANT (otr_type);
2992
2993 /* Walk into inner type. This may clear maybe_derived_type and save us
2994 from useless work. It also makes later comparsions with static type
2995 easier. */
2996 if (outer_type)
2997 {
2998 if (!restrict_to_inner_class (otr_type))
2999 return false;
3000 }
3001
3002 if (!maybe_in_construction && !maybe_derived_type)
3003 return false;
3004
3005 /* We need to obtain refernce to virtual table pointer. It is better
3006 to look it up in the code rather than build our own. This require bit
3007 of pattern matching, but we end up verifying that what we found is
3008 correct.
3009
3010 What we pattern match is:
3011
3012 tmp = instance->_vptr.A; // vtbl ptr load
3013 tmp2 = tmp[otr_token]; // vtable lookup
3014 OBJ_TYPE_REF(tmp2;instance->0) (instance);
3015
3016 We want to start alias oracle walk from vtbl pointer load,
3017 but we may not be able to identify it, for example, when PRE moved the
3018 load around. */
3019
3020 if (gimple_code (call) == GIMPLE_CALL)
3021 {
3022 tree ref = gimple_call_fn (call);
3023 HOST_WIDE_INT offset2, size, max_size;
3024
3025 if (TREE_CODE (ref) == OBJ_TYPE_REF)
3026 {
3027 ref = OBJ_TYPE_REF_EXPR (ref);
3028 ref = walk_ssa_copies (ref);
3029
3030 /* Check if definition looks like vtable lookup. */
3031 if (TREE_CODE (ref) == SSA_NAME
3032 && !SSA_NAME_IS_DEFAULT_DEF (ref)
3033 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
3034 && TREE_CODE (gimple_assign_rhs1
3035 (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
3036 {
3037 ref = get_base_address
3038 (TREE_OPERAND (gimple_assign_rhs1
3039 (SSA_NAME_DEF_STMT (ref)), 0));
3040 ref = walk_ssa_copies (ref);
3041 /* Find base address of the lookup and see if it looks like
3042 vptr load. */
3043 if (TREE_CODE (ref) == SSA_NAME
3044 && !SSA_NAME_IS_DEFAULT_DEF (ref)
3045 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
3046 {
3047 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
3048 tree base_ref = get_ref_base_and_extent
3049 (ref_exp, &offset2, &size, &max_size);
3050
3051 /* Finally verify that what we found looks like read from OTR_OBJECT
3052 or from INSTANCE with offset OFFSET. */
3053 if (base_ref
3054 && ((TREE_CODE (base_ref) == MEM_REF
3055 && ((offset2 == instance_offset
3056 && TREE_OPERAND (base_ref, 0) == instance)
3057 || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object)))
3058 || (DECL_P (instance) && base_ref == instance
3059 && offset2 == instance_offset)))
3060 {
3061 stmt = SSA_NAME_DEF_STMT (ref);
3062 instance_ref = ref_exp;
3063 }
3064 }
3065 }
3066 }
3067 }
3068
3069 /* If we failed to look up the refernece in code, build our own. */
3070 if (!instance_ref)
3071 {
3072 /* If the statement in question does not use memory, we can't tell
3073 anything. */
3074 if (!gimple_vuse (stmt))
3075 return false;
3076 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
3077 }
3078 else
3079 /* Otherwise use the real reference. */
3080 ao_ref_init (&ao, instance_ref);
3081
3082 /* We look for vtbl pointer read. */
3083 ao.size = POINTER_SIZE;
3084 ao.max_size = ao.size;
3085 ao.ref_alias_set
3086 = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
3087
3088 if (dump_file)
3089 {
3090 fprintf (dump_file, "Determining dynamic type for call: ");
3091 print_gimple_stmt (dump_file, call, 0, 0);
3092 fprintf (dump_file, " Starting walk at: ");
3093 print_gimple_stmt (dump_file, stmt, 0, 0);
3094 fprintf (dump_file, " instance pointer: ");
3095 print_generic_expr (dump_file, otr_object, TDF_SLIM);
3096 fprintf (dump_file, " Outer instance pointer: ");
3097 print_generic_expr (dump_file, instance, TDF_SLIM);
3098 fprintf (dump_file, " offset: %i (bits)", (int)offset);
3099 fprintf (dump_file, " vtbl reference: ");
3100 print_generic_expr (dump_file, instance_ref, TDF_SLIM);
3101 fprintf (dump_file, "\n");
3102 }
3103
3104 tci.offset = offset;
3105 tci.instance = instance;
3106 tci.vtbl_ptr_ref = instance_ref;
3107 gcc_assert (TREE_CODE (instance) != MEM_REF);
3108 tci.known_current_type = NULL_TREE;
3109 tci.known_current_offset = 0;
3110 tci.otr_type = otr_type;
3111 tci.type_maybe_changed = false;
3112 tci.multiple_types_encountered = false;
3113 tci.speculative = false;
3114 tci.seen_unanalyzed_store = false;
3115
3116 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
3117 &tci, NULL, &function_entry_reached);
3118
3119 /* If we did not find any type changing statements, we may still drop
3120 maybe_in_construction flag if the context already have outer type.
3121
3122 Here we make special assumptions about both constructors and
3123 destructors which are all the functions that are allowed to alter the
3124 VMT pointers. It assumes that destructors begin with assignment into
3125 all VMT pointers and that constructors essentially look in the
3126 following way:
3127
3128 1) The very first thing they do is that they call constructors of
3129 ancestor sub-objects that have them.
3130
3131 2) Then VMT pointers of this and all its ancestors is set to new
3132 values corresponding to the type corresponding to the constructor.
3133
3134 3) Only afterwards, other stuff such as constructor of member
3135 sub-objects and the code written by the user is run. Only this may
3136 include calling virtual functions, directly or indirectly.
3137
3138 4) placement new can not be used to change type of non-POD statically
3139 allocated variables.
3140
3141 There is no way to call a constructor of an ancestor sub-object in any
3142 other way.
3143
3144 This means that we do not have to care whether constructors get the
3145 correct type information because they will always change it (in fact,
3146 if we define the type to be given by the VMT pointer, it is undefined).
3147
3148 The most important fact to derive from the above is that if, for some
3149 statement in the section 3, we try to detect whether the dynamic type
3150 has changed, we can safely ignore all calls as we examine the function
3151 body backwards until we reach statements in section 2 because these
3152 calls cannot be ancestor constructors or destructors (if the input is
3153 not bogus) and so do not change the dynamic type (this holds true only
3154 for automatically allocated objects but at the moment we devirtualize
3155 only these). We then must detect that statements in section 2 change
3156 the dynamic type and can try to derive the new type. That is enough
3157 and we can stop, we will never see the calls into constructors of
3158 sub-objects in this code.
3159
3160 Therefore if the static outer type was found (outer_type)
3161 we can safely ignore tci.speculative that is set on calls and give up
3162 only if there was dyanmic type store that may affect given variable
3163 (seen_unanalyzed_store) */
3164
3165 if (!tci.type_maybe_changed
3166 || (outer_type
3167 && !tci.seen_unanalyzed_store
3168 && !tci.multiple_types_encountered
3169 && offset == tci.offset
3170 && types_same_for_odr (tci.known_current_type,
3171 outer_type)))
3172 {
3173 if (!outer_type || tci.seen_unanalyzed_store)
3174 return false;
3175 if (maybe_in_construction)
3176 maybe_in_construction = false;
3177 if (dump_file)
3178 fprintf (dump_file, " No dynamic type change found.\n");
3179 return true;
3180 }
3181
3182 if (tci.known_current_type
3183 && !function_entry_reached
3184 && !tci.multiple_types_encountered)
3185 {
3186 if (!tci.speculative)
3187 {
3188 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
3189 offset = tci.known_current_offset;
3190 maybe_in_construction = false;
3191 maybe_derived_type = false;
3192 if (dump_file)
3193 fprintf (dump_file, " Determined dynamic type.\n");
3194 }
3195 else if (!speculative_outer_type
3196 || speculative_maybe_derived_type)
3197 {
3198 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
3199 speculative_offset = tci.known_current_offset;
3200 speculative_maybe_derived_type = false;
3201 if (dump_file)
3202 fprintf (dump_file, " Determined speculative dynamic type.\n");
3203 }
3204 }
3205 else if (dump_file)
3206 {
3207 fprintf (dump_file, " Found multiple types%s%s\n",
3208 function_entry_reached ? " (function entry reached)" : "",
3209 function_entry_reached ? " (multiple types encountered)" : "");
3210 }
3211
3212 return true;
3213 }
3214
3215 /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
3216 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
3217 and insert them to NODES.
3218
3219 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
3220
3221 static void
3222 record_targets_from_bases (tree otr_type,
3223 HOST_WIDE_INT otr_token,
3224 tree outer_type,
3225 HOST_WIDE_INT offset,
3226 vec <cgraph_node *> &nodes,
3227 hash_set<tree> *inserted,
3228 hash_set<tree> *matched_vtables,
3229 bool *completep)
3230 {
3231 while (true)
3232 {
3233 HOST_WIDE_INT pos, size;
3234 tree base_binfo;
3235 tree fld;
3236
3237 if (types_same_for_odr (outer_type, otr_type))
3238 return;
3239
3240 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
3241 {
3242 if (TREE_CODE (fld) != FIELD_DECL)
3243 continue;
3244
3245 pos = int_bit_position (fld);
3246 size = tree_to_shwi (DECL_SIZE (fld));
3247 if (pos <= offset && (pos + size) > offset
3248 /* Do not get confused by zero sized bases. */
3249 && polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
3250 break;
3251 }
3252 /* Within a class type we should always find correcponding fields. */
3253 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
3254
3255 /* Nonbasetypes should have been stripped by outer_class_type. */
3256 gcc_assert (DECL_ARTIFICIAL (fld));
3257
3258 outer_type = TREE_TYPE (fld);
3259 offset -= pos;
3260
3261 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
3262 offset, otr_type);
3263 if (!base_binfo)
3264 {
3265 gcc_assert (odr_violation_reported);
3266 return;
3267 }
3268 gcc_assert (base_binfo);
3269 if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
3270 {
3271 bool can_refer;
3272 tree target = gimple_get_virt_method_for_binfo (otr_token,
3273 base_binfo,
3274 &can_refer);
3275 if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
3276 maybe_record_node (nodes, target, inserted, can_refer, completep);
3277 matched_vtables->add (BINFO_VTABLE (base_binfo));
3278 }
3279 }
3280 }
3281
3282 /* When virtual table is removed, we may need to flush the cache. */
3283
3284 static void
3285 devirt_variable_node_removal_hook (varpool_node *n,
3286 void *d ATTRIBUTE_UNUSED)
3287 {
3288 if (cached_polymorphic_call_targets
3289 && DECL_VIRTUAL_P (n->decl)
3290 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3291 free_polymorphic_call_targets_hash ();
3292 }
3293
3294 /* Record about how many calls would benefit from given type to be final. */
3295
3296 struct odr_type_warn_count
3297 {
3298 tree type;
3299 int count;
3300 gcov_type dyn_count;
3301 };
3302
3303 /* Record about how many calls would benefit from given method to be final. */
3304
3305 struct decl_warn_count
3306 {
3307 tree decl;
3308 int count;
3309 gcov_type dyn_count;
3310 };
3311
3312 /* Information about type and decl warnings. */
3313
3314 struct final_warning_record
3315 {
3316 gcov_type dyn_count;
3317 vec<odr_type_warn_count> type_warnings;
3318 hash_map<tree, decl_warn_count> decl_warnings;
3319 };
3320 struct final_warning_record *final_warning_records;
3321
3322 /* Return vector containing possible targets of polymorphic call of type
3323 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
3324 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
3325 OTR_TYPE and include their virtual method. This is useful for types
3326 possibly in construction or destruction where the virtual table may
3327 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
3328 us to walk the inheritance graph for all derivations.
3329
3330 OTR_TOKEN == INT_MAX is used to mark calls that are provably
3331 undefined and should be redirected to unreachable.
3332
3333 If COMPLETEP is non-NULL, store true if the list is complete.
3334 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
3335 in the target cache. If user needs to visit every target list
3336 just once, it can memoize them.
3337
3338 SPECULATION_TARGETS specify number of targets that are speculatively
3339 likely. These include targets specified by the speculative part
3340 of polymoprhic call context and also exclude all targets for classes
3341 in construction.
3342
3343 Returned vector is placed into cache. It is NOT caller's responsibility
3344 to free it. The vector can be freed on cgraph_remove_node call if
3345 the particular node is a virtual function present in the cache. */
3346
3347 vec <cgraph_node *>
3348 possible_polymorphic_call_targets (tree otr_type,
3349 HOST_WIDE_INT otr_token,
3350 ipa_polymorphic_call_context context,
3351 bool *completep,
3352 void **cache_token,
3353 int *speculative_targetsp)
3354 {
3355 static struct cgraph_node_hook_list *node_removal_hook_holder;
3356 vec <cgraph_node *> nodes = vNULL;
3357 vec <tree> bases_to_consider = vNULL;
3358 odr_type type, outer_type;
3359 polymorphic_call_target_d key;
3360 polymorphic_call_target_d **slot;
3361 unsigned int i;
3362 tree binfo, target;
3363 bool complete;
3364 bool can_refer;
3365 bool skipped = false;
3366
3367 otr_type = TYPE_MAIN_VARIANT (otr_type);
3368
3369 /* If ODR is not initialized, return empty incomplete list. */
3370 if (!odr_hash)
3371 {
3372 if (completep)
3373 *completep = false;
3374 if (cache_token)
3375 *cache_token = NULL;
3376 if (speculative_targetsp)
3377 *speculative_targetsp = 0;
3378 return nodes;
3379 }
3380
3381 /* If we hit type inconsistency, just return empty list of targets. */
3382 if (otr_token == INT_MAX)
3383 {
3384 if (completep)
3385 *completep = true;
3386 if (cache_token)
3387 *cache_token = NULL;
3388 if (speculative_targetsp)
3389 *speculative_targetsp = 0;
3390 return nodes;
3391 }
3392
3393 /* Do not bother to compute speculative info when user do not asks for it. */
3394 if (!speculative_targetsp || !context.speculative_outer_type)
3395 context.clear_speculation ();
3396
3397 type = get_odr_type (otr_type, true);
3398
3399 /* Recording type variants would wast results cache. */
3400 gcc_assert (!context.outer_type
3401 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3402
3403 /* Lookup the outer class type we want to walk. */
3404 if ((context.outer_type || context.speculative_outer_type)
3405 && !context.restrict_to_inner_class (otr_type))
3406 {
3407 if (completep)
3408 *completep = false;
3409 if (cache_token)
3410 *cache_token = NULL;
3411 if (speculative_targetsp)
3412 *speculative_targetsp = 0;
3413 return nodes;
3414 }
3415
3416 /* Check that restrict_to_inner_class kept the main variant. */
3417 gcc_assert (!context.outer_type
3418 || TYPE_MAIN_VARIANT (context.outer_type) == context.outer_type);
3419
3420 /* We canonicalize our query, so we do not need extra hashtable entries. */
3421
3422 /* Without outer type, we have no use for offset. Just do the
3423 basic search from innter type */
3424 if (!context.outer_type)
3425 {
3426 context.outer_type = otr_type;
3427 context.offset = 0;
3428 }
3429 /* We need to update our hiearchy if the type does not exist. */
3430 outer_type = get_odr_type (context.outer_type, true);
3431 /* If the type is complete, there are no derivations. */
3432 if (TYPE_FINAL_P (outer_type->type))
3433 context.maybe_derived_type = false;
3434
3435 /* Initialize query cache. */
3436 if (!cached_polymorphic_call_targets)
3437 {
3438 cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
3439 polymorphic_call_target_hash
3440 = new polymorphic_call_target_hash_type (23);
3441 if (!node_removal_hook_holder)
3442 {
3443 node_removal_hook_holder =
3444 symtab->add_cgraph_removal_hook (&devirt_node_removal_hook, NULL);
3445 symtab->add_varpool_removal_hook (&devirt_variable_node_removal_hook,
3446 NULL);
3447 }
3448 }
3449
3450 /* Lookup cached answer. */
3451 key.type = type;
3452 key.otr_token = otr_token;
3453 key.context = context;
3454 slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
3455 if (cache_token)
3456 *cache_token = (void *)*slot;
3457 if (*slot)
3458 {
3459 if (completep)
3460 *completep = (*slot)->complete;
3461 if (speculative_targetsp)
3462 *speculative_targetsp = (*slot)->speculative_targets;
3463 if ((*slot)->type_warning && final_warning_records)
3464 {
3465 final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
3466 final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
3467 += final_warning_records->dyn_count;
3468 }
3469 if ((*slot)->decl_warning && final_warning_records)
3470 {
3471 struct decl_warn_count *c =
3472 final_warning_records->decl_warnings.get ((*slot)->decl_warning);
3473 c->count++;
3474 c->dyn_count += final_warning_records->dyn_count;
3475 }
3476 return (*slot)->targets;
3477 }
3478
3479 complete = true;
3480
3481 /* Do actual search. */
3482 timevar_push (TV_IPA_VIRTUAL_CALL);
3483 *slot = XCNEW (polymorphic_call_target_d);
3484 if (cache_token)
3485 *cache_token = (void *)*slot;
3486 (*slot)->type = type;
3487 (*slot)->otr_token = otr_token;
3488 (*slot)->context = context;
3489 (*slot)->speculative_targets = 0;
3490
3491 hash_set<tree> inserted;
3492 hash_set<tree> matched_vtables;
3493
3494 /* First insert targets we speculatively identified as likely. */
3495 if (context.speculative_outer_type)
3496 {
3497 odr_type speculative_outer_type;
3498 bool speculation_complete = true;
3499
3500 /* First insert target from type itself and check if it may have derived types. */
3501 speculative_outer_type = get_odr_type (context.speculative_outer_type, true);
3502 if (TYPE_FINAL_P (speculative_outer_type->type))
3503 context.speculative_maybe_derived_type = false;
3504 binfo = get_binfo_at_offset (TYPE_BINFO (speculative_outer_type->type),
3505 context.speculative_offset, otr_type);
3506 if (binfo)
3507 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3508 &can_refer);
3509 else
3510 target = NULL;
3511
3512 /* In the case we get complete method, we don't need
3513 to walk derivations. */
3514 if (target && DECL_FINAL_P (target))
3515 context.speculative_maybe_derived_type = false;
3516 if (type_possibly_instantiated_p (speculative_outer_type->type))
3517 maybe_record_node (nodes, target, &inserted, can_refer, &speculation_complete);
3518 if (binfo)
3519 matched_vtables.add (BINFO_VTABLE (binfo));
3520
3521
3522 /* Next walk recursively all derived types. */
3523 if (context.speculative_maybe_derived_type)
3524 for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
3525 possible_polymorphic_call_targets_1 (nodes, &inserted,
3526 &matched_vtables,
3527 otr_type,
3528 speculative_outer_type->derived_types[i],
3529 otr_token, speculative_outer_type->type,
3530 context.speculative_offset,
3531 &speculation_complete,
3532 bases_to_consider,
3533 false);
3534 (*slot)->speculative_targets = nodes.length();
3535 }
3536
3537 /* First see virtual method of type itself. */
3538 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
3539 context.offset, otr_type);
3540 if (binfo)
3541 target = gimple_get_virt_method_for_binfo (otr_token, binfo,
3542 &can_refer);
3543 else
3544 {
3545 gcc_assert (odr_violation_reported);
3546 target = NULL;
3547 }
3548
3549 /* Destructors are never called through construction virtual tables,
3550 because the type is always known. */
3551 if (target && DECL_CXX_DESTRUCTOR_P (target))
3552 context.maybe_in_construction = false;
3553
3554 if (target)
3555 {
3556 /* In the case we get complete method, we don't need
3557 to walk derivations. */
3558 if (DECL_FINAL_P (target))
3559 context.maybe_derived_type = false;
3560 }
3561
3562 /* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
3563 if (type_possibly_instantiated_p (outer_type->type))
3564 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3565 else
3566 {
3567 skipped = true;
3568 gcc_assert (in_lto_p || context.maybe_derived_type);
3569 }
3570
3571 if (binfo)
3572 matched_vtables.add (BINFO_VTABLE (binfo));
3573
3574 /* Next walk recursively all derived types. */
3575 if (context.maybe_derived_type)
3576 {
3577 for (i = 0; i < outer_type->derived_types.length(); i++)
3578 possible_polymorphic_call_targets_1 (nodes, &inserted,
3579 &matched_vtables,
3580 otr_type,
3581 outer_type->derived_types[i],
3582 otr_token, outer_type->type,
3583 context.offset, &complete,
3584 bases_to_consider,
3585 context.maybe_in_construction);
3586
3587 if (!outer_type->all_derivations_known)
3588 {
3589 if (final_warning_records)
3590 {
3591 if (complete
3592 && nodes.length () == 1
3593 && warn_suggest_final_types
3594 && !outer_type->derived_types.length ())
3595 {
3596 if (outer_type->id >= (int)final_warning_records->type_warnings.length ())
3597 final_warning_records->type_warnings.safe_grow_cleared
3598 (odr_types.length ());
3599 final_warning_records->type_warnings[outer_type->id].count++;
3600 final_warning_records->type_warnings[outer_type->id].dyn_count
3601 += final_warning_records->dyn_count;
3602 final_warning_records->type_warnings[outer_type->id].type
3603 = outer_type->type;
3604 (*slot)->type_warning = outer_type->id + 1;
3605 }
3606 if (complete
3607 && warn_suggest_final_methods
3608 && nodes.length () == 1
3609 && types_same_for_odr (DECL_CONTEXT (nodes[0]->decl),
3610 outer_type->type))
3611 {
3612 bool existed;
3613 struct decl_warn_count &c =
3614 final_warning_records->decl_warnings.get_or_insert
3615 (nodes[0]->decl, &existed);
3616
3617 if (existed)
3618 {
3619 c.count++;
3620 c.dyn_count += final_warning_records->dyn_count;
3621 }
3622 else
3623 {
3624 c.count = 1;
3625 c.dyn_count = final_warning_records->dyn_count;
3626 c.decl = nodes[0]->decl;
3627 }
3628 (*slot)->decl_warning = nodes[0]->decl;
3629 }
3630 }
3631 complete = false;
3632 }
3633 }
3634
3635 /* Finally walk bases, if asked to. */
3636 if (!(*slot)->speculative_targets)
3637 (*slot)->speculative_targets = nodes.length();
3638
3639 /* Destructors are never called through construction virtual tables,
3640 because the type is always known. One of entries may be cxa_pure_virtual
3641 so look to at least two of them. */
3642 if (context.maybe_in_construction)
3643 for (i =0 ; i < MIN (nodes.length (), 2); i++)
3644 if (DECL_CXX_DESTRUCTOR_P (nodes[i]->decl))
3645 context.maybe_in_construction = false;
3646 if (context.maybe_in_construction)
3647 {
3648 if (type != outer_type
3649 && (!skipped
3650 || (context.maybe_derived_type
3651 && !type_all_derivations_known_p (outer_type->type))))
3652 record_targets_from_bases (otr_type, otr_token, outer_type->type,
3653 context.offset, nodes, &inserted,
3654 &matched_vtables, &complete);
3655 if (skipped)
3656 maybe_record_node (nodes, target, &inserted, can_refer, &complete);
3657 for (i = 0; i < bases_to_consider.length(); i++)
3658 maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
3659 }
3660 bases_to_consider.release();
3661
3662 (*slot)->targets = nodes;
3663 (*slot)->complete = complete;
3664 if (completep)
3665 *completep = complete;
3666 if (speculative_targetsp)
3667 *speculative_targetsp = (*slot)->speculative_targets;
3668
3669 timevar_pop (TV_IPA_VIRTUAL_CALL);
3670 return nodes;
3671 }
3672
3673 bool
3674 add_decl_warning (const tree &key ATTRIBUTE_UNUSED, const decl_warn_count &value,
3675 vec<const decl_warn_count*> *vec)
3676 {
3677 vec->safe_push (&value);
3678 return true;
3679 }
3680
3681 /* Dump all possible targets of a polymorphic call. */
3682
3683 void
3684 dump_possible_polymorphic_call_targets (FILE *f,
3685 tree otr_type,
3686 HOST_WIDE_INT otr_token,
3687 const ipa_polymorphic_call_context &ctx)
3688 {
3689 vec <cgraph_node *> targets;
3690 bool final;
3691 odr_type type = get_odr_type (TYPE_MAIN_VARIANT (otr_type), false);
3692 unsigned int i;
3693 int speculative;
3694
3695 if (!type)
3696 return;
3697 targets = possible_polymorphic_call_targets (otr_type, otr_token,
3698 ctx,
3699 &final, NULL, &speculative);
3700 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
3701 print_generic_expr (f, type->type, TDF_SLIM);
3702 fprintf (f, " token %i\n", (int)otr_token);
3703 if (ctx.outer_type || ctx.offset)
3704 {
3705 fprintf (f, " Contained in type:");
3706 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
3707 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3708 ctx.offset);
3709 }
3710 if (ctx.speculative_outer_type)
3711 {
3712 fprintf (f, " Speculatively contained in type:");
3713 print_generic_expr (f, ctx.speculative_outer_type, TDF_SLIM);
3714 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
3715 ctx.speculative_offset);
3716 }
3717
3718 fprintf (f, " %s%s%s%s\n ",
3719 final ? "This is a complete list." :
3720 "This is partial list; extra targets may be defined in other units.",
3721 ctx.maybe_in_construction ? " (base types included)" : "",
3722 ctx.maybe_derived_type ? " (derived types included)" : "",
3723 ctx.speculative_maybe_derived_type ? " (speculative derived types included)" : "");
3724 for (i = 0; i < targets.length (); i++)
3725 {
3726 char *name = NULL;
3727 if (i == (unsigned)speculative)
3728 fprintf (f, "\n Targets that are not likely:\n"
3729 " ");
3730 if (in_lto_p)
3731 name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
3732 fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
3733 if (in_lto_p)
3734 free (name);
3735 if (!targets[i]->definition)
3736 fprintf (f, " (no definition%s)",
3737 DECL_DECLARED_INLINE_P (targets[i]->decl)
3738 ? " inline" : "");
3739 }
3740 fprintf (f, "\n\n");
3741 }
3742
3743
3744 /* Return true if N can be possibly target of a polymorphic call of
3745 OTR_TYPE/OTR_TOKEN. */
3746
3747 bool
3748 possible_polymorphic_call_target_p (tree otr_type,
3749 HOST_WIDE_INT otr_token,
3750 const ipa_polymorphic_call_context &ctx,
3751 struct cgraph_node *n)
3752 {
3753 vec <cgraph_node *> targets;
3754 unsigned int i;
3755 enum built_in_function fcode;
3756 bool final;
3757
3758 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
3759 && ((fcode = DECL_FUNCTION_CODE (n->decl))
3760 == BUILT_IN_UNREACHABLE
3761 || fcode == BUILT_IN_TRAP))
3762 return true;
3763
3764 if (!odr_hash)
3765 return true;
3766 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
3767 for (i = 0; i < targets.length (); i++)
3768 if (n->semantically_equivalent_p (targets[i]))
3769 return true;
3770
3771 /* At a moment we allow middle end to dig out new external declarations
3772 as a targets of polymorphic calls. */
3773 if (!final && !n->definition)
3774 return true;
3775 return false;
3776 }
3777
3778
3779 /* After callgraph construction new external nodes may appear.
3780 Add them into the graph. */
3781
3782 void
3783 update_type_inheritance_graph (void)
3784 {
3785 struct cgraph_node *n;
3786
3787 if (!odr_hash)
3788 return;
3789 free_polymorphic_call_targets_hash ();
3790 timevar_push (TV_IPA_INHERITANCE);
3791 /* We reconstruct the graph starting from types of all methods seen in the
3792 the unit. */
3793 FOR_EACH_FUNCTION (n)
3794 if (DECL_VIRTUAL_P (n->decl)
3795 && !n->definition
3796 && n->real_symbol_p ())
3797 get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
3798 true);
3799 timevar_pop (TV_IPA_INHERITANCE);
3800 }
3801
3802
3803 /* Return true if N looks like likely target of a polymorphic call.
3804 Rule out cxa_pure_virtual, noreturns, function declared cold and
3805 other obvious cases. */
3806
3807 bool
3808 likely_target_p (struct cgraph_node *n)
3809 {
3810 int flags;
3811 /* cxa_pure_virtual and similar things are not likely. */
3812 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
3813 return false;
3814 flags = flags_from_decl_or_type (n->decl);
3815 if (flags & ECF_NORETURN)
3816 return false;
3817 if (lookup_attribute ("cold",
3818 DECL_ATTRIBUTES (n->decl)))
3819 return false;
3820 if (n->frequency < NODE_FREQUENCY_NORMAL)
3821 return false;
3822 /* If there are no virtual tables refering the target alive,
3823 the only way the target can be called is an instance comming from other
3824 compilation unit; speculative devirtualization is build around an
3825 assumption that won't happen. */
3826 if (!referenced_from_vtable_p (n))
3827 return false;
3828 return true;
3829 }
3830
3831 /* Compare type warning records P1 and P2 and chose one with larger count;
3832 helper for qsort. */
3833
3834 int
3835 type_warning_cmp (const void *p1, const void *p2)
3836 {
3837 const odr_type_warn_count *t1 = (const odr_type_warn_count *)p1;
3838 const odr_type_warn_count *t2 = (const odr_type_warn_count *)p2;
3839
3840 if (t1->dyn_count < t2->dyn_count)
3841 return 1;
3842 if (t1->dyn_count > t2->dyn_count)
3843 return -1;
3844 return t2->count - t1->count;
3845 }
3846
3847 /* Compare decl warning records P1 and P2 and chose one with larger count;
3848 helper for qsort. */
3849
3850 int
3851 decl_warning_cmp (const void *p1, const void *p2)
3852 {
3853 const decl_warn_count *t1 = *(const decl_warn_count * const *)p1;
3854 const decl_warn_count *t2 = *(const decl_warn_count * const *)p2;
3855
3856 if (t1->dyn_count < t2->dyn_count)
3857 return 1;
3858 if (t1->dyn_count > t2->dyn_count)
3859 return -1;
3860 return t2->count - t1->count;
3861 }
3862
3863 /* The ipa-devirt pass.
3864 When polymorphic call has only one likely target in the unit,
3865 turn it into speculative call. */
3866
3867 static unsigned int
3868 ipa_devirt (void)
3869 {
3870 struct cgraph_node *n;
3871 hash_set<void *> bad_call_targets;
3872 struct cgraph_edge *e;
3873
3874 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
3875 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
3876 int nwrong = 0, nok = 0, nexternal = 0, nartificial = 0;
3877
3878 /* We can output -Wsuggest-final-methods and -Wsuggest-final-types warnings.
3879 This is implemented by setting up final_warning_records that are updated
3880 by get_polymorphic_call_targets.
3881 We need to clear cache in this case to trigger recomputation of all
3882 entries. */
3883 if (warn_suggest_final_methods || warn_suggest_final_types)
3884 {
3885 final_warning_records = new (final_warning_record);
3886 final_warning_records->type_warnings = vNULL;
3887 final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
3888 free_polymorphic_call_targets_hash ();
3889 }
3890
3891 FOR_EACH_DEFINED_FUNCTION (n)
3892 {
3893 bool update = false;
3894 if (dump_file && n->indirect_calls)
3895 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
3896 n->name (), n->order);
3897 for (e = n->indirect_calls; e; e = e->next_callee)
3898 if (e->indirect_info->polymorphic)
3899 {
3900 struct cgraph_node *likely_target = NULL;
3901 void *cache_token;
3902 bool final;
3903 int speculative_targets;
3904
3905 if (final_warning_records)
3906 final_warning_records->dyn_count = e->count;
3907
3908 vec <cgraph_node *>targets
3909 = possible_polymorphic_call_targets
3910 (e, &final, &cache_token, &speculative_targets);
3911 unsigned int i;
3912
3913 if (dump_file)
3914 dump_possible_polymorphic_call_targets
3915 (dump_file, e);
3916
3917 npolymorphic++;
3918
3919 if (!flag_devirtualize_speculatively)
3920 continue;
3921
3922 if (!e->maybe_hot_p ())
3923 {
3924 if (dump_file)
3925 fprintf (dump_file, "Call is cold\n\n");
3926 ncold++;
3927 continue;
3928 }
3929 if (e->speculative)
3930 {
3931 if (dump_file)
3932 fprintf (dump_file, "Call is aready speculated\n\n");
3933 nspeculated++;
3934
3935 /* When dumping see if we agree with speculation. */
3936 if (!dump_file)
3937 continue;
3938 }
3939 if (bad_call_targets.contains (cache_token))
3940 {
3941 if (dump_file)
3942 fprintf (dump_file, "Target list is known to be useless\n\n");
3943 nmultiple++;
3944 continue;
3945 }
3946 for (i = 0; i < targets.length (); i++)
3947 if (likely_target_p (targets[i]))
3948 {
3949 if (likely_target)
3950 {
3951 if (i < (unsigned) speculative_targets)
3952 {
3953 likely_target = NULL;
3954 if (dump_file)
3955 fprintf (dump_file, "More than one likely target\n\n");
3956 nmultiple++;
3957 }
3958 break;
3959 }
3960 likely_target = targets[i];
3961 }
3962 if (!likely_target)
3963 {
3964 bad_call_targets.add (cache_token);
3965 continue;
3966 }
3967 /* This is reached only when dumping; check if we agree or disagree
3968 with the speculation. */
3969 if (e->speculative)
3970 {
3971 struct cgraph_edge *e2;
3972 struct ipa_ref *ref;
3973 e->speculative_call_info (e2, e, ref);
3974 if (e2->callee->ultimate_alias_target ()
3975 == likely_target->ultimate_alias_target ())
3976 {
3977 fprintf (dump_file, "We agree with speculation\n\n");
3978 nok++;
3979 }
3980 else
3981 {
3982 fprintf (dump_file, "We disagree with speculation\n\n");
3983 nwrong++;
3984 }
3985 continue;
3986 }
3987 if (!likely_target->definition)
3988 {
3989 if (dump_file)
3990 fprintf (dump_file, "Target is not an definition\n\n");
3991 nnotdefined++;
3992 continue;
3993 }
3994 /* Do not introduce new references to external symbols. While we
3995 can handle these just well, it is common for programs to
3996 incorrectly with headers defining methods they are linked
3997 with. */
3998 if (DECL_EXTERNAL (likely_target->decl))
3999 {
4000 if (dump_file)
4001 fprintf (dump_file, "Target is external\n\n");
4002 nexternal++;
4003 continue;
4004 }
4005 /* Don't use an implicitly-declared destructor (c++/58678). */
4006 struct cgraph_node *non_thunk_target
4007 = likely_target->function_symbol ();
4008 if (DECL_ARTIFICIAL (non_thunk_target->decl))
4009 {
4010 if (dump_file)
4011 fprintf (dump_file, "Target is artificial\n\n");
4012 nartificial++;
4013 continue;
4014 }
4015 if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
4016 && likely_target->can_be_discarded_p ())
4017 {
4018 if (dump_file)
4019 fprintf (dump_file, "Target is overwritable\n\n");
4020 noverwritable++;
4021 continue;
4022 }
4023 else if (dbg_cnt (devirt))
4024 {
4025 if (dump_enabled_p ())
4026 {
4027 location_t locus = gimple_location_safe (e->call_stmt);
4028 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
4029 "speculatively devirtualizing call in %s/%i to %s/%i\n",
4030 n->name (), n->order,
4031 likely_target->name (),
4032 likely_target->order);
4033 }
4034 if (!likely_target->can_be_discarded_p ())
4035 {
4036 cgraph_node *alias;
4037 alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
4038 if (alias)
4039 likely_target = alias;
4040 }
4041 nconverted++;
4042 update = true;
4043 e->make_speculative
4044 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
4045 }
4046 }
4047 if (update)
4048 inline_update_overall_summary (n);
4049 }
4050 if (warn_suggest_final_methods || warn_suggest_final_types)
4051 {
4052 if (warn_suggest_final_types)
4053 {
4054 final_warning_records->type_warnings.qsort (type_warning_cmp);
4055 for (unsigned int i = 0;
4056 i < final_warning_records->type_warnings.length (); i++)
4057 if (final_warning_records->type_warnings[i].count)
4058 {
4059 tree type = final_warning_records->type_warnings[i].type;
4060 warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
4061 OPT_Wsuggest_final_types,
4062 "Declaring type %qD final "
4063 "would enable devirtualization of %i calls",
4064 type,
4065 final_warning_records->type_warnings[i].count);
4066 }
4067 }
4068
4069 if (warn_suggest_final_methods)
4070 {
4071 vec<const decl_warn_count*> decl_warnings_vec = vNULL;
4072
4073 final_warning_records->decl_warnings.traverse
4074 <vec<const decl_warn_count *> *, add_decl_warning> (&decl_warnings_vec);
4075 decl_warnings_vec.qsort (decl_warning_cmp);
4076 for (unsigned int i = 0; i < decl_warnings_vec.length (); i++)
4077 {
4078 tree decl = decl_warnings_vec[i]->decl;
4079 int count = decl_warnings_vec[i]->count;
4080
4081 if (DECL_CXX_DESTRUCTOR_P (decl))
4082 warning_at (DECL_SOURCE_LOCATION (decl),
4083 OPT_Wsuggest_final_methods,
4084 "Declaring virtual destructor of %qD final "
4085 "would enable devirtualization of %i calls",
4086 DECL_CONTEXT (decl), count);
4087 else
4088 warning_at (DECL_SOURCE_LOCATION (decl),
4089 OPT_Wsuggest_final_methods,
4090 "Declaring method %qD final "
4091 "would enable devirtualization of %i calls",
4092 decl, count);
4093 }
4094 }
4095
4096 delete (final_warning_records);
4097 final_warning_records = 0;
4098 }
4099
4100 if (dump_file)
4101 fprintf (dump_file,
4102 "%i polymorphic calls, %i devirtualized,"
4103 " %i speculatively devirtualized, %i cold\n"
4104 "%i have multiple targets, %i overwritable,"
4105 " %i already speculated (%i agree, %i disagree),"
4106 " %i external, %i not defined, %i artificial\n",
4107 npolymorphic, ndevirtualized, nconverted, ncold,
4108 nmultiple, noverwritable, nspeculated, nok, nwrong,
4109 nexternal, nnotdefined, nartificial);
4110 return ndevirtualized ? TODO_remove_functions : 0;
4111 }
4112
4113 namespace {
4114
4115 const pass_data pass_data_ipa_devirt =
4116 {
4117 IPA_PASS, /* type */
4118 "devirt", /* name */
4119 OPTGROUP_NONE, /* optinfo_flags */
4120 TV_IPA_DEVIRT, /* tv_id */
4121 0, /* properties_required */
4122 0, /* properties_provided */
4123 0, /* properties_destroyed */
4124 0, /* todo_flags_start */
4125 ( TODO_dump_symtab ), /* todo_flags_finish */
4126 };
4127
4128 class pass_ipa_devirt : public ipa_opt_pass_d
4129 {
4130 public:
4131 pass_ipa_devirt (gcc::context *ctxt)
4132 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
4133 NULL, /* generate_summary */
4134 NULL, /* write_summary */
4135 NULL, /* read_summary */
4136 NULL, /* write_optimization_summary */
4137 NULL, /* read_optimization_summary */
4138 NULL, /* stmt_fixup */
4139 0, /* function_transform_todo_flags_start */
4140 NULL, /* function_transform */
4141 NULL) /* variable_transform */
4142 {}
4143
4144 /* opt_pass methods: */
4145 virtual bool gate (function *)
4146 {
4147 return (flag_devirtualize
4148 && (flag_devirtualize_speculatively
4149 || (warn_suggest_final_methods
4150 || warn_suggest_final_types))
4151 && optimize);
4152 }
4153
4154 virtual unsigned int execute (function *) { return ipa_devirt (); }
4155
4156 }; // class pass_ipa_devirt
4157
4158 } // anon namespace
4159
4160 ipa_opt_pass_d *
4161 make_pass_ipa_devirt (gcc::context *ctxt)
4162 {
4163 return new pass_ipa_devirt (ctxt);
4164 }
4165
4166 #include "gt-ipa-devirt.h"