8c7253e6f0112119806ca48e3a6c9fad8e0f4b80
[gcc.git] / gcc / ipa-type-escape.c
1 /* Type based alias analysis.
2 Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This pass determines which types in the program contain only
23 instances that are completely encapsulated by the compilation unit.
24 Those types that are encapsulated must also pass the further
25 requirement that there be no bad operations on any instances of
26 those types.
27
28 A great deal of freedom in compilation is allowed for the instances
29 of those types that pass these conditions.
30 */
31
32 /* The code in this module is called by the ipa pass manager. It
33 should be one of the later passes since its information is used by
34 the rest of the compilation. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "tree.h"
41 #include "tree-flow.h"
42 #include "tree-inline.h"
43 #include "tree-pass.h"
44 #include "langhooks.h"
45 #include "pointer-set.h"
46 #include "ggc.h"
47 #include "ipa-utils.h"
48 #include "ipa-type-escape.h"
49 #include "c-common.h"
50 #include "tree-gimple.h"
51 #include "cgraph.h"
52 #include "output.h"
53 #include "flags.h"
54 #include "timevar.h"
55 #include "diagnostic.h"
56 #include "langhooks.h"
57
58 /* Some of the aliasing is called very early, before this phase is
59 called. To assure that this is not a problem, we keep track of if
60 this phase has been run. */
61 static bool initialized = false;
62
63 /* This bitmap contains the set of local vars that are the lhs of
64 calls to mallocs. These variables, when seen on the rhs as part of
65 a cast, the cast are not marked as doing bad things to the type
66 even though they are generally of the form
67 "foo = (type_of_foo)void_temp". */
68 static bitmap results_of_malloc;
69
70 /* Scratch bitmap for avoiding work. */
71 static bitmap been_there_done_that;
72 static bitmap bitmap_tmp;
73
74 /* There are two levels of escape that types can undergo.
75
76 EXPOSED_PARAMETER - some instance of the variable is
77 passed by value into an externally visible function or some
78 instance of the variable is passed out of an externally visible
79 function as a return value. In this case any of the fields of the
80 variable that are pointer types end up having their types marked as
81 FULL_ESCAPE.
82
83 FULL_ESCAPE - when bad things happen to good types. One of the
84 following things happens to the type: (a) either an instance of the
85 variable has its address passed to an externally visible function,
86 (b) the address is taken and some bad cast happens to the address
87 or (c) explicit arithmetic is done to the address.
88 */
89
90 enum escape_t
91 {
92 EXPOSED_PARAMETER,
93 FULL_ESCAPE
94 };
95
96 /* The following two bit vectors global_types_* correspond to
97 previous cases above. During the analysis phase, a bit is set in
98 one of these vectors if an operation of the offending class is
99 discovered to happen on the associated type. */
100
101 static bitmap global_types_exposed_parameter;
102 static bitmap global_types_full_escape;
103
104 /* All of the types seen in this compilation unit. */
105 static bitmap global_types_seen;
106 /* Reverse map to take a canon uid and map it to a canon type. Uid's
107 are never manipulated unless they are associated with a canon
108 type. */
109 static splay_tree uid_to_canon_type;
110
111 /* Internal structure of type mapping code. This maps a canon type
112 name to its canon type. */
113 static splay_tree all_canon_types;
114
115 /* Map from type clones to the single canon type. */
116 static splay_tree type_to_canon_type;
117
118 /* A splay tree of bitmaps. An element X in the splay tree has a bit
119 set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (Y)) if there was
120 an operation in the program of the form "&X.Y". */
121 static splay_tree uid_to_addressof_down_map;
122
123 /* A splay tree of bitmaps. An element Y in the splay tree has a bit
124 set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (X)) if there was
125 an operation in the program of the form "&X.Y". */
126 static splay_tree uid_to_addressof_up_map;
127
128 /* Tree to hold the subtype maps used to mark subtypes of escaped
129 types. */
130 static splay_tree uid_to_subtype_map;
131
132 /* Records tree nodes seen in cgraph_create_edges. Simply using
133 walk_tree_without_duplicates doesn't guarantee each node is visited
134 once because it gets a new htab upon each recursive call from
135 scan_for_refs. */
136 static struct pointer_set_t *visited_nodes;
137
138 static bitmap_obstack ipa_obstack;
139
140 /* Get the name of TYPE or return the string "<UNNAMED>". */
141 static char*
142 get_name_of_type (tree type)
143 {
144 tree name = TYPE_NAME (type);
145
146 if (!name)
147 /* Unnamed type, do what you like here. */
148 return (char*)"<UNNAMED>";
149
150 /* It will be a TYPE_DECL in the case of a typedef, otherwise, an
151 identifier_node */
152 if (TREE_CODE (name) == TYPE_DECL)
153 {
154 /* Each DECL has a DECL_NAME field which contains an
155 IDENTIFIER_NODE. (Some decls, most often labels, may have
156 zero as the DECL_NAME). */
157 if (DECL_NAME (name))
158 return (char*)IDENTIFIER_POINTER (DECL_NAME (name));
159 else
160 /* Unnamed type, do what you like here. */
161 return (char*)"<UNNAMED>";
162 }
163 else if (TREE_CODE (name) == IDENTIFIER_NODE)
164 return (char*)IDENTIFIER_POINTER (name);
165 else
166 return (char*)"<UNNAMED>";
167 }
168
169 struct type_brand_s
170 {
171 char* name;
172 int seq;
173 };
174
175 /* Splay tree comparison function on type_brand_s structures. */
176
177 static int
178 compare_type_brand (splay_tree_key sk1, splay_tree_key sk2)
179 {
180 struct type_brand_s * k1 = (struct type_brand_s *) sk1;
181 struct type_brand_s * k2 = (struct type_brand_s *) sk2;
182
183 int value = strcmp(k1->name, k2->name);
184 if (value == 0)
185 return k2->seq - k1->seq;
186 else
187 return value;
188 }
189
190 /* All of the "unique_type" code is a hack to get around the sleazy
191 implementation used to compile more than file. Currently gcc does
192 not get rid of multiple instances of the same type that have been
193 collected from different compilation units. */
194 /* This is a trivial algorithm for removing duplicate types. This
195 would not work for any language that used structural equivalence as
196 the basis of its type system. */
197 /* Return either TYPE if this is first time TYPE has been seen an
198 compatible TYPE that has already been processed. */
199
200 static tree
201 discover_unique_type (tree type)
202 {
203 struct type_brand_s * brand = XNEW (struct type_brand_s);
204 int i = 0;
205 splay_tree_node result;
206
207 brand->name = get_name_of_type (type);
208
209 while (1)
210 {
211 brand->seq = i++;
212 result = splay_tree_lookup (all_canon_types, (splay_tree_key) brand);
213
214 if (result)
215 {
216 /* Create an alias since this is just the same as
217 other_type. */
218 tree other_type = (tree) result->value;
219 if (lang_hooks.types_compatible_p (type, other_type) == 1)
220 {
221 free (brand);
222 /* Insert this new type as an alias for other_type. */
223 splay_tree_insert (type_to_canon_type,
224 (splay_tree_key) type,
225 (splay_tree_value) other_type);
226 return other_type;
227 }
228 /* Not compatible, look for next instance with same name. */
229 }
230 else
231 {
232 /* No more instances, create new one since this is the first
233 time we saw this type. */
234 brand->seq = i++;
235 /* Insert the new brand. */
236 splay_tree_insert (all_canon_types,
237 (splay_tree_key) brand,
238 (splay_tree_value) type);
239
240 /* Insert this new type as an alias for itself. */
241 splay_tree_insert (type_to_canon_type,
242 (splay_tree_key) type,
243 (splay_tree_value) type);
244
245 /* Insert the uid for reverse lookup; */
246 splay_tree_insert (uid_to_canon_type,
247 (splay_tree_key) TYPE_UID (type),
248 (splay_tree_value) type);
249
250 bitmap_set_bit (global_types_seen, TYPE_UID (type));
251 return type;
252 }
253 }
254 }
255
256 /* Return true if TYPE is one of the type classes that we are willing
257 to analyze. This skips the goofy types like arrays of pointers to
258 methods. */
259 static bool
260 type_to_consider (tree type)
261 {
262 /* Strip the *'s off. */
263 type = TYPE_MAIN_VARIANT (type);
264 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
265 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
266
267 switch (TREE_CODE (type))
268 {
269 case BOOLEAN_TYPE:
270 case COMPLEX_TYPE:
271 case ENUMERAL_TYPE:
272 case INTEGER_TYPE:
273 case QUAL_UNION_TYPE:
274 case REAL_TYPE:
275 case RECORD_TYPE:
276 case UNION_TYPE:
277 case VECTOR_TYPE:
278 case VOID_TYPE:
279 return true;
280
281 default:
282 return false;
283 }
284 }
285
286 /* Get the canon type of TYPE. If SEE_THRU_PTRS is true, remove all
287 the POINTER_TOs and if SEE_THRU_ARRAYS is true, remove all of the
288 ARRAY_OFs and POINTER_TOs. */
289
290 static tree
291 get_canon_type (tree type, bool see_thru_ptrs, bool see_thru_arrays)
292 {
293 splay_tree_node result;
294 /* Strip the *'s off. */
295 if (!type || !type_to_consider (type))
296 return NULL;
297
298 type = TYPE_MAIN_VARIANT (type);
299 if (see_thru_arrays)
300 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
301 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
302
303 else if (see_thru_ptrs)
304 while (POINTER_TYPE_P (type))
305 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
306
307 result = splay_tree_lookup(type_to_canon_type, (splay_tree_key) type);
308
309 if (result == NULL)
310 return discover_unique_type (type);
311 else return (tree) result->value;
312 }
313
314 /* Same as GET_CANON_TYPE, except return the TYPE_ID rather than the
315 TYPE. */
316
317 static int
318 get_canon_type_uid (tree type, bool see_thru_ptrs, bool see_thru_arrays)
319 {
320 type = get_canon_type (type, see_thru_ptrs, see_thru_arrays);
321 if (type)
322 return TYPE_UID(type);
323 else return 0;
324 }
325
326 /* Return 0 if TYPE is a record or union type. Return a positive
327 number if TYPE is a pointer to a record or union. The number is
328 the number of pointer types stripped to get to the record or union
329 type. Return -1 if TYPE is none of the above. */
330
331 int
332 ipa_type_escape_star_count_of_interesting_type (tree type)
333 {
334 int count = 0;
335 /* Strip the *'s off. */
336 if (!type)
337 return -1;
338 type = TYPE_MAIN_VARIANT (type);
339 while (POINTER_TYPE_P (type))
340 {
341 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
342 count++;
343 }
344
345 /* We are interested in records, and unions only. */
346 if (TREE_CODE (type) == RECORD_TYPE
347 || TREE_CODE (type) == QUAL_UNION_TYPE
348 || TREE_CODE (type) == UNION_TYPE)
349 return count;
350 else
351 return -1;
352 }
353
354
355 /* Return 0 if TYPE is a record or union type. Return a positive
356 number if TYPE is a pointer to a record or union. The number is
357 the number of pointer types stripped to get to the record or union
358 type. Return -1 if TYPE is none of the above. */
359
360 int
361 ipa_type_escape_star_count_of_interesting_or_array_type (tree type)
362 {
363 int count = 0;
364 /* Strip the *'s off. */
365 if (!type)
366 return -1;
367 type = TYPE_MAIN_VARIANT (type);
368 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
369 {
370 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
371 count++;
372 }
373
374 /* We are interested in records, and unions only. */
375 if (TREE_CODE (type) == RECORD_TYPE
376 || TREE_CODE (type) == QUAL_UNION_TYPE
377 || TREE_CODE (type) == UNION_TYPE)
378 return count;
379 else
380 return -1;
381 }
382
383
384 /* Return true if the record, or union TYPE passed in escapes this
385 compilation unit. Note that all of the pointer-to's are removed
386 before testing since these may not be correct. */
387
388 bool
389 ipa_type_escape_type_contained_p (tree type)
390 {
391 if (!initialized)
392 return false;
393 return !bitmap_bit_p (global_types_full_escape,
394 get_canon_type_uid (type, true, false));
395 }
396
397 /* Return true if a modification to a field of type FIELD_TYPE cannot
398 clobber a record of RECORD_TYPE. */
399
400 bool
401 ipa_type_escape_field_does_not_clobber_p (tree record_type, tree field_type)
402 {
403 splay_tree_node result;
404 int uid;
405
406 if (!initialized)
407 return false;
408
409 /* Strip off all of the pointer tos on the record type. Strip the
410 same number of pointer tos from the field type. If the field
411 type has fewer, it could not have been aliased. */
412 record_type = TYPE_MAIN_VARIANT (record_type);
413 field_type = TYPE_MAIN_VARIANT (field_type);
414 while (POINTER_TYPE_P (record_type))
415 {
416 record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
417 if (POINTER_TYPE_P (field_type))
418 field_type = TYPE_MAIN_VARIANT (TREE_TYPE (field_type));
419 else
420 /* However, if field_type is a union, this quick test is not
421 correct since one of the variants of the union may be a
422 pointer to type and we cannot see across that here. So we
423 just strip the remaining pointer tos off the record type
424 and fall thru to the more precise code. */
425 if (TREE_CODE (field_type) == QUAL_UNION_TYPE
426 || TREE_CODE (field_type) == UNION_TYPE)
427 {
428 while (POINTER_TYPE_P (record_type))
429 record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
430 break;
431 }
432 else
433 return true;
434 }
435
436 record_type = get_canon_type (record_type, true, true);
437 /* The record type must be contained. The field type may
438 escape. */
439 if (!ipa_type_escape_type_contained_p (record_type))
440 return false;
441
442 uid = TYPE_UID (record_type);
443 result = splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
444
445 if (result)
446 {
447 bitmap field_type_map = (bitmap) result->value;
448 uid = get_canon_type_uid (field_type, true, true);
449 /* If the bit is there, the address was taken. If not, it
450 wasn't. */
451 return !bitmap_bit_p (field_type_map, uid);
452 }
453 else
454 /* No bitmap means no addresses were taken. */
455 return true;
456 }
457
458
459 /* Add TYPE to the suspect type set. Return true if the bit needed to
460 be marked. */
461
462 static tree
463 mark_type (tree type, enum escape_t escape_status)
464 {
465 bitmap map = NULL;
466 int uid;
467
468 type = get_canon_type (type, true, true);
469 if (!type)
470 return NULL;
471
472 switch (escape_status)
473 {
474 case EXPOSED_PARAMETER:
475 map = global_types_exposed_parameter;
476 break;
477 case FULL_ESCAPE:
478 map = global_types_full_escape;
479 break;
480 }
481
482 uid = TYPE_UID (type);
483 if (bitmap_bit_p (map, uid))
484 return type;
485 else
486 {
487 bitmap_set_bit (map, uid);
488 if (escape_status == FULL_ESCAPE)
489 {
490 /* Efficiency hack. When things are bad, do not mess around
491 with this type anymore. */
492 bitmap_set_bit (global_types_exposed_parameter, uid);
493 }
494 }
495 return type;
496 }
497
498 /* Add interesting TYPE to the suspect type set. If the set is
499 EXPOSED_PARAMETER and the TYPE is a pointer type, the set is
500 changed to FULL_ESCAPE. */
501
502 static void
503 mark_interesting_type (tree type, enum escape_t escape_status)
504 {
505 if (!type) return;
506 if (ipa_type_escape_star_count_of_interesting_type (type) >= 0)
507 {
508 if ((escape_status == EXPOSED_PARAMETER)
509 && POINTER_TYPE_P (type))
510 /* EXPOSED_PARAMETERs are only structs or unions are passed by
511 value. Anything passed by reference to an external
512 function fully exposes the type. */
513 mark_type (type, FULL_ESCAPE);
514 else
515 mark_type (type, escape_status);
516 }
517 }
518
519 /* Return true if PARENT is supertype of CHILD. Both types must be
520 known to be structures or unions. */
521
522 static bool
523 parent_type_p (tree parent, tree child)
524 {
525 int i;
526 tree binfo, base_binfo;
527 if (TYPE_BINFO (parent))
528 for (binfo = TYPE_BINFO (parent), i = 0;
529 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
530 {
531 tree binfotype = BINFO_TYPE (base_binfo);
532 if (binfotype == child)
533 return true;
534 else if (parent_type_p (binfotype, child))
535 return true;
536 }
537 if (TREE_CODE (parent) == UNION_TYPE
538 || TREE_CODE (parent) == QUAL_UNION_TYPE)
539 {
540 tree field;
541 /* Search all of the variants in the union to see if one of them
542 is the child. */
543 for (field = TYPE_FIELDS (parent);
544 field;
545 field = TREE_CHAIN (field))
546 {
547 tree field_type;
548 if (TREE_CODE (field) != FIELD_DECL)
549 continue;
550
551 field_type = TREE_TYPE (field);
552 if (field_type == child)
553 return true;
554 }
555
556 /* If we did not find it, recursively ask the variants if one of
557 their children is the child type. */
558 for (field = TYPE_FIELDS (parent);
559 field;
560 field = TREE_CHAIN (field))
561 {
562 tree field_type;
563 if (TREE_CODE (field) != FIELD_DECL)
564 continue;
565
566 field_type = TREE_TYPE (field);
567 if (TREE_CODE (field_type) == RECORD_TYPE
568 || TREE_CODE (field_type) == QUAL_UNION_TYPE
569 || TREE_CODE (field_type) == UNION_TYPE)
570 if (parent_type_p (field_type, child))
571 return true;
572 }
573 }
574
575 if (TREE_CODE (parent) == RECORD_TYPE)
576 {
577 tree field;
578 for (field = TYPE_FIELDS (parent);
579 field;
580 field = TREE_CHAIN (field))
581 {
582 tree field_type;
583 if (TREE_CODE (field) != FIELD_DECL)
584 continue;
585
586 field_type = TREE_TYPE (field);
587 if (field_type == child)
588 return true;
589 /* You can only cast to the first field so if it does not
590 match, quit. */
591 if (TREE_CODE (field_type) == RECORD_TYPE
592 || TREE_CODE (field_type) == QUAL_UNION_TYPE
593 || TREE_CODE (field_type) == UNION_TYPE)
594 {
595 if (parent_type_p (field_type, child))
596 return true;
597 else
598 break;
599 }
600 }
601 }
602 return false;
603 }
604
605 /* Return the number of pointer tos for TYPE and return TYPE with all
606 of these stripped off. */
607
608 static int
609 count_stars (tree* type_ptr)
610 {
611 tree type = *type_ptr;
612 int i = 0;
613 type = TYPE_MAIN_VARIANT (type);
614 while (POINTER_TYPE_P (type))
615 {
616 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
617 i++;
618 }
619
620 *type_ptr = type;
621 return i;
622 }
623
624 enum cast_type {
625 CT_UP,
626 CT_DOWN,
627 CT_SIDEWAYS,
628 CT_USELESS
629 };
630
631 /* Check the cast FROM_TYPE to TO_TYPE. This function requires that
632 the two types have already passed the
633 ipa_type_escape_star_count_of_interesting_type test. */
634
635 static enum cast_type
636 check_cast_type (tree to_type, tree from_type)
637 {
638 int to_stars = count_stars (&to_type);
639 int from_stars = count_stars (&from_type);
640 if (to_stars != from_stars)
641 return CT_SIDEWAYS;
642
643 if (to_type == from_type)
644 return CT_USELESS;
645
646 if (parent_type_p (to_type, from_type)) return CT_UP;
647 if (parent_type_p (from_type, to_type)) return CT_DOWN;
648 return CT_SIDEWAYS;
649 }
650
651 /* Check a cast FROM this variable, TO_TYPE. Mark the escaping types
652 if appropriate. */
653 static void
654 check_cast (tree to_type, tree from)
655 {
656 tree from_type = get_canon_type (TREE_TYPE (from), false, false);
657 bool to_interesting_type, from_interesting_type;
658
659 to_type = get_canon_type (to_type, false, false);
660 if (!from_type || !to_type || from_type == to_type)
661 return;
662
663 to_interesting_type =
664 ipa_type_escape_star_count_of_interesting_type (to_type) >= 0;
665 from_interesting_type =
666 ipa_type_escape_star_count_of_interesting_type (from_type) >= 0;
667
668 if (to_interesting_type)
669 if (from_interesting_type)
670 {
671 /* Both types are interesting. This can be one of four types
672 of cast: useless, up, down, or sideways. We do not care
673 about up or useless. Sideways casts are always bad and
674 both sides get marked as escaping. Downcasts are not
675 interesting here because if type is marked as escaping, all
676 of its subtypes escape. */
677 switch (check_cast_type (to_type, from_type))
678 {
679 case CT_UP:
680 case CT_USELESS:
681 case CT_DOWN:
682 break;
683
684 case CT_SIDEWAYS:
685 mark_type (to_type, FULL_ESCAPE);
686 mark_type (from_type, FULL_ESCAPE);
687 break;
688 }
689 }
690 else
691 {
692 /* If this is a cast from the local that is a result from a
693 call to malloc, do not mark the cast as bad. */
694 if (DECL_P (from) && !bitmap_bit_p (results_of_malloc, DECL_UID (from)))
695 mark_type (to_type, FULL_ESCAPE);
696 }
697 else if (from_interesting_type)
698 mark_type (from_type, FULL_ESCAPE);
699 }
700
701 /* Register the parameter and return types of function FN. The type
702 ESCAPES if the function is visible outside of the compilation
703 unit. */
704 static void
705 check_function_parameter_and_return_types (tree fn, bool escapes)
706 {
707 tree arg;
708
709 if (TYPE_ARG_TYPES (TREE_TYPE (fn)))
710 {
711 for (arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
712 arg && TREE_VALUE (arg) != void_type_node;
713 arg = TREE_CHAIN (arg))
714 {
715 tree type = get_canon_type (TREE_VALUE (arg), false, false);
716 if (escapes)
717 mark_interesting_type (type, EXPOSED_PARAMETER);
718 }
719 }
720 else
721 {
722 /* FIXME - According to Geoff Keating, we should never have to
723 do this; the front ends should always process the arg list
724 from the TYPE_ARG_LIST. However, Geoff is wrong, this code
725 does seem to be live. */
726
727 for (arg = DECL_ARGUMENTS (fn); arg; arg = TREE_CHAIN (arg))
728 {
729 tree type = get_canon_type (TREE_TYPE (arg), false, false);
730 if (escapes)
731 mark_interesting_type (type, EXPOSED_PARAMETER);
732 }
733 }
734 if (escapes)
735 {
736 tree type = get_canon_type (TREE_TYPE (TREE_TYPE (fn)), false, false);
737 mark_interesting_type (type, EXPOSED_PARAMETER);
738 }
739 }
740
741 /* Return true if the variable T is the right kind of static variable to
742 perform compilation unit scope escape analysis. */
743
744 static inline void
745 has_proper_scope_for_analysis (tree t)
746 {
747 /* If the variable has the "used" attribute, treat it as if it had a
748 been touched by the devil. */
749 tree type = get_canon_type (TREE_TYPE (t), false, false);
750 if (!type) return;
751
752 if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
753 {
754 mark_interesting_type (type, FULL_ESCAPE);
755 return;
756 }
757
758 /* Do not want to do anything with volatile except mark any
759 function that uses one to be not const or pure. */
760 if (TREE_THIS_VOLATILE (t))
761 return;
762
763 /* Do not care about a local automatic that is not static. */
764 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
765 return;
766
767 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
768 {
769 /* If the front end set the variable to be READONLY and
770 constant, we can allow this variable in pure or const
771 functions but the scope is too large for our analysis to set
772 these bits ourselves. */
773
774 if (TREE_READONLY (t)
775 && DECL_INITIAL (t)
776 && is_gimple_min_invariant (DECL_INITIAL (t)))
777 ; /* Read of a constant, do not change the function state. */
778 else
779 {
780 /* The type escapes for all public and externs. */
781 mark_interesting_type (type, FULL_ESCAPE);
782 }
783 }
784 }
785
786 /* If T is a VAR_DECL for a static that we are interested in, add the
787 uid to the bitmap. */
788
789 static void
790 check_operand (tree t)
791 {
792 if (!t) return;
793
794 /* This is an assignment from a function, register the types as
795 escaping. */
796 if (TREE_CODE (t) == FUNCTION_DECL)
797 check_function_parameter_and_return_types (t, true);
798
799 else if (TREE_CODE (t) == VAR_DECL)
800 has_proper_scope_for_analysis (t);
801 }
802
803 /* Examine tree T for references. */
804
805 static void
806 check_tree (tree t)
807 {
808 if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
809 return;
810
811 while (TREE_CODE (t) == REALPART_EXPR
812 || TREE_CODE (t) == IMAGPART_EXPR
813 || handled_component_p (t))
814 {
815 if (TREE_CODE (t) == ARRAY_REF)
816 check_operand (TREE_OPERAND (t, 1));
817 t = TREE_OPERAND (t, 0);
818 }
819
820 if (INDIRECT_REF_P (t))
821 /* || TREE_CODE (t) == MEM_REF) */
822 check_tree (TREE_OPERAND (t, 0));
823
824 if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
825 check_operand (t);
826 }
827
828 /* Create an address_of edge FROM_TYPE.TO_TYPE. */
829 static void
830 mark_interesting_addressof (tree to_type, tree from_type)
831 {
832 int from_uid;
833 int to_uid;
834 bitmap type_map;
835 splay_tree_node result;
836
837 from_type = get_canon_type (from_type, false, false);
838 to_type = get_canon_type (to_type, false, false);
839
840 if (!from_type || !to_type)
841 return;
842
843 from_uid = TYPE_UID (from_type);
844 to_uid = TYPE_UID (to_type);
845
846 gcc_assert (ipa_type_escape_star_count_of_interesting_type (from_type) == 0);
847
848 /* Process the Y into X map pointer. */
849 result = splay_tree_lookup (uid_to_addressof_down_map,
850 (splay_tree_key) from_uid);
851
852 if (result)
853 type_map = (bitmap) result->value;
854 else
855 {
856 type_map = BITMAP_ALLOC (&ipa_obstack);
857 splay_tree_insert (uid_to_addressof_down_map,
858 from_uid,
859 (splay_tree_value)type_map);
860 }
861 bitmap_set_bit (type_map, TYPE_UID (to_type));
862
863 /* Process the X into Y reverse map pointer. */
864 result =
865 splay_tree_lookup (uid_to_addressof_up_map, (splay_tree_key) to_uid);
866
867 if (result)
868 type_map = (bitmap) result->value;
869 else
870 {
871 type_map = BITMAP_ALLOC (&ipa_obstack);
872 splay_tree_insert (uid_to_addressof_up_map,
873 to_uid,
874 (splay_tree_value)type_map);
875 }
876 bitmap_set_bit (type_map, TYPE_UID (to_type));
877 }
878
879 /* Scan tree T to see if there are any addresses taken in within T. */
880
881 static void
882 look_for_address_of (tree t)
883 {
884 if (TREE_CODE (t) == ADDR_EXPR)
885 {
886 tree x = get_base_var (t);
887 tree cref = TREE_OPERAND (t, 0);
888
889 /* If we have an expression of the form "&a.b.c.d", mark a.b,
890 b.c and c.d. as having its address taken. */
891 tree fielddecl = NULL_TREE;
892 while (cref!= x)
893 {
894 if (TREE_CODE (cref) == COMPONENT_REF)
895 {
896 fielddecl = TREE_OPERAND (cref, 1);
897 mark_interesting_addressof (TREE_TYPE (fielddecl),
898 DECL_FIELD_CONTEXT (fielddecl));
899 }
900 else if (TREE_CODE (cref) == ARRAY_REF)
901 get_canon_type (TREE_TYPE (cref), false, false);
902
903 cref = TREE_OPERAND (cref, 0);
904 }
905
906 if (TREE_CODE (x) == VAR_DECL)
907 has_proper_scope_for_analysis (x);
908 }
909 }
910
911
912 /* Scan tree T to see if there are any casts within it.
913 LHS Is the LHS of the expression involving the cast. */
914
915 static void
916 look_for_casts (tree lhs __attribute__((unused)), tree t)
917 {
918 if (is_gimple_cast (t) || TREE_CODE (t) == VIEW_CONVERT_EXPR)
919 {
920 tree castfromvar = TREE_OPERAND (t, 0);
921 check_cast (TREE_TYPE (t), castfromvar);
922 }
923 else
924 while (handled_component_p (t))
925 {
926 t = TREE_OPERAND (t, 0);
927 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
928 {
929 /* This may be some part of a component ref.
930 IE it may be a.b.VIEW_CONVERT_EXPR<weird_type>(c).d, AFAIK.
931 castfromref will give you a.b.c, not a. */
932 tree castfromref = TREE_OPERAND (t, 0);
933 check_cast (TREE_TYPE (t), castfromref);
934 }
935 else if (TREE_CODE (t) == COMPONENT_REF)
936 get_canon_type (TREE_TYPE (TREE_OPERAND (t, 1)), false, false);
937 }
938 }
939
940 /* Check to see if T is a read or address of operation on a static var
941 we are interested in analyzing. */
942
943 static void
944 check_rhs_var (tree t)
945 {
946 look_for_address_of (t);
947 check_tree(t);
948 }
949
950 /* Check to see if T is an assignment to a static var we are
951 interested in analyzing. */
952
953 static void
954 check_lhs_var (tree t)
955 {
956 check_tree(t);
957 }
958
959 /* This is a scaled down version of get_asm_expr_operands from
960 tree_ssa_operands.c. The version there runs much later and assumes
961 that aliasing information is already available. Here we are just
962 trying to find if the set of inputs and outputs contain references
963 or address of operations to local. FN is the function being
964 analyzed and STMT is the actual asm statement. */
965
966 static void
967 get_asm_expr_operands (tree stmt)
968 {
969 int noutputs = list_length (ASM_OUTPUTS (stmt));
970 const char **oconstraints
971 = (const char **) alloca ((noutputs) * sizeof (const char *));
972 int i;
973 tree link;
974 const char *constraint;
975 bool allows_mem, allows_reg, is_inout;
976
977 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
978 {
979 oconstraints[i] = constraint
980 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
981 parse_output_constraint (&constraint, i, 0, 0,
982 &allows_mem, &allows_reg, &is_inout);
983
984 check_lhs_var (TREE_VALUE (link));
985 }
986
987 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
988 {
989 constraint
990 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
991 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
992 oconstraints, &allows_mem, &allows_reg);
993
994 check_rhs_var (TREE_VALUE (link));
995 }
996
997 /* There is no code here to check for asm memory clobbers. The
998 casual maintainer might think that such code would be necessary,
999 but that appears to be wrong. In other parts of the compiler,
1000 the asm memory clobbers are assumed to only clobber variables
1001 that are addressable. All types with addressable instances are
1002 assumed to already escape. So, we are protected here. */
1003 }
1004
1005 /* Check the parameters of a function call to CALL_EXPR to mark the
1006 types that pass across the function boundary. Also check to see if
1007 this is either an indirect call, a call outside the compilation
1008 unit. */
1009
1010 static bool
1011 check_call (tree call_expr)
1012 {
1013 int flags = call_expr_flags(call_expr);
1014 tree operand;
1015 tree callee_t = get_callee_fndecl (call_expr);
1016 struct cgraph_node* callee;
1017 enum availability avail = AVAIL_NOT_AVAILABLE;
1018 call_expr_arg_iterator iter;
1019
1020 FOR_EACH_CALL_EXPR_ARG (operand, iter, call_expr)
1021 check_rhs_var (operand);
1022
1023 if (callee_t)
1024 {
1025 tree arg_type;
1026 tree last_arg_type = NULL;
1027 callee = cgraph_node(callee_t);
1028 avail = cgraph_function_body_availability (callee);
1029
1030 /* Check that there are no implicit casts in the passing of
1031 parameters. */
1032 if (TYPE_ARG_TYPES (TREE_TYPE (callee_t)))
1033 {
1034 for (arg_type = TYPE_ARG_TYPES (TREE_TYPE (callee_t)),
1035 operand = first_call_expr_arg (call_expr, &iter);
1036 arg_type && TREE_VALUE (arg_type) != void_type_node;
1037 arg_type = TREE_CHAIN (arg_type),
1038 operand = next_call_expr_arg (&iter))
1039 {
1040 if (operand)
1041 {
1042 last_arg_type = TREE_VALUE(arg_type);
1043 check_cast (last_arg_type, operand);
1044 }
1045 else
1046 /* The code reaches here for some unfortunate
1047 builtin functions that do not have a list of
1048 argument types. */
1049 break;
1050 }
1051 }
1052 else
1053 {
1054 /* FIXME - According to Geoff Keating, we should never
1055 have to do this; the front ends should always process
1056 the arg list from the TYPE_ARG_LIST. */
1057 for (arg_type = DECL_ARGUMENTS (callee_t),
1058 operand = first_call_expr_arg (call_expr, &iter);
1059 arg_type;
1060 arg_type = TREE_CHAIN (arg_type),
1061 operand = next_call_expr_arg (&iter))
1062 {
1063 if (operand)
1064 {
1065 last_arg_type = TREE_TYPE(arg_type);
1066 check_cast (last_arg_type, operand);
1067 }
1068 else
1069 /* The code reaches here for some unfortunate
1070 builtin functions that do not have a list of
1071 argument types. */
1072 break;
1073 }
1074 }
1075
1076 /* In the case where we have a var_args function, we need to
1077 check the remaining parameters against the last argument. */
1078 arg_type = last_arg_type;
1079 for (;
1080 operand != NULL_TREE;
1081 operand = next_call_expr_arg (&iter))
1082 {
1083 if (arg_type)
1084 check_cast (arg_type, operand);
1085 else
1086 {
1087 /* The code reaches here for some unfortunate
1088 builtin functions that do not have a list of
1089 argument types. Most of these functions have
1090 been marked as having their parameters not
1091 escape, but for the rest, the type is doomed. */
1092 tree type = get_canon_type (TREE_TYPE (operand), false, false);
1093 mark_interesting_type (type, FULL_ESCAPE);
1094 }
1095 }
1096 }
1097
1098 /* The callee is either unknown (indirect call) or there is just no
1099 scannable code for it (external call) . We look to see if there
1100 are any bits available for the callee (such as by declaration or
1101 because it is builtin) and process solely on the basis of those
1102 bits. */
1103
1104 if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
1105 {
1106 /* If this is a direct call to an external function, mark all of
1107 the parameter and return types. */
1108 FOR_EACH_CALL_EXPR_ARG (operand, iter, call_expr)
1109 {
1110 tree type = get_canon_type (TREE_TYPE (operand), false, false);
1111 mark_interesting_type (type, EXPOSED_PARAMETER);
1112 }
1113
1114 if (callee_t)
1115 {
1116 tree type =
1117 get_canon_type (TREE_TYPE (TREE_TYPE (callee_t)), false, false);
1118 mark_interesting_type (type, EXPOSED_PARAMETER);
1119 }
1120 }
1121 return (flags & ECF_MALLOC);
1122 }
1123
1124 /* CODE is the operation on OP0 and OP1. OP0 is the operand that we
1125 *know* is a pointer type. OP1 may be a pointer type. */
1126 static bool
1127 okay_pointer_operation (enum tree_code code, tree op0, tree op1)
1128 {
1129 tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
1130 tree op1type = TYPE_MAIN_VARIANT (TREE_TYPE (op1));
1131 if (POINTER_TYPE_P (op1type))
1132 return false;
1133 switch (code)
1134 {
1135 case MULT_EXPR:
1136 case PLUS_EXPR:
1137 case MINUS_EXPR:
1138 /* TODO: Handle multiples of op0 size as well */
1139 if (operand_equal_p (size_in_bytes (op0type), op1, 0))
1140 return true;
1141 /* fallthrough */
1142
1143 default:
1144 return false;
1145 }
1146 return false;
1147 }
1148
1149 /* TP is the part of the tree currently under the microscope.
1150 WALK_SUBTREES is part of the walk_tree api but is unused here.
1151 DATA is cgraph_node of the function being walked. */
1152
1153 /* FIXME: When this is converted to run over SSA form, this code
1154 should be converted to use the operand scanner. */
1155
1156 static tree
1157 scan_for_refs (tree *tp, int *walk_subtrees, void *data)
1158 {
1159 struct cgraph_node *fn = data;
1160 tree t = *tp;
1161
1162 switch (TREE_CODE (t))
1163 {
1164 case VAR_DECL:
1165 if (DECL_INITIAL (t))
1166 walk_tree (&DECL_INITIAL (t), scan_for_refs, fn, visited_nodes);
1167 *walk_subtrees = 0;
1168 break;
1169
1170 case GIMPLE_MODIFY_STMT:
1171 {
1172 /* First look on the lhs and see what variable is stored to */
1173 tree lhs = GIMPLE_STMT_OPERAND (t, 0);
1174 tree rhs = GIMPLE_STMT_OPERAND (t, 1);
1175
1176 check_lhs_var (lhs);
1177 check_cast (TREE_TYPE (lhs), rhs);
1178
1179 /* For the purposes of figuring out what the cast affects */
1180
1181 /* Next check the operands on the rhs to see if they are ok. */
1182 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1183 {
1184 case tcc_binary:
1185 {
1186 tree op0 = TREE_OPERAND (rhs, 0);
1187 tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1188 tree op1 = TREE_OPERAND (rhs, 1);
1189 tree type1 = get_canon_type (TREE_TYPE (op1), false, false);
1190
1191 /* If this is pointer arithmetic of any bad sort, then
1192 we need to mark the types as bad. For binary
1193 operations, no binary operator we currently support
1194 is always "safe" in regard to what it would do to
1195 pointers for purposes of determining which types
1196 escape, except operations of the size of the type.
1197 It is possible that min and max under the right set
1198 of circumstances and if the moon is in the correct
1199 place could be safe, but it is hard to see how this
1200 is worth the effort. */
1201
1202 if (type0 && POINTER_TYPE_P (type0)
1203 && !okay_pointer_operation (TREE_CODE (rhs), op0, op1))
1204 mark_interesting_type (type0, FULL_ESCAPE);
1205 if (type1 && POINTER_TYPE_P (type1)
1206 && !okay_pointer_operation (TREE_CODE (rhs), op1, op0))
1207 mark_interesting_type (type1, FULL_ESCAPE);
1208
1209 look_for_casts (lhs, op0);
1210 look_for_casts (lhs, op1);
1211 check_rhs_var (op0);
1212 check_rhs_var (op1);
1213 }
1214 break;
1215 case tcc_unary:
1216 {
1217 tree op0 = TREE_OPERAND (rhs, 0);
1218 tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1219 /* For unary operations, if the operation is NEGATE or
1220 ABS on a pointer, this is also considered pointer
1221 arithmetic and thus, bad for business. */
1222 if (type0 && (TREE_CODE (op0) == NEGATE_EXPR
1223 || TREE_CODE (op0) == ABS_EXPR)
1224 && POINTER_TYPE_P (type0))
1225 {
1226 mark_interesting_type (type0, FULL_ESCAPE);
1227 }
1228 check_rhs_var (op0);
1229 look_for_casts (lhs, op0);
1230 look_for_casts (lhs, rhs);
1231 }
1232
1233 break;
1234 case tcc_reference:
1235 look_for_casts (lhs, rhs);
1236 check_rhs_var (rhs);
1237 break;
1238 case tcc_declaration:
1239 check_rhs_var (rhs);
1240 break;
1241 case tcc_expression:
1242 switch (TREE_CODE (rhs))
1243 {
1244 case ADDR_EXPR:
1245 look_for_casts (lhs, TREE_OPERAND (rhs, 0));
1246 check_rhs_var (rhs);
1247 break;
1248 default:
1249 break;
1250 }
1251 break;
1252 case tcc_vl_exp:
1253 switch (TREE_CODE (rhs))
1254 {
1255 case CALL_EXPR:
1256 /* If this is a call to malloc, squirrel away the
1257 result so we do mark the resulting cast as being
1258 bad. */
1259 if (check_call (rhs))
1260 {
1261 if (TREE_CODE (lhs) == SSA_NAME)
1262 lhs = SSA_NAME_VAR (lhs);
1263 bitmap_set_bit (results_of_malloc, DECL_UID (lhs));
1264 }
1265 break;
1266 default:
1267 break;
1268 }
1269 break;
1270 default:
1271 break;
1272 }
1273 *walk_subtrees = 0;
1274 }
1275 break;
1276
1277 case ADDR_EXPR:
1278 /* This case is here to find addresses on rhs of constructors in
1279 decl_initial of static variables. */
1280 check_rhs_var (t);
1281 *walk_subtrees = 0;
1282 break;
1283
1284 case CALL_EXPR:
1285 check_call (t);
1286 *walk_subtrees = 0;
1287 break;
1288
1289 case ASM_EXPR:
1290 get_asm_expr_operands (t);
1291 *walk_subtrees = 0;
1292 break;
1293
1294 default:
1295 break;
1296 }
1297 return NULL;
1298 }
1299
1300
1301 /* The init routine for analyzing global static variable usage. See
1302 comments at top for description. */
1303 static void
1304 ipa_init (void)
1305 {
1306 bitmap_obstack_initialize (&ipa_obstack);
1307 global_types_exposed_parameter = BITMAP_ALLOC (&ipa_obstack);
1308 global_types_full_escape = BITMAP_ALLOC (&ipa_obstack);
1309 global_types_seen = BITMAP_ALLOC (&ipa_obstack);
1310 results_of_malloc = BITMAP_ALLOC (&ipa_obstack);
1311
1312 uid_to_canon_type = splay_tree_new (splay_tree_compare_ints, 0, 0);
1313 all_canon_types = splay_tree_new (compare_type_brand, 0, 0);
1314 type_to_canon_type = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1315 uid_to_subtype_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1316 uid_to_addressof_down_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1317 uid_to_addressof_up_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1318
1319 /* There are some shared nodes, in particular the initializers on
1320 static declarations. We do not need to scan them more than once
1321 since all we would be interested in are the addressof
1322 operations. */
1323 visited_nodes = pointer_set_create ();
1324 initialized = true;
1325 }
1326
1327 /* Check out the rhs of a static or global initialization VNODE to see
1328 if any of them contain addressof operations. Note that some of
1329 these variables may not even be referenced in the code in this
1330 compilation unit but their right hand sides may contain references
1331 to variables defined within this unit. */
1332
1333 static void
1334 analyze_variable (struct varpool_node *vnode)
1335 {
1336 tree global = vnode->decl;
1337 tree type = get_canon_type (TREE_TYPE (global), false, false);
1338
1339 /* If this variable has exposure beyond the compilation unit, add
1340 its type to the global types. */
1341
1342 if (vnode->externally_visible)
1343 mark_interesting_type (type, FULL_ESCAPE);
1344
1345 gcc_assert (TREE_CODE (global) == VAR_DECL);
1346
1347 if (DECL_INITIAL (global))
1348 walk_tree (&DECL_INITIAL (global), scan_for_refs, NULL, visited_nodes);
1349 }
1350
1351 /* This is the main routine for finding the reference patterns for
1352 global variables within a function FN. */
1353
1354 static void
1355 analyze_function (struct cgraph_node *fn)
1356 {
1357 tree decl = fn->decl;
1358 check_function_parameter_and_return_types (decl,
1359 fn->local.externally_visible);
1360 if (dump_file)
1361 fprintf (dump_file, "\n local analysis of %s", cgraph_node_name (fn));
1362
1363 {
1364 struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
1365 basic_block this_block;
1366
1367 FOR_EACH_BB_FN (this_block, this_cfun)
1368 {
1369 block_stmt_iterator bsi;
1370 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
1371 walk_tree (bsi_stmt_ptr (bsi), scan_for_refs,
1372 fn, visited_nodes);
1373 }
1374 }
1375
1376 /* There may be const decls with interesting right hand sides. */
1377 if (DECL_STRUCT_FUNCTION (decl))
1378 {
1379 tree step;
1380 for (step = DECL_STRUCT_FUNCTION (decl)->unexpanded_var_list;
1381 step;
1382 step = TREE_CHAIN (step))
1383 {
1384 tree var = TREE_VALUE (step);
1385 if (TREE_CODE (var) == VAR_DECL
1386 && DECL_INITIAL (var)
1387 && !TREE_STATIC (var))
1388 walk_tree (&DECL_INITIAL (var), scan_for_refs,
1389 fn, visited_nodes);
1390 get_canon_type (TREE_TYPE (var), false, false);
1391 }
1392 }
1393 }
1394
1395 \f
1396
1397 /* Convert a type_UID into a type. */
1398 static tree
1399 type_for_uid (int uid)
1400 {
1401 splay_tree_node result =
1402 splay_tree_lookup (uid_to_canon_type, (splay_tree_key) uid);
1403
1404 if (result)
1405 return (tree) result->value;
1406 else return NULL;
1407 }
1408
1409 /* Return the a bitmap with the subtypes of the type for UID. If it
1410 does not exist, return either NULL or a new bitmap depending on the
1411 value of CREATE. */
1412
1413 static bitmap
1414 subtype_map_for_uid (int uid, bool create)
1415 {
1416 splay_tree_node result = splay_tree_lookup (uid_to_subtype_map,
1417 (splay_tree_key) uid);
1418
1419 if (result)
1420 return (bitmap) result->value;
1421 else if (create)
1422 {
1423 bitmap subtype_map = BITMAP_ALLOC (&ipa_obstack);
1424 splay_tree_insert (uid_to_subtype_map,
1425 uid,
1426 (splay_tree_value)subtype_map);
1427 return subtype_map;
1428 }
1429 else return NULL;
1430 }
1431
1432 /* Mark all of the supertypes and field types of TYPE as being seen.
1433 Also accumulate the subtypes for each type so that
1434 close_types_full_escape can mark a subtype as escaping if the
1435 supertype escapes. */
1436
1437 static void
1438 close_type_seen (tree type)
1439 {
1440 tree field;
1441 int i, uid;
1442 tree binfo, base_binfo;
1443
1444 /* See thru all pointer tos and array ofs. */
1445 type = get_canon_type (type, true, true);
1446 if (!type)
1447 return;
1448
1449 uid = TYPE_UID (type);
1450
1451 if (bitmap_bit_p (been_there_done_that, uid))
1452 return;
1453 bitmap_set_bit (been_there_done_that, uid);
1454
1455 /* If we are doing a language with a type hierarchy, mark all of
1456 the superclasses. */
1457 if (TYPE_BINFO (type))
1458 for (binfo = TYPE_BINFO (type), i = 0;
1459 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1460 {
1461 tree binfo_type = BINFO_TYPE (base_binfo);
1462 bitmap subtype_map = subtype_map_for_uid
1463 (TYPE_UID (TYPE_MAIN_VARIANT (binfo_type)), true);
1464 bitmap_set_bit (subtype_map, uid);
1465 close_type_seen (get_canon_type (binfo_type, true, true));
1466 }
1467
1468 /* If the field is a struct or union type, mark all of the
1469 subfields. */
1470 for (field = TYPE_FIELDS (type);
1471 field;
1472 field = TREE_CHAIN (field))
1473 {
1474 tree field_type;
1475 if (TREE_CODE (field) != FIELD_DECL)
1476 continue;
1477
1478 field_type = TREE_TYPE (field);
1479 if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1480 close_type_seen (get_canon_type (field_type, true, true));
1481 }
1482 }
1483
1484 /* Take a TYPE that has been passed by value to an external function
1485 and mark all of the fields that have pointer types as escaping. For
1486 any of the non pointer types that are structures or unions,
1487 recurse. TYPE is never a pointer type. */
1488
1489 static void
1490 close_type_exposed_parameter (tree type)
1491 {
1492 tree field;
1493 int uid;
1494
1495 type = get_canon_type (type, false, false);
1496 if (!type)
1497 return;
1498 uid = TYPE_UID (type);
1499 gcc_assert (!POINTER_TYPE_P (type));
1500
1501 if (bitmap_bit_p (been_there_done_that, uid))
1502 return;
1503 bitmap_set_bit (been_there_done_that, uid);
1504
1505 /* If the field is a struct or union type, mark all of the
1506 subfields. */
1507 for (field = TYPE_FIELDS (type);
1508 field;
1509 field = TREE_CHAIN (field))
1510 {
1511 tree field_type;
1512
1513 if (TREE_CODE (field) != FIELD_DECL)
1514 continue;
1515
1516 field_type = get_canon_type (TREE_TYPE (field), false, false);
1517 mark_interesting_type (field_type, EXPOSED_PARAMETER);
1518
1519 /* Only recurse for non pointer types of structures and unions. */
1520 if (ipa_type_escape_star_count_of_interesting_type (field_type) == 0)
1521 close_type_exposed_parameter (field_type);
1522 }
1523 }
1524
1525 /* The next function handles the case where a type fully escapes.
1526 This means that not only does the type itself escape,
1527
1528 a) the type of every field recursively escapes
1529 b) the type of every subtype escapes as well as the super as well
1530 as all of the pointer to types for each field.
1531
1532 Note that pointer to types are not marked as escaping. If the
1533 pointed to type escapes, the pointer to type also escapes.
1534
1535 Take a TYPE that has had the address taken for an instance of it
1536 and mark all of the types for its fields as having their addresses
1537 taken. */
1538
1539 static void
1540 close_type_full_escape (tree type)
1541 {
1542 tree field;
1543 unsigned int i;
1544 int uid;
1545 tree binfo, base_binfo;
1546 bitmap_iterator bi;
1547 bitmap subtype_map;
1548 splay_tree_node address_result;
1549
1550 /* Strip off any pointer or array types. */
1551 type = get_canon_type (type, true, true);
1552 if (!type)
1553 return;
1554 uid = TYPE_UID (type);
1555
1556 if (bitmap_bit_p (been_there_done_that, uid))
1557 return;
1558 bitmap_set_bit (been_there_done_that, uid);
1559
1560 subtype_map = subtype_map_for_uid (uid, false);
1561
1562 /* If we are doing a language with a type hierarchy, mark all of
1563 the superclasses. */
1564 if (TYPE_BINFO (type))
1565 for (binfo = TYPE_BINFO (type), i = 0;
1566 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1567 {
1568 tree binfotype = BINFO_TYPE (base_binfo);
1569 binfotype = mark_type (binfotype, FULL_ESCAPE);
1570 close_type_full_escape (binfotype);
1571 }
1572
1573 /* Mark as escaped any types that have been down casted to
1574 this type. */
1575 if (subtype_map)
1576 EXECUTE_IF_SET_IN_BITMAP (subtype_map, 0, i, bi)
1577 {
1578 tree subtype = type_for_uid (i);
1579 subtype = mark_type (subtype, FULL_ESCAPE);
1580 close_type_full_escape (subtype);
1581 }
1582
1583 /* If the field is a struct or union type, mark all of the
1584 subfields. */
1585 for (field = TYPE_FIELDS (type);
1586 field;
1587 field = TREE_CHAIN (field))
1588 {
1589 tree field_type;
1590 if (TREE_CODE (field) != FIELD_DECL)
1591 continue;
1592
1593 field_type = TREE_TYPE (field);
1594 if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1595 {
1596 field_type = mark_type (field_type, FULL_ESCAPE);
1597 close_type_full_escape (field_type);
1598 }
1599 }
1600
1601 /* For all of the types A that contain this type B and were part of
1602 an expression like "&...A.B...", mark the A's as escaping. */
1603 address_result = splay_tree_lookup (uid_to_addressof_up_map,
1604 (splay_tree_key) uid);
1605 if (address_result)
1606 {
1607 bitmap containing_classes = (bitmap) address_result->value;
1608 EXECUTE_IF_SET_IN_BITMAP (containing_classes, 0, i, bi)
1609 {
1610 close_type_full_escape (type_for_uid (i));
1611 }
1612 }
1613 }
1614
1615 /* Transitively close the addressof bitmap for the type with UID.
1616 This means that if we had a.b and b.c, a would have both b and c in
1617 its maps. */
1618
1619 static bitmap
1620 close_addressof_down (int uid)
1621 {
1622 bitmap_iterator bi;
1623 splay_tree_node result =
1624 splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
1625 bitmap map = NULL;
1626 bitmap new_map;
1627 unsigned int i;
1628
1629 if (result)
1630 map = (bitmap) result->value;
1631 else
1632 return NULL;
1633
1634 if (bitmap_bit_p (been_there_done_that, uid))
1635 return map;
1636 bitmap_set_bit (been_there_done_that, uid);
1637
1638 /* If the type escapes, get rid of the addressof map, it will not be
1639 needed. */
1640 if (bitmap_bit_p (global_types_full_escape, uid))
1641 {
1642 BITMAP_FREE (map);
1643 splay_tree_remove (uid_to_addressof_down_map, (splay_tree_key) uid);
1644 return NULL;
1645 }
1646
1647 /* The new_map will have all of the bits for the enclosed fields and
1648 will have the unique id version of the old map. */
1649 new_map = BITMAP_ALLOC (&ipa_obstack);
1650
1651 EXECUTE_IF_SET_IN_BITMAP (map, 0, i, bi)
1652 {
1653 bitmap submap = close_addressof_down (i);
1654 bitmap_set_bit (new_map, i);
1655 if (submap)
1656 bitmap_ior_into (new_map, submap);
1657 }
1658 result->value = (splay_tree_value) new_map;
1659
1660 BITMAP_FREE (map);
1661 return new_map;
1662 }
1663
1664 \f
1665 /* The main entry point for type escape analysis. */
1666
1667 static unsigned int
1668 type_escape_execute (void)
1669 {
1670 struct cgraph_node *node;
1671 struct varpool_node *vnode;
1672 unsigned int i;
1673 bitmap_iterator bi;
1674 splay_tree_node result;
1675
1676 ipa_init ();
1677
1678 /* Process all of the variables first. */
1679 FOR_EACH_STATIC_VARIABLE (vnode)
1680 analyze_variable (vnode);
1681
1682 /* Process all of the functions. next
1683
1684 We do not want to process any of the clones so we check that this
1685 is a master clone. However, we do need to process any
1686 AVAIL_OVERWRITABLE functions (these are never clones) because
1687 they may cause a type variable to escape.
1688 */
1689 for (node = cgraph_nodes; node; node = node->next)
1690 if (node->analyzed
1691 && (cgraph_is_master_clone (node)
1692 || (cgraph_function_body_availability (node) == AVAIL_OVERWRITABLE)))
1693 analyze_function (node);
1694
1695
1696 pointer_set_destroy (visited_nodes);
1697 visited_nodes = NULL;
1698
1699 /* Do all of the closures to discover which types escape the
1700 compilation unit. */
1701
1702 been_there_done_that = BITMAP_ALLOC (&ipa_obstack);
1703 bitmap_tmp = BITMAP_ALLOC (&ipa_obstack);
1704
1705 /* Examine the types that we have directly seen in scanning the code
1706 and add to that any contained types or superclasses. */
1707
1708 bitmap_copy (bitmap_tmp, global_types_seen);
1709 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
1710 {
1711 tree type = type_for_uid (i);
1712 /* Only look at records and unions and pointer tos. */
1713 if (ipa_type_escape_star_count_of_interesting_or_array_type (type) >= 0)
1714 close_type_seen (type);
1715 }
1716 bitmap_clear (been_there_done_that);
1717
1718 /* Examine all of the types passed by value and mark any enclosed
1719 pointer types as escaping. */
1720 bitmap_copy (bitmap_tmp, global_types_exposed_parameter);
1721 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
1722 {
1723 close_type_exposed_parameter (type_for_uid (i));
1724 }
1725 bitmap_clear (been_there_done_that);
1726
1727 /* Close the types for escape. If something escapes, then any
1728 enclosed types escape as well as any subtypes. */
1729 bitmap_copy (bitmap_tmp, global_types_full_escape);
1730 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
1731 {
1732 close_type_full_escape (type_for_uid (i));
1733 }
1734 bitmap_clear (been_there_done_that);
1735
1736 /* Before this pass, the uid_to_addressof_down_map for type X
1737 contained an entry for Y if there had been an operation of the
1738 form &X.Y. This step adds all of the fields contained within Y
1739 (recursively) to X's map. */
1740
1741 result = splay_tree_min (uid_to_addressof_down_map);
1742 while (result)
1743 {
1744 int uid = result->key;
1745 /* Close the addressof map, i.e. copy all of the transitive
1746 substructures up to this level. */
1747 close_addressof_down (uid);
1748 result = splay_tree_successor (uid_to_addressof_down_map, uid);
1749 }
1750
1751 /* Do not need the array types and pointer types in the persistent
1752 data structures. */
1753 result = splay_tree_min (all_canon_types);
1754 while (result)
1755 {
1756 tree type = (tree) result->value;
1757 tree key = (tree) result->key;
1758 if (POINTER_TYPE_P (type)
1759 || TREE_CODE (type) == ARRAY_TYPE)
1760 {
1761 splay_tree_remove (all_canon_types, (splay_tree_key) result->key);
1762 splay_tree_remove (type_to_canon_type, (splay_tree_key) type);
1763 splay_tree_remove (uid_to_canon_type, (splay_tree_key) TYPE_UID (type));
1764 bitmap_clear_bit (global_types_seen, TYPE_UID (type));
1765 }
1766 result = splay_tree_successor (all_canon_types, (splay_tree_key) key);
1767 }
1768
1769 if (dump_file)
1770 {
1771 EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
1772 {
1773 /* The pointer types are in the global_types_full_escape
1774 bitmap but not in the backwards map. They also contain
1775 no useful information since they are not marked. */
1776 tree type = type_for_uid (i);
1777 fprintf(dump_file, "type %d ", i);
1778 print_generic_expr (dump_file, type, 0);
1779 if (bitmap_bit_p (global_types_full_escape, i))
1780 fprintf(dump_file, " escaped\n");
1781 else
1782 fprintf(dump_file, " contained\n");
1783 }
1784 }
1785
1786 /* Get rid of uid_to_addressof_up_map and its bitmaps. */
1787 result = splay_tree_min (uid_to_addressof_up_map);
1788 while (result)
1789 {
1790 int uid = (int)result->key;
1791 bitmap bm = (bitmap)result->value;
1792
1793 BITMAP_FREE (bm);
1794 splay_tree_remove (uid_to_addressof_up_map, (splay_tree_key) uid);
1795 result = splay_tree_successor (uid_to_addressof_up_map, uid);
1796 }
1797
1798 /* Get rid of the subtype map. */
1799 result = splay_tree_min (uid_to_subtype_map);
1800 while (result)
1801 {
1802 bitmap b = (bitmap)result->value;
1803 BITMAP_FREE(b);
1804 splay_tree_remove (uid_to_subtype_map, result->key);
1805 result = splay_tree_min (uid_to_subtype_map);
1806 }
1807 splay_tree_delete (uid_to_subtype_map);
1808 uid_to_subtype_map = NULL;
1809
1810 BITMAP_FREE (global_types_exposed_parameter);
1811 BITMAP_FREE (been_there_done_that);
1812 BITMAP_FREE (bitmap_tmp);
1813 BITMAP_FREE (results_of_malloc);
1814 return 0;
1815 }
1816
1817 static bool
1818 gate_type_escape_vars (void)
1819 {
1820 return (flag_unit_at_a_time != 0 && flag_ipa_type_escape
1821 /* Don't bother doing anything if the program has errors. */
1822 && !(errorcount || sorrycount));
1823 }
1824
1825 struct tree_opt_pass pass_ipa_type_escape =
1826 {
1827 "type-escape-var", /* name */
1828 gate_type_escape_vars, /* gate */
1829 type_escape_execute, /* execute */
1830 NULL, /* sub */
1831 NULL, /* next */
1832 0, /* static_pass_number */
1833 TV_IPA_TYPE_ESCAPE, /* tv_id */
1834 0, /* properties_required */
1835 0, /* properties_provided */
1836 0, /* properties_destroyed */
1837 0, /* todo_flags_start */
1838 0, /* todo_flags_finish */
1839 0 /* letter */
1840 };
1841