ipa-type-escape.h: Expose function is_array_access_through_pointer_and_index.
[gcc.git] / gcc / ipa-type-escape.c
1 /* Type based alias analysis.
2 Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This pass determines which types in the program contain only
22 instances that are completely encapsulated by the compilation unit.
23 Those types that are encapsulated must also pass the further
24 requirement that there be no bad operations on any instances of
25 those types.
26
27 A great deal of freedom in compilation is allowed for the instances
28 of those types that pass these conditions.
29 */
30
31 /* The code in this module is called by the ipa pass manager. It
32 should be one of the later passes since its information is used by
33 the rest of the compilation. */
34
35 #include "config.h"
36 #include "system.h"
37 #include "coretypes.h"
38 #include "tm.h"
39 #include "tree.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
44 #include "pointer-set.h"
45 #include "ggc.h"
46 #include "ipa-utils.h"
47 #include "ipa-type-escape.h"
48 #include "c-common.h"
49 #include "tree-gimple.h"
50 #include "cgraph.h"
51 #include "output.h"
52 #include "flags.h"
53 #include "timevar.h"
54 #include "diagnostic.h"
55 #include "langhooks.h"
56
57 /* Some of the aliasing is called very early, before this phase is
58 called. To assure that this is not a problem, we keep track of if
59 this phase has been run. */
60 static bool initialized = false;
61
62 /* Scratch bitmap for avoiding work. */
63 static bitmap been_there_done_that;
64 static bitmap bitmap_tmp;
65
66 /* There are two levels of escape that types can undergo.
67
68 EXPOSED_PARAMETER - some instance of the variable is
69 passed by value into an externally visible function or some
70 instance of the variable is passed out of an externally visible
71 function as a return value. In this case any of the fields of the
72 variable that are pointer types end up having their types marked as
73 FULL_ESCAPE.
74
75 FULL_ESCAPE - when bad things happen to good types. One of the
76 following things happens to the type: (a) either an instance of the
77 variable has its address passed to an externally visible function,
78 (b) the address is taken and some bad cast happens to the address
79 or (c) explicit arithmetic is done to the address.
80 */
81
82 enum escape_t
83 {
84 EXPOSED_PARAMETER,
85 FULL_ESCAPE
86 };
87
88 /* The following two bit vectors global_types_* correspond to
89 previous cases above. During the analysis phase, a bit is set in
90 one of these vectors if an operation of the offending class is
91 discovered to happen on the associated type. */
92
93 static bitmap global_types_exposed_parameter;
94 static bitmap global_types_full_escape;
95
96 /* All of the types seen in this compilation unit. */
97 static bitmap global_types_seen;
98 /* Reverse map to take a canon uid and map it to a canon type. Uid's
99 are never manipulated unless they are associated with a canon
100 type. */
101 static splay_tree uid_to_canon_type;
102
103 /* Internal structure of type mapping code. This maps a canon type
104 name to its canon type. */
105 static splay_tree all_canon_types;
106
107 /* Map from type clones to the single canon type. */
108 static splay_tree type_to_canon_type;
109
110 /* A splay tree of bitmaps. An element X in the splay tree has a bit
111 set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (Y)) if there was
112 an operation in the program of the form "&X.Y". */
113 static splay_tree uid_to_addressof_down_map;
114
115 /* A splay tree of bitmaps. An element Y in the splay tree has a bit
116 set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (X)) if there was
117 an operation in the program of the form "&X.Y". */
118 static splay_tree uid_to_addressof_up_map;
119
120 /* Tree to hold the subtype maps used to mark subtypes of escaped
121 types. */
122 static splay_tree uid_to_subtype_map;
123
124 /* Records tree nodes seen in cgraph_create_edges. Simply using
125 walk_tree_without_duplicates doesn't guarantee each node is visited
126 once because it gets a new htab upon each recursive call from
127 scan_for_refs. */
128 static struct pointer_set_t *visited_nodes;
129
130 /* Visited stmts by walk_use_def_chains function because it's called
131 recursively. */
132 static struct pointer_set_t *visited_stmts;
133
134 static bitmap_obstack ipa_obstack;
135
136 /* Static functions from this file that are used
137 before being defined. */
138 static unsigned int look_for_casts (tree lhs ATTRIBUTE_UNUSED, tree);
139 static bool is_cast_from_non_pointer (tree, tree, void *);
140
141 /* Get the name of TYPE or return the string "<UNNAMED>". */
142 static const char*
143 get_name_of_type (tree type)
144 {
145 tree name = TYPE_NAME (type);
146
147 if (!name)
148 /* Unnamed type, do what you like here. */
149 return "<UNNAMED>";
150
151 /* It will be a TYPE_DECL in the case of a typedef, otherwise, an
152 identifier_node */
153 if (TREE_CODE (name) == TYPE_DECL)
154 {
155 /* Each DECL has a DECL_NAME field which contains an
156 IDENTIFIER_NODE. (Some decls, most often labels, may have
157 zero as the DECL_NAME). */
158 if (DECL_NAME (name))
159 return IDENTIFIER_POINTER (DECL_NAME (name));
160 else
161 /* Unnamed type, do what you like here. */
162 return "<UNNAMED>";
163 }
164 else if (TREE_CODE (name) == IDENTIFIER_NODE)
165 return IDENTIFIER_POINTER (name);
166 else
167 return "<UNNAMED>";
168 }
169
170 struct type_brand_s
171 {
172 const char* name;
173 int seq;
174 };
175
176 /* Splay tree comparison function on type_brand_s structures. */
177
178 static int
179 compare_type_brand (splay_tree_key sk1, splay_tree_key sk2)
180 {
181 struct type_brand_s * k1 = (struct type_brand_s *) sk1;
182 struct type_brand_s * k2 = (struct type_brand_s *) sk2;
183
184 int value = strcmp(k1->name, k2->name);
185 if (value == 0)
186 return k2->seq - k1->seq;
187 else
188 return value;
189 }
190
191 /* All of the "unique_type" code is a hack to get around the sleazy
192 implementation used to compile more than file. Currently gcc does
193 not get rid of multiple instances of the same type that have been
194 collected from different compilation units. */
195 /* This is a trivial algorithm for removing duplicate types. This
196 would not work for any language that used structural equivalence as
197 the basis of its type system. */
198 /* Return TYPE if no type compatible with TYPE has been seen so far,
199 otherwise return a type compatible with TYPE that has already been
200 processed. */
201
202 static tree
203 discover_unique_type (tree type)
204 {
205 struct type_brand_s * brand = XNEW (struct type_brand_s);
206 int i = 0;
207 splay_tree_node result;
208
209 brand->name = get_name_of_type (type);
210
211 while (1)
212 {
213 brand->seq = i++;
214 result = splay_tree_lookup (all_canon_types, (splay_tree_key) brand);
215
216 if (result)
217 {
218 /* Create an alias since this is just the same as
219 other_type. */
220 tree other_type = (tree) result->value;
221 if (types_compatible_p (type, other_type))
222 {
223 free (brand);
224 /* Insert this new type as an alias for other_type. */
225 splay_tree_insert (type_to_canon_type,
226 (splay_tree_key) type,
227 (splay_tree_value) other_type);
228 return other_type;
229 }
230 /* Not compatible, look for next instance with same name. */
231 }
232 else
233 {
234 /* No more instances, create new one since this is the first
235 time we saw this type. */
236 brand->seq = i++;
237 /* Insert the new brand. */
238 splay_tree_insert (all_canon_types,
239 (splay_tree_key) brand,
240 (splay_tree_value) type);
241
242 /* Insert this new type as an alias for itself. */
243 splay_tree_insert (type_to_canon_type,
244 (splay_tree_key) type,
245 (splay_tree_value) type);
246
247 /* Insert the uid for reverse lookup; */
248 splay_tree_insert (uid_to_canon_type,
249 (splay_tree_key) TYPE_UID (type),
250 (splay_tree_value) type);
251
252 bitmap_set_bit (global_types_seen, TYPE_UID (type));
253 return type;
254 }
255 }
256 }
257
258 /* Return true if TYPE is one of the type classes that we are willing
259 to analyze. This skips the goofy types like arrays of pointers to
260 methods. */
261 static bool
262 type_to_consider (tree type)
263 {
264 /* Strip the *'s off. */
265 type = TYPE_MAIN_VARIANT (type);
266 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
267 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
268
269 switch (TREE_CODE (type))
270 {
271 case BOOLEAN_TYPE:
272 case COMPLEX_TYPE:
273 case ENUMERAL_TYPE:
274 case INTEGER_TYPE:
275 case QUAL_UNION_TYPE:
276 case REAL_TYPE:
277 case FIXED_POINT_TYPE:
278 case RECORD_TYPE:
279 case UNION_TYPE:
280 case VECTOR_TYPE:
281 case VOID_TYPE:
282 return true;
283
284 default:
285 return false;
286 }
287 }
288
289 /* Get the canon type of TYPE. If SEE_THRU_PTRS is true, remove all
290 the POINTER_TOs and if SEE_THRU_ARRAYS is true, remove all of the
291 ARRAY_OFs and POINTER_TOs. */
292
293 static tree
294 get_canon_type (tree type, bool see_thru_ptrs, bool see_thru_arrays)
295 {
296 splay_tree_node result;
297 /* Strip the *'s off. */
298 if (!type || !type_to_consider (type))
299 return NULL;
300
301 type = TYPE_MAIN_VARIANT (type);
302 if (see_thru_arrays)
303 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
304 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
305
306 else if (see_thru_ptrs)
307 while (POINTER_TYPE_P (type))
308 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
309
310 result = splay_tree_lookup(type_to_canon_type, (splay_tree_key) type);
311
312 if (result == NULL)
313 return discover_unique_type (type);
314 else return (tree) result->value;
315 }
316
317 /* Same as GET_CANON_TYPE, except return the TYPE_ID rather than the
318 TYPE. */
319
320 static int
321 get_canon_type_uid (tree type, bool see_thru_ptrs, bool see_thru_arrays)
322 {
323 type = get_canon_type (type, see_thru_ptrs, see_thru_arrays);
324 if (type)
325 return TYPE_UID(type);
326 else return 0;
327 }
328
329 /* Return 0 if TYPE is a record or union type. Return a positive
330 number if TYPE is a pointer to a record or union. The number is
331 the number of pointer types stripped to get to the record or union
332 type. Return -1 if TYPE is none of the above. */
333
334 int
335 ipa_type_escape_star_count_of_interesting_type (tree type)
336 {
337 int count = 0;
338 /* Strip the *'s off. */
339 if (!type)
340 return -1;
341 type = TYPE_MAIN_VARIANT (type);
342 while (POINTER_TYPE_P (type))
343 {
344 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
345 count++;
346 }
347
348 /* We are interested in records, and unions only. */
349 if (TREE_CODE (type) == RECORD_TYPE
350 || TREE_CODE (type) == QUAL_UNION_TYPE
351 || TREE_CODE (type) == UNION_TYPE)
352 return count;
353 else
354 return -1;
355 }
356
357
358 /* Return 0 if TYPE is a record or union type. Return a positive
359 number if TYPE is a pointer to a record or union. The number is
360 the number of pointer types stripped to get to the record or union
361 type. Return -1 if TYPE is none of the above. */
362
363 int
364 ipa_type_escape_star_count_of_interesting_or_array_type (tree type)
365 {
366 int count = 0;
367 /* Strip the *'s off. */
368 if (!type)
369 return -1;
370 type = TYPE_MAIN_VARIANT (type);
371 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
372 {
373 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
374 count++;
375 }
376
377 /* We are interested in records, and unions only. */
378 if (TREE_CODE (type) == RECORD_TYPE
379 || TREE_CODE (type) == QUAL_UNION_TYPE
380 || TREE_CODE (type) == UNION_TYPE)
381 return count;
382 else
383 return -1;
384 }
385
386
387 /* Return true if the record, or union TYPE passed in escapes this
388 compilation unit. Note that all of the pointer-to's are removed
389 before testing since these may not be correct. */
390
391 bool
392 ipa_type_escape_type_contained_p (tree type)
393 {
394 if (!initialized)
395 return false;
396 return !bitmap_bit_p (global_types_full_escape,
397 get_canon_type_uid (type, true, false));
398 }
399
400 /* Return true if a modification to a field of type FIELD_TYPE cannot
401 clobber a record of RECORD_TYPE. */
402
403 bool
404 ipa_type_escape_field_does_not_clobber_p (tree record_type, tree field_type)
405 {
406 splay_tree_node result;
407 int uid;
408
409 if (!initialized)
410 return false;
411
412 /* Strip off all of the pointer tos on the record type. Strip the
413 same number of pointer tos from the field type. If the field
414 type has fewer, it could not have been aliased. */
415 record_type = TYPE_MAIN_VARIANT (record_type);
416 field_type = TYPE_MAIN_VARIANT (field_type);
417 while (POINTER_TYPE_P (record_type))
418 {
419 record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
420 if (POINTER_TYPE_P (field_type))
421 field_type = TYPE_MAIN_VARIANT (TREE_TYPE (field_type));
422 else
423 /* However, if field_type is a union, this quick test is not
424 correct since one of the variants of the union may be a
425 pointer to type and we cannot see across that here. So we
426 just strip the remaining pointer tos off the record type
427 and fall thru to the more precise code. */
428 if (TREE_CODE (field_type) == QUAL_UNION_TYPE
429 || TREE_CODE (field_type) == UNION_TYPE)
430 {
431 while (POINTER_TYPE_P (record_type))
432 record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
433 break;
434 }
435 else
436 return true;
437 }
438
439 record_type = get_canon_type (record_type, true, true);
440 /* The record type must be contained. The field type may
441 escape. */
442 if (!ipa_type_escape_type_contained_p (record_type))
443 return false;
444
445 uid = TYPE_UID (record_type);
446 result = splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
447
448 if (result)
449 {
450 bitmap field_type_map = (bitmap) result->value;
451 uid = get_canon_type_uid (field_type, true, true);
452 /* If the bit is there, the address was taken. If not, it
453 wasn't. */
454 return !bitmap_bit_p (field_type_map, uid);
455 }
456 else
457 /* No bitmap means no addresses were taken. */
458 return true;
459 }
460
461
462 /* Add TYPE to the suspect type set. Return true if the bit needed to
463 be marked. */
464
465 static tree
466 mark_type (tree type, enum escape_t escape_status)
467 {
468 bitmap map = NULL;
469 int uid;
470
471 type = get_canon_type (type, true, true);
472 if (!type)
473 return NULL;
474
475 switch (escape_status)
476 {
477 case EXPOSED_PARAMETER:
478 map = global_types_exposed_parameter;
479 break;
480 case FULL_ESCAPE:
481 map = global_types_full_escape;
482 break;
483 }
484
485 uid = TYPE_UID (type);
486 if (bitmap_bit_p (map, uid))
487 return type;
488 else
489 {
490 bitmap_set_bit (map, uid);
491 if (escape_status == FULL_ESCAPE)
492 {
493 /* Efficiency hack. When things are bad, do not mess around
494 with this type anymore. */
495 bitmap_set_bit (global_types_exposed_parameter, uid);
496 }
497 }
498 return type;
499 }
500
501 /* Add interesting TYPE to the suspect type set. If the set is
502 EXPOSED_PARAMETER and the TYPE is a pointer type, the set is
503 changed to FULL_ESCAPE. */
504
505 static void
506 mark_interesting_type (tree type, enum escape_t escape_status)
507 {
508 if (!type) return;
509 if (ipa_type_escape_star_count_of_interesting_type (type) >= 0)
510 {
511 if ((escape_status == EXPOSED_PARAMETER)
512 && POINTER_TYPE_P (type))
513 /* EXPOSED_PARAMETERs are only structs or unions are passed by
514 value. Anything passed by reference to an external
515 function fully exposes the type. */
516 mark_type (type, FULL_ESCAPE);
517 else
518 mark_type (type, escape_status);
519 }
520 }
521
522 /* Return true if PARENT is supertype of CHILD. Both types must be
523 known to be structures or unions. */
524
525 static bool
526 parent_type_p (tree parent, tree child)
527 {
528 int i;
529 tree binfo, base_binfo;
530 if (TYPE_BINFO (parent))
531 for (binfo = TYPE_BINFO (parent), i = 0;
532 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
533 {
534 tree binfotype = BINFO_TYPE (base_binfo);
535 if (binfotype == child)
536 return true;
537 else if (parent_type_p (binfotype, child))
538 return true;
539 }
540 if (TREE_CODE (parent) == UNION_TYPE
541 || TREE_CODE (parent) == QUAL_UNION_TYPE)
542 {
543 tree field;
544 /* Search all of the variants in the union to see if one of them
545 is the child. */
546 for (field = TYPE_FIELDS (parent);
547 field;
548 field = TREE_CHAIN (field))
549 {
550 tree field_type;
551 if (TREE_CODE (field) != FIELD_DECL)
552 continue;
553
554 field_type = TREE_TYPE (field);
555 if (field_type == child)
556 return true;
557 }
558
559 /* If we did not find it, recursively ask the variants if one of
560 their children is the child type. */
561 for (field = TYPE_FIELDS (parent);
562 field;
563 field = TREE_CHAIN (field))
564 {
565 tree field_type;
566 if (TREE_CODE (field) != FIELD_DECL)
567 continue;
568
569 field_type = TREE_TYPE (field);
570 if (TREE_CODE (field_type) == RECORD_TYPE
571 || TREE_CODE (field_type) == QUAL_UNION_TYPE
572 || TREE_CODE (field_type) == UNION_TYPE)
573 if (parent_type_p (field_type, child))
574 return true;
575 }
576 }
577
578 if (TREE_CODE (parent) == RECORD_TYPE)
579 {
580 tree field;
581 for (field = TYPE_FIELDS (parent);
582 field;
583 field = TREE_CHAIN (field))
584 {
585 tree field_type;
586 if (TREE_CODE (field) != FIELD_DECL)
587 continue;
588
589 field_type = TREE_TYPE (field);
590 if (field_type == child)
591 return true;
592 /* You can only cast to the first field so if it does not
593 match, quit. */
594 if (TREE_CODE (field_type) == RECORD_TYPE
595 || TREE_CODE (field_type) == QUAL_UNION_TYPE
596 || TREE_CODE (field_type) == UNION_TYPE)
597 {
598 if (parent_type_p (field_type, child))
599 return true;
600 else
601 break;
602 }
603 }
604 }
605 return false;
606 }
607
608 /* Return the number of pointer tos for TYPE and return TYPE with all
609 of these stripped off. */
610
611 static int
612 count_stars (tree* type_ptr)
613 {
614 tree type = *type_ptr;
615 int i = 0;
616 type = TYPE_MAIN_VARIANT (type);
617 while (POINTER_TYPE_P (type))
618 {
619 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
620 i++;
621 }
622
623 *type_ptr = type;
624 return i;
625 }
626
627 enum cast_type {
628 CT_UP = 0x1,
629 CT_DOWN = 0x2,
630 CT_SIDEWAYS = 0x4,
631 CT_USELESS = 0x8,
632 CT_FROM_P_BAD = 0x10,
633 CT_FROM_NON_P = 0x20,
634 CT_TO_NON_INTER = 0x40,
635 CT_FROM_MALLOC = 0x80,
636 CT_NO_CAST = 0x100
637 };
638
639 /* Check the cast FROM_TYPE to TO_TYPE. This function requires that
640 the two types have already passed the
641 ipa_type_escape_star_count_of_interesting_type test. */
642
643 static enum cast_type
644 check_cast_type (tree to_type, tree from_type)
645 {
646 int to_stars = count_stars (&to_type);
647 int from_stars = count_stars (&from_type);
648 if (to_stars != from_stars)
649 return CT_SIDEWAYS;
650
651 if (to_type == from_type)
652 return CT_USELESS;
653
654 if (parent_type_p (to_type, from_type)) return CT_UP;
655 if (parent_type_p (from_type, to_type)) return CT_DOWN;
656 return CT_SIDEWAYS;
657 }
658
659 /* This function returns nonzero if VAR is result of call
660 to malloc function. */
661
662 static bool
663 is_malloc_result (tree var)
664 {
665 tree def_stmt;
666 tree rhs;
667 int flags;
668
669 if (!var)
670 return false;
671
672 if (SSA_NAME_IS_DEFAULT_DEF (var))
673 return false;
674
675 def_stmt = SSA_NAME_DEF_STMT (var);
676
677 if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
678 return false;
679
680 if (var != GIMPLE_STMT_OPERAND (def_stmt, 0))
681 return false;
682
683 rhs = get_call_expr_in (def_stmt);
684
685 if (!rhs)
686 return false;
687
688 flags = call_expr_flags (rhs);
689
690 return ((flags & ECF_MALLOC) != 0);
691
692 }
693
694 /* Check a cast FROM this variable, TO_TYPE. Mark the escaping types
695 if appropriate. Returns cast_type as detected. */
696
697 static enum cast_type
698 check_cast (tree to_type, tree from)
699 {
700 tree from_type = get_canon_type (TREE_TYPE (from), false, false);
701 bool to_interesting_type, from_interesting_type;
702 enum cast_type cast = CT_NO_CAST;
703
704 to_type = get_canon_type (to_type, false, false);
705 if (!from_type || !to_type || from_type == to_type)
706 return cast;
707
708 to_interesting_type =
709 ipa_type_escape_star_count_of_interesting_type (to_type) >= 0;
710 from_interesting_type =
711 ipa_type_escape_star_count_of_interesting_type (from_type) >= 0;
712
713 if (to_interesting_type)
714 if (from_interesting_type)
715 {
716 /* Both types are interesting. This can be one of four types
717 of cast: useless, up, down, or sideways. We do not care
718 about up or useless. Sideways casts are always bad and
719 both sides get marked as escaping. Downcasts are not
720 interesting here because if type is marked as escaping, all
721 of its subtypes escape. */
722 cast = check_cast_type (to_type, from_type);
723 switch (cast)
724 {
725 case CT_UP:
726 case CT_USELESS:
727 case CT_DOWN:
728 break;
729
730 case CT_SIDEWAYS:
731 mark_type (to_type, FULL_ESCAPE);
732 mark_type (from_type, FULL_ESCAPE);
733 break;
734
735 default:
736 break;
737 }
738 }
739 else
740 {
741 /* This code excludes two cases from marking as escaped:
742
743 1. if this is a cast of index of array of structures/unions
744 that happens before accessing array element, we should not
745 mark it as escaped.
746 2. if this is a cast from the local that is a result from a
747 call to malloc, do not mark the cast as bad.
748
749 */
750
751 if (POINTER_TYPE_P (to_type) && !POINTER_TYPE_P (from_type))
752 cast = CT_FROM_NON_P;
753 else if (TREE_CODE (from) == SSA_NAME
754 && is_malloc_result (from))
755 cast = CT_FROM_MALLOC;
756 else
757 {
758 cast = CT_FROM_P_BAD;
759 mark_type (to_type, FULL_ESCAPE);
760 }
761 }
762 else if (from_interesting_type)
763 {
764 mark_type (from_type, FULL_ESCAPE);
765 cast = CT_TO_NON_INTER;
766 }
767
768 return cast;
769 }
770
771 typedef struct cast
772 {
773 int type;
774 tree stmt;
775 }cast_t;
776
777 /* This function is a callback for walk_tree called from
778 is_cast_from_non_pointer. The data->type is set to be:
779
780 0 - if there is no cast
781 number - the number of casts from non-pointer type
782 -1 - if there is a cast that makes the type to escape
783
784 If data->type = number, then data->stmt will contain the
785 last casting stmt met in traversing. */
786
787 static tree
788 is_cast_from_non_pointer_1 (tree *tp, int *walk_subtrees, void *data)
789 {
790 tree def_stmt = *tp;
791
792
793 if (pointer_set_insert (visited_stmts, def_stmt))
794 {
795 *walk_subtrees = 0;
796 return NULL;
797 }
798
799 switch (TREE_CODE (def_stmt))
800 {
801 case GIMPLE_MODIFY_STMT:
802 {
803 use_operand_p use_p;
804 ssa_op_iter iter;
805 tree lhs = GIMPLE_STMT_OPERAND (def_stmt, 0);
806 tree rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
807
808 unsigned int cast = look_for_casts (lhs, rhs);
809 /* Check that only one cast happened, and it's of
810 non-pointer type. */
811 if ((cast & CT_FROM_NON_P) == (CT_FROM_NON_P)
812 && (cast & ~(CT_FROM_NON_P)) == 0)
813 {
814 ((cast_t *)data)->stmt = def_stmt;
815 ((cast_t *)data)->type++;
816
817 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
818 {
819 walk_use_def_chains (USE_FROM_PTR (use_p), is_cast_from_non_pointer,
820 data, false);
821 if (((cast_t*)data)->type == -1)
822 return def_stmt;
823 }
824 }
825
826 /* Check that there is no cast, or cast is not harmful. */
827 else if ((cast & CT_NO_CAST) == (CT_NO_CAST)
828 || (cast & CT_DOWN) == (CT_DOWN)
829 || (cast & CT_UP) == (CT_UP)
830 || (cast & CT_USELESS) == (CT_USELESS)
831 || (cast & CT_FROM_MALLOC) == (CT_FROM_MALLOC))
832 {
833 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
834 {
835 walk_use_def_chains (USE_FROM_PTR (use_p), is_cast_from_non_pointer,
836 data, false);
837 if (((cast_t*)data)->type == -1)
838 return def_stmt;
839 }
840 }
841
842 /* The cast is harmful. */
843 else
844 {
845 ((cast_t *)data)->type = -1;
846 return def_stmt;
847 }
848
849 *walk_subtrees = 0;
850 }
851 break;
852
853 default:
854 {
855 *walk_subtrees = 0;
856 break;
857 }
858 }
859
860 return NULL;
861 }
862
863 /* This function is a callback for walk_use_def_chains function called
864 from is_array_access_through_pointer_and_index. */
865
866 static bool
867 is_cast_from_non_pointer (tree var, tree def_stmt, void *data)
868 {
869
870 if (!def_stmt || !var)
871 return false;
872
873 if (TREE_CODE (def_stmt) == PHI_NODE)
874 return false;
875
876 if (SSA_NAME_IS_DEFAULT_DEF (var))
877 return false;
878
879 walk_tree (&def_stmt, is_cast_from_non_pointer_1, data, NULL);
880 if (((cast_t*)data)->type == -1)
881 return true;
882
883 return false;
884 }
885
886 /* When array element a_p[i] is accessed through the pointer a_p
887 and index i, it's translated into the following sequence
888 in gimple:
889
890 i.1_5 = (unsigned int) i_1;
891 D.1605_6 = i.1_5 * 16;
892 D.1606_7 = (struct str_t *) D.1605_6;
893 a_p.2_8 = a_p;
894 D.1608_9 = D.1606_7 + a_p.2_8;
895
896 OP0 and OP1 are of the same pointer types and stand for
897 D.1606_7 and a_p.2_8 or vise versa.
898
899 This function checks that:
900
901 1. one of OP0 and OP1 (D.1606_7) has passed only one cast from
902 non-pointer type (D.1606_7 = (struct str_t *) D.1605_6;).
903
904 2. one of OP0 and OP1 which has passed the cast from
905 non-pointer type (D.1606_7), is actually generated by multiplication of
906 index by size of type to which both OP0 and OP1 point to
907 (in this case D.1605_6 = i.1_5 * 16; ).
908
909 3. an address of def of the var to which was made cast (D.1605_6)
910 was not taken.(How can it happen?)
911
912 The following items are checked implicitly by the end of algorithm:
913
914 4. one of OP0 and OP1 (a_p.2_8) have never been cast
915 (because if it was cast to pointer type, its type, that is also
916 the type of OP0 and OP1, will be marked as escaped during
917 analysis of casting stmt (when check_cast() is called
918 from scan_for_refs for this stmt)).
919
920 5. defs of OP0 and OP1 are not passed into externally visible function
921 (because if they are passed then their type, that is also the type of OP0
922 and OP1, will be marked and escaped during check_call function called from
923 scan_for_refs with call stmt).
924
925 In total, 1-5 guaranty that it's an access to array by pointer and index.
926
927 */
928
929 bool
930 is_array_access_through_pointer_and_index (enum tree_code code, tree op0,
931 tree op1, tree *base, tree *offset,
932 tree *offset_cast_stmt)
933 {
934 tree before_cast, before_cast_def_stmt;
935 cast_t op0_cast, op1_cast;
936
937 *base = NULL;
938 *offset = NULL;
939 *offset_cast_stmt = NULL;
940
941 /* Check 1. */
942 if (code == POINTER_PLUS_EXPR)
943 {
944 tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
945 tree op1type = TYPE_MAIN_VARIANT (TREE_TYPE (op1));
946
947 /* One of op0 and op1 is of pointer type and the other is numerical. */
948 if (POINTER_TYPE_P (op0type) && NUMERICAL_TYPE_CHECK (op1type))
949 {
950 *base = op0;
951 *offset = op1;
952 }
953 else if (POINTER_TYPE_P (op1type) && NUMERICAL_TYPE_CHECK (op0type))
954 {
955 *base = op1;
956 *offset = op0;
957 }
958 else
959 return false;
960 }
961 else
962 {
963 /* Init data for walk_use_def_chains function. */
964 op0_cast.type = op1_cast.type = 0;
965 op0_cast.stmt = op1_cast.stmt = NULL;
966
967 visited_stmts = pointer_set_create ();
968 walk_use_def_chains (op0, is_cast_from_non_pointer,(void *)(&op0_cast),
969 false);
970 pointer_set_destroy (visited_stmts);
971
972 visited_stmts = pointer_set_create ();
973 walk_use_def_chains (op1, is_cast_from_non_pointer,(void *)(&op1_cast),
974 false);
975 pointer_set_destroy (visited_stmts);
976
977 if (op0_cast.type == 1 && op1_cast.type == 0)
978 {
979 *base = op1;
980 *offset = op0;
981 *offset_cast_stmt = op0_cast.stmt;
982 }
983 else if (op0_cast.type == 0 && op1_cast.type == 1)
984 {
985 *base = op0;
986 *offset = op1;
987 *offset_cast_stmt = op1_cast.stmt;
988 }
989 else
990 return false;
991 }
992
993 /* Check 2.
994 offset_cast_stmt is of the form:
995 D.1606_7 = (struct str_t *) D.1605_6; */
996
997 if (*offset_cast_stmt)
998 {
999 before_cast = SINGLE_SSA_TREE_OPERAND (*offset_cast_stmt, SSA_OP_USE);
1000 if (!before_cast)
1001 return false;
1002
1003 if (SSA_NAME_IS_DEFAULT_DEF (before_cast))
1004 return false;
1005
1006 before_cast_def_stmt = SSA_NAME_DEF_STMT (before_cast);
1007 if (!before_cast_def_stmt)
1008 return false;
1009 }
1010 else
1011 before_cast_def_stmt = SSA_NAME_DEF_STMT (*offset);
1012
1013 /* before_cast_def_stmt should be of the form:
1014 D.1605_6 = i.1_5 * 16; */
1015
1016 if (TREE_CODE (before_cast_def_stmt) == GIMPLE_MODIFY_STMT)
1017 {
1018 tree lhs = GIMPLE_STMT_OPERAND (before_cast_def_stmt,0);
1019 tree rhs = GIMPLE_STMT_OPERAND (before_cast_def_stmt,1);
1020
1021 /* We expect temporary here. */
1022 if (!is_gimple_reg (lhs))
1023 return false;
1024
1025 if (TREE_CODE (rhs) == MULT_EXPR)
1026 {
1027 tree arg0 = TREE_OPERAND (rhs, 0);
1028 tree arg1 = TREE_OPERAND (rhs, 1);
1029 tree unit_size =
1030 TYPE_SIZE_UNIT (TREE_TYPE (TYPE_MAIN_VARIANT (TREE_TYPE (op0))));
1031
1032 if (!(CONSTANT_CLASS_P (arg0)
1033 && simple_cst_equal (arg0,unit_size))
1034 && !(CONSTANT_CLASS_P (arg1)
1035 && simple_cst_equal (arg1,unit_size)))
1036 return false;
1037 }
1038 else
1039 return false;
1040 }
1041 else
1042 return false;
1043
1044 /* Check 3.
1045 check that address of D.1605_6 was not taken.
1046 FIXME: if D.1605_6 is gimple reg than it cannot be addressable. */
1047
1048 return true;
1049 }
1050
1051 /* Register the parameter and return types of function FN. The type
1052 ESCAPES if the function is visible outside of the compilation
1053 unit. */
1054 static void
1055 check_function_parameter_and_return_types (tree fn, bool escapes)
1056 {
1057 tree arg;
1058
1059 if (TYPE_ARG_TYPES (TREE_TYPE (fn)))
1060 {
1061 for (arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1062 arg && TREE_VALUE (arg) != void_type_node;
1063 arg = TREE_CHAIN (arg))
1064 {
1065 tree type = get_canon_type (TREE_VALUE (arg), false, false);
1066 if (escapes)
1067 mark_interesting_type (type, EXPOSED_PARAMETER);
1068 }
1069 }
1070 else
1071 {
1072 /* FIXME - According to Geoff Keating, we should never have to
1073 do this; the front ends should always process the arg list
1074 from the TYPE_ARG_LIST. However, Geoff is wrong, this code
1075 does seem to be live. */
1076
1077 for (arg = DECL_ARGUMENTS (fn); arg; arg = TREE_CHAIN (arg))
1078 {
1079 tree type = get_canon_type (TREE_TYPE (arg), false, false);
1080 if (escapes)
1081 mark_interesting_type (type, EXPOSED_PARAMETER);
1082 }
1083 }
1084 if (escapes)
1085 {
1086 tree type = get_canon_type (TREE_TYPE (TREE_TYPE (fn)), false, false);
1087 mark_interesting_type (type, EXPOSED_PARAMETER);
1088 }
1089 }
1090
1091 /* Return true if the variable T is the right kind of static variable to
1092 perform compilation unit scope escape analysis. */
1093
1094 static inline void
1095 has_proper_scope_for_analysis (tree t)
1096 {
1097 /* If the variable has the "used" attribute, treat it as if it had a
1098 been touched by the devil. */
1099 tree type = get_canon_type (TREE_TYPE (t), false, false);
1100 if (!type) return;
1101
1102 if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
1103 {
1104 mark_interesting_type (type, FULL_ESCAPE);
1105 return;
1106 }
1107
1108 /* Do not want to do anything with volatile except mark any
1109 function that uses one to be not const or pure. */
1110 if (TREE_THIS_VOLATILE (t))
1111 return;
1112
1113 /* Do not care about a local automatic that is not static. */
1114 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
1115 return;
1116
1117 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
1118 {
1119 /* If the front end set the variable to be READONLY and
1120 constant, we can allow this variable in pure or const
1121 functions but the scope is too large for our analysis to set
1122 these bits ourselves. */
1123
1124 if (TREE_READONLY (t)
1125 && DECL_INITIAL (t)
1126 && is_gimple_min_invariant (DECL_INITIAL (t)))
1127 ; /* Read of a constant, do not change the function state. */
1128 else
1129 {
1130 /* The type escapes for all public and externs. */
1131 mark_interesting_type (type, FULL_ESCAPE);
1132 }
1133 }
1134 }
1135
1136 /* If T is a VAR_DECL for a static that we are interested in, add the
1137 uid to the bitmap. */
1138
1139 static void
1140 check_operand (tree t)
1141 {
1142 if (!t) return;
1143
1144 /* This is an assignment from a function, register the types as
1145 escaping. */
1146 if (TREE_CODE (t) == FUNCTION_DECL)
1147 check_function_parameter_and_return_types (t, true);
1148
1149 else if (TREE_CODE (t) == VAR_DECL)
1150 has_proper_scope_for_analysis (t);
1151 }
1152
1153 /* Examine tree T for references. */
1154
1155 static void
1156 check_tree (tree t)
1157 {
1158 if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
1159 return;
1160
1161 /* We want to catch here also REALPART_EXPR and IMAGEPART_EXPR,
1162 but they already included in handled_component_p. */
1163 while (handled_component_p (t))
1164 {
1165 if (TREE_CODE (t) == ARRAY_REF)
1166 check_operand (TREE_OPERAND (t, 1));
1167 t = TREE_OPERAND (t, 0);
1168 }
1169
1170 if (INDIRECT_REF_P (t))
1171 /* || TREE_CODE (t) == MEM_REF) */
1172 check_tree (TREE_OPERAND (t, 0));
1173
1174 if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
1175 check_operand (t);
1176 }
1177
1178 /* Create an address_of edge FROM_TYPE.TO_TYPE. */
1179 static void
1180 mark_interesting_addressof (tree to_type, tree from_type)
1181 {
1182 int from_uid;
1183 int to_uid;
1184 bitmap type_map;
1185 splay_tree_node result;
1186
1187 from_type = get_canon_type (from_type, false, false);
1188 to_type = get_canon_type (to_type, false, false);
1189
1190 if (!from_type || !to_type)
1191 return;
1192
1193 from_uid = TYPE_UID (from_type);
1194 to_uid = TYPE_UID (to_type);
1195
1196 gcc_assert (ipa_type_escape_star_count_of_interesting_type (from_type) == 0);
1197
1198 /* Process the Y into X map pointer. */
1199 result = splay_tree_lookup (uid_to_addressof_down_map,
1200 (splay_tree_key) from_uid);
1201
1202 if (result)
1203 type_map = (bitmap) result->value;
1204 else
1205 {
1206 type_map = BITMAP_ALLOC (&ipa_obstack);
1207 splay_tree_insert (uid_to_addressof_down_map,
1208 from_uid,
1209 (splay_tree_value)type_map);
1210 }
1211 bitmap_set_bit (type_map, TYPE_UID (to_type));
1212
1213 /* Process the X into Y reverse map pointer. */
1214 result =
1215 splay_tree_lookup (uid_to_addressof_up_map, (splay_tree_key) to_uid);
1216
1217 if (result)
1218 type_map = (bitmap) result->value;
1219 else
1220 {
1221 type_map = BITMAP_ALLOC (&ipa_obstack);
1222 splay_tree_insert (uid_to_addressof_up_map,
1223 to_uid,
1224 (splay_tree_value)type_map);
1225 }
1226 bitmap_set_bit (type_map, TYPE_UID (from_type));
1227 }
1228
1229 /* Scan tree T to see if there are any addresses taken in within T. */
1230
1231 static void
1232 look_for_address_of (tree t)
1233 {
1234 if (TREE_CODE (t) == ADDR_EXPR)
1235 {
1236 tree x = get_base_var (t);
1237 tree cref = TREE_OPERAND (t, 0);
1238
1239 /* If we have an expression of the form "&a.b.c.d", mark a.b,
1240 b.c and c.d. as having its address taken. */
1241 tree fielddecl = NULL_TREE;
1242 while (cref!= x)
1243 {
1244 if (TREE_CODE (cref) == COMPONENT_REF)
1245 {
1246 fielddecl = TREE_OPERAND (cref, 1);
1247 mark_interesting_addressof (TREE_TYPE (fielddecl),
1248 DECL_FIELD_CONTEXT (fielddecl));
1249 }
1250 else if (TREE_CODE (cref) == ARRAY_REF)
1251 get_canon_type (TREE_TYPE (cref), false, false);
1252
1253 cref = TREE_OPERAND (cref, 0);
1254 }
1255
1256 if (TREE_CODE (x) == VAR_DECL)
1257 has_proper_scope_for_analysis (x);
1258 }
1259 }
1260
1261
1262 /* Scan tree T to see if there are any casts within it.
1263 LHS Is the LHS of the expression involving the cast. */
1264
1265 static unsigned int
1266 look_for_casts (tree lhs ATTRIBUTE_UNUSED, tree t)
1267 {
1268 unsigned int cast = 0;
1269
1270
1271 if (is_gimple_cast (t) || TREE_CODE (t) == VIEW_CONVERT_EXPR)
1272 {
1273 tree castfromvar = TREE_OPERAND (t, 0);
1274 cast = cast | check_cast (TREE_TYPE (t), castfromvar);
1275 }
1276 else
1277 while (handled_component_p (t))
1278 {
1279 t = TREE_OPERAND (t, 0);
1280 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1281 {
1282 /* This may be some part of a component ref.
1283 IE it may be a.b.VIEW_CONVERT_EXPR<weird_type>(c).d, AFAIK.
1284 castfromref will give you a.b.c, not a. */
1285 tree castfromref = TREE_OPERAND (t, 0);
1286 cast = cast | check_cast (TREE_TYPE (t), castfromref);
1287 }
1288 else if (TREE_CODE (t) == COMPONENT_REF)
1289 get_canon_type (TREE_TYPE (TREE_OPERAND (t, 1)), false, false);
1290 }
1291
1292 if (!cast)
1293 cast = CT_NO_CAST;
1294 return cast;
1295 }
1296
1297 /* Check to see if T is a read or address of operation on a static var
1298 we are interested in analyzing. */
1299
1300 static void
1301 check_rhs_var (tree t)
1302 {
1303 look_for_address_of (t);
1304 check_tree(t);
1305 }
1306
1307 /* Check to see if T is an assignment to a static var we are
1308 interested in analyzing. */
1309
1310 static void
1311 check_lhs_var (tree t)
1312 {
1313 check_tree(t);
1314 }
1315
1316 /* This is a scaled down version of get_asm_expr_operands from
1317 tree_ssa_operands.c. The version there runs much later and assumes
1318 that aliasing information is already available. Here we are just
1319 trying to find if the set of inputs and outputs contain references
1320 or address of operations to local. FN is the function being
1321 analyzed and STMT is the actual asm statement. */
1322
1323 static void
1324 get_asm_expr_operands (tree stmt)
1325 {
1326 int noutputs = list_length (ASM_OUTPUTS (stmt));
1327 const char **oconstraints
1328 = (const char **) alloca ((noutputs) * sizeof (const char *));
1329 int i;
1330 tree link;
1331 const char *constraint;
1332 bool allows_mem, allows_reg, is_inout;
1333
1334 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1335 {
1336 oconstraints[i] = constraint
1337 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1338 parse_output_constraint (&constraint, i, 0, 0,
1339 &allows_mem, &allows_reg, &is_inout);
1340
1341 check_lhs_var (TREE_VALUE (link));
1342 }
1343
1344 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1345 {
1346 constraint
1347 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1348 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1349 oconstraints, &allows_mem, &allows_reg);
1350
1351 check_rhs_var (TREE_VALUE (link));
1352 }
1353
1354 /* There is no code here to check for asm memory clobbers. The
1355 casual maintainer might think that such code would be necessary,
1356 but that appears to be wrong. In other parts of the compiler,
1357 the asm memory clobbers are assumed to only clobber variables
1358 that are addressable. All types with addressable instances are
1359 assumed to already escape. So, we are protected here. */
1360 }
1361
1362 /* Check the parameters of a function call to CALL_EXPR to mark the
1363 types that pass across the function boundary. Also check to see if
1364 this is either an indirect call, a call outside the compilation
1365 unit. */
1366
1367 static void
1368 check_call (tree call_expr)
1369 {
1370 tree operand;
1371 tree callee_t = get_callee_fndecl (call_expr);
1372 struct cgraph_node* callee;
1373 enum availability avail = AVAIL_NOT_AVAILABLE;
1374 call_expr_arg_iterator iter;
1375
1376 FOR_EACH_CALL_EXPR_ARG (operand, iter, call_expr)
1377 check_rhs_var (operand);
1378
1379 if (callee_t)
1380 {
1381 tree arg_type;
1382 tree last_arg_type = NULL;
1383 callee = cgraph_node(callee_t);
1384 avail = cgraph_function_body_availability (callee);
1385
1386 /* Check that there are no implicit casts in the passing of
1387 parameters. */
1388 if (TYPE_ARG_TYPES (TREE_TYPE (callee_t)))
1389 {
1390 for (arg_type = TYPE_ARG_TYPES (TREE_TYPE (callee_t)),
1391 operand = first_call_expr_arg (call_expr, &iter);
1392 arg_type && TREE_VALUE (arg_type) != void_type_node;
1393 arg_type = TREE_CHAIN (arg_type),
1394 operand = next_call_expr_arg (&iter))
1395 {
1396 if (operand)
1397 {
1398 last_arg_type = TREE_VALUE(arg_type);
1399 check_cast (last_arg_type, operand);
1400 }
1401 else
1402 /* The code reaches here for some unfortunate
1403 builtin functions that do not have a list of
1404 argument types. */
1405 break;
1406 }
1407 }
1408 else
1409 {
1410 /* FIXME - According to Geoff Keating, we should never
1411 have to do this; the front ends should always process
1412 the arg list from the TYPE_ARG_LIST. */
1413 for (arg_type = DECL_ARGUMENTS (callee_t),
1414 operand = first_call_expr_arg (call_expr, &iter);
1415 arg_type;
1416 arg_type = TREE_CHAIN (arg_type),
1417 operand = next_call_expr_arg (&iter))
1418 {
1419 if (operand)
1420 {
1421 last_arg_type = TREE_TYPE(arg_type);
1422 check_cast (last_arg_type, operand);
1423 }
1424 else
1425 /* The code reaches here for some unfortunate
1426 builtin functions that do not have a list of
1427 argument types. */
1428 break;
1429 }
1430 }
1431
1432 /* In the case where we have a var_args function, we need to
1433 check the remaining parameters against the last argument. */
1434 arg_type = last_arg_type;
1435 for (;
1436 operand != NULL_TREE;
1437 operand = next_call_expr_arg (&iter))
1438 {
1439 if (arg_type)
1440 check_cast (arg_type, operand);
1441 else
1442 {
1443 /* The code reaches here for some unfortunate
1444 builtin functions that do not have a list of
1445 argument types. Most of these functions have
1446 been marked as having their parameters not
1447 escape, but for the rest, the type is doomed. */
1448 tree type = get_canon_type (TREE_TYPE (operand), false, false);
1449 mark_interesting_type (type, FULL_ESCAPE);
1450 }
1451 }
1452 }
1453
1454 /* The callee is either unknown (indirect call) or there is just no
1455 scannable code for it (external call) . We look to see if there
1456 are any bits available for the callee (such as by declaration or
1457 because it is builtin) and process solely on the basis of those
1458 bits. */
1459
1460 if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
1461 {
1462 /* If this is a direct call to an external function, mark all of
1463 the parameter and return types. */
1464 FOR_EACH_CALL_EXPR_ARG (operand, iter, call_expr)
1465 {
1466 tree type = get_canon_type (TREE_TYPE (operand), false, false);
1467 mark_interesting_type (type, EXPOSED_PARAMETER);
1468 }
1469
1470 if (callee_t)
1471 {
1472 tree type =
1473 get_canon_type (TREE_TYPE (TREE_TYPE (callee_t)), false, false);
1474 mark_interesting_type (type, EXPOSED_PARAMETER);
1475 }
1476 }
1477 }
1478
1479 /* CODE is the operation on OP0 and OP1. OP0 is the operand that we
1480 *know* is a pointer type. OP1 may be a pointer type. */
1481 static bool
1482 okay_pointer_operation (enum tree_code code, tree op0, tree op1)
1483 {
1484 tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
1485
1486 switch (code)
1487 {
1488 case MULT_EXPR:
1489 /* Multiplication does not change alignment. */
1490 return true;
1491 break;
1492 case MINUS_EXPR:
1493 case PLUS_EXPR:
1494 case POINTER_PLUS_EXPR:
1495 {
1496 tree base, offset, offset_cast_stmt;
1497
1498 if (POINTER_TYPE_P (op0type)
1499 && TREE_CODE (op0) == SSA_NAME
1500 && TREE_CODE (op1) == SSA_NAME
1501 && is_array_access_through_pointer_and_index (code, op0, op1,
1502 &base,
1503 &offset,
1504 &offset_cast_stmt))
1505 return true;
1506 else
1507 {
1508 tree size_of_op0_points_to = TYPE_SIZE_UNIT (TREE_TYPE (op0type));
1509
1510 if (CONSTANT_CLASS_P (op1)
1511 && size_of_op0_points_to
1512 && multiple_of_p (TREE_TYPE (size_of_op0_points_to),
1513 op1, size_of_op0_points_to))
1514 return true;
1515
1516 if (CONSTANT_CLASS_P (op0)
1517 && size_of_op0_points_to
1518 && multiple_of_p (TREE_TYPE (size_of_op0_points_to),
1519 op0, size_of_op0_points_to))
1520 return true;
1521 }
1522 }
1523 break;
1524 default:
1525 return false;
1526 }
1527 return false;
1528 }
1529
1530 /* TP is the part of the tree currently under the microscope.
1531 WALK_SUBTREES is part of the walk_tree api but is unused here.
1532 DATA is cgraph_node of the function being walked. */
1533
1534 /* FIXME: When this is converted to run over SSA form, this code
1535 should be converted to use the operand scanner. */
1536
1537 static tree
1538 scan_for_refs (tree *tp, int *walk_subtrees, void *data)
1539 {
1540 struct cgraph_node *fn = (struct cgraph_node *) data;
1541 tree t = *tp;
1542
1543 switch (TREE_CODE (t))
1544 {
1545 case VAR_DECL:
1546 if (DECL_INITIAL (t))
1547 walk_tree (&DECL_INITIAL (t), scan_for_refs, fn, visited_nodes);
1548 *walk_subtrees = 0;
1549 break;
1550
1551 case GIMPLE_MODIFY_STMT:
1552 {
1553 /* First look on the lhs and see what variable is stored to */
1554 tree lhs = GIMPLE_STMT_OPERAND (t, 0);
1555 tree rhs = GIMPLE_STMT_OPERAND (t, 1);
1556
1557 check_lhs_var (lhs);
1558 check_cast (TREE_TYPE (lhs), rhs);
1559
1560 /* For the purposes of figuring out what the cast affects */
1561
1562 /* Next check the operands on the rhs to see if they are ok. */
1563 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1564 {
1565 case tcc_binary:
1566 {
1567 tree op0 = TREE_OPERAND (rhs, 0);
1568 tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1569 tree op1 = TREE_OPERAND (rhs, 1);
1570 tree type1 = get_canon_type (TREE_TYPE (op1), false, false);
1571
1572 /* If this is pointer arithmetic of any bad sort, then
1573 we need to mark the types as bad. For binary
1574 operations, no binary operator we currently support
1575 is always "safe" in regard to what it would do to
1576 pointers for purposes of determining which types
1577 escape, except operations of the size of the type.
1578 It is possible that min and max under the right set
1579 of circumstances and if the moon is in the correct
1580 place could be safe, but it is hard to see how this
1581 is worth the effort. */
1582
1583 if (type0 && POINTER_TYPE_P (type0)
1584 && !okay_pointer_operation (TREE_CODE (rhs), op0, op1))
1585 mark_interesting_type (type0, FULL_ESCAPE);
1586 if (type1 && POINTER_TYPE_P (type1)
1587 && !okay_pointer_operation (TREE_CODE (rhs), op1, op0))
1588 mark_interesting_type (type1, FULL_ESCAPE);
1589
1590 look_for_casts (lhs, op0);
1591 look_for_casts (lhs, op1);
1592 check_rhs_var (op0);
1593 check_rhs_var (op1);
1594 }
1595 break;
1596 case tcc_unary:
1597 {
1598 tree op0 = TREE_OPERAND (rhs, 0);
1599 tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1600 /* For unary operations, if the operation is NEGATE or
1601 ABS on a pointer, this is also considered pointer
1602 arithmetic and thus, bad for business. */
1603 if (type0 && (TREE_CODE (op0) == NEGATE_EXPR
1604 || TREE_CODE (op0) == ABS_EXPR)
1605 && POINTER_TYPE_P (type0))
1606 {
1607 mark_interesting_type (type0, FULL_ESCAPE);
1608 }
1609 check_rhs_var (op0);
1610 look_for_casts (lhs, op0);
1611 look_for_casts (lhs, rhs);
1612 }
1613
1614 break;
1615 case tcc_reference:
1616 look_for_casts (lhs, rhs);
1617 check_rhs_var (rhs);
1618 break;
1619 case tcc_declaration:
1620 check_rhs_var (rhs);
1621 break;
1622 case tcc_expression:
1623 switch (TREE_CODE (rhs))
1624 {
1625 case ADDR_EXPR:
1626 look_for_casts (lhs, TREE_OPERAND (rhs, 0));
1627 check_rhs_var (rhs);
1628 break;
1629 default:
1630 break;
1631 }
1632 break;
1633 case tcc_vl_exp:
1634 switch (TREE_CODE (rhs))
1635 {
1636 case CALL_EXPR:
1637 /* If this is a call to malloc, squirrel away the
1638 result so we do mark the resulting cast as being
1639 bad. */
1640 check_call (rhs);
1641 break;
1642 default:
1643 break;
1644 }
1645 break;
1646 default:
1647 break;
1648 }
1649 *walk_subtrees = 0;
1650 }
1651 break;
1652
1653 case ADDR_EXPR:
1654 /* This case is here to find addresses on rhs of constructors in
1655 decl_initial of static variables. */
1656 check_rhs_var (t);
1657 *walk_subtrees = 0;
1658 break;
1659
1660 case CALL_EXPR:
1661 check_call (t);
1662 *walk_subtrees = 0;
1663 break;
1664
1665 case ASM_EXPR:
1666 get_asm_expr_operands (t);
1667 *walk_subtrees = 0;
1668 break;
1669
1670 default:
1671 break;
1672 }
1673 return NULL;
1674 }
1675
1676
1677 /* The init routine for analyzing global static variable usage. See
1678 comments at top for description. */
1679 static void
1680 ipa_init (void)
1681 {
1682 bitmap_obstack_initialize (&ipa_obstack);
1683 global_types_exposed_parameter = BITMAP_ALLOC (&ipa_obstack);
1684 global_types_full_escape = BITMAP_ALLOC (&ipa_obstack);
1685 global_types_seen = BITMAP_ALLOC (&ipa_obstack);
1686
1687 uid_to_canon_type = splay_tree_new (splay_tree_compare_ints, 0, 0);
1688 all_canon_types = splay_tree_new (compare_type_brand, 0, 0);
1689 type_to_canon_type = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1690 uid_to_subtype_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1691 uid_to_addressof_down_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1692 uid_to_addressof_up_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1693
1694 /* There are some shared nodes, in particular the initializers on
1695 static declarations. We do not need to scan them more than once
1696 since all we would be interested in are the addressof
1697 operations. */
1698 visited_nodes = pointer_set_create ();
1699 initialized = true;
1700 }
1701
1702 /* Check out the rhs of a static or global initialization VNODE to see
1703 if any of them contain addressof operations. Note that some of
1704 these variables may not even be referenced in the code in this
1705 compilation unit but their right hand sides may contain references
1706 to variables defined within this unit. */
1707
1708 static void
1709 analyze_variable (struct varpool_node *vnode)
1710 {
1711 tree global = vnode->decl;
1712 tree type = get_canon_type (TREE_TYPE (global), false, false);
1713
1714 /* If this variable has exposure beyond the compilation unit, add
1715 its type to the global types. */
1716
1717 if (vnode->externally_visible)
1718 mark_interesting_type (type, FULL_ESCAPE);
1719
1720 gcc_assert (TREE_CODE (global) == VAR_DECL);
1721
1722 if (DECL_INITIAL (global))
1723 walk_tree (&DECL_INITIAL (global), scan_for_refs, NULL, visited_nodes);
1724 }
1725
1726 /* This is the main routine for finding the reference patterns for
1727 global variables within a function FN. */
1728
1729 static void
1730 analyze_function (struct cgraph_node *fn)
1731 {
1732 tree decl = fn->decl;
1733 check_function_parameter_and_return_types (decl,
1734 fn->local.externally_visible);
1735 if (dump_file)
1736 fprintf (dump_file, "\n local analysis of %s", cgraph_node_name (fn));
1737
1738 {
1739 struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
1740 basic_block this_block;
1741
1742 FOR_EACH_BB_FN (this_block, this_cfun)
1743 {
1744 block_stmt_iterator bsi;
1745 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
1746 walk_tree (bsi_stmt_ptr (bsi), scan_for_refs,
1747 fn, visited_nodes);
1748 }
1749 }
1750
1751 /* There may be const decls with interesting right hand sides. */
1752 if (DECL_STRUCT_FUNCTION (decl))
1753 {
1754 tree step;
1755 for (step = DECL_STRUCT_FUNCTION (decl)->unexpanded_var_list;
1756 step;
1757 step = TREE_CHAIN (step))
1758 {
1759 tree var = TREE_VALUE (step);
1760 if (TREE_CODE (var) == VAR_DECL
1761 && DECL_INITIAL (var)
1762 && !TREE_STATIC (var))
1763 walk_tree (&DECL_INITIAL (var), scan_for_refs,
1764 fn, visited_nodes);
1765 get_canon_type (TREE_TYPE (var), false, false);
1766 }
1767 }
1768 }
1769
1770 \f
1771
1772 /* Convert a type_UID into a type. */
1773 static tree
1774 type_for_uid (int uid)
1775 {
1776 splay_tree_node result =
1777 splay_tree_lookup (uid_to_canon_type, (splay_tree_key) uid);
1778
1779 if (result)
1780 return (tree) result->value;
1781 else return NULL;
1782 }
1783
1784 /* Return the a bitmap with the subtypes of the type for UID. If it
1785 does not exist, return either NULL or a new bitmap depending on the
1786 value of CREATE. */
1787
1788 static bitmap
1789 subtype_map_for_uid (int uid, bool create)
1790 {
1791 splay_tree_node result = splay_tree_lookup (uid_to_subtype_map,
1792 (splay_tree_key) uid);
1793
1794 if (result)
1795 return (bitmap) result->value;
1796 else if (create)
1797 {
1798 bitmap subtype_map = BITMAP_ALLOC (&ipa_obstack);
1799 splay_tree_insert (uid_to_subtype_map,
1800 uid,
1801 (splay_tree_value)subtype_map);
1802 return subtype_map;
1803 }
1804 else return NULL;
1805 }
1806
1807 /* Mark all of the supertypes and field types of TYPE as being seen.
1808 Also accumulate the subtypes for each type so that
1809 close_types_full_escape can mark a subtype as escaping if the
1810 supertype escapes. */
1811
1812 static void
1813 close_type_seen (tree type)
1814 {
1815 tree field;
1816 int i, uid;
1817 tree binfo, base_binfo;
1818
1819 /* See thru all pointer tos and array ofs. */
1820 type = get_canon_type (type, true, true);
1821 if (!type)
1822 return;
1823
1824 uid = TYPE_UID (type);
1825
1826 if (bitmap_bit_p (been_there_done_that, uid))
1827 return;
1828 bitmap_set_bit (been_there_done_that, uid);
1829
1830 /* If we are doing a language with a type hierarchy, mark all of
1831 the superclasses. */
1832 if (TYPE_BINFO (type))
1833 for (binfo = TYPE_BINFO (type), i = 0;
1834 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1835 {
1836 tree binfo_type = BINFO_TYPE (base_binfo);
1837 bitmap subtype_map = subtype_map_for_uid
1838 (TYPE_UID (TYPE_MAIN_VARIANT (binfo_type)), true);
1839 bitmap_set_bit (subtype_map, uid);
1840 close_type_seen (get_canon_type (binfo_type, true, true));
1841 }
1842
1843 /* If the field is a struct or union type, mark all of the
1844 subfields. */
1845 for (field = TYPE_FIELDS (type);
1846 field;
1847 field = TREE_CHAIN (field))
1848 {
1849 tree field_type;
1850 if (TREE_CODE (field) != FIELD_DECL)
1851 continue;
1852
1853 field_type = TREE_TYPE (field);
1854 if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1855 close_type_seen (get_canon_type (field_type, true, true));
1856 }
1857 }
1858
1859 /* Take a TYPE that has been passed by value to an external function
1860 and mark all of the fields that have pointer types as escaping. For
1861 any of the non pointer types that are structures or unions,
1862 recurse. TYPE is never a pointer type. */
1863
1864 static void
1865 close_type_exposed_parameter (tree type)
1866 {
1867 tree field;
1868 int uid;
1869
1870 type = get_canon_type (type, false, false);
1871 if (!type)
1872 return;
1873 uid = TYPE_UID (type);
1874 gcc_assert (!POINTER_TYPE_P (type));
1875
1876 if (bitmap_bit_p (been_there_done_that, uid))
1877 return;
1878 bitmap_set_bit (been_there_done_that, uid);
1879
1880 /* If the field is a struct or union type, mark all of the
1881 subfields. */
1882 for (field = TYPE_FIELDS (type);
1883 field;
1884 field = TREE_CHAIN (field))
1885 {
1886 tree field_type;
1887
1888 if (TREE_CODE (field) != FIELD_DECL)
1889 continue;
1890
1891 field_type = get_canon_type (TREE_TYPE (field), false, false);
1892 mark_interesting_type (field_type, EXPOSED_PARAMETER);
1893
1894 /* Only recurse for non pointer types of structures and unions. */
1895 if (ipa_type_escape_star_count_of_interesting_type (field_type) == 0)
1896 close_type_exposed_parameter (field_type);
1897 }
1898 }
1899
1900 /* The next function handles the case where a type fully escapes.
1901 This means that not only does the type itself escape,
1902
1903 a) the type of every field recursively escapes
1904 b) the type of every subtype escapes as well as the super as well
1905 as all of the pointer to types for each field.
1906
1907 Note that pointer to types are not marked as escaping. If the
1908 pointed to type escapes, the pointer to type also escapes.
1909
1910 Take a TYPE that has had the address taken for an instance of it
1911 and mark all of the types for its fields as having their addresses
1912 taken. */
1913
1914 static void
1915 close_type_full_escape (tree type)
1916 {
1917 tree field;
1918 unsigned int i;
1919 int uid;
1920 tree binfo, base_binfo;
1921 bitmap_iterator bi;
1922 bitmap subtype_map;
1923 splay_tree_node address_result;
1924
1925 /* Strip off any pointer or array types. */
1926 type = get_canon_type (type, true, true);
1927 if (!type)
1928 return;
1929 uid = TYPE_UID (type);
1930
1931 if (bitmap_bit_p (been_there_done_that, uid))
1932 return;
1933 bitmap_set_bit (been_there_done_that, uid);
1934
1935 subtype_map = subtype_map_for_uid (uid, false);
1936
1937 /* If we are doing a language with a type hierarchy, mark all of
1938 the superclasses. */
1939 if (TYPE_BINFO (type))
1940 for (binfo = TYPE_BINFO (type), i = 0;
1941 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1942 {
1943 tree binfotype = BINFO_TYPE (base_binfo);
1944 binfotype = mark_type (binfotype, FULL_ESCAPE);
1945 close_type_full_escape (binfotype);
1946 }
1947
1948 /* Mark as escaped any types that have been down casted to
1949 this type. */
1950 if (subtype_map)
1951 EXECUTE_IF_SET_IN_BITMAP (subtype_map, 0, i, bi)
1952 {
1953 tree subtype = type_for_uid (i);
1954 subtype = mark_type (subtype, FULL_ESCAPE);
1955 close_type_full_escape (subtype);
1956 }
1957
1958 /* If the field is a struct or union type, mark all of the
1959 subfields. */
1960 for (field = TYPE_FIELDS (type);
1961 field;
1962 field = TREE_CHAIN (field))
1963 {
1964 tree field_type;
1965 if (TREE_CODE (field) != FIELD_DECL)
1966 continue;
1967
1968 field_type = TREE_TYPE (field);
1969 if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1970 {
1971 field_type = mark_type (field_type, FULL_ESCAPE);
1972 close_type_full_escape (field_type);
1973 }
1974 }
1975
1976 /* For all of the types A that contain this type B and were part of
1977 an expression like "&...A.B...", mark the A's as escaping. */
1978 address_result = splay_tree_lookup (uid_to_addressof_up_map,
1979 (splay_tree_key) uid);
1980 if (address_result)
1981 {
1982 bitmap containing_classes = (bitmap) address_result->value;
1983 EXECUTE_IF_SET_IN_BITMAP (containing_classes, 0, i, bi)
1984 {
1985 close_type_full_escape (type_for_uid (i));
1986 }
1987 }
1988 }
1989
1990 /* Transitively close the addressof bitmap for the type with UID.
1991 This means that if we had a.b and b.c, a would have both b and c in
1992 its maps. */
1993
1994 static bitmap
1995 close_addressof_down (int uid)
1996 {
1997 bitmap_iterator bi;
1998 splay_tree_node result =
1999 splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
2000 bitmap map = NULL;
2001 bitmap new_map;
2002 unsigned int i;
2003
2004 if (result)
2005 map = (bitmap) result->value;
2006 else
2007 return NULL;
2008
2009 if (bitmap_bit_p (been_there_done_that, uid))
2010 return map;
2011 bitmap_set_bit (been_there_done_that, uid);
2012
2013 /* If the type escapes, get rid of the addressof map, it will not be
2014 needed. */
2015 if (bitmap_bit_p (global_types_full_escape, uid))
2016 {
2017 BITMAP_FREE (map);
2018 splay_tree_remove (uid_to_addressof_down_map, (splay_tree_key) uid);
2019 return NULL;
2020 }
2021
2022 /* The new_map will have all of the bits for the enclosed fields and
2023 will have the unique id version of the old map. */
2024 new_map = BITMAP_ALLOC (&ipa_obstack);
2025
2026 EXECUTE_IF_SET_IN_BITMAP (map, 0, i, bi)
2027 {
2028 bitmap submap = close_addressof_down (i);
2029 bitmap_set_bit (new_map, i);
2030 if (submap)
2031 bitmap_ior_into (new_map, submap);
2032 }
2033 result->value = (splay_tree_value) new_map;
2034
2035 BITMAP_FREE (map);
2036 return new_map;
2037 }
2038
2039 \f
2040 /* The main entry point for type escape analysis. */
2041
2042 static unsigned int
2043 type_escape_execute (void)
2044 {
2045 struct cgraph_node *node;
2046 struct varpool_node *vnode;
2047 unsigned int i;
2048 bitmap_iterator bi;
2049 splay_tree_node result;
2050
2051 ipa_init ();
2052
2053 /* Process all of the variables first. */
2054 FOR_EACH_STATIC_VARIABLE (vnode)
2055 analyze_variable (vnode);
2056
2057 /* Process all of the functions. next
2058
2059 We do not want to process any of the clones so we check that this
2060 is a master clone. However, we do need to process any
2061 AVAIL_OVERWRITABLE functions (these are never clones) because
2062 they may cause a type variable to escape.
2063 */
2064 for (node = cgraph_nodes; node; node = node->next)
2065 if (node->analyzed
2066 && (cgraph_is_master_clone (node)
2067 || (cgraph_function_body_availability (node) == AVAIL_OVERWRITABLE)))
2068 analyze_function (node);
2069
2070
2071 pointer_set_destroy (visited_nodes);
2072 visited_nodes = NULL;
2073
2074 /* Do all of the closures to discover which types escape the
2075 compilation unit. */
2076
2077 been_there_done_that = BITMAP_ALLOC (&ipa_obstack);
2078 bitmap_tmp = BITMAP_ALLOC (&ipa_obstack);
2079
2080 /* Examine the types that we have directly seen in scanning the code
2081 and add to that any contained types or superclasses. */
2082
2083 bitmap_copy (bitmap_tmp, global_types_seen);
2084 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2085 {
2086 tree type = type_for_uid (i);
2087 /* Only look at records and unions and pointer tos. */
2088 if (ipa_type_escape_star_count_of_interesting_or_array_type (type) >= 0)
2089 close_type_seen (type);
2090 }
2091 bitmap_clear (been_there_done_that);
2092
2093 /* Examine all of the types passed by value and mark any enclosed
2094 pointer types as escaping. */
2095 bitmap_copy (bitmap_tmp, global_types_exposed_parameter);
2096 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2097 {
2098 close_type_exposed_parameter (type_for_uid (i));
2099 }
2100 bitmap_clear (been_there_done_that);
2101
2102 /* Close the types for escape. If something escapes, then any
2103 enclosed types escape as well as any subtypes. */
2104 bitmap_copy (bitmap_tmp, global_types_full_escape);
2105 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2106 {
2107 close_type_full_escape (type_for_uid (i));
2108 }
2109 bitmap_clear (been_there_done_that);
2110
2111 /* Before this pass, the uid_to_addressof_down_map for type X
2112 contained an entry for Y if there had been an operation of the
2113 form &X.Y. This step adds all of the fields contained within Y
2114 (recursively) to X's map. */
2115
2116 result = splay_tree_min (uid_to_addressof_down_map);
2117 while (result)
2118 {
2119 int uid = result->key;
2120 /* Close the addressof map, i.e. copy all of the transitive
2121 substructures up to this level. */
2122 close_addressof_down (uid);
2123 result = splay_tree_successor (uid_to_addressof_down_map, uid);
2124 }
2125
2126 /* Do not need the array types and pointer types in the persistent
2127 data structures. */
2128 result = splay_tree_min (all_canon_types);
2129 while (result)
2130 {
2131 tree type = (tree) result->value;
2132 tree key = (tree) result->key;
2133 if (POINTER_TYPE_P (type)
2134 || TREE_CODE (type) == ARRAY_TYPE)
2135 {
2136 splay_tree_remove (all_canon_types, (splay_tree_key) result->key);
2137 splay_tree_remove (type_to_canon_type, (splay_tree_key) type);
2138 splay_tree_remove (uid_to_canon_type, (splay_tree_key) TYPE_UID (type));
2139 bitmap_clear_bit (global_types_seen, TYPE_UID (type));
2140 }
2141 result = splay_tree_successor (all_canon_types, (splay_tree_key) key);
2142 }
2143
2144 if (dump_file)
2145 {
2146 EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
2147 {
2148 /* The pointer types are in the global_types_full_escape
2149 bitmap but not in the backwards map. They also contain
2150 no useful information since they are not marked. */
2151 tree type = type_for_uid (i);
2152 fprintf(dump_file, "type %d ", i);
2153 print_generic_expr (dump_file, type, 0);
2154 if (bitmap_bit_p (global_types_full_escape, i))
2155 fprintf(dump_file, " escaped\n");
2156 else
2157 fprintf(dump_file, " contained\n");
2158 }
2159 }
2160
2161 /* Get rid of uid_to_addressof_up_map and its bitmaps. */
2162 result = splay_tree_min (uid_to_addressof_up_map);
2163 while (result)
2164 {
2165 int uid = (int)result->key;
2166 bitmap bm = (bitmap)result->value;
2167
2168 BITMAP_FREE (bm);
2169 splay_tree_remove (uid_to_addressof_up_map, (splay_tree_key) uid);
2170 result = splay_tree_successor (uid_to_addressof_up_map, uid);
2171 }
2172
2173 /* Get rid of the subtype map. */
2174 result = splay_tree_min (uid_to_subtype_map);
2175 while (result)
2176 {
2177 bitmap b = (bitmap)result->value;
2178 BITMAP_FREE(b);
2179 splay_tree_remove (uid_to_subtype_map, result->key);
2180 result = splay_tree_min (uid_to_subtype_map);
2181 }
2182 splay_tree_delete (uid_to_subtype_map);
2183 uid_to_subtype_map = NULL;
2184
2185 BITMAP_FREE (global_types_exposed_parameter);
2186 BITMAP_FREE (been_there_done_that);
2187 BITMAP_FREE (bitmap_tmp);
2188 return 0;
2189 }
2190
2191 static bool
2192 gate_type_escape_vars (void)
2193 {
2194 return (flag_unit_at_a_time != 0 && flag_ipa_type_escape
2195 /* Don't bother doing anything if the program has errors. */
2196 && !(errorcount || sorrycount));
2197 }
2198
2199 struct tree_opt_pass pass_ipa_type_escape =
2200 {
2201 "type-escape-var", /* name */
2202 gate_type_escape_vars, /* gate */
2203 type_escape_execute, /* execute */
2204 NULL, /* sub */
2205 NULL, /* next */
2206 0, /* static_pass_number */
2207 TV_IPA_TYPE_ESCAPE, /* tv_id */
2208 0, /* properties_required */
2209 0, /* properties_provided */
2210 0, /* properties_destroyed */
2211 0, /* todo_flags_start */
2212 0, /* todo_flags_finish */
2213 0 /* letter */
2214 };
2215