intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / ipa-type-escape.c
1 /* Escape analysis for types.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010
3 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This pass determines which types in the program contain only
23 instances that are completely encapsulated by the compilation unit.
24 Those types that are encapsulated must also pass the further
25 requirement that there be no bad operations on any instances of
26 those types.
27
28 A great deal of freedom in compilation is allowed for the instances
29 of those types that pass these conditions.
30 */
31
32 /* The code in this module is called by the ipa pass manager. It
33 should be one of the later passes since its information is used by
34 the rest of the compilation. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "tree.h"
41 #include "tree-flow.h"
42 #include "tree-inline.h"
43 #include "tree-pass.h"
44 #include "langhooks.h"
45 #include "pointer-set.h"
46 #include "splay-tree.h"
47 #include "ggc.h"
48 #include "ipa-utils.h"
49 #include "ipa-type-escape.h"
50 #include "gimple.h"
51 #include "cgraph.h"
52 #include "output.h"
53 #include "flags.h"
54 #include "timevar.h"
55 #include "diagnostic.h"
56 #include "tree-pretty-print.h"
57 #include "langhooks.h"
58
59 /* Some of the aliasing is called very early, before this phase is
60 called. To assure that this is not a problem, we keep track of if
61 this phase has been run. */
62 static bool initialized = false;
63
64 /* Scratch bitmap for avoiding work. */
65 static bitmap been_there_done_that;
66 static bitmap bitmap_tmp;
67
68 /* There are two levels of escape that types can undergo.
69
70 EXPOSED_PARAMETER - some instance of the variable is
71 passed by value into an externally visible function or some
72 instance of the variable is passed out of an externally visible
73 function as a return value. In this case any of the fields of the
74 variable that are pointer types end up having their types marked as
75 FULL_ESCAPE.
76
77 FULL_ESCAPE - when bad things happen to good types. One of the
78 following things happens to the type: (a) either an instance of the
79 variable has its address passed to an externally visible function,
80 (b) the address is taken and some bad cast happens to the address
81 or (c) explicit arithmetic is done to the address.
82 */
83
84 enum escape_t
85 {
86 EXPOSED_PARAMETER,
87 FULL_ESCAPE
88 };
89
90 /* The following two bit vectors global_types_* correspond to
91 previous cases above. During the analysis phase, a bit is set in
92 one of these vectors if an operation of the offending class is
93 discovered to happen on the associated type. */
94
95 static bitmap global_types_exposed_parameter;
96 static bitmap global_types_full_escape;
97
98 /* All of the types seen in this compilation unit. */
99 static bitmap global_types_seen;
100 /* Reverse map to take a canon uid and map it to a canon type. Uid's
101 are never manipulated unless they are associated with a canon
102 type. */
103 static splay_tree uid_to_canon_type;
104
105 /* Internal structure of type mapping code. This maps a canon type
106 name to its canon type. */
107 static splay_tree all_canon_types;
108
109 /* Map from type clones to the single canon type. */
110 static splay_tree type_to_canon_type;
111
112 /* A splay tree of bitmaps. An element X in the splay tree has a bit
113 set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (Y)) if there was
114 an operation in the program of the form "&X.Y". */
115 static splay_tree uid_to_addressof_down_map;
116
117 /* A splay tree of bitmaps. An element Y in the splay tree has a bit
118 set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (X)) if there was
119 an operation in the program of the form "&X.Y". */
120 static splay_tree uid_to_addressof_up_map;
121
122 /* Tree to hold the subtype maps used to mark subtypes of escaped
123 types. */
124 static splay_tree uid_to_subtype_map;
125
126 /* Records tree nodes seen in cgraph_create_edges. Simply using
127 walk_tree_without_duplicates doesn't guarantee each node is visited
128 once because it gets a new htab upon each recursive call from
129 scan_for_refs. */
130 static struct pointer_set_t *visited_nodes;
131
132 /* Visited stmts by walk_use_def_chains function because it's called
133 recursively. */
134 static struct pointer_set_t *visited_stmts;
135
136 static bitmap_obstack ipa_obstack;
137
138 /* Static functions from this file that are used
139 before being defined. */
140 static unsigned int look_for_casts (tree);
141 static bool is_cast_from_non_pointer (tree, gimple, void *);
142
143 /* Get the name of TYPE or return the string "<UNNAMED>". */
144 static const char*
145 get_name_of_type (tree type)
146 {
147 tree name = TYPE_NAME (type);
148
149 if (!name)
150 /* Unnamed type, do what you like here. */
151 return "<UNNAMED>";
152
153 /* It will be a TYPE_DECL in the case of a typedef, otherwise, an
154 identifier_node */
155 if (TREE_CODE (name) == TYPE_DECL)
156 {
157 /* Each DECL has a DECL_NAME field which contains an
158 IDENTIFIER_NODE. (Some decls, most often labels, may have
159 zero as the DECL_NAME). */
160 if (DECL_NAME (name))
161 return IDENTIFIER_POINTER (DECL_NAME (name));
162 else
163 /* Unnamed type, do what you like here. */
164 return "<UNNAMED>";
165 }
166 else if (TREE_CODE (name) == IDENTIFIER_NODE)
167 return IDENTIFIER_POINTER (name);
168 else
169 return "<UNNAMED>";
170 }
171
172 struct type_brand_s
173 {
174 const char* name;
175 int seq;
176 };
177
178 /* Splay tree comparison function on type_brand_s structures. */
179
180 static int
181 compare_type_brand (splay_tree_key sk1, splay_tree_key sk2)
182 {
183 struct type_brand_s * k1 = (struct type_brand_s *) sk1;
184 struct type_brand_s * k2 = (struct type_brand_s *) sk2;
185
186 int value = strcmp(k1->name, k2->name);
187 if (value == 0)
188 return k2->seq - k1->seq;
189 else
190 return value;
191 }
192
193 /* All of the "unique_type" code is a hack to get around the sleazy
194 implementation used to compile more than file. Currently gcc does
195 not get rid of multiple instances of the same type that have been
196 collected from different compilation units. */
197 /* This is a trivial algorithm for removing duplicate types. This
198 would not work for any language that used structural equivalence as
199 the basis of its type system. */
200 /* Return TYPE if no type compatible with TYPE has been seen so far,
201 otherwise return a type compatible with TYPE that has already been
202 processed. */
203
204 static tree
205 discover_unique_type (tree type)
206 {
207 struct type_brand_s * brand = XNEW (struct type_brand_s);
208 int i = 0;
209 splay_tree_node result;
210
211 brand->name = get_name_of_type (type);
212
213 while (1)
214 {
215 brand->seq = i++;
216 result = splay_tree_lookup (all_canon_types, (splay_tree_key) brand);
217
218 if (result)
219 {
220 /* Create an alias since this is just the same as
221 other_type. */
222 tree other_type = (tree) result->value;
223 if (types_compatible_p (type, other_type))
224 {
225 free (brand);
226 /* Insert this new type as an alias for other_type. */
227 splay_tree_insert (type_to_canon_type,
228 (splay_tree_key) type,
229 (splay_tree_value) other_type);
230 return other_type;
231 }
232 /* Not compatible, look for next instance with same name. */
233 }
234 else
235 {
236 /* No more instances, create new one since this is the first
237 time we saw this type. */
238 brand->seq = i++;
239 /* Insert the new brand. */
240 splay_tree_insert (all_canon_types,
241 (splay_tree_key) brand,
242 (splay_tree_value) type);
243
244 /* Insert this new type as an alias for itself. */
245 splay_tree_insert (type_to_canon_type,
246 (splay_tree_key) type,
247 (splay_tree_value) type);
248
249 /* Insert the uid for reverse lookup; */
250 splay_tree_insert (uid_to_canon_type,
251 (splay_tree_key) TYPE_UID (type),
252 (splay_tree_value) type);
253
254 bitmap_set_bit (global_types_seen, TYPE_UID (type));
255 return type;
256 }
257 }
258 }
259
260 /* Return true if TYPE is one of the type classes that we are willing
261 to analyze. This skips the goofy types like arrays of pointers to
262 methods. */
263 static bool
264 type_to_consider (tree type)
265 {
266 /* Strip the *'s off. */
267 type = TYPE_MAIN_VARIANT (type);
268 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
269 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
270
271 switch (TREE_CODE (type))
272 {
273 case BOOLEAN_TYPE:
274 case COMPLEX_TYPE:
275 case ENUMERAL_TYPE:
276 case INTEGER_TYPE:
277 case QUAL_UNION_TYPE:
278 case REAL_TYPE:
279 case FIXED_POINT_TYPE:
280 case RECORD_TYPE:
281 case UNION_TYPE:
282 case VECTOR_TYPE:
283 case VOID_TYPE:
284 return true;
285
286 default:
287 return false;
288 }
289 }
290
291 /* Get the canon type of TYPE. If SEE_THRU_PTRS is true, remove all
292 the POINTER_TOs and if SEE_THRU_ARRAYS is true, remove all of the
293 ARRAY_OFs and POINTER_TOs. */
294
295 static tree
296 get_canon_type (tree type, bool see_thru_ptrs, bool see_thru_arrays)
297 {
298 splay_tree_node result;
299 /* Strip the *'s off. */
300 if (!type || !type_to_consider (type))
301 return NULL;
302
303 type = TYPE_MAIN_VARIANT (type);
304 if (see_thru_arrays)
305 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
306 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
307
308 else if (see_thru_ptrs)
309 while (POINTER_TYPE_P (type))
310 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
311
312 result = splay_tree_lookup (type_to_canon_type, (splay_tree_key) type);
313
314 if (result == NULL)
315 return discover_unique_type (type);
316 else return (tree) result->value;
317 }
318
319 /* Same as GET_CANON_TYPE, except return the TYPE_ID rather than the
320 TYPE. */
321
322 static int
323 get_canon_type_uid (tree type, bool see_thru_ptrs, bool see_thru_arrays)
324 {
325 type = get_canon_type (type, see_thru_ptrs, see_thru_arrays);
326 if (type)
327 return TYPE_UID(type);
328 else return 0;
329 }
330
331 /* Return 0 if TYPE is a record or union type. Return a positive
332 number if TYPE is a pointer to a record or union. The number is
333 the number of pointer types stripped to get to the record or union
334 type. Return -1 if TYPE is none of the above. */
335
336 int
337 ipa_type_escape_star_count_of_interesting_type (tree type)
338 {
339 int count = 0;
340 /* Strip the *'s off. */
341 if (!type)
342 return -1;
343 type = TYPE_MAIN_VARIANT (type);
344 while (POINTER_TYPE_P (type))
345 {
346 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
347 count++;
348 }
349
350 /* We are interested in records, and unions only. */
351 if (TREE_CODE (type) == RECORD_TYPE
352 || TREE_CODE (type) == QUAL_UNION_TYPE
353 || TREE_CODE (type) == UNION_TYPE)
354 return count;
355 else
356 return -1;
357 }
358
359
360 /* Return 0 if TYPE is a record or union type. Return a positive
361 number if TYPE is a pointer to a record or union. The number is
362 the number of pointer types stripped to get to the record or union
363 type. Return -1 if TYPE is none of the above. */
364
365 int
366 ipa_type_escape_star_count_of_interesting_or_array_type (tree type)
367 {
368 int count = 0;
369 /* Strip the *'s off. */
370 if (!type)
371 return -1;
372 type = TYPE_MAIN_VARIANT (type);
373 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
374 {
375 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
376 count++;
377 }
378
379 /* We are interested in records, and unions only. */
380 if (TREE_CODE (type) == RECORD_TYPE
381 || TREE_CODE (type) == QUAL_UNION_TYPE
382 || TREE_CODE (type) == UNION_TYPE)
383 return count;
384 else
385 return -1;
386 }
387
388
389 /* Return true if the record, or union TYPE passed in escapes this
390 compilation unit. Note that all of the pointer-to's are removed
391 before testing since these may not be correct. */
392
393 bool
394 ipa_type_escape_type_contained_p (tree type)
395 {
396 if (!initialized)
397 return false;
398 return !bitmap_bit_p (global_types_full_escape,
399 get_canon_type_uid (type, true, false));
400 }
401
402 /* Return true if a modification to a field of type FIELD_TYPE cannot
403 clobber a record of RECORD_TYPE. */
404
405 bool
406 ipa_type_escape_field_does_not_clobber_p (tree record_type, tree field_type)
407 {
408 splay_tree_node result;
409 int uid;
410
411 if (!initialized)
412 return false;
413
414 /* Strip off all of the pointer tos on the record type. Strip the
415 same number of pointer tos from the field type. If the field
416 type has fewer, it could not have been aliased. */
417 record_type = TYPE_MAIN_VARIANT (record_type);
418 field_type = TYPE_MAIN_VARIANT (field_type);
419 while (POINTER_TYPE_P (record_type))
420 {
421 record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
422 if (POINTER_TYPE_P (field_type))
423 field_type = TYPE_MAIN_VARIANT (TREE_TYPE (field_type));
424 else
425 /* However, if field_type is a union, this quick test is not
426 correct since one of the variants of the union may be a
427 pointer to type and we cannot see across that here. So we
428 just strip the remaining pointer tos off the record type
429 and fall thru to the more precise code. */
430 if (TREE_CODE (field_type) == QUAL_UNION_TYPE
431 || TREE_CODE (field_type) == UNION_TYPE)
432 {
433 while (POINTER_TYPE_P (record_type))
434 record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
435 break;
436 }
437 else
438 return true;
439 }
440
441 record_type = get_canon_type (record_type, true, true);
442 /* The record type must be contained. The field type may
443 escape. */
444 if (!ipa_type_escape_type_contained_p (record_type))
445 return false;
446
447 uid = TYPE_UID (record_type);
448 result = splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
449
450 if (result)
451 {
452 bitmap field_type_map = (bitmap) result->value;
453 uid = get_canon_type_uid (field_type, true, true);
454 /* If the bit is there, the address was taken. If not, it
455 wasn't. */
456 return !bitmap_bit_p (field_type_map, uid);
457 }
458 else
459 /* No bitmap means no addresses were taken. */
460 return true;
461 }
462
463
464 /* Add TYPE to the suspect type set. Return true if the bit needed to
465 be marked. */
466
467 static tree
468 mark_type (tree type, enum escape_t escape_status)
469 {
470 bitmap map = NULL;
471 int uid;
472
473 type = get_canon_type (type, true, true);
474 if (!type)
475 return NULL;
476
477 switch (escape_status)
478 {
479 case EXPOSED_PARAMETER:
480 map = global_types_exposed_parameter;
481 break;
482 case FULL_ESCAPE:
483 map = global_types_full_escape;
484 break;
485 }
486
487 uid = TYPE_UID (type);
488 if (bitmap_bit_p (map, uid))
489 return type;
490 else
491 {
492 bitmap_set_bit (map, uid);
493 if (escape_status == FULL_ESCAPE)
494 {
495 /* Efficiency hack. When things are bad, do not mess around
496 with this type anymore. */
497 bitmap_set_bit (global_types_exposed_parameter, uid);
498 }
499 }
500 return type;
501 }
502
503 /* Add interesting TYPE to the suspect type set. If the set is
504 EXPOSED_PARAMETER and the TYPE is a pointer type, the set is
505 changed to FULL_ESCAPE. */
506
507 static void
508 mark_interesting_type (tree type, enum escape_t escape_status)
509 {
510 if (!type) return;
511 if (ipa_type_escape_star_count_of_interesting_type (type) >= 0)
512 {
513 if ((escape_status == EXPOSED_PARAMETER)
514 && POINTER_TYPE_P (type))
515 /* EXPOSED_PARAMETERs are only structs or unions are passed by
516 value. Anything passed by reference to an external
517 function fully exposes the type. */
518 mark_type (type, FULL_ESCAPE);
519 else
520 mark_type (type, escape_status);
521 }
522 }
523
524 /* Return true if PARENT is supertype of CHILD. Both types must be
525 known to be structures or unions. */
526
527 static bool
528 parent_type_p (tree parent, tree child)
529 {
530 int i;
531 tree binfo, base_binfo;
532 if (TYPE_BINFO (parent))
533 for (binfo = TYPE_BINFO (parent), i = 0;
534 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
535 {
536 tree binfotype = BINFO_TYPE (base_binfo);
537 if (binfotype == child)
538 return true;
539 else if (parent_type_p (binfotype, child))
540 return true;
541 }
542 if (TREE_CODE (parent) == UNION_TYPE
543 || TREE_CODE (parent) == QUAL_UNION_TYPE)
544 {
545 tree field;
546 /* Search all of the variants in the union to see if one of them
547 is the child. */
548 for (field = TYPE_FIELDS (parent);
549 field;
550 field = TREE_CHAIN (field))
551 {
552 tree field_type;
553 if (TREE_CODE (field) != FIELD_DECL)
554 continue;
555
556 field_type = TREE_TYPE (field);
557 if (field_type == child)
558 return true;
559 }
560
561 /* If we did not find it, recursively ask the variants if one of
562 their children is the child type. */
563 for (field = TYPE_FIELDS (parent);
564 field;
565 field = TREE_CHAIN (field))
566 {
567 tree field_type;
568 if (TREE_CODE (field) != FIELD_DECL)
569 continue;
570
571 field_type = TREE_TYPE (field);
572 if (TREE_CODE (field_type) == RECORD_TYPE
573 || TREE_CODE (field_type) == QUAL_UNION_TYPE
574 || TREE_CODE (field_type) == UNION_TYPE)
575 if (parent_type_p (field_type, child))
576 return true;
577 }
578 }
579
580 if (TREE_CODE (parent) == RECORD_TYPE)
581 {
582 tree field;
583 for (field = TYPE_FIELDS (parent);
584 field;
585 field = TREE_CHAIN (field))
586 {
587 tree field_type;
588 if (TREE_CODE (field) != FIELD_DECL)
589 continue;
590
591 field_type = TREE_TYPE (field);
592 if (field_type == child)
593 return true;
594 /* You can only cast to the first field so if it does not
595 match, quit. */
596 if (TREE_CODE (field_type) == RECORD_TYPE
597 || TREE_CODE (field_type) == QUAL_UNION_TYPE
598 || TREE_CODE (field_type) == UNION_TYPE)
599 {
600 if (parent_type_p (field_type, child))
601 return true;
602 else
603 break;
604 }
605 }
606 }
607 return false;
608 }
609
610 /* Return the number of pointer tos for TYPE and return TYPE with all
611 of these stripped off. */
612
613 static int
614 count_stars (tree* type_ptr)
615 {
616 tree type = *type_ptr;
617 int i = 0;
618 type = TYPE_MAIN_VARIANT (type);
619 while (POINTER_TYPE_P (type))
620 {
621 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
622 i++;
623 }
624
625 *type_ptr = type;
626 return i;
627 }
628
629 enum cast_type {
630 CT_UP = 0x1,
631 CT_DOWN = 0x2,
632 CT_SIDEWAYS = 0x4,
633 CT_USELESS = 0x8,
634 CT_FROM_P_BAD = 0x10,
635 CT_FROM_NON_P = 0x20,
636 CT_TO_NON_INTER = 0x40,
637 CT_FROM_MALLOC = 0x80,
638 CT_NO_CAST = 0x100
639 };
640
641 /* Check the cast FROM_TYPE to TO_TYPE. This function requires that
642 the two types have already passed the
643 ipa_type_escape_star_count_of_interesting_type test. */
644
645 static enum cast_type
646 check_cast_type (tree to_type, tree from_type)
647 {
648 int to_stars = count_stars (&to_type);
649 int from_stars = count_stars (&from_type);
650 if (to_stars != from_stars)
651 return CT_SIDEWAYS;
652
653 if (to_type == from_type)
654 return CT_USELESS;
655
656 if (parent_type_p (to_type, from_type)) return CT_UP;
657 if (parent_type_p (from_type, to_type)) return CT_DOWN;
658 return CT_SIDEWAYS;
659 }
660
661 /* This function returns nonzero if VAR is result of call
662 to malloc function. */
663
664 static bool
665 is_malloc_result (tree var)
666 {
667 gimple def_stmt;
668
669 if (!var)
670 return false;
671
672 if (SSA_NAME_IS_DEFAULT_DEF (var))
673 return false;
674
675 def_stmt = SSA_NAME_DEF_STMT (var);
676
677 if (!is_gimple_call (def_stmt))
678 return false;
679
680 if (var != gimple_call_lhs (def_stmt))
681 return false;
682
683 return ((gimple_call_flags (def_stmt) & ECF_MALLOC) != 0);
684
685 }
686
687 /* Check a cast FROM this variable, TO_TYPE. Mark the escaping types
688 if appropriate. Returns cast_type as detected. */
689
690 static enum cast_type
691 check_cast (tree to_type, tree from)
692 {
693 tree from_type = get_canon_type (TREE_TYPE (from), false, false);
694 bool to_interesting_type, from_interesting_type;
695 enum cast_type cast = CT_NO_CAST;
696
697 to_type = get_canon_type (to_type, false, false);
698 if (!from_type || !to_type || from_type == to_type)
699 return cast;
700
701 to_interesting_type =
702 ipa_type_escape_star_count_of_interesting_type (to_type) >= 0;
703 from_interesting_type =
704 ipa_type_escape_star_count_of_interesting_type (from_type) >= 0;
705
706 if (to_interesting_type)
707 if (from_interesting_type)
708 {
709 /* Both types are interesting. This can be one of four types
710 of cast: useless, up, down, or sideways. We do not care
711 about up or useless. Sideways casts are always bad and
712 both sides get marked as escaping. Downcasts are not
713 interesting here because if type is marked as escaping, all
714 of its subtypes escape. */
715 cast = check_cast_type (to_type, from_type);
716 switch (cast)
717 {
718 case CT_UP:
719 case CT_USELESS:
720 case CT_DOWN:
721 break;
722
723 case CT_SIDEWAYS:
724 mark_type (to_type, FULL_ESCAPE);
725 mark_type (from_type, FULL_ESCAPE);
726 break;
727
728 default:
729 break;
730 }
731 }
732 else
733 {
734 /* This code excludes two cases from marking as escaped:
735
736 1. if this is a cast of index of array of structures/unions
737 that happens before accessing array element, we should not
738 mark it as escaped.
739 2. if this is a cast from the local that is a result from a
740 call to malloc, do not mark the cast as bad.
741
742 */
743
744 if (POINTER_TYPE_P (to_type) && !POINTER_TYPE_P (from_type))
745 cast = CT_FROM_NON_P;
746 else if (TREE_CODE (from) == SSA_NAME
747 && is_malloc_result (from))
748 cast = CT_FROM_MALLOC;
749 else
750 {
751 cast = CT_FROM_P_BAD;
752 mark_type (to_type, FULL_ESCAPE);
753 }
754 }
755 else if (from_interesting_type)
756 {
757 mark_type (from_type, FULL_ESCAPE);
758 cast = CT_TO_NON_INTER;
759 }
760
761 return cast;
762 }
763
764
765 /* Scan assignment statement S to see if there are any casts within it. */
766
767 static unsigned int
768 look_for_casts_stmt (gimple s)
769 {
770 unsigned int cast = 0;
771
772 gcc_assert (is_gimple_assign (s));
773
774 if (gimple_assign_cast_p (s))
775 {
776 tree castfromvar = gimple_assign_rhs1 (s);
777 cast |= check_cast (TREE_TYPE (gimple_assign_lhs (s)), castfromvar);
778 }
779 else
780 {
781 size_t i;
782 for (i = 0; i < gimple_num_ops (s); i++)
783 cast |= look_for_casts (gimple_op (s, i));
784 }
785
786 if (!cast)
787 cast = CT_NO_CAST;
788
789 return cast;
790 }
791
792
793 typedef struct cast
794 {
795 int type;
796 gimple stmt;
797 } cast_t;
798
799 /* This function is a callback for walk_use_def_chains function called
800 from is_array_access_through_pointer_and_index. */
801
802 static bool
803 is_cast_from_non_pointer (tree var, gimple def_stmt, void *data)
804 {
805 if (!def_stmt || !var)
806 return false;
807
808 if (gimple_code (def_stmt) == GIMPLE_PHI)
809 return false;
810
811 if (SSA_NAME_IS_DEFAULT_DEF (var))
812 return false;
813
814 if (is_gimple_assign (def_stmt))
815 {
816 use_operand_p use_p;
817 ssa_op_iter iter;
818 unsigned int cast = look_for_casts_stmt (def_stmt);
819
820 /* Check that only one cast happened, and it's of non-pointer
821 type. */
822 if ((cast & CT_FROM_NON_P) == (CT_FROM_NON_P)
823 && (cast & ~(CT_FROM_NON_P)) == 0)
824 {
825 ((cast_t *)data)->stmt = def_stmt;
826 ((cast_t *)data)->type++;
827
828 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
829 {
830 walk_use_def_chains (USE_FROM_PTR (use_p),
831 is_cast_from_non_pointer, data, false);
832 if (((cast_t*)data)->type == -1)
833 break;
834 }
835 }
836 /* Check that there is no cast, or cast is not harmful. */
837 else if ((cast & CT_NO_CAST) == (CT_NO_CAST)
838 || (cast & CT_DOWN) == (CT_DOWN)
839 || (cast & CT_UP) == (CT_UP)
840 || (cast & CT_USELESS) == (CT_USELESS)
841 || (cast & CT_FROM_MALLOC) == (CT_FROM_MALLOC))
842 {
843 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
844 {
845 walk_use_def_chains (USE_FROM_PTR (use_p),
846 is_cast_from_non_pointer, data, false);
847 if (((cast_t*)data)->type == -1)
848 break;
849 }
850 }
851 /* The cast is harmful. */
852 else
853 ((cast_t *)data)->type = -1;
854 }
855
856 if (((cast_t*)data)->type == -1)
857 return true;
858
859 return false;
860 }
861
862 /* When array element a_p[i] is accessed through the pointer a_p
863 and index i, it's translated into the following sequence
864 in gimple:
865
866 i.1_5 = (unsigned int) i_1;
867 D.1605_6 = i.1_5 * 16;
868 D.1606_7 = (struct str_t *) D.1605_6;
869 a_p.2_8 = a_p;
870 D.1608_9 = D.1606_7 + a_p.2_8;
871
872 OP0 and OP1 are of the same pointer types and stand for
873 D.1606_7 and a_p.2_8 or vise versa.
874
875 This function checks that:
876
877 1. one of OP0 and OP1 (D.1606_7) has passed only one cast from
878 non-pointer type (D.1606_7 = (struct str_t *) D.1605_6;).
879
880 2. one of OP0 and OP1 which has passed the cast from
881 non-pointer type (D.1606_7), is actually generated by multiplication of
882 index by size of type to which both OP0 and OP1 point to
883 (in this case D.1605_6 = i.1_5 * 16; ).
884
885 3. an address of def of the var to which was made cast (D.1605_6)
886 was not taken.(How can it happen?)
887
888 The following items are checked implicitly by the end of algorithm:
889
890 4. one of OP0 and OP1 (a_p.2_8) have never been cast
891 (because if it was cast to pointer type, its type, that is also
892 the type of OP0 and OP1, will be marked as escaped during
893 analysis of casting stmt (when check_cast() is called
894 from scan_for_refs for this stmt)).
895
896 5. defs of OP0 and OP1 are not passed into externally visible function
897 (because if they are passed then their type, that is also the type of OP0
898 and OP1, will be marked and escaped during check_call function called from
899 scan_for_refs with call stmt).
900
901 In total, 1-5 guaranty that it's an access to array by pointer and index.
902
903 */
904
905 bool
906 is_array_access_through_pointer_and_index (enum tree_code code, tree op0,
907 tree op1, tree *base, tree *offset,
908 gimple *offset_cast_stmt)
909 {
910 tree before_cast;
911 gimple before_cast_def_stmt;
912 cast_t op0_cast, op1_cast;
913
914 *base = NULL;
915 *offset = NULL;
916 *offset_cast_stmt = NULL;
917
918 /* Check 1. */
919 if (code == POINTER_PLUS_EXPR)
920 {
921 tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
922 tree op1type = TYPE_MAIN_VARIANT (TREE_TYPE (op1));
923
924 /* One of op0 and op1 is of pointer type and the other is numerical. */
925 if (POINTER_TYPE_P (op0type) && NUMERICAL_TYPE_CHECK (op1type))
926 {
927 *base = op0;
928 *offset = op1;
929 }
930 else if (POINTER_TYPE_P (op1type) && NUMERICAL_TYPE_CHECK (op0type))
931 {
932 *base = op1;
933 *offset = op0;
934 }
935 else
936 return false;
937 }
938 else
939 {
940 /* Init data for walk_use_def_chains function. */
941 op0_cast.type = op1_cast.type = 0;
942 op0_cast.stmt = op1_cast.stmt = NULL;
943
944 visited_stmts = pointer_set_create ();
945 walk_use_def_chains (op0, is_cast_from_non_pointer,(void *)(&op0_cast),
946 false);
947 pointer_set_destroy (visited_stmts);
948
949 visited_stmts = pointer_set_create ();
950 walk_use_def_chains (op1, is_cast_from_non_pointer,(void *)(&op1_cast),
951 false);
952 pointer_set_destroy (visited_stmts);
953
954 if (op0_cast.type == 1 && op1_cast.type == 0)
955 {
956 *base = op1;
957 *offset = op0;
958 *offset_cast_stmt = op0_cast.stmt;
959 }
960 else if (op0_cast.type == 0 && op1_cast.type == 1)
961 {
962 *base = op0;
963 *offset = op1;
964 *offset_cast_stmt = op1_cast.stmt;
965 }
966 else
967 return false;
968 }
969
970 /* Check 2.
971 offset_cast_stmt is of the form:
972 D.1606_7 = (struct str_t *) D.1605_6; */
973
974 if (*offset_cast_stmt)
975 {
976 before_cast = SINGLE_SSA_TREE_OPERAND (*offset_cast_stmt, SSA_OP_USE);
977 if (!before_cast)
978 return false;
979
980 if (SSA_NAME_IS_DEFAULT_DEF (before_cast))
981 return false;
982
983 before_cast_def_stmt = SSA_NAME_DEF_STMT (before_cast);
984 if (!before_cast_def_stmt)
985 return false;
986 }
987 else
988 before_cast_def_stmt = SSA_NAME_DEF_STMT (*offset);
989
990 /* before_cast_def_stmt should be of the form:
991 D.1605_6 = i.1_5 * 16; */
992
993 if (is_gimple_assign (before_cast_def_stmt))
994 {
995 /* We expect temporary here. */
996 if (!is_gimple_reg (gimple_assign_lhs (before_cast_def_stmt)))
997 return false;
998
999 if (gimple_assign_rhs_code (before_cast_def_stmt) == MULT_EXPR)
1000 {
1001 tree arg0 = gimple_assign_rhs1 (before_cast_def_stmt);
1002 tree arg1 = gimple_assign_rhs2 (before_cast_def_stmt);
1003 tree unit_size =
1004 TYPE_SIZE_UNIT (TREE_TYPE (TYPE_MAIN_VARIANT (TREE_TYPE (op0))));
1005
1006 if (!(CONSTANT_CLASS_P (arg0)
1007 && simple_cst_equal (arg0, unit_size))
1008 && !(CONSTANT_CLASS_P (arg1)
1009 && simple_cst_equal (arg1, unit_size)))
1010 return false;
1011 }
1012 else
1013 return false;
1014 }
1015 else
1016 return false;
1017
1018 /* Check 3.
1019 check that address of D.1605_6 was not taken.
1020 FIXME: if D.1605_6 is gimple reg than it cannot be addressable. */
1021
1022 return true;
1023 }
1024
1025 /* Register the parameter and return types of function FN. The type
1026 ESCAPES if the function is visible outside of the compilation
1027 unit. */
1028 static void
1029 check_function_parameter_and_return_types (tree fn, bool escapes)
1030 {
1031 tree arg;
1032
1033 if (TYPE_ARG_TYPES (TREE_TYPE (fn)))
1034 {
1035 for (arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1036 arg && TREE_VALUE (arg) != void_type_node;
1037 arg = TREE_CHAIN (arg))
1038 {
1039 tree type = get_canon_type (TREE_VALUE (arg), false, false);
1040 if (escapes)
1041 mark_interesting_type (type, EXPOSED_PARAMETER);
1042 }
1043 }
1044 else
1045 {
1046 /* FIXME - According to Geoff Keating, we should never have to
1047 do this; the front ends should always process the arg list
1048 from the TYPE_ARG_LIST. However, Geoff is wrong, this code
1049 does seem to be live. */
1050
1051 for (arg = DECL_ARGUMENTS (fn); arg; arg = TREE_CHAIN (arg))
1052 {
1053 tree type = get_canon_type (TREE_TYPE (arg), false, false);
1054 if (escapes)
1055 mark_interesting_type (type, EXPOSED_PARAMETER);
1056 }
1057 }
1058 if (escapes)
1059 {
1060 tree type = get_canon_type (TREE_TYPE (TREE_TYPE (fn)), false, false);
1061 mark_interesting_type (type, EXPOSED_PARAMETER);
1062 }
1063 }
1064
1065 /* Return true if the variable T is the right kind of static variable to
1066 perform compilation unit scope escape analysis. */
1067
1068 static inline void
1069 has_proper_scope_for_analysis (tree t)
1070 {
1071 /* If the variable has the "used" attribute, treat it as if it had a
1072 been touched by the devil. */
1073 tree type = get_canon_type (TREE_TYPE (t), false, false);
1074 if (!type) return;
1075
1076 if (DECL_PRESERVE_P (t))
1077 {
1078 mark_interesting_type (type, FULL_ESCAPE);
1079 return;
1080 }
1081
1082 /* Do not want to do anything with volatile except mark any
1083 function that uses one to be not const or pure. */
1084 if (TREE_THIS_VOLATILE (t))
1085 return;
1086
1087 /* Do not care about a local automatic that is not static. */
1088 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
1089 return;
1090
1091 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
1092 {
1093 /* If the front end set the variable to be READONLY and
1094 constant, we can allow this variable in pure or const
1095 functions but the scope is too large for our analysis to set
1096 these bits ourselves. */
1097
1098 if (TREE_READONLY (t)
1099 && DECL_INITIAL (t)
1100 && is_gimple_min_invariant (DECL_INITIAL (t)))
1101 ; /* Read of a constant, do not change the function state. */
1102 else
1103 {
1104 /* The type escapes for all public and externs. */
1105 mark_interesting_type (type, FULL_ESCAPE);
1106 }
1107 }
1108 }
1109
1110 /* If T is a VAR_DECL for a static that we are interested in, add the
1111 uid to the bitmap. */
1112
1113 static void
1114 check_operand (tree t)
1115 {
1116 if (!t) return;
1117
1118 /* This is an assignment from a function, register the types as
1119 escaping. */
1120 if (TREE_CODE (t) == FUNCTION_DECL)
1121 check_function_parameter_and_return_types (t, true);
1122
1123 else if (TREE_CODE (t) == VAR_DECL)
1124 has_proper_scope_for_analysis (t);
1125 }
1126
1127 /* Examine tree T for references. */
1128
1129 static void
1130 check_tree (tree t)
1131 {
1132 /* We want to catch here also REALPART_EXPR and IMAGEPART_EXPR,
1133 but they already included in handled_component_p. */
1134 while (handled_component_p (t))
1135 {
1136 if (TREE_CODE (t) == ARRAY_REF)
1137 check_operand (TREE_OPERAND (t, 1));
1138 t = TREE_OPERAND (t, 0);
1139 }
1140
1141 if (INDIRECT_REF_P (t))
1142 /* || TREE_CODE (t) == MEM_REF) */
1143 check_tree (TREE_OPERAND (t, 0));
1144
1145 if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
1146 {
1147 check_operand (t);
1148 if (DECL_P (t) && DECL_INITIAL (t))
1149 check_tree (DECL_INITIAL (t));
1150 }
1151 }
1152
1153 /* Create an address_of edge FROM_TYPE.TO_TYPE. */
1154 static void
1155 mark_interesting_addressof (tree to_type, tree from_type)
1156 {
1157 int from_uid;
1158 int to_uid;
1159 bitmap type_map;
1160 splay_tree_node result;
1161
1162 from_type = get_canon_type (from_type, false, false);
1163 to_type = get_canon_type (to_type, false, false);
1164
1165 if (!from_type || !to_type)
1166 return;
1167
1168 from_uid = TYPE_UID (from_type);
1169 to_uid = TYPE_UID (to_type);
1170
1171 gcc_assert (ipa_type_escape_star_count_of_interesting_type (from_type) == 0);
1172
1173 /* Process the Y into X map pointer. */
1174 result = splay_tree_lookup (uid_to_addressof_down_map,
1175 (splay_tree_key) from_uid);
1176
1177 if (result)
1178 type_map = (bitmap) result->value;
1179 else
1180 {
1181 type_map = BITMAP_ALLOC (&ipa_obstack);
1182 splay_tree_insert (uid_to_addressof_down_map,
1183 from_uid,
1184 (splay_tree_value)type_map);
1185 }
1186 bitmap_set_bit (type_map, TYPE_UID (to_type));
1187
1188 /* Process the X into Y reverse map pointer. */
1189 result =
1190 splay_tree_lookup (uid_to_addressof_up_map, (splay_tree_key) to_uid);
1191
1192 if (result)
1193 type_map = (bitmap) result->value;
1194 else
1195 {
1196 type_map = BITMAP_ALLOC (&ipa_obstack);
1197 splay_tree_insert (uid_to_addressof_up_map,
1198 to_uid,
1199 (splay_tree_value)type_map);
1200 }
1201 bitmap_set_bit (type_map, TYPE_UID (from_type));
1202 }
1203
1204 /* Scan tree T to see if there are any addresses taken in within T. */
1205
1206 static void
1207 look_for_address_of (tree t)
1208 {
1209 if (TREE_CODE (t) == ADDR_EXPR)
1210 {
1211 tree x = get_base_var (t);
1212 tree cref = TREE_OPERAND (t, 0);
1213
1214 /* If we have an expression of the form "&a.b.c.d", mark a.b,
1215 b.c and c.d. as having its address taken. */
1216 tree fielddecl = NULL_TREE;
1217 while (cref!= x)
1218 {
1219 if (TREE_CODE (cref) == COMPONENT_REF)
1220 {
1221 fielddecl = TREE_OPERAND (cref, 1);
1222 mark_interesting_addressof (TREE_TYPE (fielddecl),
1223 DECL_FIELD_CONTEXT (fielddecl));
1224 }
1225 else if (TREE_CODE (cref) == ARRAY_REF)
1226 get_canon_type (TREE_TYPE (cref), false, false);
1227
1228 cref = TREE_OPERAND (cref, 0);
1229 }
1230
1231 if (TREE_CODE (x) == VAR_DECL)
1232 has_proper_scope_for_analysis (x);
1233 }
1234 }
1235
1236
1237 /* Scan tree T to see if there are any casts within it. */
1238
1239 static unsigned int
1240 look_for_casts (tree t)
1241 {
1242 unsigned int cast = 0;
1243
1244 if (is_gimple_cast (t) || TREE_CODE (t) == VIEW_CONVERT_EXPR)
1245 {
1246 tree castfromvar = TREE_OPERAND (t, 0);
1247 cast = cast | check_cast (TREE_TYPE (t), castfromvar);
1248 }
1249 else
1250 while (handled_component_p (t))
1251 {
1252 t = TREE_OPERAND (t, 0);
1253 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1254 {
1255 /* This may be some part of a component ref.
1256 IE it may be a.b.VIEW_CONVERT_EXPR<weird_type>(c).d, AFAIK.
1257 castfromref will give you a.b.c, not a. */
1258 tree castfromref = TREE_OPERAND (t, 0);
1259 cast = cast | check_cast (TREE_TYPE (t), castfromref);
1260 }
1261 else if (TREE_CODE (t) == COMPONENT_REF)
1262 get_canon_type (TREE_TYPE (TREE_OPERAND (t, 1)), false, false);
1263 }
1264
1265 if (!cast)
1266 cast = CT_NO_CAST;
1267 return cast;
1268 }
1269
1270 /* Check to see if T is a read or address of operation on a static var
1271 we are interested in analyzing. */
1272
1273 static void
1274 check_rhs_var (tree t)
1275 {
1276 look_for_address_of (t);
1277 check_tree (t);
1278 }
1279
1280 /* Check to see if T is an assignment to a static var we are
1281 interested in analyzing. */
1282
1283 static void
1284 check_lhs_var (tree t)
1285 {
1286 check_tree (t);
1287 }
1288
1289 /* This is a scaled down version of get_asm_expr_operands from
1290 tree_ssa_operands.c. The version there runs much later and assumes
1291 that aliasing information is already available. Here we are just
1292 trying to find if the set of inputs and outputs contain references
1293 or address of operations to local. FN is the function being
1294 analyzed and STMT is the actual asm statement. */
1295
1296 static void
1297 check_asm (gimple stmt)
1298 {
1299 size_t i;
1300
1301 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1302 check_lhs_var (gimple_asm_output_op (stmt, i));
1303
1304 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1305 check_rhs_var (gimple_asm_input_op (stmt, i));
1306
1307 /* There is no code here to check for asm memory clobbers. The
1308 casual maintainer might think that such code would be necessary,
1309 but that appears to be wrong. In other parts of the compiler,
1310 the asm memory clobbers are assumed to only clobber variables
1311 that are addressable. All types with addressable instances are
1312 assumed to already escape. So, we are protected here. */
1313 }
1314
1315
1316 /* Check the parameters of function call to CALL to mark the
1317 types that pass across the function boundary. Also check to see if
1318 this is either an indirect call, a call outside the compilation
1319 unit. */
1320
1321 static void
1322 check_call (gimple call)
1323 {
1324 tree callee_t = gimple_call_fndecl (call);
1325 struct cgraph_node* callee;
1326 enum availability avail = AVAIL_NOT_AVAILABLE;
1327 size_t i;
1328
1329 for (i = 0; i < gimple_call_num_args (call); i++)
1330 check_rhs_var (gimple_call_arg (call, i));
1331
1332 if (callee_t)
1333 {
1334 tree arg_type;
1335 tree last_arg_type = NULL;
1336 callee = cgraph_node(callee_t);
1337 avail = cgraph_function_body_availability (callee);
1338
1339 /* Check that there are no implicit casts in the passing of
1340 parameters. */
1341 if (TYPE_ARG_TYPES (TREE_TYPE (callee_t)))
1342 {
1343 for (arg_type = TYPE_ARG_TYPES (TREE_TYPE (callee_t)), i = 0;
1344 arg_type && TREE_VALUE (arg_type) != void_type_node
1345 && i < gimple_call_num_args (call);
1346 arg_type = TREE_CHAIN (arg_type), i++)
1347 {
1348 tree operand = gimple_call_arg (call, i);
1349 if (operand)
1350 {
1351 last_arg_type = TREE_VALUE(arg_type);
1352 check_cast (last_arg_type, operand);
1353 }
1354 else
1355 /* The code reaches here for some unfortunate
1356 builtin functions that do not have a list of
1357 argument types. */
1358 break;
1359 }
1360 }
1361 else
1362 {
1363 /* FIXME - According to Geoff Keating, we should never
1364 have to do this; the front ends should always process
1365 the arg list from the TYPE_ARG_LIST. */
1366 for (arg_type = DECL_ARGUMENTS (callee_t), i = 0;
1367 arg_type && i < gimple_call_num_args (call);
1368 arg_type = TREE_CHAIN (arg_type), i++)
1369 {
1370 tree operand = gimple_call_arg (call, i);
1371 if (operand)
1372 {
1373 last_arg_type = TREE_TYPE (arg_type);
1374 check_cast (last_arg_type, operand);
1375 }
1376 else
1377 /* The code reaches here for some unfortunate
1378 builtin functions that do not have a list of
1379 argument types. */
1380 break;
1381 }
1382 }
1383
1384 /* In the case where we have a var_args function, we need to
1385 check the remaining parameters against the last argument. */
1386 arg_type = last_arg_type;
1387 for ( ; i < gimple_call_num_args (call); i++)
1388 {
1389 tree operand = gimple_call_arg (call, i);
1390 if (arg_type)
1391 check_cast (arg_type, operand);
1392 else
1393 {
1394 /* The code reaches here for some unfortunate
1395 builtin functions that do not have a list of
1396 argument types. Most of these functions have
1397 been marked as having their parameters not
1398 escape, but for the rest, the type is doomed. */
1399 tree type = get_canon_type (TREE_TYPE (operand), false, false);
1400 mark_interesting_type (type, FULL_ESCAPE);
1401 }
1402 }
1403 }
1404
1405 /* The callee is either unknown (indirect call) or there is just no
1406 scannable code for it (external call) . We look to see if there
1407 are any bits available for the callee (such as by declaration or
1408 because it is builtin) and process solely on the basis of those
1409 bits. */
1410 if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
1411 {
1412 /* If this is a direct call to an external function, mark all of
1413 the parameter and return types. */
1414 for (i = 0; i < gimple_call_num_args (call); i++)
1415 {
1416 tree operand = gimple_call_arg (call, i);
1417 tree type = get_canon_type (TREE_TYPE (operand), false, false);
1418 mark_interesting_type (type, EXPOSED_PARAMETER);
1419 }
1420
1421 if (callee_t)
1422 {
1423 tree type =
1424 get_canon_type (TREE_TYPE (TREE_TYPE (callee_t)), false, false);
1425 mark_interesting_type (type, EXPOSED_PARAMETER);
1426 }
1427 }
1428 }
1429
1430 /* CODE is the operation on OP0 and OP1. OP0 is the operand that we
1431 *know* is a pointer type. OP1 may be a pointer type. */
1432 static bool
1433 okay_pointer_operation (enum tree_code code, tree op0, tree op1)
1434 {
1435 tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
1436
1437 switch (code)
1438 {
1439 case MULT_EXPR:
1440 /* Multiplication does not change alignment. */
1441 return true;
1442 break;
1443 case MINUS_EXPR:
1444 case PLUS_EXPR:
1445 case POINTER_PLUS_EXPR:
1446 {
1447 tree base, offset;
1448 gimple offset_cast_stmt;
1449
1450 if (POINTER_TYPE_P (op0type)
1451 && TREE_CODE (op0) == SSA_NAME
1452 && TREE_CODE (op1) == SSA_NAME
1453 && is_array_access_through_pointer_and_index (code, op0, op1,
1454 &base,
1455 &offset,
1456 &offset_cast_stmt))
1457 return true;
1458 else
1459 {
1460 tree size_of_op0_points_to = TYPE_SIZE_UNIT (TREE_TYPE (op0type));
1461
1462 if (CONSTANT_CLASS_P (op1)
1463 && size_of_op0_points_to
1464 && multiple_of_p (TREE_TYPE (size_of_op0_points_to),
1465 op1, size_of_op0_points_to))
1466 return true;
1467
1468 if (CONSTANT_CLASS_P (op0)
1469 && size_of_op0_points_to
1470 && multiple_of_p (TREE_TYPE (size_of_op0_points_to),
1471 op0, size_of_op0_points_to))
1472 return true;
1473 }
1474 }
1475 break;
1476 default:
1477 return false;
1478 }
1479 return false;
1480 }
1481
1482
1483
1484 /* Helper for scan_for_refs. Check the operands of an assignment to
1485 mark types that may escape. */
1486
1487 static void
1488 check_assign (gimple t)
1489 {
1490 /* First look on the lhs and see what variable is stored to */
1491 check_lhs_var (gimple_assign_lhs (t));
1492
1493 /* For the purposes of figuring out what the cast affects */
1494
1495 /* Next check the operands on the rhs to see if they are ok. */
1496 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (t)))
1497 {
1498 case tcc_binary:
1499 {
1500 tree op0 = gimple_assign_rhs1 (t);
1501 tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1502 tree op1 = gimple_assign_rhs2 (t);
1503 tree type1 = get_canon_type (TREE_TYPE (op1), false, false);
1504
1505 /* If this is pointer arithmetic of any bad sort, then
1506 we need to mark the types as bad. For binary
1507 operations, no binary operator we currently support
1508 is always "safe" in regard to what it would do to
1509 pointers for purposes of determining which types
1510 escape, except operations of the size of the type.
1511 It is possible that min and max under the right set
1512 of circumstances and if the moon is in the correct
1513 place could be safe, but it is hard to see how this
1514 is worth the effort. */
1515 if (type0 && POINTER_TYPE_P (type0)
1516 && !okay_pointer_operation (gimple_assign_rhs_code (t), op0, op1))
1517 mark_interesting_type (type0, FULL_ESCAPE);
1518
1519 if (type1 && POINTER_TYPE_P (type1)
1520 && !okay_pointer_operation (gimple_assign_rhs_code (t), op1, op0))
1521 mark_interesting_type (type1, FULL_ESCAPE);
1522
1523 look_for_casts (op0);
1524 look_for_casts (op1);
1525 check_rhs_var (op0);
1526 check_rhs_var (op1);
1527 }
1528 break;
1529
1530 case tcc_unary:
1531 {
1532 tree op0 = gimple_assign_rhs1 (t);
1533 tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1534
1535 /* For unary operations, if the operation is NEGATE or ABS on
1536 a pointer, this is also considered pointer arithmetic and
1537 thus, bad for business. */
1538 if (type0
1539 && POINTER_TYPE_P (type0)
1540 && (TREE_CODE (op0) == NEGATE_EXPR
1541 || TREE_CODE (op0) == ABS_EXPR))
1542 mark_interesting_type (type0, FULL_ESCAPE);
1543
1544 check_rhs_var (op0);
1545 look_for_casts (op0);
1546 }
1547 break;
1548
1549 case tcc_reference:
1550 look_for_casts (gimple_assign_rhs1 (t));
1551 check_rhs_var (gimple_assign_rhs1 (t));
1552 break;
1553
1554 case tcc_declaration:
1555 check_rhs_var (gimple_assign_rhs1 (t));
1556 break;
1557
1558 case tcc_expression:
1559 if (gimple_assign_rhs_code (t) == ADDR_EXPR)
1560 {
1561 tree rhs = gimple_assign_rhs1 (t);
1562 look_for_casts (TREE_OPERAND (rhs, 0));
1563 check_rhs_var (rhs);
1564 }
1565 break;
1566
1567 default:
1568 break;
1569 }
1570 }
1571
1572
1573 /* Scan statement T for references to types and mark anything
1574 interesting. */
1575
1576 static void
1577 scan_for_refs (gimple t)
1578 {
1579 switch (gimple_code (t))
1580 {
1581 case GIMPLE_ASSIGN:
1582 check_assign (t);
1583 break;
1584
1585 case GIMPLE_CALL:
1586 /* If this is a call to malloc, squirrel away the result so we
1587 do mark the resulting cast as being bad. */
1588 check_call (t);
1589 break;
1590
1591 case GIMPLE_ASM:
1592 check_asm (t);
1593 break;
1594
1595 default:
1596 break;
1597 }
1598
1599 return;
1600 }
1601
1602
1603 /* The init routine for analyzing global static variable usage. See
1604 comments at top for description. */
1605 static void
1606 ipa_init (void)
1607 {
1608 bitmap_obstack_initialize (&ipa_obstack);
1609 global_types_exposed_parameter = BITMAP_ALLOC (&ipa_obstack);
1610 global_types_full_escape = BITMAP_ALLOC (&ipa_obstack);
1611 global_types_seen = BITMAP_ALLOC (&ipa_obstack);
1612
1613 uid_to_canon_type = splay_tree_new (splay_tree_compare_ints, 0, 0);
1614 all_canon_types = splay_tree_new (compare_type_brand, 0, 0);
1615 type_to_canon_type = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1616 uid_to_subtype_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1617 uid_to_addressof_down_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1618 uid_to_addressof_up_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1619
1620 /* There are some shared nodes, in particular the initializers on
1621 static declarations. We do not need to scan them more than once
1622 since all we would be interested in are the addressof
1623 operations. */
1624 visited_nodes = pointer_set_create ();
1625 initialized = true;
1626 }
1627
1628 /* Check out the rhs of a static or global initialization VNODE to see
1629 if any of them contain addressof operations. Note that some of
1630 these variables may not even be referenced in the code in this
1631 compilation unit but their right hand sides may contain references
1632 to variables defined within this unit. */
1633
1634 static void
1635 analyze_variable (struct varpool_node *vnode)
1636 {
1637 tree global = vnode->decl;
1638 tree type = get_canon_type (TREE_TYPE (global), false, false);
1639
1640 /* If this variable has exposure beyond the compilation unit, add
1641 its type to the global types. */
1642
1643 if (vnode->externally_visible)
1644 mark_interesting_type (type, FULL_ESCAPE);
1645
1646 gcc_assert (TREE_CODE (global) == VAR_DECL);
1647
1648 if (DECL_INITIAL (global))
1649 check_tree (DECL_INITIAL (global));
1650 }
1651
1652 /* This is the main routine for finding the reference patterns for
1653 global variables within a function FN. */
1654
1655 static void
1656 analyze_function (struct cgraph_node *fn)
1657 {
1658 tree decl = fn->decl;
1659 check_function_parameter_and_return_types (decl,
1660 fn->local.externally_visible);
1661 if (dump_file)
1662 fprintf (dump_file, "\n local analysis of %s", cgraph_node_name (fn));
1663
1664 {
1665 struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
1666 basic_block this_block;
1667
1668 FOR_EACH_BB_FN (this_block, this_cfun)
1669 {
1670 gimple_stmt_iterator gsi;
1671 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
1672 scan_for_refs (gsi_stmt (gsi));
1673 }
1674 }
1675
1676 /* There may be const decls with interesting right hand sides. */
1677 if (DECL_STRUCT_FUNCTION (decl))
1678 {
1679 tree step;
1680 for (step = DECL_STRUCT_FUNCTION (decl)->local_decls;
1681 step;
1682 step = TREE_CHAIN (step))
1683 {
1684 tree var = TREE_VALUE (step);
1685 if (TREE_CODE (var) == VAR_DECL
1686 && DECL_INITIAL (var)
1687 && !TREE_STATIC (var))
1688 check_tree (DECL_INITIAL (var));
1689 get_canon_type (TREE_TYPE (var), false, false);
1690 }
1691 }
1692 }
1693
1694 \f
1695
1696 /* Convert a type_UID into a type. */
1697 static tree
1698 type_for_uid (int uid)
1699 {
1700 splay_tree_node result =
1701 splay_tree_lookup (uid_to_canon_type, (splay_tree_key) uid);
1702
1703 if (result)
1704 return (tree) result->value;
1705 else return NULL;
1706 }
1707
1708 /* Return a bitmap with the subtypes of the type for UID. If it
1709 does not exist, return either NULL or a new bitmap depending on the
1710 value of CREATE. */
1711
1712 static bitmap
1713 subtype_map_for_uid (int uid, bool create)
1714 {
1715 splay_tree_node result = splay_tree_lookup (uid_to_subtype_map,
1716 (splay_tree_key) uid);
1717
1718 if (result)
1719 return (bitmap) result->value;
1720 else if (create)
1721 {
1722 bitmap subtype_map = BITMAP_ALLOC (&ipa_obstack);
1723 splay_tree_insert (uid_to_subtype_map,
1724 uid,
1725 (splay_tree_value)subtype_map);
1726 return subtype_map;
1727 }
1728 else return NULL;
1729 }
1730
1731 /* Mark all of the supertypes and field types of TYPE as being seen.
1732 Also accumulate the subtypes for each type so that
1733 close_types_full_escape can mark a subtype as escaping if the
1734 supertype escapes. */
1735
1736 static void
1737 close_type_seen (tree type)
1738 {
1739 tree field;
1740 int i, uid;
1741 tree binfo, base_binfo;
1742
1743 /* See thru all pointer tos and array ofs. */
1744 type = get_canon_type (type, true, true);
1745 if (!type)
1746 return;
1747
1748 uid = TYPE_UID (type);
1749
1750 if (bitmap_bit_p (been_there_done_that, uid))
1751 return;
1752 bitmap_set_bit (been_there_done_that, uid);
1753
1754 /* If we are doing a language with a type hierarchy, mark all of
1755 the superclasses. */
1756 if (TYPE_BINFO (type))
1757 for (binfo = TYPE_BINFO (type), i = 0;
1758 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1759 {
1760 tree binfo_type = BINFO_TYPE (base_binfo);
1761 bitmap subtype_map = subtype_map_for_uid
1762 (TYPE_UID (TYPE_MAIN_VARIANT (binfo_type)), true);
1763 bitmap_set_bit (subtype_map, uid);
1764 close_type_seen (get_canon_type (binfo_type, true, true));
1765 }
1766
1767 /* If the field is a struct or union type, mark all of the
1768 subfields. */
1769 for (field = TYPE_FIELDS (type);
1770 field;
1771 field = TREE_CHAIN (field))
1772 {
1773 tree field_type;
1774 if (TREE_CODE (field) != FIELD_DECL)
1775 continue;
1776
1777 field_type = TREE_TYPE (field);
1778 if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1779 close_type_seen (get_canon_type (field_type, true, true));
1780 }
1781 }
1782
1783 /* Take a TYPE that has been passed by value to an external function
1784 and mark all of the fields that have pointer types as escaping. For
1785 any of the non pointer types that are structures or unions,
1786 recurse. TYPE is never a pointer type. */
1787
1788 static void
1789 close_type_exposed_parameter (tree type)
1790 {
1791 tree field;
1792 int uid;
1793
1794 type = get_canon_type (type, false, false);
1795 if (!type)
1796 return;
1797 uid = TYPE_UID (type);
1798 gcc_assert (!POINTER_TYPE_P (type));
1799
1800 if (bitmap_bit_p (been_there_done_that, uid))
1801 return;
1802 bitmap_set_bit (been_there_done_that, uid);
1803
1804 /* If the field is a struct or union type, mark all of the
1805 subfields. */
1806 for (field = TYPE_FIELDS (type);
1807 field;
1808 field = TREE_CHAIN (field))
1809 {
1810 tree field_type;
1811
1812 if (TREE_CODE (field) != FIELD_DECL)
1813 continue;
1814
1815 field_type = get_canon_type (TREE_TYPE (field), false, false);
1816 mark_interesting_type (field_type, EXPOSED_PARAMETER);
1817
1818 /* Only recurse for non pointer types of structures and unions. */
1819 if (ipa_type_escape_star_count_of_interesting_type (field_type) == 0)
1820 close_type_exposed_parameter (field_type);
1821 }
1822 }
1823
1824 /* The next function handles the case where a type fully escapes.
1825 This means that not only does the type itself escape,
1826
1827 a) the type of every field recursively escapes
1828 b) the type of every subtype escapes as well as the super as well
1829 as all of the pointer to types for each field.
1830
1831 Note that pointer to types are not marked as escaping. If the
1832 pointed to type escapes, the pointer to type also escapes.
1833
1834 Take a TYPE that has had the address taken for an instance of it
1835 and mark all of the types for its fields as having their addresses
1836 taken. */
1837
1838 static void
1839 close_type_full_escape (tree type)
1840 {
1841 tree field;
1842 unsigned int i;
1843 int uid;
1844 tree binfo, base_binfo;
1845 bitmap_iterator bi;
1846 bitmap subtype_map;
1847 splay_tree_node address_result;
1848
1849 /* Strip off any pointer or array types. */
1850 type = get_canon_type (type, true, true);
1851 if (!type)
1852 return;
1853 uid = TYPE_UID (type);
1854
1855 if (bitmap_bit_p (been_there_done_that, uid))
1856 return;
1857 bitmap_set_bit (been_there_done_that, uid);
1858
1859 subtype_map = subtype_map_for_uid (uid, false);
1860
1861 /* If we are doing a language with a type hierarchy, mark all of
1862 the superclasses. */
1863 if (TYPE_BINFO (type))
1864 for (binfo = TYPE_BINFO (type), i = 0;
1865 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1866 {
1867 tree binfotype = BINFO_TYPE (base_binfo);
1868 binfotype = mark_type (binfotype, FULL_ESCAPE);
1869 close_type_full_escape (binfotype);
1870 }
1871
1872 /* Mark as escaped any types that have been down casted to
1873 this type. */
1874 if (subtype_map)
1875 EXECUTE_IF_SET_IN_BITMAP (subtype_map, 0, i, bi)
1876 {
1877 tree subtype = type_for_uid (i);
1878 subtype = mark_type (subtype, FULL_ESCAPE);
1879 close_type_full_escape (subtype);
1880 }
1881
1882 /* If the field is a struct or union type, mark all of the
1883 subfields. */
1884 for (field = TYPE_FIELDS (type);
1885 field;
1886 field = TREE_CHAIN (field))
1887 {
1888 tree field_type;
1889 if (TREE_CODE (field) != FIELD_DECL)
1890 continue;
1891
1892 field_type = TREE_TYPE (field);
1893 if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1894 {
1895 field_type = mark_type (field_type, FULL_ESCAPE);
1896 close_type_full_escape (field_type);
1897 }
1898 }
1899
1900 /* For all of the types A that contain this type B and were part of
1901 an expression like "&...A.B...", mark the A's as escaping. */
1902 address_result = splay_tree_lookup (uid_to_addressof_up_map,
1903 (splay_tree_key) uid);
1904 if (address_result)
1905 {
1906 bitmap containing_classes = (bitmap) address_result->value;
1907 EXECUTE_IF_SET_IN_BITMAP (containing_classes, 0, i, bi)
1908 {
1909 close_type_full_escape (type_for_uid (i));
1910 }
1911 }
1912 }
1913
1914 /* Transitively close the addressof bitmap for the type with UID.
1915 This means that if we had a.b and b.c, a would have both b and c in
1916 its maps. */
1917
1918 static bitmap
1919 close_addressof_down (int uid)
1920 {
1921 bitmap_iterator bi;
1922 splay_tree_node result =
1923 splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
1924 bitmap map = NULL;
1925 bitmap new_map;
1926 unsigned int i;
1927
1928 if (result)
1929 map = (bitmap) result->value;
1930 else
1931 return NULL;
1932
1933 if (bitmap_bit_p (been_there_done_that, uid))
1934 return map;
1935 bitmap_set_bit (been_there_done_that, uid);
1936
1937 /* If the type escapes, get rid of the addressof map, it will not be
1938 needed. */
1939 if (bitmap_bit_p (global_types_full_escape, uid))
1940 {
1941 BITMAP_FREE (map);
1942 splay_tree_remove (uid_to_addressof_down_map, (splay_tree_key) uid);
1943 return NULL;
1944 }
1945
1946 /* The new_map will have all of the bits for the enclosed fields and
1947 will have the unique id version of the old map. */
1948 new_map = BITMAP_ALLOC (&ipa_obstack);
1949
1950 EXECUTE_IF_SET_IN_BITMAP (map, 0, i, bi)
1951 {
1952 bitmap submap = close_addressof_down (i);
1953 bitmap_set_bit (new_map, i);
1954 if (submap)
1955 bitmap_ior_into (new_map, submap);
1956 }
1957 result->value = (splay_tree_value) new_map;
1958
1959 BITMAP_FREE (map);
1960 return new_map;
1961 }
1962
1963 \f
1964 /* The main entry point for type escape analysis. */
1965
1966 static unsigned int
1967 type_escape_execute (void)
1968 {
1969 struct cgraph_node *node;
1970 struct varpool_node *vnode;
1971 unsigned int i;
1972 bitmap_iterator bi;
1973 splay_tree_node result;
1974
1975 ipa_init ();
1976
1977 /* Process all of the variables first. */
1978 FOR_EACH_STATIC_VARIABLE (vnode)
1979 analyze_variable (vnode);
1980
1981 /* Process all of the functions next.
1982
1983 We do not want to process any of the clones so we check that this
1984 is a master clone. However, we do need to process any
1985 AVAIL_OVERWRITABLE functions (these are never clones) because
1986 they may cause a type variable to escape.
1987 */
1988 for (node = cgraph_nodes; node; node = node->next)
1989 if (node->analyzed && !node->clone_of)
1990 analyze_function (node);
1991
1992
1993 pointer_set_destroy (visited_nodes);
1994 visited_nodes = NULL;
1995
1996 /* Do all of the closures to discover which types escape the
1997 compilation unit. */
1998
1999 been_there_done_that = BITMAP_ALLOC (&ipa_obstack);
2000 bitmap_tmp = BITMAP_ALLOC (&ipa_obstack);
2001
2002 /* Examine the types that we have directly seen in scanning the code
2003 and add to that any contained types or superclasses. */
2004
2005 bitmap_copy (bitmap_tmp, global_types_seen);
2006 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2007 {
2008 tree type = type_for_uid (i);
2009 /* Only look at records and unions and pointer tos. */
2010 if (ipa_type_escape_star_count_of_interesting_or_array_type (type) >= 0)
2011 close_type_seen (type);
2012 }
2013 bitmap_clear (been_there_done_that);
2014
2015 /* Examine all of the types passed by value and mark any enclosed
2016 pointer types as escaping. */
2017 bitmap_copy (bitmap_tmp, global_types_exposed_parameter);
2018 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2019 {
2020 close_type_exposed_parameter (type_for_uid (i));
2021 }
2022 bitmap_clear (been_there_done_that);
2023
2024 /* Close the types for escape. If something escapes, then any
2025 enclosed types escape as well as any subtypes. */
2026 bitmap_copy (bitmap_tmp, global_types_full_escape);
2027 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2028 {
2029 close_type_full_escape (type_for_uid (i));
2030 }
2031 bitmap_clear (been_there_done_that);
2032
2033 /* Before this pass, the uid_to_addressof_down_map for type X
2034 contained an entry for Y if there had been an operation of the
2035 form &X.Y. This step adds all of the fields contained within Y
2036 (recursively) to X's map. */
2037
2038 result = splay_tree_min (uid_to_addressof_down_map);
2039 while (result)
2040 {
2041 int uid = result->key;
2042 /* Close the addressof map, i.e. copy all of the transitive
2043 substructures up to this level. */
2044 close_addressof_down (uid);
2045 result = splay_tree_successor (uid_to_addressof_down_map, uid);
2046 }
2047
2048 /* Do not need the array types and pointer types in the persistent
2049 data structures. */
2050 result = splay_tree_min (all_canon_types);
2051 while (result)
2052 {
2053 tree type = (tree) result->value;
2054 tree key = (tree) result->key;
2055 if (POINTER_TYPE_P (type)
2056 || TREE_CODE (type) == ARRAY_TYPE)
2057 {
2058 splay_tree_remove (all_canon_types, (splay_tree_key) result->key);
2059 splay_tree_remove (type_to_canon_type, (splay_tree_key) type);
2060 splay_tree_remove (uid_to_canon_type, (splay_tree_key) TYPE_UID (type));
2061 bitmap_clear_bit (global_types_seen, TYPE_UID (type));
2062 }
2063 result = splay_tree_successor (all_canon_types, (splay_tree_key) key);
2064 }
2065
2066 if (dump_file)
2067 {
2068 EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
2069 {
2070 /* The pointer types are in the global_types_full_escape
2071 bitmap but not in the backwards map. They also contain
2072 no useful information since they are not marked. */
2073 tree type = type_for_uid (i);
2074 fprintf(dump_file, "type %d ", i);
2075 print_generic_expr (dump_file, type, 0);
2076 if (bitmap_bit_p (global_types_full_escape, i))
2077 fprintf(dump_file, " escaped\n");
2078 else
2079 fprintf(dump_file, " contained\n");
2080 }
2081 }
2082
2083 /* Get rid of uid_to_addressof_up_map and its bitmaps. */
2084 result = splay_tree_min (uid_to_addressof_up_map);
2085 while (result)
2086 {
2087 int uid = (int)result->key;
2088 bitmap bm = (bitmap)result->value;
2089
2090 BITMAP_FREE (bm);
2091 splay_tree_remove (uid_to_addressof_up_map, (splay_tree_key) uid);
2092 result = splay_tree_successor (uid_to_addressof_up_map, uid);
2093 }
2094
2095 /* Get rid of the subtype map. */
2096 result = splay_tree_min (uid_to_subtype_map);
2097 while (result)
2098 {
2099 bitmap b = (bitmap)result->value;
2100 BITMAP_FREE(b);
2101 splay_tree_remove (uid_to_subtype_map, result->key);
2102 result = splay_tree_min (uid_to_subtype_map);
2103 }
2104 splay_tree_delete (uid_to_subtype_map);
2105 uid_to_subtype_map = NULL;
2106
2107 BITMAP_FREE (global_types_exposed_parameter);
2108 BITMAP_FREE (been_there_done_that);
2109 BITMAP_FREE (bitmap_tmp);
2110 return 0;
2111 }
2112
2113 static bool
2114 gate_type_escape_vars (void)
2115 {
2116 return flag_ipa_struct_reorg && flag_whole_program && (optimize > 0);
2117 }
2118
2119 struct simple_ipa_opt_pass pass_ipa_type_escape =
2120 {
2121 {
2122 SIMPLE_IPA_PASS,
2123 "type-escape-var", /* name */
2124 gate_type_escape_vars, /* gate */
2125 type_escape_execute, /* execute */
2126 NULL, /* sub */
2127 NULL, /* next */
2128 0, /* static_pass_number */
2129 TV_IPA_TYPE_ESCAPE, /* tv_id */
2130 0, /* properties_required */
2131 0, /* properties_provided */
2132 0, /* properties_destroyed */
2133 0, /* todo_flags_start */
2134 0 /* todo_flags_finish */
2135 }
2136 };