ipa-reference.c (analyze_function): Declare step only if ENABLE_CHECKING is defined.
[gcc.git] / gcc / ipa-reference.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file gathers information about how variables whose scope is
22 confined to the compilation unit are used.
23
24 There are two categories of information produced by this pass:
25
26 1) The addressable (TREE_ADDRESSABLE) bit and readonly
27 (TREE_READONLY) bit associated with these variables is properly set
28 based on scanning all of the code withing the compilation unit.
29
30 2) The transitive call site specific clobber effects are computed
31 for the variables whose scope is contained within this compilation
32 unit.
33
34 First each function and static variable initialization is analyzed
35 to determine which local static variables are either read, written,
36 or have their address taken. Any local static that has its address
37 taken is removed from consideration. Once the local read and
38 writes are determined, a transitive closure of this information is
39 performed over the call graph to determine the worst case set of
40 side effects of each call. In later parts of the compiler, these
41 local and global sets are examined to make the call clobbering less
42 traumatic, promote some statics to registers, and improve aliasing
43 information.
44
45 Currently must be run after inlining decisions have been made since
46 otherwise, the local sets will not contain information that is
47 consistent with post inlined state. The global sets are not prone
48 to this problem since they are by definition transitive. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "tree.h"
55 #include "tree-flow.h"
56 #include "tree-inline.h"
57 #include "tree-pass.h"
58 #include "langhooks.h"
59 #include "pointer-set.h"
60 #include "ggc.h"
61 #include "ipa-utils.h"
62 #include "ipa-reference.h"
63 #include "c-common.h"
64 #include "gimple.h"
65 #include "cgraph.h"
66 #include "output.h"
67 #include "flags.h"
68 #include "timevar.h"
69 #include "diagnostic.h"
70 #include "langhooks.h"
71
72 /* This splay tree contains all of the static variables that are
73 being considered by the compilation level alias analysis. For
74 module_at_a_time compilation, this is the set of static but not
75 public variables. Any variables that either have their address
76 taken or participate in otherwise unsavory operations are deleted
77 from this list. */
78 static GTY((param1_is(int), param2_is(tree)))
79 splay_tree reference_vars_to_consider;
80
81 /* This bitmap is used to knock out the module static variables whose
82 addresses have been taken and passed around. */
83 static bitmap module_statics_escape;
84
85 /* This bitmap is used to knock out the module static variables that
86 are not readonly. */
87 static bitmap module_statics_written;
88
89 /* A bit is set for every module static we are considering. This is
90 ored into the local info when asm code is found that clobbers all
91 memory. */
92 static bitmap all_module_statics;
93
94 static struct pointer_set_t *visited_nodes;
95
96 /* Obstack holding bitmaps of local analysis (live from analysis to
97 propagation) */
98 static bitmap_obstack local_info_obstack;
99 /* Obstack holding global analysis live forever. */
100 static bitmap_obstack global_info_obstack;
101
102 /* Holders of ipa cgraph hooks: */
103 static struct cgraph_node_hook_list *function_insertion_hook_holder;
104
105 enum initialization_status_t
106 {
107 UNINITIALIZED,
108 RUNNING,
109 FINISHED
110 };
111
112 tree memory_identifier_string;
113
114 /* Return the ipa_reference_vars structure starting from the cgraph NODE. */
115 static inline ipa_reference_vars_info_t
116 get_reference_vars_info_from_cgraph (struct cgraph_node * node)
117 {
118 return get_function_ann (node->decl)->reference_vars_info;
119 }
120
121 /* Get a bitmap that contains all of the locally referenced static
122 variables for function FN. */
123 static ipa_reference_local_vars_info_t
124 get_local_reference_vars_info (tree fn)
125 {
126 ipa_reference_vars_info_t info = get_function_ann (fn)->reference_vars_info;
127
128 if (info)
129 return info->local;
130 else
131 /* This phase was not run. */
132 return NULL;
133 }
134
135 /* Get a bitmap that contains all of the globally referenced static
136 variables for function FN. */
137
138 static ipa_reference_global_vars_info_t
139 get_global_reference_vars_info (tree fn)
140 {
141 ipa_reference_vars_info_t info = get_function_ann (fn)->reference_vars_info;
142
143 if (info)
144 return info->global;
145 else
146 /* This phase was not run. */
147 return NULL;
148 }
149
150 /* Return a bitmap indexed by VAR_DECL uid for the static variables
151 that may be read locally by the execution of the function fn.
152 Returns NULL if no data is available. */
153
154 bitmap
155 ipa_reference_get_read_local (tree fn)
156 {
157 ipa_reference_local_vars_info_t l = get_local_reference_vars_info (fn);
158 if (l)
159 return l->statics_read;
160 else
161 return NULL;
162 }
163
164 /* Return a bitmap indexed by VAR_DECL uid for the static variables
165 that may be written locally by the execution of the function fn.
166 Returns NULL if no data is available. */
167
168 bitmap
169 ipa_reference_get_written_local (tree fn)
170 {
171 ipa_reference_local_vars_info_t l = get_local_reference_vars_info (fn);
172 if (l)
173 return l->statics_written;
174 else
175 return NULL;
176 }
177
178 /* Return a bitmap indexed by VAR_DECL uid for the static variables
179 that are read during the execution of the function FN. Returns
180 NULL if no data is available. */
181
182 bitmap
183 ipa_reference_get_read_global (tree fn)
184 {
185 ipa_reference_global_vars_info_t g = get_global_reference_vars_info (fn);
186 if (g)
187 return g->statics_read;
188 else
189 return NULL;
190 }
191
192 /* Return a bitmap indexed by VAR_DECL uid for the static variables
193 that are written during the execution of the function FN. Note
194 that variables written may or may not be read during the function
195 call. Returns NULL if no data is available. */
196
197 bitmap
198 ipa_reference_get_written_global (tree fn)
199 {
200 ipa_reference_global_vars_info_t g = get_global_reference_vars_info (fn);
201 if (g)
202 return g->statics_written;
203 else
204 return NULL;
205 }
206
207 /* Return a bitmap indexed by_DECL_UID uid for the static variables
208 that are not read during the execution of the function FN. Returns
209 NULL if no data is available. */
210
211 bitmap
212 ipa_reference_get_not_read_global (tree fn)
213 {
214 ipa_reference_global_vars_info_t g = get_global_reference_vars_info (fn);
215 if (g)
216 return g->statics_not_read;
217 else
218 return NULL;
219 }
220
221 /* Return a bitmap indexed by DECL_UID uid for the static variables
222 that are not written during the execution of the function FN. Note
223 that variables written may or may not be read during the function
224 call. Returns NULL if no data is available. */
225
226 bitmap
227 ipa_reference_get_not_written_global (tree fn)
228 {
229 ipa_reference_global_vars_info_t g = get_global_reference_vars_info (fn);
230 if (g)
231 return g->statics_not_written;
232 else
233 return NULL;
234 }
235
236 \f
237
238 /* Add VAR to all_module_statics and the two
239 reference_vars_to_consider* sets. */
240
241 static inline void
242 add_static_var (tree var)
243 {
244 int uid = DECL_UID (var);
245 gcc_assert (TREE_CODE (var) == VAR_DECL);
246 if (!bitmap_bit_p (all_module_statics, uid))
247 {
248 splay_tree_insert (reference_vars_to_consider,
249 uid, (splay_tree_value)var);
250 bitmap_set_bit (all_module_statics, uid);
251 }
252 }
253
254 /* Return true if the variable T is the right kind of static variable to
255 perform compilation unit scope escape analysis. */
256
257 static inline bool
258 has_proper_scope_for_analysis (tree t)
259 {
260 /* If the variable has the "used" attribute, treat it as if it had a
261 been touched by the devil. */
262 if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
263 return false;
264
265 /* Do not want to do anything with volatile except mark any
266 function that uses one to be not const or pure. */
267 if (TREE_THIS_VOLATILE (t))
268 return false;
269
270 /* Do not care about a local automatic that is not static. */
271 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
272 return false;
273
274 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
275 return false;
276
277 /* We cannot touch decls where the type needs constructing. */
278 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
279 return false;
280
281 /* This is a variable we care about. Check if we have seen it
282 before, and if not add it the set of variables we care about. */
283 if (!bitmap_bit_p (all_module_statics, DECL_UID (t)))
284 add_static_var (t);
285
286 return true;
287 }
288
289 /* Mark tree T as having address taken. */
290
291 static void
292 mark_address_taken (tree x)
293 {
294 if (TREE_CODE (x) == VAR_DECL
295 && module_statics_escape && has_proper_scope_for_analysis (x))
296 bitmap_set_bit (module_statics_escape, DECL_UID (x));
297 }
298
299 /* Mark load of T. */
300
301 static void
302 mark_load (ipa_reference_local_vars_info_t local,
303 tree t)
304 {
305 if (TREE_CODE (t) == VAR_DECL
306 && has_proper_scope_for_analysis (t))
307 bitmap_set_bit (local->statics_read, DECL_UID (t));
308 }
309
310 /* Mark store of T. */
311
312 static void
313 mark_store (ipa_reference_local_vars_info_t local,
314 tree t)
315 {
316 if (TREE_CODE (t) == VAR_DECL
317 && has_proper_scope_for_analysis (t))
318 {
319 if (local)
320 bitmap_set_bit (local->statics_written, DECL_UID (t));
321 /* Mark the write so we can tell which statics are
322 readonly. */
323 if (module_statics_written)
324 bitmap_set_bit (module_statics_written, DECL_UID (t));
325 }
326 }
327
328 /* Look for memory clobber and set read_all/write_all if present. */
329
330 static void
331 check_asm_memory_clobber (ipa_reference_local_vars_info_t local, gimple stmt)
332 {
333 size_t i;
334 tree op;
335
336 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
337 {
338 op = gimple_asm_clobber_op (stmt, i);
339 if (simple_cst_equal(TREE_VALUE (op), memory_identifier_string) == 1)
340 {
341 /* Abandon all hope, ye who enter here. */
342 local->calls_read_all = true;
343 local->calls_write_all = true;
344 }
345 }
346 }
347
348 /* Look for external calls and set read_all/write_all correspondingly. */
349
350 static void
351 check_call (ipa_reference_local_vars_info_t local, gimple stmt)
352 {
353 int flags = gimple_call_flags (stmt);
354 tree callee_t = gimple_call_fndecl (stmt);
355 enum availability avail = AVAIL_NOT_AVAILABLE;
356
357 if (callee_t)
358 {
359 struct cgraph_node* callee = cgraph_node(callee_t);
360 avail = cgraph_function_body_availability (callee);
361 }
362
363 if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
364 if (local)
365 {
366 if (flags & ECF_CONST)
367 ;
368 else if (flags & ECF_PURE)
369 local->calls_read_all = true;
370 else
371 {
372 local->calls_read_all = true;
373 local->calls_write_all = true;
374 }
375 }
376 /* TODO: To be able to produce sane results, we should also handle
377 common builtins, in particular throw.
378 Indirect calls hsould be only counted and as inliner is replacing them
379 by direct calls, we can conclude if any indirect calls are left in body */
380 }
381
382 /* TP is the part of the tree currently under the microscope.
383 WALK_SUBTREES is part of the walk_tree api but is unused here.
384 DATA is cgraph_node of the function being walked. */
385
386 static tree
387 scan_stmt_for_static_refs (gimple_stmt_iterator *gsip,
388 struct cgraph_node *fn)
389 {
390 gimple stmt = gsi_stmt (*gsip);
391 ipa_reference_local_vars_info_t local = NULL;
392 unsigned int i;
393 bitmap_iterator bi;
394
395 if (fn)
396 local = get_reference_vars_info_from_cgraph (fn)->local;
397
398 if (gimple_loaded_syms (stmt))
399 EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
400 mark_load (local, referenced_var_lookup (i));
401 if (gimple_stored_syms (stmt))
402 EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
403 mark_store (local, referenced_var_lookup (i));
404 if (gimple_addresses_taken (stmt))
405 EXECUTE_IF_SET_IN_BITMAP (gimple_addresses_taken (stmt), 0, i, bi)
406 mark_address_taken (referenced_var_lookup (i));
407
408 switch (gimple_code (stmt))
409 {
410 case GIMPLE_CALL:
411 check_call (local, stmt);
412 break;
413
414 case GIMPLE_ASM:
415 check_asm_memory_clobber (local, stmt);
416 break;
417
418 /* We used to check nonlocal labels here and set them as potentially modifying
419 everything. This is not needed, since we can get to nonlocal label only
420 from callee and thus we will get info propagated. */
421
422 default:
423 break;
424 }
425
426 return NULL;
427 }
428
429 /* Call-back to scan variable initializers for static references.
430 Called using walk_tree. */
431
432 static tree
433 scan_initializer_for_static_refs (tree *tp, int *walk_subtrees,
434 void *data ATTRIBUTE_UNUSED)
435 {
436 tree t = *tp;
437
438 if (TREE_CODE (t) == ADDR_EXPR)
439 {
440 mark_address_taken (get_base_var (t));
441 *walk_subtrees = 0;
442 }
443 /* Save some cycles by not walking types and declaration as we
444 won't find anything useful there anyway. */
445 else if (IS_TYPE_OR_DECL_P (*tp))
446 *walk_subtrees = 0;
447
448 return NULL;
449 }
450
451 /* Lookup the tree node for the static variable that has UID. */
452 static tree
453 get_static_decl (int index)
454 {
455 splay_tree_node stn =
456 splay_tree_lookup (reference_vars_to_consider, index);
457 if (stn)
458 return (tree)stn->value;
459 return NULL;
460 }
461
462 /* Lookup the tree node for the static variable that has UID and
463 convert the name to a string for debugging. */
464
465 static const char *
466 get_static_name (int index)
467 {
468 splay_tree_node stn =
469 splay_tree_lookup (reference_vars_to_consider, index);
470 if (stn)
471 return lang_hooks.decl_printable_name ((tree)(stn->value), 2);
472 return NULL;
473 }
474
475 /* Or in all of the bits from every callee into X, the caller's, bit
476 vector. There are several cases to check to avoid the sparse
477 bitmap oring. */
478
479 static void
480 propagate_bits (struct cgraph_node *x)
481 {
482 ipa_reference_vars_info_t x_info = get_reference_vars_info_from_cgraph (x);
483 ipa_reference_global_vars_info_t x_global = x_info->global;
484
485 struct cgraph_edge *e;
486 for (e = x->callees; e; e = e->next_callee)
487 {
488 struct cgraph_node *y = e->callee;
489
490 /* Only look at the master nodes and skip external nodes. */
491 y = cgraph_master_clone (y);
492 if (y)
493 {
494 if (get_reference_vars_info_from_cgraph (y))
495 {
496 ipa_reference_vars_info_t y_info
497 = get_reference_vars_info_from_cgraph (y);
498 ipa_reference_global_vars_info_t y_global = y_info->global;
499
500 if (x_global->statics_read
501 != all_module_statics)
502 {
503 if (y_global->statics_read
504 == all_module_statics)
505 {
506 BITMAP_FREE (x_global->statics_read);
507 x_global->statics_read
508 = all_module_statics;
509 }
510 /* Skip bitmaps that are pointer equal to node's bitmap
511 (no reason to spin within the cycle). */
512 else if (x_global->statics_read
513 != y_global->statics_read)
514 bitmap_ior_into (x_global->statics_read,
515 y_global->statics_read);
516 }
517
518 if (x_global->statics_written
519 != all_module_statics)
520 {
521 if (y_global->statics_written
522 == all_module_statics)
523 {
524 BITMAP_FREE (x_global->statics_written);
525 x_global->statics_written
526 = all_module_statics;
527 }
528 /* Skip bitmaps that are pointer equal to node's bitmap
529 (no reason to spin within the cycle). */
530 else if (x_global->statics_written
531 != y_global->statics_written)
532 bitmap_ior_into (x_global->statics_written,
533 y_global->statics_written);
534 }
535 }
536 else
537 gcc_unreachable ();
538 }
539 }
540 }
541
542 /* Look at all of the callees of X to see which ones represent inlined
543 calls. For each of these callees, merge their local info into
544 TARGET and check their children recursively.
545
546 This function goes away when Jan changes the inliner and IPA
547 analysis so that this is not run between the time when inlining
548 decisions are made and when the inlining actually occurs. */
549
550 static void
551 merge_callee_local_info (struct cgraph_node *target,
552 struct cgraph_node *x)
553 {
554 struct cgraph_edge *e;
555 ipa_reference_local_vars_info_t x_l =
556 get_reference_vars_info_from_cgraph (target)->local;
557
558 /* Make the world safe for tail recursion. */
559 struct ipa_dfs_info *node_info = (struct ipa_dfs_info *) x->aux;
560
561 if (node_info->aux)
562 return;
563
564 node_info->aux = x;
565
566 for (e = x->callees; e; e = e->next_callee)
567 {
568 struct cgraph_node *y = e->callee;
569 if (y->global.inlined_to)
570 {
571 ipa_reference_vars_info_t y_info;
572 ipa_reference_local_vars_info_t y_l;
573 struct cgraph_node* orig_y = y;
574
575 y = cgraph_master_clone (y);
576 if (y)
577 {
578 y_info = get_reference_vars_info_from_cgraph (y);
579 y_l = y_info->local;
580 if (x_l != y_l)
581 {
582 bitmap_ior_into (x_l->statics_read,
583 y_l->statics_read);
584 bitmap_ior_into (x_l->statics_written,
585 y_l->statics_written);
586 }
587 x_l->calls_read_all |= y_l->calls_read_all;
588 x_l->calls_write_all |= y_l->calls_write_all;
589 merge_callee_local_info (target, y);
590 }
591 else
592 {
593 fprintf(stderr, "suspect inlining of ");
594 dump_cgraph_node (stderr, orig_y);
595 fprintf(stderr, "\ninto ");
596 dump_cgraph_node (stderr, target);
597 dump_cgraph (stderr);
598 gcc_assert(false);
599 }
600 }
601 }
602
603 node_info->aux = NULL;
604 }
605
606 /* The init routine for analyzing global static variable usage. See
607 comments at top for description. */
608 static void
609 ipa_init (void)
610 {
611 memory_identifier_string = build_string(7, "memory");
612
613 reference_vars_to_consider =
614 splay_tree_new_ggc (splay_tree_compare_ints);
615
616 bitmap_obstack_initialize (&local_info_obstack);
617 bitmap_obstack_initialize (&global_info_obstack);
618 module_statics_escape = BITMAP_ALLOC (&local_info_obstack);
619 module_statics_written = BITMAP_ALLOC (&local_info_obstack);
620 all_module_statics = BITMAP_ALLOC (&global_info_obstack);
621
622 /* There are some shared nodes, in particular the initializers on
623 static declarations. We do not need to scan them more than once
624 since all we would be interested in are the addressof
625 operations. */
626 visited_nodes = pointer_set_create ();
627 }
628
629 /* Check out the rhs of a static or global initialization VNODE to see
630 if any of them contain addressof operations. Note that some of
631 these variables may not even be referenced in the code in this
632 compilation unit but their right hand sides may contain references
633 to variables defined within this unit. */
634
635 static void
636 analyze_variable (struct varpool_node *vnode)
637 {
638 struct walk_stmt_info wi;
639 tree global = vnode->decl;
640
641 memset (&wi, 0, sizeof (wi));
642 wi.pset = visited_nodes;
643 walk_tree (&DECL_INITIAL (global), scan_initializer_for_static_refs,
644 &wi, wi.pset);
645 }
646
647 /* Set up the persistent info for FN. */
648
649 static ipa_reference_local_vars_info_t
650 init_function_info (struct cgraph_node *fn)
651 {
652 ipa_reference_vars_info_t info
653 = XCNEW (struct ipa_reference_vars_info_d);
654 ipa_reference_local_vars_info_t l
655 = XCNEW (struct ipa_reference_local_vars_info_d);
656 tree decl = fn->decl;
657
658 /* Add the info to the tree's annotation. */
659 get_function_ann (decl)->reference_vars_info = info;
660
661 info->local = l;
662 l->statics_read = BITMAP_ALLOC (&local_info_obstack);
663 l->statics_written = BITMAP_ALLOC (&local_info_obstack);
664
665 return l;
666 }
667
668 /* This is the main routine for finding the reference patterns for
669 global variables within a function FN. */
670
671 static void
672 analyze_function (struct cgraph_node *fn)
673 {
674 tree decl = fn->decl;
675 struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
676 basic_block this_block;
677 #ifdef ENABLE_CHECKING
678 tree step;
679 #endif
680
681 if (dump_file)
682 fprintf (dump_file, "\n local analysis of %s\n", cgraph_node_name (fn));
683
684 push_cfun (DECL_STRUCT_FUNCTION (decl));
685 current_function_decl = decl;
686
687 init_function_info (fn);
688 FOR_EACH_BB_FN (this_block, this_cfun)
689 {
690 gimple_stmt_iterator gsi;
691 gimple phi;
692 tree op;
693 use_operand_p use;
694 ssa_op_iter iter;
695
696 /* Find the addresses taken in phi node arguments. */
697 for (gsi = gsi_start_phis (this_block);
698 !gsi_end_p (gsi);
699 gsi_next (&gsi))
700 {
701 phi = gsi_stmt (gsi);
702 FOR_EACH_PHI_ARG (use, phi, iter, SSA_OP_USE)
703 {
704 op = USE_FROM_PTR (use);
705 if (TREE_CODE (op) == ADDR_EXPR)
706 mark_address_taken (get_base_var (op));
707 }
708 }
709
710 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
711 scan_stmt_for_static_refs (&gsi, fn);
712 }
713
714 #ifdef ENABLE_CHECKING
715 /* Verify that all local initializers was expanded by gimplifier. */
716 for (step = DECL_STRUCT_FUNCTION (decl)->local_decls;
717 step;
718 step = TREE_CHAIN (step))
719 {
720 tree var = TREE_VALUE (step);
721 if (TREE_CODE (var) == VAR_DECL
722 && DECL_INITIAL (var)
723 && !TREE_STATIC (var))
724 gcc_unreachable ();
725 }
726 #endif
727 pop_cfun ();
728 current_function_decl = NULL;
729 }
730
731 /* If FN is avail == AVAIL_OVERWRITABLE, replace the effects bit
732 vectors with worst case bit vectors. We had to analyze it above to
733 find out if it took the address of any statics. However, now that
734 we know that, we can get rid of all of the other side effects. */
735
736 static void
737 clean_function (struct cgraph_node *fn)
738 {
739 ipa_reference_vars_info_t info = get_reference_vars_info_from_cgraph (fn);
740 ipa_reference_local_vars_info_t l = info->local;
741 ipa_reference_global_vars_info_t g = info->global;
742
743 if (l)
744 {
745 if (l->statics_read
746 && l->statics_read != all_module_statics)
747 BITMAP_FREE (l->statics_read);
748 if (l->statics_written
749 &&l->statics_written != all_module_statics)
750 BITMAP_FREE (l->statics_written);
751 free (l);
752 }
753
754 if (g)
755 {
756 if (g->statics_read
757 && g->statics_read != all_module_statics)
758 BITMAP_FREE (g->statics_read);
759
760 if (g->statics_written
761 && g->statics_written != all_module_statics)
762 BITMAP_FREE (g->statics_written);
763
764 if (g->statics_not_read
765 && g->statics_not_read != all_module_statics)
766 BITMAP_FREE (g->statics_not_read);
767
768 if (g->statics_not_written
769 && g->statics_not_written != all_module_statics)
770 BITMAP_FREE (g->statics_not_written);
771 free (g);
772 }
773
774 free (get_function_ann (fn->decl)->reference_vars_info);
775 get_function_ann (fn->decl)->reference_vars_info = NULL;
776 }
777
778 /* Called when new function is inserted to callgraph late. */
779 static void
780 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
781 {
782 /* There are some shared nodes, in particular the initializers on
783 static declarations. We do not need to scan them more than once
784 since all we would be interested in are the addressof
785 operations. */
786 analyze_function (node);
787 visited_nodes = NULL;
788 }
789
790 /* Analyze each function in the cgraph to see which global or statics
791 are read or written. */
792
793 static void
794 generate_summary (void)
795 {
796 struct cgraph_node *node;
797 struct varpool_node *vnode;
798 unsigned int index;
799 bitmap_iterator bi;
800 bitmap module_statics_readonly;
801 bitmap bm_temp;
802
803 function_insertion_hook_holder =
804 cgraph_add_function_insertion_hook (&add_new_function, NULL);
805 ipa_init ();
806 module_statics_readonly = BITMAP_ALLOC (&local_info_obstack);
807 bm_temp = BITMAP_ALLOC (&local_info_obstack);
808
809 /* Process all of the variables first. */
810 FOR_EACH_STATIC_INITIALIZER (vnode)
811 analyze_variable (vnode);
812
813 /* Process all of the functions next.
814
815 We do not want to process any of the clones so we check that this
816 is a master clone. However, we do need to process any
817 AVAIL_OVERWRITABLE functions (these are never clones) because
818 they may cause a static variable to escape. The code that can
819 overwrite such a function cannot access the statics because it
820 would not be in the same compilation unit. When the analysis is
821 finished, the computed information of these AVAIL_OVERWRITABLE is
822 replaced with worst case info.
823 */
824 for (node = cgraph_nodes; node; node = node->next)
825 if (node->analyzed
826 && (cgraph_is_master_clone (node)
827 || (cgraph_function_body_availability (node)
828 == AVAIL_OVERWRITABLE)))
829 analyze_function (node);
830
831 pointer_set_destroy (visited_nodes);
832 visited_nodes = NULL;
833
834 /* Prune out the variables that were found to behave badly
835 (i.e. have their address taken). */
836 EXECUTE_IF_SET_IN_BITMAP (module_statics_escape, 0, index, bi)
837 {
838 splay_tree_remove (reference_vars_to_consider, index);
839 }
840
841 bitmap_and_compl_into (all_module_statics,
842 module_statics_escape);
843
844 bitmap_and_compl (module_statics_readonly, all_module_statics,
845 module_statics_written);
846
847 /* If the address is not taken, we can unset the addressable bit
848 on this variable. */
849 EXECUTE_IF_SET_IN_BITMAP (all_module_statics, 0, index, bi)
850 {
851 tree var = get_static_decl (index);
852 TREE_ADDRESSABLE (var) = 0;
853 if (dump_file)
854 fprintf (dump_file, "Not TREE_ADDRESSABLE var %s\n",
855 get_static_name (index));
856 }
857
858 /* If the variable is never written, we can set the TREE_READONLY
859 flag. Additionally if it has a DECL_INITIAL that is made up of
860 constants we can treat the entire global as a constant. */
861
862 bitmap_and_compl (module_statics_readonly, all_module_statics,
863 module_statics_written);
864 EXECUTE_IF_SET_IN_BITMAP (module_statics_readonly, 0, index, bi)
865 {
866 tree var = get_static_decl (index);
867
868 /* Ignore variables in named sections - changing TREE_READONLY
869 changes the section flags, potentially causing conflicts with
870 other variables in the same named section. */
871 if (DECL_SECTION_NAME (var) == NULL_TREE)
872 {
873 TREE_READONLY (var) = 1;
874 if (dump_file)
875 fprintf (dump_file, "read-only var %s\n",
876 get_static_name (index));
877 }
878 }
879
880 BITMAP_FREE(module_statics_escape);
881 BITMAP_FREE(module_statics_written);
882 module_statics_escape = NULL;
883 module_statics_written = NULL;
884
885 if (dump_file)
886 EXECUTE_IF_SET_IN_BITMAP (all_module_statics, 0, index, bi)
887 {
888 fprintf (dump_file, "\nPromotable global:%s",
889 get_static_name (index));
890 }
891
892 for (node = cgraph_nodes; node; node = node->next)
893 if (node->analyzed
894 && (cgraph_is_master_clone (node)
895 || (cgraph_function_body_availability (node)
896 == AVAIL_OVERWRITABLE)))
897 {
898 ipa_reference_local_vars_info_t l;
899 l = get_reference_vars_info_from_cgraph (node)->local;
900
901 /* Any variables that are not in all_module_statics are
902 removed from the local maps. This will include all of the
903 variables that were found to escape in the function
904 scanning. */
905 bitmap_and_into (l->statics_read,
906 all_module_statics);
907 bitmap_and_into (l->statics_written,
908 all_module_statics);
909 }
910
911 BITMAP_FREE(module_statics_readonly);
912 BITMAP_FREE(bm_temp);
913
914 if (dump_file)
915 for (node = cgraph_nodes; node; node = node->next)
916 if (node->analyzed
917 && (cgraph_is_master_clone (node)
918 || (cgraph_function_body_availability (node)
919 == AVAIL_OVERWRITABLE)))
920 {
921 ipa_reference_local_vars_info_t l;
922 unsigned int index;
923 bitmap_iterator bi;
924
925 l = get_reference_vars_info_from_cgraph (node)->local;
926 fprintf (dump_file,
927 "\nFunction name:%s/%i:",
928 cgraph_node_name (node), node->uid);
929 fprintf (dump_file, "\n locals read: ");
930 EXECUTE_IF_SET_IN_BITMAP (l->statics_read,
931 0, index, bi)
932 {
933 fprintf (dump_file, "%s ",
934 get_static_name (index));
935 }
936 fprintf (dump_file, "\n locals written: ");
937 EXECUTE_IF_SET_IN_BITMAP (l->statics_written,
938 0, index, bi)
939 {
940 fprintf(dump_file, "%s ",
941 get_static_name (index));
942 }
943 }
944 }
945 \f
946 /* Produce the global information by preforming a transitive closure
947 on the local information that was produced by ipa_analyze_function
948 and ipa_analyze_variable. */
949
950 static unsigned int
951 propagate (void)
952 {
953 struct cgraph_node *node;
954 struct cgraph_node *w;
955 struct cgraph_node **order =
956 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
957 int order_pos = ipa_utils_reduced_inorder (order, false, true);
958 int i;
959
960 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
961 if (dump_file)
962 dump_cgraph (dump_file);
963
964 /* Propagate the local information thru the call graph to produce
965 the global information. All the nodes within a cycle will have
966 the same info so we collapse cycles first. Then we can do the
967 propagation in one pass from the leaves to the roots. */
968 order_pos = ipa_utils_reduced_inorder (order, true, true);
969 if (dump_file)
970 ipa_utils_print_order(dump_file, "reduced", order, order_pos);
971
972 for (i = 0; i < order_pos; i++ )
973 {
974 ipa_reference_vars_info_t node_info;
975 ipa_reference_global_vars_info_t node_g =
976 XCNEW (struct ipa_reference_global_vars_info_d);
977 ipa_reference_local_vars_info_t node_l;
978
979 bool read_all;
980 bool write_all;
981 struct ipa_dfs_info * w_info;
982
983 node = order[i];
984 node_info = get_reference_vars_info_from_cgraph (node);
985 if (!node_info)
986 {
987 dump_cgraph_node (stderr, node);
988 dump_cgraph (stderr);
989 gcc_unreachable ();
990 }
991
992 node_info->global = node_g;
993 node_l = node_info->local;
994
995 read_all = node_l->calls_read_all;
996 write_all = node_l->calls_write_all;
997
998 /* If any node in a cycle is calls_read_all or calls_write_all
999 they all are. */
1000 w_info = (struct ipa_dfs_info *) node->aux;
1001 w = w_info->next_cycle;
1002 while (w)
1003 {
1004 ipa_reference_local_vars_info_t w_l =
1005 get_reference_vars_info_from_cgraph (w)->local;
1006 read_all |= w_l->calls_read_all;
1007 write_all |= w_l->calls_write_all;
1008
1009 w_info = (struct ipa_dfs_info *) w->aux;
1010 w = w_info->next_cycle;
1011 }
1012
1013 /* Initialized the bitmaps for the reduced nodes */
1014 if (read_all)
1015 node_g->statics_read = all_module_statics;
1016 else
1017 {
1018 node_g->statics_read = BITMAP_ALLOC (&global_info_obstack);
1019 bitmap_copy (node_g->statics_read,
1020 node_l->statics_read);
1021 }
1022
1023 if (write_all)
1024 node_g->statics_written = all_module_statics;
1025 else
1026 {
1027 node_g->statics_written = BITMAP_ALLOC (&global_info_obstack);
1028 bitmap_copy (node_g->statics_written,
1029 node_l->statics_written);
1030 }
1031
1032 w_info = (struct ipa_dfs_info *) node->aux;
1033 w = w_info->next_cycle;
1034 while (w)
1035 {
1036 ipa_reference_vars_info_t w_ri =
1037 get_reference_vars_info_from_cgraph (w);
1038 ipa_reference_local_vars_info_t w_l = w_ri->local;
1039
1040 /* All nodes within a cycle share the same global info bitmaps. */
1041 w_ri->global = node_g;
1042
1043 /* These global bitmaps are initialized from the local info
1044 of all of the nodes in the region. However there is no
1045 need to do any work if the bitmaps were set to
1046 all_module_statics. */
1047 if (!read_all)
1048 bitmap_ior_into (node_g->statics_read,
1049 w_l->statics_read);
1050 if (!write_all)
1051 bitmap_ior_into (node_g->statics_written,
1052 w_l->statics_written);
1053 w_info = (struct ipa_dfs_info *) w->aux;
1054 w = w_info->next_cycle;
1055 }
1056
1057 w = node;
1058 while (w)
1059 {
1060 propagate_bits (w);
1061 w_info = (struct ipa_dfs_info *) w->aux;
1062 w = w_info->next_cycle;
1063 }
1064 }
1065
1066 /* Need to fix up the local information sets. The information that
1067 has been gathered so far is preinlining. However, the
1068 compilation will progress post inlining so the local sets for the
1069 inlined calls need to be merged into the callers. Note that the
1070 local sets are not shared between all of the nodes in a cycle so
1071 those nodes in the cycle must be processed explicitly. */
1072 for (i = 0; i < order_pos; i++ )
1073 {
1074 struct ipa_dfs_info * w_info;
1075 node = order[i];
1076 merge_callee_local_info (node, node);
1077
1078 w_info = (struct ipa_dfs_info *) node->aux;
1079 w = w_info->next_cycle;
1080 while (w)
1081 {
1082 merge_callee_local_info (w, w);
1083 w_info = (struct ipa_dfs_info *) w->aux;
1084 w = w_info->next_cycle;
1085 }
1086 }
1087
1088 if (dump_file)
1089 {
1090 for (i = 0; i < order_pos; i++ )
1091 {
1092 ipa_reference_vars_info_t node_info;
1093 ipa_reference_global_vars_info_t node_g;
1094 ipa_reference_local_vars_info_t node_l;
1095 unsigned int index;
1096 bitmap_iterator bi;
1097 struct ipa_dfs_info * w_info;
1098
1099 node = order[i];
1100 node_info = get_reference_vars_info_from_cgraph (node);
1101 node_g = node_info->global;
1102 node_l = node_info->local;
1103 fprintf (dump_file,
1104 "\nFunction name:%s/%i:",
1105 cgraph_node_name (node), node->uid);
1106 fprintf (dump_file, "\n locals read: ");
1107 EXECUTE_IF_SET_IN_BITMAP (node_l->statics_read,
1108 0, index, bi)
1109 {
1110 fprintf (dump_file, "%s ",
1111 get_static_name (index));
1112 }
1113 fprintf (dump_file, "\n locals written: ");
1114 EXECUTE_IF_SET_IN_BITMAP (node_l->statics_written,
1115 0, index, bi)
1116 {
1117 fprintf(dump_file, "%s ",
1118 get_static_name (index));
1119 }
1120
1121 w_info = (struct ipa_dfs_info *) node->aux;
1122 w = w_info->next_cycle;
1123 while (w)
1124 {
1125 ipa_reference_vars_info_t w_ri =
1126 get_reference_vars_info_from_cgraph (w);
1127 ipa_reference_local_vars_info_t w_l = w_ri->local;
1128 fprintf (dump_file, "\n next cycle: %s/%i ",
1129 cgraph_node_name (w), w->uid);
1130 fprintf (dump_file, "\n locals read: ");
1131 EXECUTE_IF_SET_IN_BITMAP (w_l->statics_read,
1132 0, index, bi)
1133 {
1134 fprintf (dump_file, "%s ",
1135 get_static_name (index));
1136 }
1137
1138 fprintf (dump_file, "\n locals written: ");
1139 EXECUTE_IF_SET_IN_BITMAP (w_l->statics_written,
1140 0, index, bi)
1141 {
1142 fprintf(dump_file, "%s ",
1143 get_static_name (index));
1144 }
1145
1146
1147 w_info = (struct ipa_dfs_info *) w->aux;
1148 w = w_info->next_cycle;
1149 }
1150 fprintf (dump_file, "\n globals read: ");
1151 EXECUTE_IF_SET_IN_BITMAP (node_g->statics_read,
1152 0, index, bi)
1153 {
1154 fprintf (dump_file, "%s ",
1155 get_static_name (index));
1156 }
1157 fprintf (dump_file, "\n globals written: ");
1158 EXECUTE_IF_SET_IN_BITMAP (node_g->statics_written,
1159 0, index, bi)
1160 {
1161 fprintf (dump_file, "%s ",
1162 get_static_name (index));
1163 }
1164 }
1165 }
1166
1167 /* Cleanup. */
1168 for (i = 0; i < order_pos; i++ )
1169 {
1170 ipa_reference_vars_info_t node_info;
1171 ipa_reference_global_vars_info_t node_g;
1172 node = order[i];
1173 node_info = get_reference_vars_info_from_cgraph (node);
1174 node_g = node_info->global;
1175
1176 /* Create the complimentary sets. These are more useful for
1177 certain apis. */
1178 node_g->statics_not_read = BITMAP_ALLOC (&global_info_obstack);
1179 node_g->statics_not_written = BITMAP_ALLOC (&global_info_obstack);
1180
1181 if (node_g->statics_read != all_module_statics)
1182 {
1183 bitmap_and_compl (node_g->statics_not_read,
1184 all_module_statics,
1185 node_g->statics_read);
1186 }
1187
1188 if (node_g->statics_written
1189 != all_module_statics)
1190 bitmap_and_compl (node_g->statics_not_written,
1191 all_module_statics,
1192 node_g->statics_written);
1193 }
1194
1195 free (order);
1196
1197 for (node = cgraph_nodes; node; node = node->next)
1198 {
1199 ipa_reference_vars_info_t node_info;
1200 node_info = get_reference_vars_info_from_cgraph (node);
1201 /* Get rid of the aux information. */
1202
1203 if (node->aux)
1204 {
1205 free (node->aux);
1206 node->aux = NULL;
1207 }
1208
1209 if (node->analyzed
1210 && (cgraph_function_body_availability (node) == AVAIL_OVERWRITABLE))
1211 clean_function (node);
1212 else if (node_info)
1213 {
1214 /* Remove local info we no longer need. */
1215 if (node_info->local->statics_read
1216 && node_info->local->statics_read != all_module_statics)
1217 BITMAP_FREE (node_info->local->statics_read);
1218 if (node_info->local->statics_written
1219 && node_info->local->statics_written != all_module_statics)
1220 BITMAP_FREE (node_info->local->statics_written);
1221 }
1222 }
1223 bitmap_obstack_release (&local_info_obstack);
1224 return 0;
1225 }
1226
1227
1228 static bool
1229 gate_reference (void)
1230 {
1231 return (flag_ipa_reference
1232 /* Don't bother doing anything if the program has errors. */
1233 && !(errorcount || sorrycount));
1234 }
1235
1236 struct ipa_opt_pass pass_ipa_reference =
1237 {
1238 {
1239 IPA_PASS,
1240 "static-var", /* name */
1241 gate_reference, /* gate */
1242 propagate, /* execute */
1243 NULL, /* sub */
1244 NULL, /* next */
1245 0, /* static_pass_number */
1246 TV_IPA_REFERENCE, /* tv_id */
1247 0, /* properties_required */
1248 0, /* properties_provided */
1249 0, /* properties_destroyed */
1250 0, /* todo_flags_start */
1251 0 /* todo_flags_finish */
1252 },
1253 generate_summary, /* generate_summary */
1254 NULL, /* write_summary */
1255 NULL, /* read_summary */
1256 NULL, /* function_read_summary */
1257 0, /* TODOs */
1258 NULL, /* function_transform */
1259 NULL /* variable_transform */
1260 };
1261
1262 #include "gt-ipa-reference.h"