re PR lto/61886 (LTO breaks fread with _FORTIFY_SOURCE=2)
[gcc.git] / gcc / ipa-reference.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file gathers information about how variables whose scope is
22 confined to the compilation unit are used.
23
24 The transitive call site specific clobber effects are computed
25 for the variables whose scope is contained within this compilation
26 unit.
27
28 First each function and static variable initialization is analyzed
29 to determine which local static variables are either read, written,
30 or have their address taken. Any local static that has its address
31 taken is removed from consideration. Once the local read and
32 writes are determined, a transitive closure of this information is
33 performed over the call graph to determine the worst case set of
34 side effects of each call. In later parts of the compiler, these
35 local and global sets are examined to make the call clobbering less
36 traumatic, promote some statics to registers, and improve aliasing
37 information. */
38
39 #include "config.h"
40 #include "system.h"
41 #include "coretypes.h"
42 #include "backend.h"
43 #include "tree.h"
44 #include "gimple.h"
45 #include "tree-pass.h"
46 #include "cgraph.h"
47 #include "data-streamer.h"
48 #include "calls.h"
49 #include "splay-tree.h"
50 #include "ipa-utils.h"
51 #include "ipa-reference.h"
52
53 static void remove_node_data (struct cgraph_node *node,
54 void *data ATTRIBUTE_UNUSED);
55 static void duplicate_node_data (struct cgraph_node *src,
56 struct cgraph_node *dst,
57 void *data ATTRIBUTE_UNUSED);
58
59 /* The static variables defined within the compilation unit that are
60 loaded or stored directly by function that owns this structure. */
61
62 struct ipa_reference_local_vars_info_d
63 {
64 bitmap statics_read;
65 bitmap statics_written;
66 };
67
68 /* Statics that are read and written by some set of functions. The
69 local ones are based on the loads and stores local to the function.
70 The global ones are based on the local info as well as the
71 transitive closure of the functions that are called. */
72
73 struct ipa_reference_global_vars_info_d
74 {
75 bitmap statics_read;
76 bitmap statics_written;
77 };
78
79 /* Information we save about every function after ipa-reference is completed. */
80
81 struct ipa_reference_optimization_summary_d
82 {
83 bitmap statics_not_read;
84 bitmap statics_not_written;
85 };
86
87 typedef struct ipa_reference_local_vars_info_d *ipa_reference_local_vars_info_t;
88 typedef struct ipa_reference_global_vars_info_d *ipa_reference_global_vars_info_t;
89 typedef struct ipa_reference_optimization_summary_d *ipa_reference_optimization_summary_t;
90
91 struct ipa_reference_vars_info_d
92 {
93 struct ipa_reference_local_vars_info_d local;
94 struct ipa_reference_global_vars_info_d global;
95 };
96
97 typedef struct ipa_reference_vars_info_d *ipa_reference_vars_info_t;
98
99 /* This splay tree contains all of the static variables that are
100 being considered by the compilation level alias analysis. */
101 static splay_tree reference_vars_to_consider;
102
103 /* Set of all interesting module statics. A bit is set for every module
104 static we are considering. This is added to the local info when asm
105 code is found that clobbers all memory. */
106 static bitmap all_module_statics;
107 /* Set of all statics that should be ignored becuase they are touched by
108 -fno-ipa-reference code. */
109 static bitmap ignore_module_statics;
110
111 /* Obstack holding bitmaps of local analysis (live from analysis to
112 propagation) */
113 static bitmap_obstack local_info_obstack;
114 /* Obstack holding global analysis live forever. */
115 static bitmap_obstack optimization_summary_obstack;
116
117 /* Holders of ipa cgraph hooks: */
118 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
119 static struct cgraph_node_hook_list *node_removal_hook_holder;
120
121 /* Vector where the reference var infos are actually stored.
122 Indexed by UID of call graph nodes. */
123 static vec<ipa_reference_vars_info_t> ipa_reference_vars_vector;
124
125 /* TODO: find a place where we should release the vector. */
126 static vec<ipa_reference_optimization_summary_t> ipa_reference_opt_sum_vector;
127
128 /* Return the ipa_reference_vars structure starting from the cgraph NODE. */
129 static inline ipa_reference_vars_info_t
130 get_reference_vars_info (struct cgraph_node *node)
131 {
132 if (!ipa_reference_vars_vector.exists ()
133 || ipa_reference_vars_vector.length () <= (unsigned int) node->uid)
134 return NULL;
135 return ipa_reference_vars_vector[node->uid];
136 }
137
138 /* Return the ipa_reference_vars structure starting from the cgraph NODE. */
139 static inline ipa_reference_optimization_summary_t
140 get_reference_optimization_summary (struct cgraph_node *node)
141 {
142 if (!ipa_reference_opt_sum_vector.exists ()
143 || (ipa_reference_opt_sum_vector.length () <= (unsigned int) node->uid))
144 return NULL;
145 return ipa_reference_opt_sum_vector[node->uid];
146 }
147
148 /* Return the ipa_reference_vars structure starting from the cgraph NODE. */
149 static inline void
150 set_reference_vars_info (struct cgraph_node *node,
151 ipa_reference_vars_info_t info)
152 {
153 if (!ipa_reference_vars_vector.exists ()
154 || ipa_reference_vars_vector.length () <= (unsigned int) node->uid)
155 ipa_reference_vars_vector.safe_grow_cleared (node->uid + 1);
156 ipa_reference_vars_vector[node->uid] = info;
157 }
158
159 /* Return the ipa_reference_vars structure starting from the cgraph NODE. */
160 static inline void
161 set_reference_optimization_summary (struct cgraph_node *node,
162 ipa_reference_optimization_summary_t info)
163 {
164 if (!ipa_reference_opt_sum_vector.exists ()
165 || (ipa_reference_opt_sum_vector.length () <= (unsigned int) node->uid))
166 ipa_reference_opt_sum_vector.safe_grow_cleared (node->uid + 1);
167 ipa_reference_opt_sum_vector[node->uid] = info;
168 }
169
170 /* Return a bitmap indexed by ipa_reference_var_uid for the static variables
171 that are *not* read during the execution of the function FN. Returns
172 NULL if no data is available. */
173
174 bitmap
175 ipa_reference_get_not_read_global (struct cgraph_node *fn)
176 {
177 if (!opt_for_fn (fn->decl, flag_ipa_reference)
178 || !opt_for_fn (current_function_decl, flag_ipa_reference))
179 return NULL;
180 ipa_reference_optimization_summary_t info =
181 get_reference_optimization_summary (fn->function_symbol (NULL));
182 if (info)
183 return info->statics_not_read;
184 else if (flags_from_decl_or_type (fn->decl) & ECF_LEAF)
185 return all_module_statics;
186 else
187 return NULL;
188 }
189
190 /* Return a bitmap indexed by ipa_reference_var_uid for the static variables
191 that are *not* written during the execution of the function FN. Note
192 that variables written may or may not be read during the function
193 call. Returns NULL if no data is available. */
194
195 bitmap
196 ipa_reference_get_not_written_global (struct cgraph_node *fn)
197 {
198 if (!opt_for_fn (fn->decl, flag_ipa_reference)
199 || !opt_for_fn (current_function_decl, flag_ipa_reference))
200 return NULL;
201 ipa_reference_optimization_summary_t info =
202 get_reference_optimization_summary (fn);
203 if (info)
204 return info->statics_not_written;
205 else if (flags_from_decl_or_type (fn->decl) & ECF_LEAF)
206 return all_module_statics;
207 else
208 return NULL;
209 }
210 \f
211
212 /* Hepler for is_proper_for_analysis. */
213 static bool
214 is_improper (symtab_node *n, void *v ATTRIBUTE_UNUSED)
215 {
216 tree t = n->decl;
217 /* If the variable has the "used" attribute, treat it as if it had a
218 been touched by the devil. */
219 if (DECL_PRESERVE_P (t))
220 return true;
221
222 /* Do not want to do anything with volatile except mark any
223 function that uses one to be not const or pure. */
224 if (TREE_THIS_VOLATILE (t))
225 return true;
226
227 /* We do not need to analyze readonly vars, we already know they do not
228 alias. */
229 if (TREE_READONLY (t))
230 return true;
231
232 /* We can not track variables with address taken. */
233 if (TREE_ADDRESSABLE (t))
234 return true;
235
236 /* TODO: We could track public variables that are not addressable, but
237 currently frontends don't give us those. */
238 if (TREE_PUBLIC (t))
239 return true;
240
241 return false;
242 }
243
244 /* Return true if the variable T is the right kind of static variable to
245 perform compilation unit scope escape analysis. */
246
247 static inline bool
248 is_proper_for_analysis (tree t)
249 {
250 if (bitmap_bit_p (ignore_module_statics, ipa_reference_var_uid (t)))
251 return false;
252
253 if (symtab_node::get (t)
254 ->call_for_symbol_and_aliases (is_improper, NULL, true))
255 return false;
256
257 return true;
258 }
259
260 /* Lookup the tree node for the static variable that has UID and
261 convert the name to a string for debugging. */
262
263 static const char *
264 get_static_name (int index)
265 {
266 splay_tree_node stn =
267 splay_tree_lookup (reference_vars_to_consider, index);
268 return fndecl_name ((tree)(stn->value));
269 }
270
271 /* Dump a set of static vars to FILE. */
272 static void
273 dump_static_vars_set_to_file (FILE *f, bitmap set)
274 {
275 unsigned int index;
276 bitmap_iterator bi;
277 if (set == NULL)
278 return;
279 else if (set == all_module_statics)
280 fprintf (f, "ALL");
281 else
282 EXECUTE_IF_SET_IN_BITMAP (set, 0, index, bi)
283 {
284 fprintf (f, "%s ", get_static_name (index));
285 }
286 }
287
288 /* Compute X |= Y, taking into account the possibility that
289 either X or Y is already the maximum set.
290 Return true if X is the maximum set after taking the union with Y. */
291
292 static bool
293 union_static_var_sets (bitmap &x, bitmap y)
294 {
295 if (x != all_module_statics)
296 {
297 if (y == all_module_statics)
298 {
299 BITMAP_FREE (x);
300 x = all_module_statics;
301 }
302 else if (bitmap_ior_into (x, y))
303 {
304 /* The union may have reduced X to the maximum set.
305 In that case, we want to make that visible explicitly.
306 Even though bitmap_equal_p can be very expensive, it
307 turns out to be an overall win to check this here for
308 an LTO bootstrap of GCC itself. Liberally extrapoliate
309 that result to be applicable to all cases. */
310 if (bitmap_equal_p (x, all_module_statics))
311 {
312 BITMAP_FREE (x);
313 x = all_module_statics;
314 }
315 }
316 }
317 return x == all_module_statics;
318 }
319
320 /* Return a copy of SET on the bitmap obstack containing SET.
321 But if SET is NULL or the maximum set, return that instead. */
322
323 static bitmap
324 copy_static_var_set (bitmap set)
325 {
326 if (set == NULL || set == all_module_statics)
327 return set;
328 bitmap_obstack *o = set->obstack;
329 gcc_checking_assert (o);
330 bitmap copy = BITMAP_ALLOC (o);
331 bitmap_copy (copy, set);
332 return copy;
333 }
334
335 /* Compute the union all of the statics read and written by every callee of X
336 into X_GLOBAL->statics_read and X_GLOBAL->statics_written. X_GLOBAL is
337 actually the set representing the cycle containing X. If the read and
338 written sets of X_GLOBAL has been reduced to the maximum set, we don't
339 have to look at the remaining callees. */
340
341 static void
342 propagate_bits (ipa_reference_global_vars_info_t x_global, struct cgraph_node *x)
343 {
344 struct cgraph_edge *e;
345 bool read_all = x_global->statics_read == all_module_statics;
346 bool write_all = x_global->statics_written == all_module_statics;
347 for (e = x->callees;
348 e && !(read_all && write_all);
349 e = e->next_callee)
350 {
351 enum availability avail;
352 struct cgraph_node *y = e->callee->function_symbol (&avail);
353 if (!y)
354 continue;
355
356 /* Only look into nodes we can propagate something. */
357 int flags = flags_from_decl_or_type (y->decl);
358 if (opt_for_fn (y->decl, flag_ipa_reference)
359 && (avail > AVAIL_INTERPOSABLE
360 || (avail == AVAIL_INTERPOSABLE && (flags & ECF_LEAF))))
361 {
362 if (get_reference_vars_info (y))
363 {
364 ipa_reference_vars_info_t y_info = get_reference_vars_info (y);
365 ipa_reference_global_vars_info_t y_global = &y_info->global;
366
367 /* Calls in the current cycle do not have their global set
368 computed yet (but everything else does because we're
369 visiting nodes in topological order). */
370 if (!y_global->statics_read)
371 continue;
372
373 /* If the function is const, it reads no memory even if it
374 seems so to local analysis. */
375 if (flags & ECF_CONST)
376 continue;
377
378 union_static_var_sets (x_global->statics_read,
379 y_global->statics_read);
380
381 /* If the function is pure, it has no stores even if it
382 seems so to local analysis. If we cannot return from
383 the function, we can safely ignore the call. */
384 if ((flags & ECF_PURE)
385 || e->cannot_lead_to_return_p ())
386 continue;
387
388 union_static_var_sets (x_global->statics_written,
389 y_global->statics_written);
390 }
391 else
392 gcc_unreachable ();
393 }
394 }
395 }
396
397 static bool ipa_init_p = false;
398
399 /* The init routine for analyzing global static variable usage. See
400 comments at top for description. */
401 static void
402 ipa_init (void)
403 {
404 if (ipa_init_p)
405 return;
406
407 ipa_init_p = true;
408
409 if (dump_file)
410 reference_vars_to_consider = splay_tree_new (splay_tree_compare_ints, 0, 0);
411
412 bitmap_obstack_initialize (&local_info_obstack);
413 bitmap_obstack_initialize (&optimization_summary_obstack);
414 all_module_statics = BITMAP_ALLOC (&optimization_summary_obstack);
415 ignore_module_statics = BITMAP_ALLOC (&optimization_summary_obstack);
416
417 node_removal_hook_holder =
418 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
419 node_duplication_hook_holder =
420 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
421 }
422
423
424 /* Set up the persistent info for FN. */
425
426 static ipa_reference_local_vars_info_t
427 init_function_info (struct cgraph_node *fn)
428 {
429 ipa_reference_vars_info_t info
430 = XCNEW (struct ipa_reference_vars_info_d);
431
432 /* Add the info to the tree's annotation. */
433 set_reference_vars_info (fn, info);
434
435 info->local.statics_read = BITMAP_ALLOC (&local_info_obstack);
436 info->local.statics_written = BITMAP_ALLOC (&local_info_obstack);
437
438 return &info->local;
439 }
440
441
442 /* This is the main routine for finding the reference patterns for
443 global variables within a function FN. */
444
445 static void
446 analyze_function (struct cgraph_node *fn)
447 {
448 ipa_reference_local_vars_info_t local;
449 struct ipa_ref *ref = NULL;
450 int i;
451 tree var;
452
453 if (!opt_for_fn (fn->decl, flag_ipa_reference))
454 return;
455 local = init_function_info (fn);
456 for (i = 0; fn->iterate_reference (i, ref); i++)
457 {
458 if (!is_a <varpool_node *> (ref->referred))
459 continue;
460 var = ref->referred->decl;
461 if (!is_proper_for_analysis (var))
462 continue;
463 /* This is a variable we care about. Check if we have seen it
464 before, and if not add it the set of variables we care about. */
465 if (all_module_statics
466 && bitmap_set_bit (all_module_statics, ipa_reference_var_uid (var)))
467 {
468 if (dump_file)
469 splay_tree_insert (reference_vars_to_consider,
470 ipa_reference_var_uid (var),
471 (splay_tree_value)var);
472 }
473 switch (ref->use)
474 {
475 case IPA_REF_LOAD:
476 bitmap_set_bit (local->statics_read, ipa_reference_var_uid (var));
477 break;
478 case IPA_REF_STORE:
479 if (ref->cannot_lead_to_return ())
480 break;
481 bitmap_set_bit (local->statics_written, ipa_reference_var_uid (var));
482 break;
483 case IPA_REF_ADDR:
484 break;
485 default:
486 gcc_unreachable ();
487 }
488 }
489
490 if (fn->cannot_return_p ())
491 bitmap_clear (local->statics_written);
492 }
493
494
495 /* Called when new clone is inserted to callgraph late. */
496
497 static void
498 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
499 void *data ATTRIBUTE_UNUSED)
500 {
501 ipa_reference_optimization_summary_t ginfo;
502 ipa_reference_optimization_summary_t dst_ginfo;
503
504 ginfo = get_reference_optimization_summary (src);
505 if (!ginfo)
506 return;
507 dst_ginfo = XCNEW (struct ipa_reference_optimization_summary_d);
508 set_reference_optimization_summary (dst, dst_ginfo);
509 dst_ginfo->statics_not_read =
510 copy_static_var_set (ginfo->statics_not_read);
511 dst_ginfo->statics_not_written =
512 copy_static_var_set (ginfo->statics_not_written);
513 }
514
515 /* Called when node is removed. */
516
517 static void
518 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
519 {
520 ipa_reference_optimization_summary_t ginfo;
521 ginfo = get_reference_optimization_summary (node);
522 if (ginfo)
523 {
524 if (ginfo->statics_not_read
525 && ginfo->statics_not_read != all_module_statics)
526 BITMAP_FREE (ginfo->statics_not_read);
527
528 if (ginfo->statics_not_written
529 && ginfo->statics_not_written != all_module_statics)
530 BITMAP_FREE (ginfo->statics_not_written);
531 free (ginfo);
532 set_reference_optimization_summary (node, NULL);
533 }
534 }
535
536 /* Analyze each function in the cgraph to see which global or statics
537 are read or written. */
538
539 static void
540 generate_summary (void)
541 {
542 struct cgraph_node *node;
543 unsigned int index;
544 bitmap_iterator bi;
545
546 ipa_init ();
547
548 /* Process all of the functions next. */
549 FOR_EACH_DEFINED_FUNCTION (node)
550 if (!node->alias && !opt_for_fn (node->decl, flag_ipa_reference))
551 {
552 struct ipa_ref *ref = NULL;
553 int i;
554 tree var;
555 for (i = 0; node->iterate_reference (i, ref); i++)
556 {
557 if (!is_a <varpool_node *> (ref->referred))
558 continue;
559 var = ref->referred->decl;
560 if (!is_proper_for_analysis (var))
561 continue;
562 bitmap_set_bit (ignore_module_statics, ipa_reference_var_uid (var));
563 }
564 }
565 FOR_EACH_DEFINED_FUNCTION (node)
566 analyze_function (node);
567
568 if (dump_file)
569 EXECUTE_IF_SET_IN_BITMAP (all_module_statics, 0, index, bi)
570 {
571 fprintf (dump_file, "\nPromotable global:%s (uid=%u)\n",
572 get_static_name (index), index);
573 }
574
575 if (dump_file)
576 FOR_EACH_DEFINED_FUNCTION (node)
577 if (node->get_availability () >= AVAIL_INTERPOSABLE
578 && opt_for_fn (node->decl, flag_ipa_reference))
579 {
580 ipa_reference_local_vars_info_t l;
581 unsigned int index;
582 bitmap_iterator bi;
583
584 l = &get_reference_vars_info (node)->local;
585 fprintf (dump_file,
586 "\nFunction name:%s/%i:",
587 node->asm_name (), node->order);
588 fprintf (dump_file, "\n locals read: ");
589 if (l->statics_read)
590 EXECUTE_IF_SET_IN_BITMAP (l->statics_read,
591 0, index, bi)
592 {
593 fprintf (dump_file, "%s ",
594 get_static_name (index));
595 }
596 fprintf (dump_file, "\n locals written: ");
597 if (l->statics_written)
598 EXECUTE_IF_SET_IN_BITMAP (l->statics_written,
599 0, index, bi)
600 {
601 fprintf (dump_file, "%s ", get_static_name (index));
602 }
603 }
604 }
605 \f
606 /* Set READ_ALL/WRITE_ALL based on decl flags of NODE. */
607
608 static void
609 read_write_all_from_decl (struct cgraph_node *node,
610 bool &read_all, bool &write_all)
611 {
612 tree decl = node->decl;
613 int flags = flags_from_decl_or_type (decl);
614 if ((flags & ECF_LEAF)
615 && node->get_availability () < AVAIL_INTERPOSABLE)
616 ;
617 else if (flags & ECF_CONST)
618 ;
619 else if ((flags & ECF_PURE) || node->cannot_return_p ())
620 {
621 read_all = true;
622 if (dump_file && (dump_flags & TDF_DETAILS))
623 fprintf (dump_file, " %s/%i -> read all\n",
624 node->asm_name (), node->order);
625 }
626 else
627 {
628 /* TODO: To be able to produce sane results, we should also handle
629 common builtins, in particular throw. */
630 read_all = true;
631 write_all = true;
632 if (dump_file && (dump_flags & TDF_DETAILS))
633 fprintf (dump_file, " %s/%i -> read all, write all\n",
634 node->asm_name (), node->order);
635 }
636 }
637
638 /* Set READ_ALL/WRITE_ALL based on decl flags of NODE or any member
639 in the cycle of NODE. */
640
641 static void
642 get_read_write_all_from_node (struct cgraph_node *node,
643 bool &read_all, bool &write_all)
644 {
645 struct cgraph_edge *e, *ie;
646
647 /* When function is overwritable, we can not assume anything. */
648 if (node->get_availability () <= AVAIL_INTERPOSABLE
649 || (node->analyzed && !opt_for_fn (node->decl, flag_ipa_reference)))
650 read_write_all_from_decl (node, read_all, write_all);
651
652 for (e = node->callees;
653 e && !(read_all && write_all);
654 e = e->next_callee)
655 {
656 enum availability avail;
657 struct cgraph_node *callee = e->callee->function_symbol (&avail);
658 gcc_checking_assert (callee);
659 if (avail <= AVAIL_INTERPOSABLE
660 || (callee->analyzed && !opt_for_fn (callee->decl, flag_ipa_reference)))
661 read_write_all_from_decl (callee, read_all, write_all);
662 }
663
664 for (ie = node->indirect_calls;
665 ie && !(read_all && write_all);
666 ie = ie->next_callee)
667 if (!(ie->indirect_info->ecf_flags & ECF_CONST))
668 {
669 read_all = true;
670 if (dump_file && (dump_flags & TDF_DETAILS))
671 fprintf (dump_file, " indirect call -> read all\n");
672 if (!ie->cannot_lead_to_return_p ()
673 && !(ie->indirect_info->ecf_flags & ECF_PURE))
674 {
675 if (dump_file && (dump_flags & TDF_DETAILS))
676 fprintf (dump_file, " indirect call -> write all\n");
677 write_all = true;
678 }
679 }
680 }
681
682 /* Skip edges from and to nodes without ipa_reference enables. This leave
683 them out of strongy connected coponents and makes them easyto skip in the
684 propagation loop bellow. */
685
686 static bool
687 ignore_edge_p (cgraph_edge *e)
688 {
689 return (!opt_for_fn (e->caller->decl, flag_ipa_reference)
690 || !opt_for_fn (e->callee->function_symbol ()->decl,
691 flag_ipa_reference));
692 }
693
694 /* Produce the global information by preforming a transitive closure
695 on the local information that was produced by ipa_analyze_function. */
696
697 static unsigned int
698 propagate (void)
699 {
700 struct cgraph_node *node;
701 struct cgraph_node **order =
702 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
703 int order_pos;
704 int i;
705 bool remove_p;
706
707 if (dump_file)
708 cgraph_node::dump_cgraph (dump_file);
709
710 remove_p = ipa_discover_readonly_nonaddressable_vars ();
711 generate_summary ();
712
713 /* Propagate the local information through the call graph to produce
714 the global information. All the nodes within a cycle will have
715 the same info so we collapse cycles first. Then we can do the
716 propagation in one pass from the leaves to the roots. */
717 order_pos = ipa_reduced_postorder (order, true, true, ignore_edge_p);
718 if (dump_file)
719 ipa_print_order (dump_file, "reduced", order, order_pos);
720
721 for (i = 0; i < order_pos; i++ )
722 {
723 unsigned x;
724 struct cgraph_node *w;
725 ipa_reference_vars_info_t node_info;
726 ipa_reference_global_vars_info_t node_g;
727 ipa_reference_local_vars_info_t node_l;
728 bool read_all = false;
729 bool write_all = false;
730
731 node = order[i];
732 if (node->alias || !opt_for_fn (node->decl, flag_ipa_reference))
733 continue;
734
735 node_info = get_reference_vars_info (node);
736 gcc_assert (node_info);
737 node_l = &node_info->local;
738 node_g = &node_info->global;
739
740 if (dump_file && (dump_flags & TDF_DETAILS))
741 fprintf (dump_file, "Starting cycle with %s/%i\n",
742 node->asm_name (), node->order);
743
744 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
745
746 /* If any node in a cycle is read_all or write_all, they all are. */
747 FOR_EACH_VEC_ELT (cycle_nodes, x, w)
748 {
749 if (dump_file && (dump_flags & TDF_DETAILS))
750 fprintf (dump_file, " Visiting %s/%i\n",
751 w->asm_name (), w->order);
752 get_read_write_all_from_node (w, read_all, write_all);
753 if (read_all && write_all)
754 break;
755 }
756
757 /* Initialized the bitmaps global sets for the reduced node. */
758 if (read_all)
759 node_g->statics_read = all_module_statics;
760 else
761 node_g->statics_read = copy_static_var_set (node_l->statics_read);
762 if (write_all)
763 node_g->statics_written = all_module_statics;
764 else
765 node_g->statics_written = copy_static_var_set (node_l->statics_written);
766
767 /* Merge the sets of this cycle with all sets of callees reached
768 from this cycle. */
769 FOR_EACH_VEC_ELT (cycle_nodes, x, w)
770 {
771 if (read_all && write_all)
772 break;
773
774 if (w != node)
775 {
776 ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
777 ipa_reference_local_vars_info_t w_l = &w_ri->local;
778 int flags = flags_from_decl_or_type (w->decl);
779
780 if (!(flags & ECF_CONST))
781 read_all = union_static_var_sets (node_g->statics_read,
782 w_l->statics_read);
783 if (!(flags & ECF_PURE)
784 && !w->cannot_return_p ())
785 write_all = union_static_var_sets (node_g->statics_written,
786 w_l->statics_written);
787 }
788
789 propagate_bits (node_g, w);
790 }
791
792 /* All nodes within a cycle have the same global info bitmaps. */
793 FOR_EACH_VEC_ELT (cycle_nodes, x, w)
794 {
795 ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
796 w_ri->global = *node_g;
797 }
798
799 cycle_nodes.release ();
800 }
801
802 if (dump_file)
803 {
804 for (i = 0; i < order_pos; i++)
805 {
806 unsigned x;
807 struct cgraph_node *w;
808
809 node = order[i];
810 if (node->alias || !opt_for_fn (node->decl, flag_ipa_reference))
811 continue;
812
813 fprintf (dump_file,
814 "\nFunction name:%s/%i:",
815 node->asm_name (), node->order);
816
817 ipa_reference_vars_info_t node_info = get_reference_vars_info (node);
818 ipa_reference_global_vars_info_t node_g = &node_info->global;
819
820 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
821 FOR_EACH_VEC_ELT (cycle_nodes, x, w)
822 {
823 ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
824 ipa_reference_local_vars_info_t w_l = &w_ri->local;
825 if (w != node)
826 fprintf (dump_file, "\n next cycle: %s/%i ",
827 w->asm_name (), w->order);
828 fprintf (dump_file, "\n locals read: ");
829 dump_static_vars_set_to_file (dump_file, w_l->statics_read);
830 fprintf (dump_file, "\n locals written: ");
831 dump_static_vars_set_to_file (dump_file, w_l->statics_written);
832 }
833 cycle_nodes.release ();
834
835 fprintf (dump_file, "\n globals read: ");
836 dump_static_vars_set_to_file (dump_file, node_g->statics_read);
837 fprintf (dump_file, "\n globals written: ");
838 dump_static_vars_set_to_file (dump_file, node_g->statics_written);
839 fprintf (dump_file, "\n");
840 }
841 }
842
843 /* Cleanup. */
844 FOR_EACH_DEFINED_FUNCTION (node)
845 {
846 ipa_reference_vars_info_t node_info;
847 ipa_reference_global_vars_info_t node_g;
848 ipa_reference_optimization_summary_t opt;
849
850 node_info = get_reference_vars_info (node);
851 if (!node->alias && opt_for_fn (node->decl, flag_ipa_reference)
852 && (node->get_availability () > AVAIL_INTERPOSABLE
853 || (flags_from_decl_or_type (node->decl) & ECF_LEAF)))
854 {
855 node_g = &node_info->global;
856
857 opt = XCNEW (struct ipa_reference_optimization_summary_d);
858 set_reference_optimization_summary (node, opt);
859
860 /* Create the complimentary sets. */
861
862 if (bitmap_empty_p (node_g->statics_read))
863 opt->statics_not_read = all_module_statics;
864 else
865 {
866 opt->statics_not_read
867 = BITMAP_ALLOC (&optimization_summary_obstack);
868 if (node_g->statics_read != all_module_statics)
869 bitmap_and_compl (opt->statics_not_read,
870 all_module_statics,
871 node_g->statics_read);
872 }
873
874 if (bitmap_empty_p (node_g->statics_written))
875 opt->statics_not_written = all_module_statics;
876 else
877 {
878 opt->statics_not_written
879 = BITMAP_ALLOC (&optimization_summary_obstack);
880 if (node_g->statics_written != all_module_statics)
881 bitmap_and_compl (opt->statics_not_written,
882 all_module_statics,
883 node_g->statics_written);
884 }
885 }
886 free (node_info);
887 }
888
889 ipa_free_postorder_info ();
890 free (order);
891
892 bitmap_obstack_release (&local_info_obstack);
893 ipa_reference_vars_vector.release ();
894 if (dump_file)
895 splay_tree_delete (reference_vars_to_consider);
896 reference_vars_to_consider = NULL;
897 return remove_p ? TODO_remove_functions : 0;
898 }
899
900 /* Return true if we need to write summary of NODE. */
901
902 static bool
903 write_node_summary_p (struct cgraph_node *node,
904 lto_symtab_encoder_t encoder,
905 bitmap ltrans_statics)
906 {
907 ipa_reference_optimization_summary_t info;
908
909 /* See if we have (non-empty) info. */
910 if (!node->definition || node->global.inlined_to)
911 return false;
912 info = get_reference_optimization_summary (node);
913 if (!info || (bitmap_empty_p (info->statics_not_read)
914 && bitmap_empty_p (info->statics_not_written)))
915 return false;
916
917 /* See if we want to encode it.
918 Encode also referenced functions since constant folding might turn it into
919 a direct call.
920
921 In future we might also want to include summaries of functions references
922 by initializers of constant variables references in current unit. */
923 if (!reachable_from_this_partition_p (node, encoder)
924 && !referenced_from_this_partition_p (node, encoder))
925 return false;
926
927 /* See if the info has non-empty intersections with vars we want to encode. */
928 if (!bitmap_intersect_p (info->statics_not_read, ltrans_statics)
929 && !bitmap_intersect_p (info->statics_not_written, ltrans_statics))
930 return false;
931 return true;
932 }
933
934 /* Stream out BITS&LTRANS_STATICS as list of decls to OB.
935 LTRANS_STATICS_BITCOUNT specify number of bits in LTRANS_STATICS
936 or -1. When it is positive, just output -1 when
937 BITS&LTRANS_STATICS == BITS&LTRANS_STATICS. */
938
939 static void
940 stream_out_bitmap (struct lto_simple_output_block *ob,
941 bitmap bits, bitmap ltrans_statics,
942 int ltrans_statics_bitcount)
943 {
944 int count = 0;
945 unsigned int index;
946 bitmap_iterator bi;
947 if (bits == all_module_statics)
948 {
949 streamer_write_hwi_stream (ob->main_stream, -1);
950 return;
951 }
952 EXECUTE_IF_AND_IN_BITMAP (bits, ltrans_statics, 0, index, bi)
953 count ++;
954 if (count == ltrans_statics_bitcount)
955 {
956 streamer_write_hwi_stream (ob->main_stream, -1);
957 return;
958 }
959 streamer_write_hwi_stream (ob->main_stream, count);
960 if (!count)
961 return;
962 EXECUTE_IF_AND_IN_BITMAP (bits, ltrans_statics, 0, index, bi)
963 {
964 tree decl = (tree)splay_tree_lookup (reference_vars_to_consider, index)->value;
965 lto_output_var_decl_index (ob->decl_state, ob->main_stream, decl);
966 }
967 }
968
969 /* Serialize the ipa info for lto. */
970
971 static void
972 ipa_reference_write_optimization_summary (void)
973 {
974 struct lto_simple_output_block *ob
975 = lto_create_simple_output_block (LTO_section_ipa_reference);
976 unsigned int count = 0;
977 int ltrans_statics_bitcount = 0;
978 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
979 bitmap ltrans_statics = BITMAP_ALLOC (NULL);
980 int i;
981
982 reference_vars_to_consider = splay_tree_new (splay_tree_compare_ints, 0, 0);
983
984 /* See what variables we are interested in. */
985 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
986 {
987 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
988 varpool_node *vnode = dyn_cast <varpool_node *> (snode);
989 if (vnode
990 && bitmap_bit_p (all_module_statics,
991 ipa_reference_var_uid (vnode->decl))
992 && referenced_from_this_partition_p (vnode, encoder))
993 {
994 tree decl = vnode->decl;
995 bitmap_set_bit (ltrans_statics, ipa_reference_var_uid (decl));
996 splay_tree_insert (reference_vars_to_consider,
997 ipa_reference_var_uid (decl),
998 (splay_tree_value)decl);
999 ltrans_statics_bitcount ++;
1000 }
1001 }
1002
1003
1004 if (ltrans_statics_bitcount)
1005 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1006 {
1007 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1008 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1009 if (cnode && write_node_summary_p (cnode, encoder, ltrans_statics))
1010 count++;
1011 }
1012
1013 streamer_write_uhwi_stream (ob->main_stream, count);
1014 if (count)
1015 stream_out_bitmap (ob, ltrans_statics, ltrans_statics,
1016 -1);
1017
1018 /* Process all of the functions. */
1019 if (ltrans_statics_bitcount)
1020 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
1021 {
1022 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
1023 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
1024 if (cnode && write_node_summary_p (cnode, encoder, ltrans_statics))
1025 {
1026 ipa_reference_optimization_summary_t info;
1027 int node_ref;
1028
1029 info = get_reference_optimization_summary (cnode);
1030 node_ref = lto_symtab_encoder_encode (encoder, snode);
1031 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1032
1033 stream_out_bitmap (ob, info->statics_not_read, ltrans_statics,
1034 ltrans_statics_bitcount);
1035 stream_out_bitmap (ob, info->statics_not_written, ltrans_statics,
1036 ltrans_statics_bitcount);
1037 }
1038 }
1039 BITMAP_FREE (ltrans_statics);
1040 lto_destroy_simple_output_block (ob);
1041 splay_tree_delete (reference_vars_to_consider);
1042 }
1043
1044 /* Deserialize the ipa info for lto. */
1045
1046 static void
1047 ipa_reference_read_optimization_summary (void)
1048 {
1049 struct lto_file_decl_data ** file_data_vec
1050 = lto_get_file_decl_data ();
1051 struct lto_file_decl_data * file_data;
1052 unsigned int j = 0;
1053 bitmap_obstack_initialize (&optimization_summary_obstack);
1054
1055 node_removal_hook_holder =
1056 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
1057 node_duplication_hook_holder =
1058 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
1059 all_module_statics = BITMAP_ALLOC (&optimization_summary_obstack);
1060
1061 while ((file_data = file_data_vec[j++]))
1062 {
1063 const char *data;
1064 size_t len;
1065 struct lto_input_block *ib
1066 = lto_create_simple_input_block (file_data,
1067 LTO_section_ipa_reference,
1068 &data, &len);
1069 if (ib)
1070 {
1071 unsigned int i;
1072 unsigned int f_count = streamer_read_uhwi (ib);
1073 int b_count;
1074 if (!f_count)
1075 continue;
1076 b_count = streamer_read_hwi (ib);
1077 if (dump_file)
1078 fprintf (dump_file, "all module statics:");
1079 for (i = 0; i < (unsigned int)b_count; i++)
1080 {
1081 unsigned int var_index = streamer_read_uhwi (ib);
1082 tree v_decl = lto_file_decl_data_get_var_decl (file_data,
1083 var_index);
1084 bitmap_set_bit (all_module_statics,
1085 ipa_reference_var_uid (v_decl));
1086 if (dump_file)
1087 fprintf (dump_file, " %s", fndecl_name (v_decl));
1088 }
1089
1090 for (i = 0; i < f_count; i++)
1091 {
1092 unsigned int j, index;
1093 struct cgraph_node *node;
1094 ipa_reference_optimization_summary_t info;
1095 int v_count;
1096 lto_symtab_encoder_t encoder;
1097
1098 index = streamer_read_uhwi (ib);
1099 encoder = file_data->symtab_node_encoder;
1100 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref
1101 (encoder, index));
1102 info = XCNEW (struct ipa_reference_optimization_summary_d);
1103 set_reference_optimization_summary (node, info);
1104 info->statics_not_read = BITMAP_ALLOC (&optimization_summary_obstack);
1105 info->statics_not_written = BITMAP_ALLOC (&optimization_summary_obstack);
1106 if (dump_file)
1107 fprintf (dump_file,
1108 "\nFunction name:%s/%i:\n static not read:",
1109 node->asm_name (), node->order);
1110
1111 /* Set the statics not read. */
1112 v_count = streamer_read_hwi (ib);
1113 if (v_count == -1)
1114 {
1115 info->statics_not_read = all_module_statics;
1116 if (dump_file)
1117 fprintf (dump_file, " all module statics");
1118 }
1119 else
1120 for (j = 0; j < (unsigned int)v_count; j++)
1121 {
1122 unsigned int var_index = streamer_read_uhwi (ib);
1123 tree v_decl = lto_file_decl_data_get_var_decl (file_data,
1124 var_index);
1125 bitmap_set_bit (info->statics_not_read,
1126 ipa_reference_var_uid (v_decl));
1127 if (dump_file)
1128 fprintf (dump_file, " %s", fndecl_name (v_decl));
1129 }
1130
1131 if (dump_file)
1132 fprintf (dump_file,
1133 "\n static not written:");
1134 /* Set the statics not written. */
1135 v_count = streamer_read_hwi (ib);
1136 if (v_count == -1)
1137 {
1138 info->statics_not_written = all_module_statics;
1139 if (dump_file)
1140 fprintf (dump_file, " all module statics");
1141 }
1142 else
1143 for (j = 0; j < (unsigned int)v_count; j++)
1144 {
1145 unsigned int var_index = streamer_read_uhwi (ib);
1146 tree v_decl = lto_file_decl_data_get_var_decl (file_data,
1147 var_index);
1148 bitmap_set_bit (info->statics_not_written,
1149 ipa_reference_var_uid (v_decl));
1150 if (dump_file)
1151 fprintf (dump_file, " %s", fndecl_name (v_decl));
1152 }
1153 if (dump_file)
1154 fprintf (dump_file, "\n");
1155 }
1156
1157 lto_destroy_simple_input_block (file_data,
1158 LTO_section_ipa_reference,
1159 ib, data, len);
1160 }
1161 else
1162 /* Fatal error here. We do not want to support compiling ltrans units with
1163 different version of compiler or different flags than the WPA unit, so
1164 this should never happen. */
1165 fatal_error (input_location,
1166 "ipa reference summary is missing in ltrans unit");
1167 }
1168 }
1169
1170 namespace {
1171
1172 const pass_data pass_data_ipa_reference =
1173 {
1174 IPA_PASS, /* type */
1175 "static-var", /* name */
1176 OPTGROUP_NONE, /* optinfo_flags */
1177 TV_IPA_REFERENCE, /* tv_id */
1178 0, /* properties_required */
1179 0, /* properties_provided */
1180 0, /* properties_destroyed */
1181 0, /* todo_flags_start */
1182 0, /* todo_flags_finish */
1183 };
1184
1185 class pass_ipa_reference : public ipa_opt_pass_d
1186 {
1187 public:
1188 pass_ipa_reference (gcc::context *ctxt)
1189 : ipa_opt_pass_d (pass_data_ipa_reference, ctxt,
1190 NULL, /* generate_summary */
1191 NULL, /* write_summary */
1192 NULL, /* read_summary */
1193 ipa_reference_write_optimization_summary, /*
1194 write_optimization_summary */
1195 ipa_reference_read_optimization_summary, /*
1196 read_optimization_summary */
1197 NULL, /* stmt_fixup */
1198 0, /* function_transform_todo_flags_start */
1199 NULL, /* function_transform */
1200 NULL) /* variable_transform */
1201 {}
1202
1203 /* opt_pass methods: */
1204 virtual bool gate (function *)
1205 {
1206 return ((in_lto_p || flag_ipa_reference)
1207 /* Don't bother doing anything if the program has errors. */
1208 && !seen_error ());
1209 }
1210
1211 virtual unsigned int execute (function *) { return propagate (); }
1212
1213 }; // class pass_ipa_reference
1214
1215 } // anon namespace
1216
1217 ipa_opt_pass_d *
1218 make_pass_ipa_reference (gcc::context *ctxt)
1219 {
1220 return new pass_ipa_reference (ctxt);
1221 }
1222
1223 /* Reset all state within ipa-reference.c so that we can rerun the compiler
1224 within the same process. For use by toplev::finalize. */
1225
1226 void
1227 ipa_reference_c_finalize (void)
1228 {
1229 if (ipa_init_p)
1230 {
1231 bitmap_obstack_release (&optimization_summary_obstack);
1232 ipa_init_p = false;
1233 }
1234
1235 if (node_removal_hook_holder)
1236 {
1237 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
1238 node_removal_hook_holder = NULL;
1239 }
1240 if (node_duplication_hook_holder)
1241 {
1242 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1243 node_duplication_hook_holder = NULL;
1244 }
1245 }