46d03e692483403cf272348781d7cb079101a9b9
[gcc.git] / gcc / ipa-pure-const.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file marks functions as being either const (TREE_READONLY) or
23 pure (DECL_PURE_P). It can also set a variant of these that
24 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25
26 This must be run after inlining decisions have been made since
27 otherwise, the local sets will not contain information that is
28 consistent with post inlined state. The global sets are not prone
29 to this problem since they are by definition transitive. */
30
31 /* The code in this module is called by the ipa pass manager. It
32 should be one of the later passes since it's information is used by
33 the rest of the compilation. */
34
35 #include "config.h"
36 #include "system.h"
37 #include "coretypes.h"
38 #include "tm.h"
39 #include "tree.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
44 #include "pointer-set.h"
45 #include "ggc.h"
46 #include "ipa-utils.h"
47 #include "gimple.h"
48 #include "cgraph.h"
49 #include "output.h"
50 #include "flags.h"
51 #include "timevar.h"
52 #include "toplev.h"
53 #include "diagnostic.h"
54 #include "langhooks.h"
55 #include "target.h"
56 #include "lto-streamer.h"
57 #include "cfgloop.h"
58 #include "tree-scalar-evolution.h"
59 #include "intl.h"
60 #include "opts.h"
61
62 static struct pointer_set_t *visited_nodes;
63
64 /* Lattice values for const and pure functions. Everything starts out
65 being const, then may drop to pure and then neither depending on
66 what is found. */
67 enum pure_const_state_e
68 {
69 IPA_CONST,
70 IPA_PURE,
71 IPA_NEITHER
72 };
73
74 /* Holder for the const_state. There is one of these per function
75 decl. */
76 struct funct_state_d
77 {
78 /* See above. */
79 enum pure_const_state_e pure_const_state;
80 /* What user set here; we can be always sure about this. */
81 enum pure_const_state_e state_previously_known;
82 bool looping_previously_known;
83
84 /* True if the function could possibly infinite loop. There are a
85 lot of ways that this could be determined. We are pretty
86 conservative here. While it is possible to cse pure and const
87 calls, it is not legal to have dce get rid of the call if there
88 is a possibility that the call could infinite loop since this is
89 a behavioral change. */
90 bool looping;
91
92 bool can_throw;
93 };
94
95 typedef struct funct_state_d * funct_state;
96
97 /* The storage of the funct_state is abstracted because there is the
98 possibility that it may be desirable to move this to the cgraph
99 local info. */
100
101 /* Array, indexed by cgraph node uid, of function states. */
102
103 DEF_VEC_P (funct_state);
104 DEF_VEC_ALLOC_P (funct_state, heap);
105 static VEC (funct_state, heap) *funct_state_vec;
106
107 /* Holders of ipa cgraph hooks: */
108 static struct cgraph_node_hook_list *function_insertion_hook_holder;
109 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
110 static struct cgraph_node_hook_list *node_removal_hook_holder;
111
112 /* Try to guess if function body will always be visible to compiler
113 when compiling the call and whether compiler will be able
114 to propagate the information by itself. */
115
116 static bool
117 function_always_visible_to_compiler_p (tree decl)
118 {
119 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
120 }
121
122 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
123 is true if the function is known to be finite. The diagnostic is
124 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
125 OPTION, this function may initialize it and it is always returned
126 by the function. */
127
128 static struct pointer_set_t *
129 suggest_attribute (int option, tree decl, bool known_finite,
130 struct pointer_set_t *warned_about,
131 const char * attrib_name)
132 {
133 if (!option_enabled (option))
134 return warned_about;
135 if (TREE_THIS_VOLATILE (decl)
136 || (known_finite && function_always_visible_to_compiler_p (decl)))
137 return warned_about;
138
139 if (!warned_about)
140 warned_about = pointer_set_create ();
141 if (pointer_set_contains (warned_about, decl))
142 return warned_about;
143 pointer_set_insert (warned_about, decl);
144 warning_at (DECL_SOURCE_LOCATION (decl),
145 option,
146 known_finite
147 ? _("function might be candidate for attribute %<%s%>")
148 : _("function might be candidate for attribute %<%s%>"
149 " if it is known to return normally"), attrib_name);
150 return warned_about;
151 }
152
153 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
154 is true if the function is known to be finite. */
155
156 static void
157 warn_function_pure (tree decl, bool known_finite)
158 {
159 static struct pointer_set_t *warned_about;
160
161 warned_about
162 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
163 known_finite, warned_about, "pure");
164 }
165
166 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
167 is true if the function is known to be finite. */
168
169 static void
170 warn_function_const (tree decl, bool known_finite)
171 {
172 static struct pointer_set_t *warned_about;
173 warned_about
174 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
175 known_finite, warned_about, "const");
176 }
177 /* Init the function state. */
178
179 static void
180 finish_state (void)
181 {
182 free (funct_state_vec);
183 }
184
185
186 /* Return the function state from NODE. */
187
188 static inline funct_state
189 get_function_state (struct cgraph_node *node)
190 {
191 if (!funct_state_vec
192 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
193 return NULL;
194 return VEC_index (funct_state, funct_state_vec, node->uid);
195 }
196
197 /* Set the function state S for NODE. */
198
199 static inline void
200 set_function_state (struct cgraph_node *node, funct_state s)
201 {
202 if (!funct_state_vec
203 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
204 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
205 VEC_replace (funct_state, funct_state_vec, node->uid, s);
206 }
207
208 /* Check to see if the use (or definition when CHECKING_WRITE is true)
209 variable T is legal in a function that is either pure or const. */
210
211 static inline void
212 check_decl (funct_state local,
213 tree t, bool checking_write)
214 {
215 /* Do not want to do anything with volatile except mark any
216 function that uses one to be not const or pure. */
217 if (TREE_THIS_VOLATILE (t))
218 {
219 local->pure_const_state = IPA_NEITHER;
220 if (dump_file)
221 fprintf (dump_file, " Volatile operand is not const/pure");
222 return;
223 }
224
225 /* Do not care about a local automatic that is not static. */
226 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
227 return;
228
229 /* If the variable has the "used" attribute, treat it as if it had a
230 been touched by the devil. */
231 if (DECL_PRESERVE_P (t))
232 {
233 local->pure_const_state = IPA_NEITHER;
234 if (dump_file)
235 fprintf (dump_file, " Used static/global variable is not const/pure\n");
236 return;
237 }
238
239 /* Since we have dealt with the locals and params cases above, if we
240 are CHECKING_WRITE, this cannot be a pure or constant
241 function. */
242 if (checking_write)
243 {
244 local->pure_const_state = IPA_NEITHER;
245 if (dump_file)
246 fprintf (dump_file, " static/global memory write is not const/pure\n");
247 return;
248 }
249
250 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
251 {
252 /* Readonly reads are safe. */
253 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
254 return; /* Read of a constant, do not change the function state. */
255 else
256 {
257 if (dump_file)
258 fprintf (dump_file, " global memory read is not const\n");
259 /* Just a regular read. */
260 if (local->pure_const_state == IPA_CONST)
261 local->pure_const_state = IPA_PURE;
262 }
263 }
264 else
265 {
266 /* Compilation level statics can be read if they are readonly
267 variables. */
268 if (TREE_READONLY (t))
269 return;
270
271 if (dump_file)
272 fprintf (dump_file, " static memory read is not const\n");
273 /* Just a regular read. */
274 if (local->pure_const_state == IPA_CONST)
275 local->pure_const_state = IPA_PURE;
276 }
277 }
278
279
280 /* Check to see if the use (or definition when CHECKING_WRITE is true)
281 variable T is legal in a function that is either pure or const. */
282
283 static inline void
284 check_op (funct_state local, tree t, bool checking_write)
285 {
286 t = get_base_address (t);
287 if (t && TREE_THIS_VOLATILE (t))
288 {
289 local->pure_const_state = IPA_NEITHER;
290 if (dump_file)
291 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
292 return;
293 }
294 else if (t
295 && INDIRECT_REF_P (t)
296 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
297 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
298 {
299 if (dump_file)
300 fprintf (dump_file, " Indirect ref to local memory is OK\n");
301 return;
302 }
303 else if (checking_write)
304 {
305 local->pure_const_state = IPA_NEITHER;
306 if (dump_file)
307 fprintf (dump_file, " Indirect ref write is not const/pure\n");
308 return;
309 }
310 else
311 {
312 if (dump_file)
313 fprintf (dump_file, " Indirect ref read is not const\n");
314 if (local->pure_const_state == IPA_CONST)
315 local->pure_const_state = IPA_PURE;
316 }
317 }
318
319 /* Check the parameters of a function call to CALL_EXPR to see if
320 there are any references in the parameters that are not allowed for
321 pure or const functions. Also check to see if this is either an
322 indirect call, a call outside the compilation unit, or has special
323 attributes that may also effect the purity. The CALL_EXPR node for
324 the entire call expression. */
325
326 static void
327 check_call (funct_state local, gimple call, bool ipa)
328 {
329 int flags = gimple_call_flags (call);
330 tree callee_t = gimple_call_fndecl (call);
331 bool possibly_throws = stmt_could_throw_p (call);
332 bool possibly_throws_externally = (possibly_throws
333 && stmt_can_throw_external (call));
334
335 if (possibly_throws)
336 {
337 unsigned int i;
338 for (i = 0; i < gimple_num_ops (call); i++)
339 if (gimple_op (call, i)
340 && tree_could_throw_p (gimple_op (call, i)))
341 {
342 if (possibly_throws && flag_non_call_exceptions)
343 {
344 if (dump_file)
345 fprintf (dump_file, " operand can throw; looping\n");
346 local->looping = true;
347 }
348 if (possibly_throws_externally)
349 {
350 if (dump_file)
351 fprintf (dump_file, " operand can throw externally\n");
352 local->can_throw = true;
353 }
354 }
355 }
356
357 /* The const and pure flags are set by a variety of places in the
358 compiler (including here). If someone has already set the flags
359 for the callee, (such as for some of the builtins) we will use
360 them, otherwise we will compute our own information.
361
362 Const and pure functions have less clobber effects than other
363 functions so we process these first. Otherwise if it is a call
364 outside the compilation unit or an indirect call we punt. This
365 leaves local calls which will be processed by following the call
366 graph. */
367 if (callee_t)
368 {
369 /* When bad things happen to bad functions, they cannot be const
370 or pure. */
371 if (setjmp_call_p (callee_t))
372 {
373 if (dump_file)
374 fprintf (dump_file, " setjmp is not const/pure\n");
375 local->looping = true;
376 local->pure_const_state = IPA_NEITHER;
377 }
378
379 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
380 switch (DECL_FUNCTION_CODE (callee_t))
381 {
382 case BUILT_IN_LONGJMP:
383 case BUILT_IN_NONLOCAL_GOTO:
384 if (dump_file)
385 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
386 local->pure_const_state = IPA_NEITHER;
387 local->looping = true;
388 break;
389 default:
390 break;
391 }
392 }
393
394 /* When not in IPA mode, we can still handle self recursion. */
395 if (!ipa && callee_t == current_function_decl)
396 {
397 if (dump_file)
398 fprintf (dump_file, " Recursive call can loop.\n");
399 local->looping = true;
400 }
401 /* Either calle is unknown or we are doing local analysis.
402 Look to see if there are any bits available for the callee (such as by
403 declaration or because it is builtin) and process solely on the basis of
404 those bits. */
405 else if (!ipa || !callee_t)
406 {
407 if (possibly_throws && flag_non_call_exceptions)
408 {
409 if (dump_file)
410 fprintf (dump_file, " can throw; looping\n");
411 local->looping = true;
412 }
413 if (possibly_throws_externally)
414 {
415 if (dump_file)
416 {
417 fprintf (dump_file, " can throw externally to lp %i\n",
418 lookup_stmt_eh_lp (call));
419 if (callee_t)
420 fprintf (dump_file, " callee:%s\n",
421 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
422 }
423 local->can_throw = true;
424 }
425 if (flags & ECF_CONST)
426 {
427 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
428 {
429 if (dump_file)
430 fprintf (dump_file, " calls looping pure.\n");
431 local->looping = true;
432 }
433 }
434 else if (flags & ECF_PURE)
435 {
436 if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
437 {
438 if (dump_file)
439 fprintf (dump_file, " calls looping const.\n");
440 local->looping = true;
441 }
442 if (dump_file)
443 fprintf (dump_file, " pure function call in not const\n");
444 if (local->pure_const_state == IPA_CONST)
445 local->pure_const_state = IPA_PURE;
446 }
447 else
448 {
449 if (dump_file)
450 fprintf (dump_file, " uknown function call is not const/pure\n");
451 local->pure_const_state = IPA_NEITHER;
452 local->looping = true;
453 }
454 }
455 /* Direct functions calls are handled by IPA propagation. */
456 }
457
458 /* Wrapper around check_decl for loads. */
459
460 static bool
461 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
462 {
463 if (DECL_P (op))
464 check_decl ((funct_state)data, op, false);
465 else
466 check_op ((funct_state)data, op, false);
467 return false;
468 }
469
470 /* Wrapper around check_decl for stores. */
471
472 static bool
473 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
474 {
475 if (DECL_P (op))
476 check_decl ((funct_state)data, op, true);
477 else
478 check_op ((funct_state)data, op, true);
479 return false;
480 }
481
482 /* Look into pointer pointed to by GSIP and figure out what interesting side
483 effects it has. */
484 static void
485 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
486 {
487 gimple stmt = gsi_stmt (*gsip);
488 unsigned int i = 0;
489
490 if (is_gimple_debug (stmt))
491 return;
492
493 if (dump_file)
494 {
495 fprintf (dump_file, " scanning: ");
496 print_gimple_stmt (dump_file, stmt, 0, 0);
497 }
498
499 /* Look for loads and stores. */
500 walk_stmt_load_store_ops (stmt, local, check_load, check_store);
501
502 if (gimple_code (stmt) != GIMPLE_CALL
503 && stmt_could_throw_p (stmt))
504 {
505 if (flag_non_call_exceptions)
506 {
507 if (dump_file)
508 fprintf (dump_file, " can throw; looping");
509 local->looping = true;
510 }
511 if (stmt_can_throw_external (stmt))
512 {
513 if (dump_file)
514 fprintf (dump_file, " can throw externally");
515 local->can_throw = true;
516 }
517 }
518 switch (gimple_code (stmt))
519 {
520 case GIMPLE_CALL:
521 check_call (local, stmt, ipa);
522 break;
523 case GIMPLE_LABEL:
524 if (DECL_NONLOCAL (gimple_label_label (stmt)))
525 /* Target of long jump. */
526 {
527 if (dump_file)
528 fprintf (dump_file, " nonlocal label is not const/pure");
529 local->pure_const_state = IPA_NEITHER;
530 }
531 break;
532 case GIMPLE_ASM:
533 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
534 {
535 tree op = gimple_asm_clobber_op (stmt, i);
536 if (simple_cst_equal(TREE_VALUE (op), memory_identifier_string) == 1)
537 {
538 if (dump_file)
539 fprintf (dump_file, " memory asm clobber is not const/pure");
540 /* Abandon all hope, ye who enter here. */
541 local->pure_const_state = IPA_NEITHER;
542 }
543 }
544 if (gimple_asm_volatile_p (stmt))
545 {
546 if (dump_file)
547 fprintf (dump_file, " volatile is not const/pure");
548 /* Abandon all hope, ye who enter here. */
549 local->pure_const_state = IPA_NEITHER;
550 local->looping = true;
551 }
552 return;
553 default:
554 break;
555 }
556 }
557
558
559 /* This is the main routine for finding the reference patterns for
560 global variables within a function FN. */
561
562 static funct_state
563 analyze_function (struct cgraph_node *fn, bool ipa)
564 {
565 tree decl = fn->decl;
566 tree old_decl = current_function_decl;
567 funct_state l;
568 basic_block this_block;
569
570 l = XCNEW (struct funct_state_d);
571 l->pure_const_state = IPA_CONST;
572 l->state_previously_known = IPA_NEITHER;
573 l->looping_previously_known = true;
574 l->looping = false;
575 l->can_throw = false;
576
577 if (dump_file)
578 {
579 fprintf (dump_file, "\n\n local analysis of %s\n ",
580 cgraph_node_name (fn));
581 }
582
583 push_cfun (DECL_STRUCT_FUNCTION (decl));
584 current_function_decl = decl;
585
586 FOR_EACH_BB (this_block)
587 {
588 gimple_stmt_iterator gsi;
589 struct walk_stmt_info wi;
590
591 memset (&wi, 0, sizeof(wi));
592 for (gsi = gsi_start_bb (this_block);
593 !gsi_end_p (gsi);
594 gsi_next (&gsi))
595 {
596 check_stmt (&gsi, l, ipa);
597 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
598 goto end;
599 }
600 }
601
602 end:
603 if (l->pure_const_state != IPA_NEITHER)
604 {
605 /* Const functions cannot have back edges (an
606 indication of possible infinite loop side
607 effect. */
608 if (mark_dfs_back_edges ())
609 {
610 /* Preheaders are needed for SCEV to work.
611 Simple lateches and recorded exits improve chances that loop will
612 proved to be finite in testcases such as in loop-15.c and loop-24.c */
613 loop_optimizer_init (LOOPS_NORMAL
614 | LOOPS_HAVE_RECORDED_EXITS);
615 if (dump_file && (dump_flags & TDF_DETAILS))
616 flow_loops_dump (dump_file, NULL, 0);
617 if (mark_irreducible_loops ())
618 {
619 if (dump_file)
620 fprintf (dump_file, " has irreducible loops\n");
621 l->looping = true;
622 }
623 else
624 {
625 loop_iterator li;
626 struct loop *loop;
627 scev_initialize ();
628 FOR_EACH_LOOP (li, loop, 0)
629 if (!finite_loop_p (loop))
630 {
631 if (dump_file)
632 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
633 l->looping =true;
634 break;
635 }
636 scev_finalize ();
637 }
638 loop_optimizer_finalize ();
639 }
640 }
641
642 if (TREE_READONLY (decl))
643 {
644 l->pure_const_state = IPA_CONST;
645 l->state_previously_known = IPA_CONST;
646 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
647 l->looping = false, l->looping_previously_known = false;
648 }
649 if (DECL_PURE_P (decl))
650 {
651 if (l->pure_const_state != IPA_CONST)
652 l->pure_const_state = IPA_PURE;
653 l->state_previously_known = IPA_PURE;
654 if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
655 l->looping = false, l->looping_previously_known = false;
656 }
657 if (TREE_NOTHROW (decl))
658 l->can_throw = false;
659
660 pop_cfun ();
661 current_function_decl = old_decl;
662 if (dump_file)
663 {
664 if (l->looping)
665 fprintf (dump_file, "Function is locally looping.\n");
666 if (l->can_throw)
667 fprintf (dump_file, "Function is locally throwing.\n");
668 if (l->pure_const_state == IPA_CONST)
669 fprintf (dump_file, "Function is locally const.\n");
670 if (l->pure_const_state == IPA_PURE)
671 fprintf (dump_file, "Function is locally pure.\n");
672 }
673 return l;
674 }
675
676 /* Called when new function is inserted to callgraph late. */
677 static void
678 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
679 {
680 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
681 return;
682 /* There are some shared nodes, in particular the initializers on
683 static declarations. We do not need to scan them more than once
684 since all we would be interested in are the addressof
685 operations. */
686 visited_nodes = pointer_set_create ();
687 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
688 set_function_state (node, analyze_function (node, true));
689 pointer_set_destroy (visited_nodes);
690 visited_nodes = NULL;
691 }
692
693 /* Called when new clone is inserted to callgraph late. */
694
695 static void
696 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
697 void *data ATTRIBUTE_UNUSED)
698 {
699 if (get_function_state (src))
700 {
701 funct_state l = XNEW (struct funct_state_d);
702 gcc_assert (!get_function_state (dst));
703 memcpy (l, get_function_state (src), sizeof (*l));
704 set_function_state (dst, l);
705 }
706 }
707
708 /* Called when new clone is inserted to callgraph late. */
709
710 static void
711 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
712 {
713 if (get_function_state (node))
714 {
715 free (get_function_state (node));
716 set_function_state (node, NULL);
717 }
718 }
719
720 \f
721 static void
722 register_hooks (void)
723 {
724 static bool init_p = false;
725
726 if (init_p)
727 return;
728
729 init_p = true;
730
731 node_removal_hook_holder =
732 cgraph_add_node_removal_hook (&remove_node_data, NULL);
733 node_duplication_hook_holder =
734 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
735 function_insertion_hook_holder =
736 cgraph_add_function_insertion_hook (&add_new_function, NULL);
737 }
738
739
740 /* Analyze each function in the cgraph to see if it is locally PURE or
741 CONST. */
742
743 static void
744 generate_summary (void)
745 {
746 struct cgraph_node *node;
747
748 register_hooks ();
749
750 /* There are some shared nodes, in particular the initializers on
751 static declarations. We do not need to scan them more than once
752 since all we would be interested in are the addressof
753 operations. */
754 visited_nodes = pointer_set_create ();
755
756 /* Process all of the functions.
757
758 We process AVAIL_OVERWRITABLE functions. We can not use the results
759 by default, but the info can be used at LTO with -fwhole-program or
760 when function got clonned and the clone is AVAILABLE. */
761
762 for (node = cgraph_nodes; node; node = node->next)
763 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
764 set_function_state (node, analyze_function (node, true));
765
766 pointer_set_destroy (visited_nodes);
767 visited_nodes = NULL;
768 }
769
770
771 /* Serialize the ipa info for lto. */
772
773 static void
774 pure_const_write_summary (cgraph_node_set set)
775 {
776 struct cgraph_node *node;
777 struct lto_simple_output_block *ob
778 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
779 unsigned int count = 0;
780 cgraph_node_set_iterator csi;
781
782 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
783 {
784 node = csi_node (csi);
785 if (node->analyzed && get_function_state (node) != NULL)
786 count++;
787 }
788
789 lto_output_uleb128_stream (ob->main_stream, count);
790
791 /* Process all of the functions. */
792 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
793 {
794 node = csi_node (csi);
795 if (node->analyzed && get_function_state (node) != NULL)
796 {
797 struct bitpack_d *bp;
798 funct_state fs;
799 int node_ref;
800 lto_cgraph_encoder_t encoder;
801
802 fs = get_function_state (node);
803
804 encoder = ob->decl_state->cgraph_node_encoder;
805 node_ref = lto_cgraph_encoder_encode (encoder, node);
806 lto_output_uleb128_stream (ob->main_stream, node_ref);
807
808 /* Note that flags will need to be read in the opposite
809 order as we are pushing the bitflags into FLAGS. */
810 bp = bitpack_create ();
811 bp_pack_value (bp, fs->pure_const_state, 2);
812 bp_pack_value (bp, fs->state_previously_known, 2);
813 bp_pack_value (bp, fs->looping_previously_known, 1);
814 bp_pack_value (bp, fs->looping, 1);
815 bp_pack_value (bp, fs->can_throw, 1);
816 lto_output_bitpack (ob->main_stream, bp);
817 bitpack_delete (bp);
818 }
819 }
820
821 lto_destroy_simple_output_block (ob);
822 }
823
824
825 /* Deserialize the ipa info for lto. */
826
827 static void
828 pure_const_read_summary (void)
829 {
830 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
831 struct lto_file_decl_data *file_data;
832 unsigned int j = 0;
833
834 register_hooks ();
835 while ((file_data = file_data_vec[j++]))
836 {
837 const char *data;
838 size_t len;
839 struct lto_input_block *ib
840 = lto_create_simple_input_block (file_data,
841 LTO_section_ipa_pure_const,
842 &data, &len);
843 if (ib)
844 {
845 unsigned int i;
846 unsigned int count = lto_input_uleb128 (ib);
847
848 for (i = 0; i < count; i++)
849 {
850 unsigned int index;
851 struct cgraph_node *node;
852 struct bitpack_d *bp;
853 funct_state fs;
854 lto_cgraph_encoder_t encoder;
855
856 fs = XCNEW (struct funct_state_d);
857 index = lto_input_uleb128 (ib);
858 encoder = file_data->cgraph_node_encoder;
859 node = lto_cgraph_encoder_deref (encoder, index);
860 set_function_state (node, fs);
861
862 /* Note that the flags must be read in the opposite
863 order in which they were written (the bitflags were
864 pushed into FLAGS). */
865 bp = lto_input_bitpack (ib);
866 fs->pure_const_state
867 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
868 fs->state_previously_known
869 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
870 fs->looping_previously_known = bp_unpack_value (bp, 1);
871 fs->looping = bp_unpack_value (bp, 1);
872 fs->can_throw = bp_unpack_value (bp, 1);
873 bitpack_delete (bp);
874 }
875
876 lto_destroy_simple_input_block (file_data,
877 LTO_section_ipa_pure_const,
878 ib, data, len);
879 }
880 }
881 }
882
883
884 static bool
885 ignore_edge (struct cgraph_edge *e)
886 {
887 return (!e->can_throw_external);
888 }
889
890 /* Return true if NODE is self recursive function. */
891
892 static bool
893 self_recursive_p (struct cgraph_node *node)
894 {
895 struct cgraph_edge *e;
896 for (e = node->callees; e; e = e->next_callee)
897 if (e->callee == node)
898 return true;
899 return false;
900 }
901
902 /* Produce the global information by preforming a transitive closure
903 on the local information that was produced by generate_summary.
904 Note that there is no function_transform pass since this only
905 updates the function_decl. */
906
907 static unsigned int
908 propagate (void)
909 {
910 struct cgraph_node *node;
911 struct cgraph_node *w;
912 struct cgraph_node **order =
913 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
914 int order_pos;
915 int i;
916 struct ipa_dfs_info * w_info;
917
918 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
919 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
920 cgraph_remove_node_removal_hook (node_removal_hook_holder);
921 order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
922 if (dump_file)
923 {
924 dump_cgraph (dump_file);
925 ipa_utils_print_order(dump_file, "reduced", order, order_pos);
926 }
927
928 /* Propagate the local information thru the call graph to produce
929 the global information. All the nodes within a cycle will have
930 the same info so we collapse cycles first. Then we can do the
931 propagation in one pass from the leaves to the roots. */
932 for (i = 0; i < order_pos; i++ )
933 {
934 enum pure_const_state_e pure_const_state = IPA_CONST;
935 bool looping = false;
936 int count = 0;
937 node = order[i];
938
939 /* Find the worst state for any node in the cycle. */
940 w = node;
941 while (w)
942 {
943 struct cgraph_edge *e;
944 funct_state w_l = get_function_state (w);
945 if (pure_const_state < w_l->pure_const_state)
946 pure_const_state = w_l->pure_const_state;
947
948 if (w_l->looping)
949 looping = true;
950 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
951 {
952 looping |= w_l->looping_previously_known;
953 if (pure_const_state < w_l->state_previously_known)
954 pure_const_state = w_l->state_previously_known;
955 }
956
957 if (pure_const_state == IPA_NEITHER)
958 break;
959
960 count++;
961
962 if (count > 1)
963 looping = true;
964
965 for (e = w->callees; e; e = e->next_callee)
966 {
967 struct cgraph_node *y = e->callee;
968
969 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
970 {
971 funct_state y_l = get_function_state (y);
972 if (pure_const_state < y_l->pure_const_state)
973 pure_const_state = y_l->pure_const_state;
974 if (pure_const_state == IPA_NEITHER)
975 break;
976 if (y_l->looping)
977 looping = true;
978 }
979 else
980 {
981 int flags = flags_from_decl_or_type (y->decl);
982
983 if (flags & ECF_LOOPING_CONST_OR_PURE)
984 looping = true;
985 if (flags & ECF_CONST)
986 ;
987 else if ((flags & ECF_PURE) && pure_const_state == IPA_CONST)
988 pure_const_state = IPA_PURE;
989 else
990 pure_const_state = IPA_NEITHER, looping = true;
991
992 }
993 }
994 w_info = (struct ipa_dfs_info *) w->aux;
995 w = w_info->next_cycle;
996 }
997
998 /* Copy back the region's pure_const_state which is shared by
999 all nodes in the region. */
1000 w = node;
1001 while (w)
1002 {
1003 funct_state w_l = get_function_state (w);
1004 enum pure_const_state_e this_state = pure_const_state;
1005 bool this_looping = looping;
1006
1007 if (w_l->state_previously_known != IPA_NEITHER
1008 && this_state > w_l->state_previously_known)
1009 this_state = w_l->state_previously_known;
1010 if (!this_looping && self_recursive_p (w))
1011 this_looping = true;
1012 if (!w_l->looping_previously_known)
1013 this_looping = false;
1014
1015 /* All nodes within a cycle share the same info. */
1016 w_l->pure_const_state = this_state;
1017 w_l->looping = this_looping;
1018
1019 switch (this_state)
1020 {
1021 case IPA_CONST:
1022 if (!TREE_READONLY (w->decl))
1023 {
1024 warn_function_const (w->decl, !this_looping);
1025 if (dump_file)
1026 fprintf (dump_file, "Function found to be %sconst: %s\n",
1027 this_looping ? "looping " : "",
1028 cgraph_node_name (w));
1029 }
1030 cgraph_set_readonly_flag (w, true);
1031 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1032 break;
1033
1034 case IPA_PURE:
1035 if (!DECL_PURE_P (w->decl))
1036 {
1037 warn_function_pure (w->decl, !this_looping);
1038 if (dump_file)
1039 fprintf (dump_file, "Function found to be %spure: %s\n",
1040 this_looping ? "looping " : "",
1041 cgraph_node_name (w));
1042 }
1043 cgraph_set_pure_flag (w, true);
1044 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1045 break;
1046
1047 default:
1048 break;
1049 }
1050 w_info = (struct ipa_dfs_info *) w->aux;
1051 w = w_info->next_cycle;
1052 }
1053 }
1054
1055 /* Cleanup. */
1056 for (node = cgraph_nodes; node; node = node->next)
1057 {
1058 /* Get rid of the aux information. */
1059 if (node->aux)
1060 {
1061 w_info = (struct ipa_dfs_info *) node->aux;
1062 free (node->aux);
1063 node->aux = NULL;
1064 }
1065 }
1066 order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
1067 if (dump_file)
1068 {
1069 dump_cgraph (dump_file);
1070 ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
1071 }
1072 /* Propagate the local information thru the call graph to produce
1073 the global information. All the nodes within a cycle will have
1074 the same info so we collapse cycles first. Then we can do the
1075 propagation in one pass from the leaves to the roots. */
1076 for (i = 0; i < order_pos; i++ )
1077 {
1078 bool can_throw = false;
1079 node = order[i];
1080
1081 /* Find the worst state for any node in the cycle. */
1082 w = node;
1083 while (w)
1084 {
1085 struct cgraph_edge *e;
1086 funct_state w_l = get_function_state (w);
1087
1088 if (w_l->can_throw
1089 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1090 can_throw = true;
1091
1092 if (can_throw)
1093 break;
1094
1095 for (e = w->callees; e; e = e->next_callee)
1096 {
1097 struct cgraph_node *y = e->callee;
1098
1099 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1100 {
1101 funct_state y_l = get_function_state (y);
1102
1103 if (can_throw)
1104 break;
1105 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1106 && e->can_throw_external)
1107 can_throw = true;
1108 }
1109 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1110 can_throw = true;
1111 }
1112 w_info = (struct ipa_dfs_info *) w->aux;
1113 w = w_info->next_cycle;
1114 }
1115
1116 /* Copy back the region's pure_const_state which is shared by
1117 all nodes in the region. */
1118 w = node;
1119 while (w)
1120 {
1121 funct_state w_l = get_function_state (w);
1122 if (!can_throw && !TREE_NOTHROW (w->decl))
1123 {
1124 struct cgraph_edge *e;
1125 cgraph_set_nothrow_flag (w, true);
1126 for (e = w->callers; e; e = e->next_caller)
1127 e->can_throw_external = false;
1128 if (dump_file)
1129 fprintf (dump_file, "Function found to be nothrow: %s\n",
1130 cgraph_node_name (w));
1131 }
1132 else if (can_throw && !TREE_NOTHROW (w->decl))
1133 w_l->can_throw = true;
1134 w_info = (struct ipa_dfs_info *) w->aux;
1135 w = w_info->next_cycle;
1136 }
1137 }
1138
1139 /* Cleanup. */
1140 for (node = cgraph_nodes; node; node = node->next)
1141 {
1142 /* Get rid of the aux information. */
1143 if (node->aux)
1144 {
1145 w_info = (struct ipa_dfs_info *) node->aux;
1146 free (node->aux);
1147 node->aux = NULL;
1148 }
1149 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
1150 free (get_function_state (node));
1151 }
1152
1153 free (order);
1154 VEC_free (funct_state, heap, funct_state_vec);
1155 finish_state ();
1156 return 0;
1157 }
1158
1159 static bool
1160 gate_pure_const (void)
1161 {
1162 return (flag_ipa_pure_const
1163 /* Don't bother doing anything if the program has errors. */
1164 && !(errorcount || sorrycount));
1165 }
1166
1167 struct ipa_opt_pass_d pass_ipa_pure_const =
1168 {
1169 {
1170 IPA_PASS,
1171 "pure-const", /* name */
1172 gate_pure_const, /* gate */
1173 propagate, /* execute */
1174 NULL, /* sub */
1175 NULL, /* next */
1176 0, /* static_pass_number */
1177 TV_IPA_PURE_CONST, /* tv_id */
1178 0, /* properties_required */
1179 0, /* properties_provided */
1180 0, /* properties_destroyed */
1181 0, /* todo_flags_start */
1182 0 /* todo_flags_finish */
1183 },
1184 generate_summary, /* generate_summary */
1185 pure_const_write_summary, /* write_summary */
1186 pure_const_read_summary, /* read_summary */
1187 NULL, /* write_optimization_summary */
1188 NULL, /* read_optimization_summary */
1189 NULL, /* stmt_fixup */
1190 0, /* TODOs */
1191 NULL, /* function_transform */
1192 NULL /* variable_transform */
1193 };
1194
1195 /* Return true if function should be skipped for local pure const analysis. */
1196
1197 static bool
1198 skip_function_for_local_pure_const (struct cgraph_node *node)
1199 {
1200 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1201 we must not promote functions that are called by already processed functions. */
1202
1203 if (function_called_by_processed_nodes_p ())
1204 {
1205 if (dump_file)
1206 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1207 return true;
1208 }
1209 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1210 {
1211 if (dump_file)
1212 fprintf (dump_file, "Function is not available or overwrittable; not analyzing.\n");
1213 return true;
1214 }
1215 return false;
1216 }
1217
1218 /* Simple local pass for pure const discovery reusing the analysis from
1219 ipa_pure_const. This pass is effective when executed together with
1220 other optimization passes in early optimization pass queue. */
1221
1222 static unsigned int
1223 local_pure_const (void)
1224 {
1225 bool changed = false;
1226 funct_state l;
1227 bool skip;
1228 struct cgraph_node *node;
1229
1230 node = cgraph_node (current_function_decl);
1231 skip = skip_function_for_local_pure_const (node);
1232 if (!warn_suggest_attribute_const
1233 && !warn_suggest_attribute_pure
1234 && skip)
1235 return 0;
1236 l = analyze_function (node, false);
1237
1238 switch (l->pure_const_state)
1239 {
1240 case IPA_CONST:
1241 if (!TREE_READONLY (current_function_decl))
1242 {
1243 warn_function_const (current_function_decl, !l->looping);
1244 if (!skip)
1245 {
1246 cgraph_set_readonly_flag (node, true);
1247 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1248 changed = true;
1249 }
1250 if (dump_file)
1251 fprintf (dump_file, "Function found to be %sconst: %s\n",
1252 l->looping ? "looping " : "",
1253 lang_hooks.decl_printable_name (current_function_decl,
1254 2));
1255 }
1256 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1257 && !l->looping)
1258 {
1259 if (!skip)
1260 {
1261 cgraph_set_looping_const_or_pure_flag (node, false);
1262 changed = true;
1263 }
1264 if (dump_file)
1265 fprintf (dump_file, "Function found to be non-looping: %s\n",
1266 lang_hooks.decl_printable_name (current_function_decl,
1267 2));
1268 }
1269 break;
1270
1271 case IPA_PURE:
1272 if (!DECL_PURE_P (current_function_decl))
1273 {
1274 if (!skip)
1275 {
1276 cgraph_set_pure_flag (node, true);
1277 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1278 changed = true;
1279 }
1280 warn_function_pure (current_function_decl, !l->looping);
1281 if (dump_file)
1282 fprintf (dump_file, "Function found to be %spure: %s\n",
1283 l->looping ? "looping " : "",
1284 lang_hooks.decl_printable_name (current_function_decl,
1285 2));
1286 }
1287 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1288 && !l->looping)
1289 {
1290 if (!skip)
1291 {
1292 cgraph_set_looping_const_or_pure_flag (node, false);
1293 changed = true;
1294 }
1295 if (dump_file)
1296 fprintf (dump_file, "Function found to be non-looping: %s\n",
1297 lang_hooks.decl_printable_name (current_function_decl,
1298 2));
1299 }
1300 break;
1301
1302 default:
1303 break;
1304 }
1305 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1306 {
1307 struct cgraph_edge *e;
1308
1309 cgraph_set_nothrow_flag (node, true);
1310 for (e = node->callers; e; e = e->next_caller)
1311 e->can_throw_external = false;
1312 changed = true;
1313 if (dump_file)
1314 fprintf (dump_file, "Function found to be nothrow: %s\n",
1315 lang_hooks.decl_printable_name (current_function_decl,
1316 2));
1317 }
1318 if (l)
1319 free (l);
1320 if (changed)
1321 return execute_fixup_cfg ();
1322 else
1323 return 0;
1324 }
1325
1326 struct gimple_opt_pass pass_local_pure_const =
1327 {
1328 {
1329 GIMPLE_PASS,
1330 "local-pure-const", /* name */
1331 gate_pure_const, /* gate */
1332 local_pure_const, /* execute */
1333 NULL, /* sub */
1334 NULL, /* next */
1335 0, /* static_pass_number */
1336 TV_IPA_PURE_CONST, /* tv_id */
1337 0, /* properties_required */
1338 0, /* properties_provided */
1339 0, /* properties_destroyed */
1340 0, /* todo_flags_start */
1341 0 /* todo_flags_finish */
1342 }
1343 };