decl.c (value_annotation_hasher::handle_cache_entry): Delete.
[gcc.git] / gcc / ipa-pure-const.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "alias.h"
39 #include "symtab.h"
40 #include "tree.h"
41 #include "fold-const.h"
42 #include "print-tree.h"
43 #include "calls.h"
44 #include "predict.h"
45 #include "hard-reg-set.h"
46 #include "function.h"
47 #include "dominance.h"
48 #include "cfg.h"
49 #include "cfganal.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-expr.h"
55 #include "gimple.h"
56 #include "gimple-iterator.h"
57 #include "gimple-walk.h"
58 #include "tree-cfg.h"
59 #include "tree-ssa-loop-niter.h"
60 #include "tree-inline.h"
61 #include "tree-pass.h"
62 #include "langhooks.h"
63 #include "plugin-api.h"
64 #include "ipa-ref.h"
65 #include "cgraph.h"
66 #include "ipa-utils.h"
67 #include "flags.h"
68 #include "diagnostic.h"
69 #include "gimple-pretty-print.h"
70 #include "langhooks.h"
71 #include "target.h"
72 #include "lto-streamer.h"
73 #include "data-streamer.h"
74 #include "tree-streamer.h"
75 #include "cfgloop.h"
76 #include "tree-scalar-evolution.h"
77 #include "intl.h"
78 #include "opts.h"
79 #include "varasm.h"
80
81 /* Lattice values for const and pure functions. Everything starts out
82 being const, then may drop to pure and then neither depending on
83 what is found. */
84 enum pure_const_state_e
85 {
86 IPA_CONST,
87 IPA_PURE,
88 IPA_NEITHER
89 };
90
91 const char *pure_const_names[3] = {"const", "pure", "neither"};
92
93 /* Holder for the const_state. There is one of these per function
94 decl. */
95 struct funct_state_d
96 {
97 /* See above. */
98 enum pure_const_state_e pure_const_state;
99 /* What user set here; we can be always sure about this. */
100 enum pure_const_state_e state_previously_known;
101 bool looping_previously_known;
102
103 /* True if the function could possibly infinite loop. There are a
104 lot of ways that this could be determined. We are pretty
105 conservative here. While it is possible to cse pure and const
106 calls, it is not legal to have dce get rid of the call if there
107 is a possibility that the call could infinite loop since this is
108 a behavioral change. */
109 bool looping;
110
111 bool can_throw;
112
113 /* If function can call free, munmap or otherwise make previously
114 non-trapping memory accesses trapping. */
115 bool can_free;
116 };
117
118 /* State used when we know nothing about function. */
119 static struct funct_state_d varying_state
120 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
121
122
123 typedef struct funct_state_d * funct_state;
124
125 /* The storage of the funct_state is abstracted because there is the
126 possibility that it may be desirable to move this to the cgraph
127 local info. */
128
129 /* Array, indexed by cgraph node uid, of function states. */
130
131 static vec<funct_state> funct_state_vec;
132
133 static bool gate_pure_const (void);
134
135 namespace {
136
137 const pass_data pass_data_ipa_pure_const =
138 {
139 IPA_PASS, /* type */
140 "pure-const", /* name */
141 OPTGROUP_NONE, /* optinfo_flags */
142 TV_IPA_PURE_CONST, /* tv_id */
143 0, /* properties_required */
144 0, /* properties_provided */
145 0, /* properties_destroyed */
146 0, /* todo_flags_start */
147 0, /* todo_flags_finish */
148 };
149
150 class pass_ipa_pure_const : public ipa_opt_pass_d
151 {
152 public:
153 pass_ipa_pure_const(gcc::context *ctxt);
154
155 /* opt_pass methods: */
156 bool gate (function *) { return gate_pure_const (); }
157 unsigned int execute (function *fun);
158
159 void register_hooks (void);
160
161 private:
162 bool init_p;
163
164 /* Holders of ipa cgraph hooks: */
165 struct cgraph_node_hook_list *function_insertion_hook_holder;
166 struct cgraph_2node_hook_list *node_duplication_hook_holder;
167 struct cgraph_node_hook_list *node_removal_hook_holder;
168
169 }; // class pass_ipa_pure_const
170
171 } // anon namespace
172
173 /* Try to guess if function body will always be visible to compiler
174 when compiling the call and whether compiler will be able
175 to propagate the information by itself. */
176
177 static bool
178 function_always_visible_to_compiler_p (tree decl)
179 {
180 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
181 }
182
183 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
184 is true if the function is known to be finite. The diagnostic is
185 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
186 OPTION, this function may initialize it and it is always returned
187 by the function. */
188
189 static hash_set<tree> *
190 suggest_attribute (int option, tree decl, bool known_finite,
191 hash_set<tree> *warned_about,
192 const char * attrib_name)
193 {
194 if (!option_enabled (option, &global_options))
195 return warned_about;
196 if (TREE_THIS_VOLATILE (decl)
197 || (known_finite && function_always_visible_to_compiler_p (decl)))
198 return warned_about;
199
200 if (!warned_about)
201 warned_about = new hash_set<tree>;
202 if (warned_about->contains (decl))
203 return warned_about;
204 warned_about->add (decl);
205 warning_at (DECL_SOURCE_LOCATION (decl),
206 option,
207 known_finite
208 ? _("function might be candidate for attribute %<%s%>")
209 : _("function might be candidate for attribute %<%s%>"
210 " if it is known to return normally"), attrib_name);
211 return warned_about;
212 }
213
214 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
215 is true if the function is known to be finite. */
216
217 static void
218 warn_function_pure (tree decl, bool known_finite)
219 {
220 static hash_set<tree> *warned_about;
221
222 warned_about
223 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
224 known_finite, warned_about, "pure");
225 }
226
227 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
228 is true if the function is known to be finite. */
229
230 static void
231 warn_function_const (tree decl, bool known_finite)
232 {
233 static hash_set<tree> *warned_about;
234 warned_about
235 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
236 known_finite, warned_about, "const");
237 }
238
239 static void
240 warn_function_noreturn (tree decl)
241 {
242 static hash_set<tree> *warned_about;
243 if (!lang_hooks.missing_noreturn_ok_p (decl)
244 && targetm.warn_func_return (decl))
245 warned_about
246 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
247 true, warned_about, "noreturn");
248 }
249
250 /* Return true if we have a function state for NODE. */
251
252 static inline bool
253 has_function_state (struct cgraph_node *node)
254 {
255 if (!funct_state_vec.exists ()
256 || funct_state_vec.length () <= (unsigned int)node->uid)
257 return false;
258 return funct_state_vec[node->uid] != NULL;
259 }
260
261 /* Return the function state from NODE. */
262
263 static inline funct_state
264 get_function_state (struct cgraph_node *node)
265 {
266 if (!funct_state_vec.exists ()
267 || funct_state_vec.length () <= (unsigned int)node->uid
268 || !funct_state_vec[node->uid])
269 /* We might want to put correct previously_known state into varying. */
270 return &varying_state;
271 return funct_state_vec[node->uid];
272 }
273
274 /* Set the function state S for NODE. */
275
276 static inline void
277 set_function_state (struct cgraph_node *node, funct_state s)
278 {
279 if (!funct_state_vec.exists ()
280 || funct_state_vec.length () <= (unsigned int)node->uid)
281 funct_state_vec.safe_grow_cleared (node->uid + 1);
282 funct_state_vec[node->uid] = s;
283 }
284
285 /* Check to see if the use (or definition when CHECKING_WRITE is true)
286 variable T is legal in a function that is either pure or const. */
287
288 static inline void
289 check_decl (funct_state local,
290 tree t, bool checking_write, bool ipa)
291 {
292 /* Do not want to do anything with volatile except mark any
293 function that uses one to be not const or pure. */
294 if (TREE_THIS_VOLATILE (t))
295 {
296 local->pure_const_state = IPA_NEITHER;
297 if (dump_file)
298 fprintf (dump_file, " Volatile operand is not const/pure");
299 return;
300 }
301
302 /* Do not care about a local automatic that is not static. */
303 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
304 return;
305
306 /* If the variable has the "used" attribute, treat it as if it had a
307 been touched by the devil. */
308 if (DECL_PRESERVE_P (t))
309 {
310 local->pure_const_state = IPA_NEITHER;
311 if (dump_file)
312 fprintf (dump_file, " Used static/global variable is not const/pure\n");
313 return;
314 }
315
316 /* In IPA mode we are not interested in checking actual loads and stores;
317 they will be processed at propagation time using ipa_ref. */
318 if (ipa)
319 return;
320
321 /* Since we have dealt with the locals and params cases above, if we
322 are CHECKING_WRITE, this cannot be a pure or constant
323 function. */
324 if (checking_write)
325 {
326 local->pure_const_state = IPA_NEITHER;
327 if (dump_file)
328 fprintf (dump_file, " static/global memory write is not const/pure\n");
329 return;
330 }
331
332 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
333 {
334 /* Readonly reads are safe. */
335 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
336 return; /* Read of a constant, do not change the function state. */
337 else
338 {
339 if (dump_file)
340 fprintf (dump_file, " global memory read is not const\n");
341 /* Just a regular read. */
342 if (local->pure_const_state == IPA_CONST)
343 local->pure_const_state = IPA_PURE;
344 }
345 }
346 else
347 {
348 /* Compilation level statics can be read if they are readonly
349 variables. */
350 if (TREE_READONLY (t))
351 return;
352
353 if (dump_file)
354 fprintf (dump_file, " static memory read is not const\n");
355 /* Just a regular read. */
356 if (local->pure_const_state == IPA_CONST)
357 local->pure_const_state = IPA_PURE;
358 }
359 }
360
361
362 /* Check to see if the use (or definition when CHECKING_WRITE is true)
363 variable T is legal in a function that is either pure or const. */
364
365 static inline void
366 check_op (funct_state local, tree t, bool checking_write)
367 {
368 t = get_base_address (t);
369 if (t && TREE_THIS_VOLATILE (t))
370 {
371 local->pure_const_state = IPA_NEITHER;
372 if (dump_file)
373 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
374 return;
375 }
376 else if (t
377 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
378 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
379 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
380 {
381 if (dump_file)
382 fprintf (dump_file, " Indirect ref to local memory is OK\n");
383 return;
384 }
385 else if (checking_write)
386 {
387 local->pure_const_state = IPA_NEITHER;
388 if (dump_file)
389 fprintf (dump_file, " Indirect ref write is not const/pure\n");
390 return;
391 }
392 else
393 {
394 if (dump_file)
395 fprintf (dump_file, " Indirect ref read is not const\n");
396 if (local->pure_const_state == IPA_CONST)
397 local->pure_const_state = IPA_PURE;
398 }
399 }
400
401 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
402
403 static void
404 state_from_flags (enum pure_const_state_e *state, bool *looping,
405 int flags, bool cannot_lead_to_return)
406 {
407 *looping = false;
408 if (flags & ECF_LOOPING_CONST_OR_PURE)
409 {
410 *looping = true;
411 if (dump_file && (dump_flags & TDF_DETAILS))
412 fprintf (dump_file, " looping");
413 }
414 if (flags & ECF_CONST)
415 {
416 *state = IPA_CONST;
417 if (dump_file && (dump_flags & TDF_DETAILS))
418 fprintf (dump_file, " const\n");
419 }
420 else if (flags & ECF_PURE)
421 {
422 *state = IPA_PURE;
423 if (dump_file && (dump_flags & TDF_DETAILS))
424 fprintf (dump_file, " pure\n");
425 }
426 else if (cannot_lead_to_return)
427 {
428 *state = IPA_PURE;
429 *looping = true;
430 if (dump_file && (dump_flags & TDF_DETAILS))
431 fprintf (dump_file, " ignoring side effects->pure looping\n");
432 }
433 else
434 {
435 if (dump_file && (dump_flags & TDF_DETAILS))
436 fprintf (dump_file, " neither\n");
437 *state = IPA_NEITHER;
438 *looping = true;
439 }
440 }
441
442 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
443 into STATE and LOOPING better of the two variants.
444 Be sure to merge looping correctly. IPA_NEITHER functions
445 have looping 0 even if they don't have to return. */
446
447 static inline void
448 better_state (enum pure_const_state_e *state, bool *looping,
449 enum pure_const_state_e state2, bool looping2)
450 {
451 if (state2 < *state)
452 {
453 if (*state == IPA_NEITHER)
454 *looping = looping2;
455 else
456 *looping = MIN (*looping, looping2);
457 *state = state2;
458 }
459 else if (state2 != IPA_NEITHER)
460 *looping = MIN (*looping, looping2);
461 }
462
463 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
464 into STATE and LOOPING worse of the two variants. */
465
466 static inline void
467 worse_state (enum pure_const_state_e *state, bool *looping,
468 enum pure_const_state_e state2, bool looping2)
469 {
470 *state = MAX (*state, state2);
471 *looping = MAX (*looping, looping2);
472 }
473
474 /* Recognize special cases of builtins that are by themselves not pure or const
475 but function using them is. */
476 static bool
477 special_builtin_state (enum pure_const_state_e *state, bool *looping,
478 tree callee)
479 {
480 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
481 switch (DECL_FUNCTION_CODE (callee))
482 {
483 case BUILT_IN_RETURN:
484 case BUILT_IN_UNREACHABLE:
485 case BUILT_IN_ALLOCA:
486 case BUILT_IN_ALLOCA_WITH_ALIGN:
487 case BUILT_IN_STACK_SAVE:
488 case BUILT_IN_STACK_RESTORE:
489 case BUILT_IN_EH_POINTER:
490 case BUILT_IN_EH_FILTER:
491 case BUILT_IN_UNWIND_RESUME:
492 case BUILT_IN_CXA_END_CLEANUP:
493 case BUILT_IN_EH_COPY_VALUES:
494 case BUILT_IN_FRAME_ADDRESS:
495 case BUILT_IN_APPLY:
496 case BUILT_IN_APPLY_ARGS:
497 *looping = false;
498 *state = IPA_CONST;
499 return true;
500 case BUILT_IN_PREFETCH:
501 *looping = true;
502 *state = IPA_CONST;
503 return true;
504 default:
505 break;
506 }
507 return false;
508 }
509
510 /* Check the parameters of a function call to CALL_EXPR to see if
511 there are any references in the parameters that are not allowed for
512 pure or const functions. Also check to see if this is either an
513 indirect call, a call outside the compilation unit, or has special
514 attributes that may also effect the purity. The CALL_EXPR node for
515 the entire call expression. */
516
517 static void
518 check_call (funct_state local, gcall *call, bool ipa)
519 {
520 int flags = gimple_call_flags (call);
521 tree callee_t = gimple_call_fndecl (call);
522 bool possibly_throws = stmt_could_throw_p (call);
523 bool possibly_throws_externally = (possibly_throws
524 && stmt_can_throw_external (call));
525
526 if (possibly_throws)
527 {
528 unsigned int i;
529 for (i = 0; i < gimple_num_ops (call); i++)
530 if (gimple_op (call, i)
531 && tree_could_throw_p (gimple_op (call, i)))
532 {
533 if (possibly_throws && cfun->can_throw_non_call_exceptions)
534 {
535 if (dump_file)
536 fprintf (dump_file, " operand can throw; looping\n");
537 local->looping = true;
538 }
539 if (possibly_throws_externally)
540 {
541 if (dump_file)
542 fprintf (dump_file, " operand can throw externally\n");
543 local->can_throw = true;
544 }
545 }
546 }
547
548 /* The const and pure flags are set by a variety of places in the
549 compiler (including here). If someone has already set the flags
550 for the callee, (such as for some of the builtins) we will use
551 them, otherwise we will compute our own information.
552
553 Const and pure functions have less clobber effects than other
554 functions so we process these first. Otherwise if it is a call
555 outside the compilation unit or an indirect call we punt. This
556 leaves local calls which will be processed by following the call
557 graph. */
558 if (callee_t)
559 {
560 enum pure_const_state_e call_state;
561 bool call_looping;
562
563 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
564 && !nonfreeing_call_p (call))
565 local->can_free = true;
566
567 if (special_builtin_state (&call_state, &call_looping, callee_t))
568 {
569 worse_state (&local->pure_const_state, &local->looping,
570 call_state, call_looping);
571 return;
572 }
573 /* When bad things happen to bad functions, they cannot be const
574 or pure. */
575 if (setjmp_call_p (callee_t))
576 {
577 if (dump_file)
578 fprintf (dump_file, " setjmp is not const/pure\n");
579 local->looping = true;
580 local->pure_const_state = IPA_NEITHER;
581 }
582
583 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
584 switch (DECL_FUNCTION_CODE (callee_t))
585 {
586 case BUILT_IN_LONGJMP:
587 case BUILT_IN_NONLOCAL_GOTO:
588 if (dump_file)
589 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
590 local->pure_const_state = IPA_NEITHER;
591 local->looping = true;
592 break;
593 default:
594 break;
595 }
596 }
597 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
598 local->can_free = true;
599
600 /* When not in IPA mode, we can still handle self recursion. */
601 if (!ipa && callee_t
602 && recursive_call_p (current_function_decl, callee_t))
603 {
604 if (dump_file)
605 fprintf (dump_file, " Recursive call can loop.\n");
606 local->looping = true;
607 }
608 /* Either callee is unknown or we are doing local analysis.
609 Look to see if there are any bits available for the callee (such as by
610 declaration or because it is builtin) and process solely on the basis of
611 those bits. */
612 else if (!ipa)
613 {
614 enum pure_const_state_e call_state;
615 bool call_looping;
616 if (possibly_throws && cfun->can_throw_non_call_exceptions)
617 {
618 if (dump_file)
619 fprintf (dump_file, " can throw; looping\n");
620 local->looping = true;
621 }
622 if (possibly_throws_externally)
623 {
624 if (dump_file)
625 {
626 fprintf (dump_file, " can throw externally to lp %i\n",
627 lookup_stmt_eh_lp (call));
628 if (callee_t)
629 fprintf (dump_file, " callee:%s\n",
630 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
631 }
632 local->can_throw = true;
633 }
634 if (dump_file && (dump_flags & TDF_DETAILS))
635 fprintf (dump_file, " checking flags for call:");
636 state_from_flags (&call_state, &call_looping, flags,
637 ((flags & (ECF_NORETURN | ECF_NOTHROW))
638 == (ECF_NORETURN | ECF_NOTHROW))
639 || (!flag_exceptions && (flags & ECF_NORETURN)));
640 worse_state (&local->pure_const_state, &local->looping,
641 call_state, call_looping);
642 }
643 /* Direct functions calls are handled by IPA propagation. */
644 }
645
646 /* Wrapper around check_decl for loads in local more. */
647
648 static bool
649 check_load (gimple, tree op, tree, void *data)
650 {
651 if (DECL_P (op))
652 check_decl ((funct_state)data, op, false, false);
653 else
654 check_op ((funct_state)data, op, false);
655 return false;
656 }
657
658 /* Wrapper around check_decl for stores in local more. */
659
660 static bool
661 check_store (gimple, tree op, tree, void *data)
662 {
663 if (DECL_P (op))
664 check_decl ((funct_state)data, op, true, false);
665 else
666 check_op ((funct_state)data, op, true);
667 return false;
668 }
669
670 /* Wrapper around check_decl for loads in ipa mode. */
671
672 static bool
673 check_ipa_load (gimple, tree op, tree, void *data)
674 {
675 if (DECL_P (op))
676 check_decl ((funct_state)data, op, false, true);
677 else
678 check_op ((funct_state)data, op, false);
679 return false;
680 }
681
682 /* Wrapper around check_decl for stores in ipa mode. */
683
684 static bool
685 check_ipa_store (gimple, tree op, tree, void *data)
686 {
687 if (DECL_P (op))
688 check_decl ((funct_state)data, op, true, true);
689 else
690 check_op ((funct_state)data, op, true);
691 return false;
692 }
693
694 /* Look into pointer pointed to by GSIP and figure out what interesting side
695 effects it has. */
696 static void
697 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
698 {
699 gimple stmt = gsi_stmt (*gsip);
700
701 if (is_gimple_debug (stmt))
702 return;
703
704 /* Do consider clobber as side effects before IPA, so we rather inline
705 C++ destructors and keep clobber semantics than eliminate them.
706
707 TODO: We may get smarter during early optimizations on these and let
708 functions containing only clobbers to be optimized more. This is a common
709 case of C++ destructors. */
710
711 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
712 return;
713
714 if (dump_file)
715 {
716 fprintf (dump_file, " scanning: ");
717 print_gimple_stmt (dump_file, stmt, 0, 0);
718 }
719
720 if (gimple_has_volatile_ops (stmt)
721 && !gimple_clobber_p (stmt))
722 {
723 local->pure_const_state = IPA_NEITHER;
724 if (dump_file)
725 fprintf (dump_file, " Volatile stmt is not const/pure\n");
726 }
727
728 /* Look for loads and stores. */
729 walk_stmt_load_store_ops (stmt, local,
730 ipa ? check_ipa_load : check_load,
731 ipa ? check_ipa_store : check_store);
732
733 if (gimple_code (stmt) != GIMPLE_CALL
734 && stmt_could_throw_p (stmt))
735 {
736 if (cfun->can_throw_non_call_exceptions)
737 {
738 if (dump_file)
739 fprintf (dump_file, " can throw; looping\n");
740 local->looping = true;
741 }
742 if (stmt_can_throw_external (stmt))
743 {
744 if (dump_file)
745 fprintf (dump_file, " can throw externally\n");
746 local->can_throw = true;
747 }
748 else
749 if (dump_file)
750 fprintf (dump_file, " can throw\n");
751 }
752 switch (gimple_code (stmt))
753 {
754 case GIMPLE_CALL:
755 check_call (local, as_a <gcall *> (stmt), ipa);
756 break;
757 case GIMPLE_LABEL:
758 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
759 /* Target of long jump. */
760 {
761 if (dump_file)
762 fprintf (dump_file, " nonlocal label is not const/pure\n");
763 local->pure_const_state = IPA_NEITHER;
764 }
765 break;
766 case GIMPLE_ASM:
767 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
768 {
769 if (dump_file)
770 fprintf (dump_file, " memory asm clobber is not const/pure\n");
771 /* Abandon all hope, ye who enter here. */
772 local->pure_const_state = IPA_NEITHER;
773 local->can_free = true;
774 }
775 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
776 {
777 if (dump_file)
778 fprintf (dump_file, " volatile is not const/pure\n");
779 /* Abandon all hope, ye who enter here. */
780 local->pure_const_state = IPA_NEITHER;
781 local->looping = true;
782 local->can_free = true;
783 }
784 return;
785 default:
786 break;
787 }
788 }
789
790
791 /* This is the main routine for finding the reference patterns for
792 global variables within a function FN. */
793
794 static funct_state
795 analyze_function (struct cgraph_node *fn, bool ipa)
796 {
797 tree decl = fn->decl;
798 funct_state l;
799 basic_block this_block;
800
801 l = XCNEW (struct funct_state_d);
802 l->pure_const_state = IPA_CONST;
803 l->state_previously_known = IPA_NEITHER;
804 l->looping_previously_known = true;
805 l->looping = false;
806 l->can_throw = false;
807 l->can_free = false;
808 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
809 flags_from_decl_or_type (fn->decl),
810 fn->cannot_return_p ());
811
812 if (fn->thunk.thunk_p || fn->alias)
813 {
814 /* Thunk gets propagated through, so nothing interesting happens. */
815 gcc_assert (ipa);
816 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
817 l->pure_const_state = IPA_NEITHER;
818 return l;
819 }
820
821 if (dump_file)
822 {
823 fprintf (dump_file, "\n\n local analysis of %s\n ",
824 fn->name ());
825 }
826
827 push_cfun (DECL_STRUCT_FUNCTION (decl));
828
829 FOR_EACH_BB_FN (this_block, cfun)
830 {
831 gimple_stmt_iterator gsi;
832 struct walk_stmt_info wi;
833
834 memset (&wi, 0, sizeof (wi));
835 for (gsi = gsi_start_bb (this_block);
836 !gsi_end_p (gsi);
837 gsi_next (&gsi))
838 {
839 check_stmt (&gsi, l, ipa);
840 if (l->pure_const_state == IPA_NEITHER
841 && l->looping
842 && l->can_throw
843 && l->can_free)
844 goto end;
845 }
846 }
847
848 end:
849 if (l->pure_const_state != IPA_NEITHER)
850 {
851 /* Const functions cannot have back edges (an
852 indication of possible infinite loop side
853 effect. */
854 if (mark_dfs_back_edges ())
855 {
856 /* Preheaders are needed for SCEV to work.
857 Simple latches and recorded exits improve chances that loop will
858 proved to be finite in testcases such as in loop-15.c
859 and loop-24.c */
860 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
861 | LOOPS_HAVE_SIMPLE_LATCHES
862 | LOOPS_HAVE_RECORDED_EXITS);
863 if (dump_file && (dump_flags & TDF_DETAILS))
864 flow_loops_dump (dump_file, NULL, 0);
865 if (mark_irreducible_loops ())
866 {
867 if (dump_file)
868 fprintf (dump_file, " has irreducible loops\n");
869 l->looping = true;
870 }
871 else
872 {
873 struct loop *loop;
874 scev_initialize ();
875 FOR_EACH_LOOP (loop, 0)
876 if (!finite_loop_p (loop))
877 {
878 if (dump_file)
879 fprintf (dump_file, " can not prove finiteness of "
880 "loop %i\n", loop->num);
881 l->looping =true;
882 break;
883 }
884 scev_finalize ();
885 }
886 loop_optimizer_finalize ();
887 }
888 }
889
890 if (dump_file && (dump_flags & TDF_DETAILS))
891 fprintf (dump_file, " checking previously known:");
892
893 better_state (&l->pure_const_state, &l->looping,
894 l->state_previously_known,
895 l->looping_previously_known);
896 if (TREE_NOTHROW (decl))
897 l->can_throw = false;
898
899 pop_cfun ();
900 if (dump_file)
901 {
902 if (l->looping)
903 fprintf (dump_file, "Function is locally looping.\n");
904 if (l->can_throw)
905 fprintf (dump_file, "Function is locally throwing.\n");
906 if (l->pure_const_state == IPA_CONST)
907 fprintf (dump_file, "Function is locally const.\n");
908 if (l->pure_const_state == IPA_PURE)
909 fprintf (dump_file, "Function is locally pure.\n");
910 if (l->can_free)
911 fprintf (dump_file, "Function can locally free.\n");
912 }
913 return l;
914 }
915
916 /* Called when new function is inserted to callgraph late. */
917 static void
918 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
919 {
920 if (node->get_availability () < AVAIL_INTERPOSABLE)
921 return;
922 /* There are some shared nodes, in particular the initializers on
923 static declarations. We do not need to scan them more than once
924 since all we would be interested in are the addressof
925 operations. */
926 if (node->get_availability () > AVAIL_INTERPOSABLE
927 && opt_for_fn (node->decl, flag_ipa_pure_const))
928 set_function_state (node, analyze_function (node, true));
929 }
930
931 /* Called when new clone is inserted to callgraph late. */
932
933 static void
934 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
935 void *data ATTRIBUTE_UNUSED)
936 {
937 if (has_function_state (src))
938 {
939 funct_state l = XNEW (struct funct_state_d);
940 gcc_assert (!has_function_state (dst));
941 memcpy (l, get_function_state (src), sizeof (*l));
942 set_function_state (dst, l);
943 }
944 }
945
946 /* Called when new clone is inserted to callgraph late. */
947
948 static void
949 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
950 {
951 if (has_function_state (node))
952 {
953 funct_state l = get_function_state (node);
954 if (l != &varying_state)
955 free (l);
956 set_function_state (node, NULL);
957 }
958 }
959
960 \f
961 void
962 pass_ipa_pure_const::
963 register_hooks (void)
964 {
965 if (init_p)
966 return;
967
968 init_p = true;
969
970 node_removal_hook_holder =
971 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
972 node_duplication_hook_holder =
973 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
974 function_insertion_hook_holder =
975 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
976 }
977
978
979 /* Analyze each function in the cgraph to see if it is locally PURE or
980 CONST. */
981
982 static void
983 pure_const_generate_summary (void)
984 {
985 struct cgraph_node *node;
986
987 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
988 pass->register_hooks ();
989
990 /* Process all of the functions.
991
992 We process AVAIL_INTERPOSABLE functions. We can not use the results
993 by default, but the info can be used at LTO with -fwhole-program or
994 when function got cloned and the clone is AVAILABLE. */
995
996 FOR_EACH_DEFINED_FUNCTION (node)
997 if (node->get_availability () >= AVAIL_INTERPOSABLE
998 && opt_for_fn (node->decl, flag_ipa_pure_const))
999 set_function_state (node, analyze_function (node, true));
1000 }
1001
1002
1003 /* Serialize the ipa info for lto. */
1004
1005 static void
1006 pure_const_write_summary (void)
1007 {
1008 struct cgraph_node *node;
1009 struct lto_simple_output_block *ob
1010 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1011 unsigned int count = 0;
1012 lto_symtab_encoder_iterator lsei;
1013 lto_symtab_encoder_t encoder;
1014
1015 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1016
1017 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1018 lsei_next_function_in_partition (&lsei))
1019 {
1020 node = lsei_cgraph_node (lsei);
1021 if (node->definition && has_function_state (node))
1022 count++;
1023 }
1024
1025 streamer_write_uhwi_stream (ob->main_stream, count);
1026
1027 /* Process all of the functions. */
1028 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1029 lsei_next_function_in_partition (&lsei))
1030 {
1031 node = lsei_cgraph_node (lsei);
1032 if (node->definition && has_function_state (node))
1033 {
1034 struct bitpack_d bp;
1035 funct_state fs;
1036 int node_ref;
1037 lto_symtab_encoder_t encoder;
1038
1039 fs = get_function_state (node);
1040
1041 encoder = ob->decl_state->symtab_node_encoder;
1042 node_ref = lto_symtab_encoder_encode (encoder, node);
1043 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1044
1045 /* Note that flags will need to be read in the opposite
1046 order as we are pushing the bitflags into FLAGS. */
1047 bp = bitpack_create (ob->main_stream);
1048 bp_pack_value (&bp, fs->pure_const_state, 2);
1049 bp_pack_value (&bp, fs->state_previously_known, 2);
1050 bp_pack_value (&bp, fs->looping_previously_known, 1);
1051 bp_pack_value (&bp, fs->looping, 1);
1052 bp_pack_value (&bp, fs->can_throw, 1);
1053 bp_pack_value (&bp, fs->can_free, 1);
1054 streamer_write_bitpack (&bp);
1055 }
1056 }
1057
1058 lto_destroy_simple_output_block (ob);
1059 }
1060
1061
1062 /* Deserialize the ipa info for lto. */
1063
1064 static void
1065 pure_const_read_summary (void)
1066 {
1067 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1068 struct lto_file_decl_data *file_data;
1069 unsigned int j = 0;
1070
1071 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1072 pass->register_hooks ();
1073
1074 while ((file_data = file_data_vec[j++]))
1075 {
1076 const char *data;
1077 size_t len;
1078 struct lto_input_block *ib
1079 = lto_create_simple_input_block (file_data,
1080 LTO_section_ipa_pure_const,
1081 &data, &len);
1082 if (ib)
1083 {
1084 unsigned int i;
1085 unsigned int count = streamer_read_uhwi (ib);
1086
1087 for (i = 0; i < count; i++)
1088 {
1089 unsigned int index;
1090 struct cgraph_node *node;
1091 struct bitpack_d bp;
1092 funct_state fs;
1093 lto_symtab_encoder_t encoder;
1094
1095 fs = XCNEW (struct funct_state_d);
1096 index = streamer_read_uhwi (ib);
1097 encoder = file_data->symtab_node_encoder;
1098 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1099 index));
1100 set_function_state (node, fs);
1101
1102 /* Note that the flags must be read in the opposite
1103 order in which they were written (the bitflags were
1104 pushed into FLAGS). */
1105 bp = streamer_read_bitpack (ib);
1106 fs->pure_const_state
1107 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1108 fs->state_previously_known
1109 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1110 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1111 fs->looping = bp_unpack_value (&bp, 1);
1112 fs->can_throw = bp_unpack_value (&bp, 1);
1113 fs->can_free = bp_unpack_value (&bp, 1);
1114 if (dump_file)
1115 {
1116 int flags = flags_from_decl_or_type (node->decl);
1117 fprintf (dump_file, "Read info for %s/%i ",
1118 node->name (),
1119 node->order);
1120 if (flags & ECF_CONST)
1121 fprintf (dump_file, " const");
1122 if (flags & ECF_PURE)
1123 fprintf (dump_file, " pure");
1124 if (flags & ECF_NOTHROW)
1125 fprintf (dump_file, " nothrow");
1126 fprintf (dump_file, "\n pure const state: %s\n",
1127 pure_const_names[fs->pure_const_state]);
1128 fprintf (dump_file, " previously known state: %s\n",
1129 pure_const_names[fs->looping_previously_known]);
1130 if (fs->looping)
1131 fprintf (dump_file," function is locally looping\n");
1132 if (fs->looping_previously_known)
1133 fprintf (dump_file," function is previously known looping\n");
1134 if (fs->can_throw)
1135 fprintf (dump_file," function is locally throwing\n");
1136 if (fs->can_free)
1137 fprintf (dump_file," function can locally free\n");
1138 }
1139 }
1140
1141 lto_destroy_simple_input_block (file_data,
1142 LTO_section_ipa_pure_const,
1143 ib, data, len);
1144 }
1145 }
1146 }
1147
1148
1149 static bool
1150 ignore_edge (struct cgraph_edge *e)
1151 {
1152 return (!e->can_throw_external);
1153 }
1154
1155 /* Return true if NODE is self recursive function.
1156 Indirectly recursive functions appears as non-trivial strongly
1157 connected components, so we need to care about self recursion
1158 only. */
1159
1160 static bool
1161 self_recursive_p (struct cgraph_node *node)
1162 {
1163 struct cgraph_edge *e;
1164 for (e = node->callees; e; e = e->next_callee)
1165 if (e->callee->function_symbol () == node)
1166 return true;
1167 return false;
1168 }
1169
1170 /* Return true if N is cdtor that is not const or pure. In this case we may
1171 need to remove unreachable function if it is marked const/pure. */
1172
1173 static bool
1174 cdtor_p (cgraph_node *n, void *)
1175 {
1176 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1177 return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
1178 return false;
1179 }
1180
1181 /* Produce transitive closure over the callgraph and compute pure/const
1182 attributes. */
1183
1184 static bool
1185 propagate_pure_const (void)
1186 {
1187 struct cgraph_node *node;
1188 struct cgraph_node *w;
1189 struct cgraph_node **order =
1190 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1191 int order_pos;
1192 int i;
1193 struct ipa_dfs_info * w_info;
1194 bool remove_p = false;
1195
1196 order_pos = ipa_reduced_postorder (order, true, false, NULL);
1197 if (dump_file)
1198 {
1199 cgraph_node::dump_cgraph (dump_file);
1200 ipa_print_order (dump_file, "reduced", order, order_pos);
1201 }
1202
1203 /* Propagate the local information through the call graph to produce
1204 the global information. All the nodes within a cycle will have
1205 the same info so we collapse cycles first. Then we can do the
1206 propagation in one pass from the leaves to the roots. */
1207 for (i = 0; i < order_pos; i++ )
1208 {
1209 enum pure_const_state_e pure_const_state = IPA_CONST;
1210 bool looping = false;
1211 int count = 0;
1212 node = order[i];
1213
1214 if (node->alias)
1215 continue;
1216
1217 if (dump_file && (dump_flags & TDF_DETAILS))
1218 fprintf (dump_file, "Starting cycle\n");
1219
1220 /* Find the worst state for any node in the cycle. */
1221 w = node;
1222 while (w && pure_const_state != IPA_NEITHER)
1223 {
1224 struct cgraph_edge *e;
1225 struct cgraph_edge *ie;
1226 int i;
1227 struct ipa_ref *ref = NULL;
1228
1229 funct_state w_l = get_function_state (w);
1230 if (dump_file && (dump_flags & TDF_DETAILS))
1231 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1232 w->name (),
1233 w->order,
1234 pure_const_names[w_l->pure_const_state],
1235 w_l->looping);
1236
1237 /* First merge in function body properties. */
1238 worse_state (&pure_const_state, &looping,
1239 w_l->pure_const_state, w_l->looping);
1240 if (pure_const_state == IPA_NEITHER)
1241 break;
1242
1243 /* For overwritable nodes we can not assume anything. */
1244 if (w->get_availability () == AVAIL_INTERPOSABLE)
1245 {
1246 worse_state (&pure_const_state, &looping,
1247 w_l->state_previously_known,
1248 w_l->looping_previously_known);
1249 if (dump_file && (dump_flags & TDF_DETAILS))
1250 {
1251 fprintf (dump_file,
1252 " Overwritable. state %s looping %i\n",
1253 pure_const_names[w_l->state_previously_known],
1254 w_l->looping_previously_known);
1255 }
1256 break;
1257 }
1258
1259 count++;
1260
1261 /* We consider recursive cycles as possibly infinite.
1262 This might be relaxed since infinite recursion leads to stack
1263 overflow. */
1264 if (count > 1)
1265 looping = true;
1266
1267 /* Now walk the edges and merge in callee properties. */
1268 for (e = w->callees; e; e = e->next_callee)
1269 {
1270 enum availability avail;
1271 struct cgraph_node *y = e->callee->
1272 function_or_virtual_thunk_symbol (&avail);
1273 enum pure_const_state_e edge_state = IPA_CONST;
1274 bool edge_looping = false;
1275
1276 if (dump_file && (dump_flags & TDF_DETAILS))
1277 {
1278 fprintf (dump_file,
1279 " Call to %s/%i",
1280 e->callee->name (),
1281 e->callee->order);
1282 }
1283 if (avail > AVAIL_INTERPOSABLE)
1284 {
1285 funct_state y_l = get_function_state (y);
1286 if (dump_file && (dump_flags & TDF_DETAILS))
1287 {
1288 fprintf (dump_file,
1289 " state:%s looping:%i\n",
1290 pure_const_names[y_l->pure_const_state],
1291 y_l->looping);
1292 }
1293 if (y_l->pure_const_state > IPA_PURE
1294 && e->cannot_lead_to_return_p ())
1295 {
1296 if (dump_file && (dump_flags & TDF_DETAILS))
1297 fprintf (dump_file,
1298 " Ignoring side effects"
1299 " -> pure, looping\n");
1300 edge_state = IPA_PURE;
1301 edge_looping = true;
1302 }
1303 else
1304 {
1305 edge_state = y_l->pure_const_state;
1306 edge_looping = y_l->looping;
1307 }
1308 }
1309 else if (special_builtin_state (&edge_state, &edge_looping,
1310 y->decl))
1311 ;
1312 else
1313 state_from_flags (&edge_state, &edge_looping,
1314 flags_from_decl_or_type (y->decl),
1315 e->cannot_lead_to_return_p ());
1316
1317 /* Merge the results with what we already know. */
1318 better_state (&edge_state, &edge_looping,
1319 w_l->state_previously_known,
1320 w_l->looping_previously_known);
1321 worse_state (&pure_const_state, &looping,
1322 edge_state, edge_looping);
1323 if (pure_const_state == IPA_NEITHER)
1324 break;
1325 }
1326 if (pure_const_state == IPA_NEITHER)
1327 break;
1328
1329 /* Now process the indirect call. */
1330 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
1331 {
1332 enum pure_const_state_e edge_state = IPA_CONST;
1333 bool edge_looping = false;
1334
1335 if (dump_file && (dump_flags & TDF_DETAILS))
1336 fprintf (dump_file, " Indirect call");
1337 state_from_flags (&edge_state, &edge_looping,
1338 ie->indirect_info->ecf_flags,
1339 ie->cannot_lead_to_return_p ());
1340 /* Merge the results with what we already know. */
1341 better_state (&edge_state, &edge_looping,
1342 w_l->state_previously_known,
1343 w_l->looping_previously_known);
1344 worse_state (&pure_const_state, &looping,
1345 edge_state, edge_looping);
1346 if (pure_const_state == IPA_NEITHER)
1347 break;
1348 }
1349 if (pure_const_state == IPA_NEITHER)
1350 break;
1351
1352 /* And finally all loads and stores. */
1353 for (i = 0; w->iterate_reference (i, ref); i++)
1354 {
1355 enum pure_const_state_e ref_state = IPA_CONST;
1356 bool ref_looping = false;
1357 switch (ref->use)
1358 {
1359 case IPA_REF_LOAD:
1360 /* readonly reads are safe. */
1361 if (TREE_READONLY (ref->referred->decl))
1362 break;
1363 if (dump_file && (dump_flags & TDF_DETAILS))
1364 fprintf (dump_file, " nonreadonly global var read\n");
1365 ref_state = IPA_PURE;
1366 break;
1367 case IPA_REF_STORE:
1368 if (ref->cannot_lead_to_return ())
1369 break;
1370 ref_state = IPA_NEITHER;
1371 if (dump_file && (dump_flags & TDF_DETAILS))
1372 fprintf (dump_file, " global var write\n");
1373 break;
1374 case IPA_REF_ADDR:
1375 case IPA_REF_CHKP:
1376 break;
1377 default:
1378 gcc_unreachable ();
1379 }
1380 better_state (&ref_state, &ref_looping,
1381 w_l->state_previously_known,
1382 w_l->looping_previously_known);
1383 worse_state (&pure_const_state, &looping,
1384 ref_state, ref_looping);
1385 if (pure_const_state == IPA_NEITHER)
1386 break;
1387 }
1388 w_info = (struct ipa_dfs_info *) w->aux;
1389 w = w_info->next_cycle;
1390 }
1391 if (dump_file && (dump_flags & TDF_DETAILS))
1392 fprintf (dump_file, "Result %s looping %i\n",
1393 pure_const_names [pure_const_state],
1394 looping);
1395
1396 /* Find the worst state of can_free for any node in the cycle. */
1397 bool can_free = false;
1398 w = node;
1399 while (w && !can_free)
1400 {
1401 struct cgraph_edge *e;
1402 funct_state w_l = get_function_state (w);
1403
1404 if (w_l->can_free
1405 || w->get_availability () == AVAIL_INTERPOSABLE
1406 || w->indirect_calls)
1407 can_free = true;
1408
1409 for (e = w->callees; e && !can_free; e = e->next_callee)
1410 {
1411 enum availability avail;
1412 struct cgraph_node *y = e->callee->
1413 function_or_virtual_thunk_symbol (&avail);
1414
1415 if (avail > AVAIL_INTERPOSABLE)
1416 can_free = get_function_state (y)->can_free;
1417 else
1418 can_free = true;
1419 }
1420 w_info = (struct ipa_dfs_info *) w->aux;
1421 w = w_info->next_cycle;
1422 }
1423
1424 /* Copy back the region's pure_const_state which is shared by
1425 all nodes in the region. */
1426 w = node;
1427 while (w)
1428 {
1429 funct_state w_l = get_function_state (w);
1430 enum pure_const_state_e this_state = pure_const_state;
1431 bool this_looping = looping;
1432
1433 w_l->can_free = can_free;
1434 w->nonfreeing_fn = !can_free;
1435 if (!can_free && dump_file)
1436 fprintf (dump_file, "Function found not to call free: %s\n",
1437 w->name ());
1438
1439 if (w_l->state_previously_known != IPA_NEITHER
1440 && this_state > w_l->state_previously_known)
1441 {
1442 this_state = w_l->state_previously_known;
1443 this_looping |= w_l->looping_previously_known;
1444 }
1445 if (!this_looping && self_recursive_p (w))
1446 this_looping = true;
1447 if (!w_l->looping_previously_known)
1448 this_looping = false;
1449
1450 /* All nodes within a cycle share the same info. */
1451 w_l->pure_const_state = this_state;
1452 w_l->looping = this_looping;
1453
1454 /* Inline clones share declaration with their offline copies;
1455 do not modify their declarations since the offline copy may
1456 be different. */
1457 if (!w->global.inlined_to)
1458 switch (this_state)
1459 {
1460 case IPA_CONST:
1461 if (!TREE_READONLY (w->decl))
1462 {
1463 warn_function_const (w->decl, !this_looping);
1464 if (dump_file)
1465 fprintf (dump_file, "Function found to be %sconst: %s\n",
1466 this_looping ? "looping " : "",
1467 w->name ());
1468 }
1469 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1470 NULL, true);
1471 w->set_const_flag (true, this_looping);
1472 break;
1473
1474 case IPA_PURE:
1475 if (!DECL_PURE_P (w->decl))
1476 {
1477 warn_function_pure (w->decl, !this_looping);
1478 if (dump_file)
1479 fprintf (dump_file, "Function found to be %spure: %s\n",
1480 this_looping ? "looping " : "",
1481 w->name ());
1482 }
1483 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1484 NULL, true);
1485 w->set_pure_flag (true, this_looping);
1486 break;
1487
1488 default:
1489 break;
1490 }
1491 w_info = (struct ipa_dfs_info *) w->aux;
1492 w = w_info->next_cycle;
1493 }
1494 }
1495
1496 ipa_free_postorder_info ();
1497 free (order);
1498 return remove_p;
1499 }
1500
1501 /* Produce transitive closure over the callgraph and compute nothrow
1502 attributes. */
1503
1504 static void
1505 propagate_nothrow (void)
1506 {
1507 struct cgraph_node *node;
1508 struct cgraph_node *w;
1509 struct cgraph_node **order =
1510 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1511 int order_pos;
1512 int i;
1513 struct ipa_dfs_info * w_info;
1514
1515 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
1516 if (dump_file)
1517 {
1518 cgraph_node::dump_cgraph (dump_file);
1519 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1520 }
1521
1522 /* Propagate the local information through the call graph to produce
1523 the global information. All the nodes within a cycle will have
1524 the same info so we collapse cycles first. Then we can do the
1525 propagation in one pass from the leaves to the roots. */
1526 for (i = 0; i < order_pos; i++ )
1527 {
1528 bool can_throw = false;
1529 node = order[i];
1530
1531 if (node->alias)
1532 continue;
1533
1534 /* Find the worst state for any node in the cycle. */
1535 w = node;
1536 while (w && !can_throw)
1537 {
1538 struct cgraph_edge *e, *ie;
1539 funct_state w_l = get_function_state (w);
1540
1541 if (w_l->can_throw
1542 || w->get_availability () == AVAIL_INTERPOSABLE)
1543 can_throw = true;
1544
1545 for (e = w->callees; e && !can_throw; e = e->next_callee)
1546 {
1547 enum availability avail;
1548 struct cgraph_node *y = e->callee->
1549 function_or_virtual_thunk_symbol (&avail);
1550
1551 if (avail > AVAIL_INTERPOSABLE)
1552 {
1553 funct_state y_l = get_function_state (y);
1554
1555 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1556 && e->can_throw_external)
1557 can_throw = true;
1558 }
1559 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1560 can_throw = true;
1561 }
1562 for (ie = w->indirect_calls; ie && !can_throw; ie = ie->next_callee)
1563 if (ie->can_throw_external)
1564 can_throw = true;
1565 w_info = (struct ipa_dfs_info *) w->aux;
1566 w = w_info->next_cycle;
1567 }
1568
1569 /* Copy back the region's pure_const_state which is shared by
1570 all nodes in the region. */
1571 w = node;
1572 while (w)
1573 {
1574 funct_state w_l = get_function_state (w);
1575 if (!can_throw && !TREE_NOTHROW (w->decl))
1576 {
1577 /* Inline clones share declaration with their offline copies;
1578 do not modify their declarations since the offline copy may
1579 be different. */
1580 if (!w->global.inlined_to)
1581 {
1582 w->set_nothrow_flag (true);
1583 if (dump_file)
1584 fprintf (dump_file, "Function found to be nothrow: %s\n",
1585 w->name ());
1586 }
1587 }
1588 else if (can_throw && !TREE_NOTHROW (w->decl))
1589 w_l->can_throw = true;
1590 w_info = (struct ipa_dfs_info *) w->aux;
1591 w = w_info->next_cycle;
1592 }
1593 }
1594
1595 ipa_free_postorder_info ();
1596 free (order);
1597 }
1598
1599
1600 /* Produce the global information by preforming a transitive closure
1601 on the local information that was produced by generate_summary. */
1602
1603 unsigned int
1604 pass_ipa_pure_const::
1605 execute (function *)
1606 {
1607 struct cgraph_node *node;
1608 bool remove_p;
1609
1610 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1611 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1612 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
1613
1614 /* Nothrow makes more function to not lead to return and improve
1615 later analysis. */
1616 propagate_nothrow ();
1617 remove_p = propagate_pure_const ();
1618
1619 /* Cleanup. */
1620 FOR_EACH_FUNCTION (node)
1621 if (has_function_state (node))
1622 free (get_function_state (node));
1623 funct_state_vec.release ();
1624 return remove_p ? TODO_remove_functions : 0;
1625 }
1626
1627 static bool
1628 gate_pure_const (void)
1629 {
1630 return flag_ipa_pure_const || in_lto_p;
1631 }
1632
1633 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1634 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1635 pure_const_generate_summary, /* generate_summary */
1636 pure_const_write_summary, /* write_summary */
1637 pure_const_read_summary, /* read_summary */
1638 NULL, /* write_optimization_summary */
1639 NULL, /* read_optimization_summary */
1640 NULL, /* stmt_fixup */
1641 0, /* function_transform_todo_flags_start */
1642 NULL, /* function_transform */
1643 NULL), /* variable_transform */
1644 init_p(false),
1645 function_insertion_hook_holder(NULL),
1646 node_duplication_hook_holder(NULL),
1647 node_removal_hook_holder(NULL)
1648 {
1649 }
1650
1651 ipa_opt_pass_d *
1652 make_pass_ipa_pure_const (gcc::context *ctxt)
1653 {
1654 return new pass_ipa_pure_const (ctxt);
1655 }
1656
1657 /* Return true if function should be skipped for local pure const analysis. */
1658
1659 static bool
1660 skip_function_for_local_pure_const (struct cgraph_node *node)
1661 {
1662 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1663 we must not promote functions that are called by already processed functions. */
1664
1665 if (function_called_by_processed_nodes_p ())
1666 {
1667 if (dump_file)
1668 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1669 return true;
1670 }
1671 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1672 {
1673 if (dump_file)
1674 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
1675 return true;
1676 }
1677 return false;
1678 }
1679
1680 /* Simple local pass for pure const discovery reusing the analysis from
1681 ipa_pure_const. This pass is effective when executed together with
1682 other optimization passes in early optimization pass queue. */
1683
1684 namespace {
1685
1686 const pass_data pass_data_local_pure_const =
1687 {
1688 GIMPLE_PASS, /* type */
1689 "local-pure-const", /* name */
1690 OPTGROUP_NONE, /* optinfo_flags */
1691 TV_IPA_PURE_CONST, /* tv_id */
1692 0, /* properties_required */
1693 0, /* properties_provided */
1694 0, /* properties_destroyed */
1695 0, /* todo_flags_start */
1696 0, /* todo_flags_finish */
1697 };
1698
1699 class pass_local_pure_const : public gimple_opt_pass
1700 {
1701 public:
1702 pass_local_pure_const (gcc::context *ctxt)
1703 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1704 {}
1705
1706 /* opt_pass methods: */
1707 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1708 virtual bool gate (function *) { return gate_pure_const (); }
1709 virtual unsigned int execute (function *);
1710
1711 }; // class pass_local_pure_const
1712
1713 unsigned int
1714 pass_local_pure_const::execute (function *fun)
1715 {
1716 bool changed = false;
1717 funct_state l;
1718 bool skip;
1719 struct cgraph_node *node;
1720
1721 node = cgraph_node::get (current_function_decl);
1722 skip = skip_function_for_local_pure_const (node);
1723 if (!warn_suggest_attribute_const
1724 && !warn_suggest_attribute_pure
1725 && skip)
1726 return 0;
1727
1728 l = analyze_function (node, false);
1729
1730 /* Do NORETURN discovery. */
1731 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1732 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1733 {
1734 warn_function_noreturn (fun->decl);
1735 if (dump_file)
1736 fprintf (dump_file, "Function found to be noreturn: %s\n",
1737 current_function_name ());
1738
1739 /* Update declaration and reduce profile to executed once. */
1740 TREE_THIS_VOLATILE (current_function_decl) = 1;
1741 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1742 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1743
1744 changed = true;
1745 }
1746
1747 switch (l->pure_const_state)
1748 {
1749 case IPA_CONST:
1750 if (!TREE_READONLY (current_function_decl))
1751 {
1752 warn_function_const (current_function_decl, !l->looping);
1753 if (!skip)
1754 {
1755 node->set_const_flag (true, l->looping);
1756 changed = true;
1757 }
1758 if (dump_file)
1759 fprintf (dump_file, "Function found to be %sconst: %s\n",
1760 l->looping ? "looping " : "",
1761 current_function_name ());
1762 }
1763 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1764 && !l->looping)
1765 {
1766 if (!skip)
1767 {
1768 node->set_const_flag (true, false);
1769 changed = true;
1770 }
1771 if (dump_file)
1772 fprintf (dump_file, "Function found to be non-looping: %s\n",
1773 current_function_name ());
1774 }
1775 break;
1776
1777 case IPA_PURE:
1778 if (!DECL_PURE_P (current_function_decl))
1779 {
1780 if (!skip)
1781 {
1782 node->set_pure_flag (true, l->looping);
1783 changed = true;
1784 }
1785 warn_function_pure (current_function_decl, !l->looping);
1786 if (dump_file)
1787 fprintf (dump_file, "Function found to be %spure: %s\n",
1788 l->looping ? "looping " : "",
1789 current_function_name ());
1790 }
1791 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1792 && !l->looping)
1793 {
1794 if (!skip)
1795 {
1796 node->set_pure_flag (true, false);
1797 changed = true;
1798 }
1799 if (dump_file)
1800 fprintf (dump_file, "Function found to be non-looping: %s\n",
1801 current_function_name ());
1802 }
1803 break;
1804
1805 default:
1806 break;
1807 }
1808 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1809 {
1810 node->set_nothrow_flag (true);
1811 changed = true;
1812 if (dump_file)
1813 fprintf (dump_file, "Function found to be nothrow: %s\n",
1814 current_function_name ());
1815 }
1816 free (l);
1817 if (changed)
1818 return execute_fixup_cfg ();
1819 else
1820 return 0;
1821 }
1822
1823 } // anon namespace
1824
1825 gimple_opt_pass *
1826 make_pass_local_pure_const (gcc::context *ctxt)
1827 {
1828 return new pass_local_pure_const (ctxt);
1829 }
1830
1831 /* Emit noreturn warnings. */
1832
1833 namespace {
1834
1835 const pass_data pass_data_warn_function_noreturn =
1836 {
1837 GIMPLE_PASS, /* type */
1838 "*warn_function_noreturn", /* name */
1839 OPTGROUP_NONE, /* optinfo_flags */
1840 TV_NONE, /* tv_id */
1841 PROP_cfg, /* properties_required */
1842 0, /* properties_provided */
1843 0, /* properties_destroyed */
1844 0, /* todo_flags_start */
1845 0, /* todo_flags_finish */
1846 };
1847
1848 class pass_warn_function_noreturn : public gimple_opt_pass
1849 {
1850 public:
1851 pass_warn_function_noreturn (gcc::context *ctxt)
1852 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1853 {}
1854
1855 /* opt_pass methods: */
1856 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
1857 virtual unsigned int execute (function *fun)
1858 {
1859 if (!TREE_THIS_VOLATILE (current_function_decl)
1860 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1861 warn_function_noreturn (current_function_decl);
1862 return 0;
1863 }
1864
1865 }; // class pass_warn_function_noreturn
1866
1867 } // anon namespace
1868
1869 gimple_opt_pass *
1870 make_pass_warn_function_noreturn (gcc::context *ctxt)
1871 {
1872 return new pass_warn_function_noreturn (ctxt);
1873 }
1874
1875 /* Simple local pass for pure const discovery reusing the analysis from
1876 ipa_pure_const. This pass is effective when executed together with
1877 other optimization passes in early optimization pass queue. */
1878
1879 namespace {
1880
1881 const pass_data pass_data_nothrow =
1882 {
1883 GIMPLE_PASS, /* type */
1884 "nothrow", /* name */
1885 OPTGROUP_NONE, /* optinfo_flags */
1886 TV_IPA_PURE_CONST, /* tv_id */
1887 0, /* properties_required */
1888 0, /* properties_provided */
1889 0, /* properties_destroyed */
1890 0, /* todo_flags_start */
1891 0, /* todo_flags_finish */
1892 };
1893
1894 class pass_nothrow : public gimple_opt_pass
1895 {
1896 public:
1897 pass_nothrow (gcc::context *ctxt)
1898 : gimple_opt_pass (pass_data_nothrow, ctxt)
1899 {}
1900
1901 /* opt_pass methods: */
1902 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
1903 virtual bool gate (function *) { return optimize; }
1904 virtual unsigned int execute (function *);
1905
1906 }; // class pass_nothrow
1907
1908 unsigned int
1909 pass_nothrow::execute (function *)
1910 {
1911 struct cgraph_node *node;
1912 basic_block this_block;
1913
1914 if (TREE_NOTHROW (current_function_decl))
1915 return 0;
1916
1917 node = cgraph_node::get (current_function_decl);
1918
1919 /* We run during lowering, we can not really use availability yet. */
1920 if (cgraph_node::get (current_function_decl)->get_availability ()
1921 <= AVAIL_INTERPOSABLE)
1922 {
1923 if (dump_file)
1924 fprintf (dump_file, "Function is interposable;"
1925 " not analyzing.\n");
1926 return true;
1927 }
1928
1929 FOR_EACH_BB_FN (this_block, cfun)
1930 {
1931 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
1932 !gsi_end_p (gsi);
1933 gsi_next (&gsi))
1934 if (stmt_can_throw_external (gsi_stmt (gsi)))
1935 {
1936 if (is_gimple_call (gsi_stmt (gsi)))
1937 {
1938 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
1939 if (callee_t && recursive_call_p (current_function_decl,
1940 callee_t))
1941 continue;
1942 }
1943
1944 if (dump_file)
1945 {
1946 fprintf (dump_file, "Statement can throw: ");
1947 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
1948 }
1949 return 0;
1950 }
1951 }
1952
1953 node->set_nothrow_flag (true);
1954 if (dump_file)
1955 fprintf (dump_file, "Function found to be nothrow: %s\n",
1956 current_function_name ());
1957 return 0;
1958 }
1959
1960 } // anon namespace
1961
1962 gimple_opt_pass *
1963 make_pass_nothrow (gcc::context *ctxt)
1964 {
1965 return new pass_nothrow (ctxt);
1966 }