Remove MPX
[gcc.git] / gcc / ipa-pure-const.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68 enum pure_const_state_e
69 {
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
73 };
74
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
76
77 enum malloc_state_e
78 {
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
82 };
83
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
85
86 /* Holder for the const_state. There is one of these per function
87 decl. */
88 struct funct_state_d
89 {
90 /* See above. */
91 enum pure_const_state_e pure_const_state;
92 /* What user set here; we can be always sure about this. */
93 enum pure_const_state_e state_previously_known;
94 bool looping_previously_known;
95
96 /* True if the function could possibly infinite loop. There are a
97 lot of ways that this could be determined. We are pretty
98 conservative here. While it is possible to cse pure and const
99 calls, it is not legal to have dce get rid of the call if there
100 is a possibility that the call could infinite loop since this is
101 a behavioral change. */
102 bool looping;
103
104 bool can_throw;
105
106 /* If function can call free, munmap or otherwise make previously
107 non-trapping memory accesses trapping. */
108 bool can_free;
109
110 enum malloc_state_e malloc_state;
111 };
112
113 /* State used when we know nothing about function. */
114 static struct funct_state_d varying_state
115 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true, STATE_MALLOC_BOTTOM };
116
117
118 typedef struct funct_state_d * funct_state;
119
120 /* The storage of the funct_state is abstracted because there is the
121 possibility that it may be desirable to move this to the cgraph
122 local info. */
123
124 /* Array, indexed by cgraph node uid, of function states. */
125
126 static vec<funct_state> funct_state_vec;
127
128 static bool gate_pure_const (void);
129
130 namespace {
131
132 const pass_data pass_data_ipa_pure_const =
133 {
134 IPA_PASS, /* type */
135 "pure-const", /* name */
136 OPTGROUP_NONE, /* optinfo_flags */
137 TV_IPA_PURE_CONST, /* tv_id */
138 0, /* properties_required */
139 0, /* properties_provided */
140 0, /* properties_destroyed */
141 0, /* todo_flags_start */
142 0, /* todo_flags_finish */
143 };
144
145 class pass_ipa_pure_const : public ipa_opt_pass_d
146 {
147 public:
148 pass_ipa_pure_const(gcc::context *ctxt);
149
150 /* opt_pass methods: */
151 bool gate (function *) { return gate_pure_const (); }
152 unsigned int execute (function *fun);
153
154 void register_hooks (void);
155
156 private:
157 bool init_p;
158
159 /* Holders of ipa cgraph hooks: */
160 struct cgraph_node_hook_list *function_insertion_hook_holder;
161 struct cgraph_2node_hook_list *node_duplication_hook_holder;
162 struct cgraph_node_hook_list *node_removal_hook_holder;
163
164 }; // class pass_ipa_pure_const
165
166 } // anon namespace
167
168 /* Try to guess if function body will always be visible to compiler
169 when compiling the call and whether compiler will be able
170 to propagate the information by itself. */
171
172 static bool
173 function_always_visible_to_compiler_p (tree decl)
174 {
175 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
176 || DECL_COMDAT (decl));
177 }
178
179 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
180 is true if the function is known to be finite. The diagnostic is
181 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
182 OPTION, this function may initialize it and it is always returned
183 by the function. */
184
185 static hash_set<tree> *
186 suggest_attribute (int option, tree decl, bool known_finite,
187 hash_set<tree> *warned_about,
188 const char * attrib_name)
189 {
190 if (!option_enabled (option, &global_options))
191 return warned_about;
192 if (TREE_THIS_VOLATILE (decl)
193 || (known_finite && function_always_visible_to_compiler_p (decl)))
194 return warned_about;
195
196 if (!warned_about)
197 warned_about = new hash_set<tree>;
198 if (warned_about->contains (decl))
199 return warned_about;
200 warned_about->add (decl);
201 warning_at (DECL_SOURCE_LOCATION (decl),
202 option,
203 known_finite
204 ? G_("function might be candidate for attribute %qs")
205 : G_("function might be candidate for attribute %qs"
206 " if it is known to return normally"), attrib_name);
207 return warned_about;
208 }
209
210 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
211 is true if the function is known to be finite. */
212
213 static void
214 warn_function_pure (tree decl, bool known_finite)
215 {
216 /* Declaring a void function pure makes no sense and is diagnosed
217 by -Wattributes because calling it would have no effect. */
218 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
219 return;
220
221 static hash_set<tree> *warned_about;
222 warned_about
223 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
224 known_finite, warned_about, "pure");
225 }
226
227 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
228 is true if the function is known to be finite. */
229
230 static void
231 warn_function_const (tree decl, bool known_finite)
232 {
233 /* Declaring a void function const makes no sense is diagnosed
234 by -Wattributes because calling it would have no effect. */
235 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
236 return;
237
238 static hash_set<tree> *warned_about;
239 warned_about
240 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
241 known_finite, warned_about, "const");
242 }
243
244 /* Emit suggestion about __attribute__((malloc)) for DECL. */
245
246 static void
247 warn_function_malloc (tree decl)
248 {
249 static hash_set<tree> *warned_about;
250 warned_about
251 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
252 true, warned_about, "malloc");
253 }
254
255 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
256
257 static void
258 warn_function_noreturn (tree decl)
259 {
260 tree original_decl = decl;
261
262 static hash_set<tree> *warned_about;
263 if (!lang_hooks.missing_noreturn_ok_p (decl)
264 && targetm.warn_func_return (decl))
265 warned_about
266 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
267 true, warned_about, "noreturn");
268 }
269
270 void
271 warn_function_cold (tree decl)
272 {
273 tree original_decl = decl;
274
275 static hash_set<tree> *warned_about;
276 warned_about
277 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
278 true, warned_about, "cold");
279 }
280
281 /* Return true if we have a function state for NODE. */
282
283 static inline bool
284 has_function_state (struct cgraph_node *node)
285 {
286 if (!funct_state_vec.exists ()
287 || funct_state_vec.length () <= (unsigned int)node->uid)
288 return false;
289 return funct_state_vec[node->uid] != NULL;
290 }
291
292 /* Return the function state from NODE. */
293
294 static inline funct_state
295 get_function_state (struct cgraph_node *node)
296 {
297 if (!funct_state_vec.exists ()
298 || funct_state_vec.length () <= (unsigned int)node->uid
299 || !funct_state_vec[node->uid])
300 /* We might want to put correct previously_known state into varying. */
301 return &varying_state;
302 return funct_state_vec[node->uid];
303 }
304
305 /* Set the function state S for NODE. */
306
307 static inline void
308 set_function_state (struct cgraph_node *node, funct_state s)
309 {
310 if (!funct_state_vec.exists ()
311 || funct_state_vec.length () <= (unsigned int)node->uid)
312 funct_state_vec.safe_grow_cleared (node->uid + 1);
313
314 /* If funct_state_vec already contains a funct_state, we have to release
315 it before it's going to be ovewritten. */
316 if (funct_state_vec[node->uid] != NULL
317 && funct_state_vec[node->uid] != &varying_state)
318 free (funct_state_vec[node->uid]);
319
320 funct_state_vec[node->uid] = s;
321 }
322
323 /* Check to see if the use (or definition when CHECKING_WRITE is true)
324 variable T is legal in a function that is either pure or const. */
325
326 static inline void
327 check_decl (funct_state local,
328 tree t, bool checking_write, bool ipa)
329 {
330 /* Do not want to do anything with volatile except mark any
331 function that uses one to be not const or pure. */
332 if (TREE_THIS_VOLATILE (t))
333 {
334 local->pure_const_state = IPA_NEITHER;
335 if (dump_file)
336 fprintf (dump_file, " Volatile operand is not const/pure\n");
337 return;
338 }
339
340 /* Do not care about a local automatic that is not static. */
341 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
342 return;
343
344 /* If the variable has the "used" attribute, treat it as if it had a
345 been touched by the devil. */
346 if (DECL_PRESERVE_P (t))
347 {
348 local->pure_const_state = IPA_NEITHER;
349 if (dump_file)
350 fprintf (dump_file, " Used static/global variable is not const/pure\n");
351 return;
352 }
353
354 /* In IPA mode we are not interested in checking actual loads and stores;
355 they will be processed at propagation time using ipa_ref. */
356 if (ipa)
357 return;
358
359 /* Since we have dealt with the locals and params cases above, if we
360 are CHECKING_WRITE, this cannot be a pure or constant
361 function. */
362 if (checking_write)
363 {
364 local->pure_const_state = IPA_NEITHER;
365 if (dump_file)
366 fprintf (dump_file, " static/global memory write is not const/pure\n");
367 return;
368 }
369
370 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
371 {
372 /* Readonly reads are safe. */
373 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
374 return; /* Read of a constant, do not change the function state. */
375 else
376 {
377 if (dump_file)
378 fprintf (dump_file, " global memory read is not const\n");
379 /* Just a regular read. */
380 if (local->pure_const_state == IPA_CONST)
381 local->pure_const_state = IPA_PURE;
382 }
383 }
384 else
385 {
386 /* Compilation level statics can be read if they are readonly
387 variables. */
388 if (TREE_READONLY (t))
389 return;
390
391 if (dump_file)
392 fprintf (dump_file, " static memory read is not const\n");
393 /* Just a regular read. */
394 if (local->pure_const_state == IPA_CONST)
395 local->pure_const_state = IPA_PURE;
396 }
397 }
398
399
400 /* Check to see if the use (or definition when CHECKING_WRITE is true)
401 variable T is legal in a function that is either pure or const. */
402
403 static inline void
404 check_op (funct_state local, tree t, bool checking_write)
405 {
406 t = get_base_address (t);
407 if (t && TREE_THIS_VOLATILE (t))
408 {
409 local->pure_const_state = IPA_NEITHER;
410 if (dump_file)
411 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
412 return;
413 }
414 else if (t
415 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
416 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
417 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
418 {
419 if (dump_file)
420 fprintf (dump_file, " Indirect ref to local memory is OK\n");
421 return;
422 }
423 else if (checking_write)
424 {
425 local->pure_const_state = IPA_NEITHER;
426 if (dump_file)
427 fprintf (dump_file, " Indirect ref write is not const/pure\n");
428 return;
429 }
430 else
431 {
432 if (dump_file)
433 fprintf (dump_file, " Indirect ref read is not const\n");
434 if (local->pure_const_state == IPA_CONST)
435 local->pure_const_state = IPA_PURE;
436 }
437 }
438
439 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
440
441 static void
442 state_from_flags (enum pure_const_state_e *state, bool *looping,
443 int flags, bool cannot_lead_to_return)
444 {
445 *looping = false;
446 if (flags & ECF_LOOPING_CONST_OR_PURE)
447 {
448 *looping = true;
449 if (dump_file && (dump_flags & TDF_DETAILS))
450 fprintf (dump_file, " looping\n");
451 }
452 if (flags & ECF_CONST)
453 {
454 *state = IPA_CONST;
455 if (dump_file && (dump_flags & TDF_DETAILS))
456 fprintf (dump_file, " const\n");
457 }
458 else if (flags & ECF_PURE)
459 {
460 *state = IPA_PURE;
461 if (dump_file && (dump_flags & TDF_DETAILS))
462 fprintf (dump_file, " pure\n");
463 }
464 else if (cannot_lead_to_return)
465 {
466 *state = IPA_PURE;
467 *looping = true;
468 if (dump_file && (dump_flags & TDF_DETAILS))
469 fprintf (dump_file, " ignoring side effects->pure looping\n");
470 }
471 else
472 {
473 if (dump_file && (dump_flags & TDF_DETAILS))
474 fprintf (dump_file, " neither\n");
475 *state = IPA_NEITHER;
476 *looping = true;
477 }
478 }
479
480 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
481 into STATE and LOOPING better of the two variants.
482 Be sure to merge looping correctly. IPA_NEITHER functions
483 have looping 0 even if they don't have to return. */
484
485 static inline void
486 better_state (enum pure_const_state_e *state, bool *looping,
487 enum pure_const_state_e state2, bool looping2)
488 {
489 if (state2 < *state)
490 {
491 if (*state == IPA_NEITHER)
492 *looping = looping2;
493 else
494 *looping = MIN (*looping, looping2);
495 *state = state2;
496 }
497 else if (state2 != IPA_NEITHER)
498 *looping = MIN (*looping, looping2);
499 }
500
501 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
502 into STATE and LOOPING worse of the two variants.
503 N is the actual node called. */
504
505 static inline void
506 worse_state (enum pure_const_state_e *state, bool *looping,
507 enum pure_const_state_e state2, bool looping2,
508 struct symtab_node *from,
509 struct symtab_node *to)
510 {
511 /* Consider function:
512
513 bool a(int *p)
514 {
515 return *p==*p;
516 }
517
518 During early optimization we will turn this into:
519
520 bool a(int *p)
521 {
522 return true;
523 }
524
525 Now if this function will be detected as CONST however when interposed it
526 may end up being just pure. We always must assume the worst scenario here.
527 */
528 if (*state == IPA_CONST && state2 == IPA_CONST
529 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
530 {
531 if (dump_file && (dump_flags & TDF_DETAILS))
532 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
533 "bind to current def.\n", to->name ());
534 state2 = IPA_PURE;
535 }
536 *state = MAX (*state, state2);
537 *looping = MAX (*looping, looping2);
538 }
539
540 /* Recognize special cases of builtins that are by themselves not pure or const
541 but function using them is. */
542 static bool
543 special_builtin_state (enum pure_const_state_e *state, bool *looping,
544 tree callee)
545 {
546 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
547 switch (DECL_FUNCTION_CODE (callee))
548 {
549 case BUILT_IN_RETURN:
550 case BUILT_IN_UNREACHABLE:
551 CASE_BUILT_IN_ALLOCA:
552 case BUILT_IN_STACK_SAVE:
553 case BUILT_IN_STACK_RESTORE:
554 case BUILT_IN_EH_POINTER:
555 case BUILT_IN_EH_FILTER:
556 case BUILT_IN_UNWIND_RESUME:
557 case BUILT_IN_CXA_END_CLEANUP:
558 case BUILT_IN_EH_COPY_VALUES:
559 case BUILT_IN_FRAME_ADDRESS:
560 case BUILT_IN_APPLY:
561 case BUILT_IN_APPLY_ARGS:
562 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
563 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
564 *looping = false;
565 *state = IPA_CONST;
566 return true;
567 case BUILT_IN_PREFETCH:
568 *looping = true;
569 *state = IPA_CONST;
570 return true;
571 default:
572 break;
573 }
574 return false;
575 }
576
577 /* Check the parameters of a function call to CALL_EXPR to see if
578 there are any references in the parameters that are not allowed for
579 pure or const functions. Also check to see if this is either an
580 indirect call, a call outside the compilation unit, or has special
581 attributes that may also effect the purity. The CALL_EXPR node for
582 the entire call expression. */
583
584 static void
585 check_call (funct_state local, gcall *call, bool ipa)
586 {
587 int flags = gimple_call_flags (call);
588 tree callee_t = gimple_call_fndecl (call);
589 bool possibly_throws = stmt_could_throw_p (call);
590 bool possibly_throws_externally = (possibly_throws
591 && stmt_can_throw_external (call));
592
593 if (possibly_throws)
594 {
595 unsigned int i;
596 for (i = 0; i < gimple_num_ops (call); i++)
597 if (gimple_op (call, i)
598 && tree_could_throw_p (gimple_op (call, i)))
599 {
600 if (possibly_throws && cfun->can_throw_non_call_exceptions)
601 {
602 if (dump_file)
603 fprintf (dump_file, " operand can throw; looping\n");
604 local->looping = true;
605 }
606 if (possibly_throws_externally)
607 {
608 if (dump_file)
609 fprintf (dump_file, " operand can throw externally\n");
610 local->can_throw = true;
611 }
612 }
613 }
614
615 /* The const and pure flags are set by a variety of places in the
616 compiler (including here). If someone has already set the flags
617 for the callee, (such as for some of the builtins) we will use
618 them, otherwise we will compute our own information.
619
620 Const and pure functions have less clobber effects than other
621 functions so we process these first. Otherwise if it is a call
622 outside the compilation unit or an indirect call we punt. This
623 leaves local calls which will be processed by following the call
624 graph. */
625 if (callee_t)
626 {
627 enum pure_const_state_e call_state;
628 bool call_looping;
629
630 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
631 && !nonfreeing_call_p (call))
632 local->can_free = true;
633
634 if (special_builtin_state (&call_state, &call_looping, callee_t))
635 {
636 worse_state (&local->pure_const_state, &local->looping,
637 call_state, call_looping,
638 NULL, NULL);
639 return;
640 }
641 /* When bad things happen to bad functions, they cannot be const
642 or pure. */
643 if (setjmp_call_p (callee_t))
644 {
645 if (dump_file)
646 fprintf (dump_file, " setjmp is not const/pure\n");
647 local->looping = true;
648 local->pure_const_state = IPA_NEITHER;
649 }
650
651 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
652 switch (DECL_FUNCTION_CODE (callee_t))
653 {
654 case BUILT_IN_LONGJMP:
655 case BUILT_IN_NONLOCAL_GOTO:
656 if (dump_file)
657 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
658 local->pure_const_state = IPA_NEITHER;
659 local->looping = true;
660 break;
661 default:
662 break;
663 }
664 }
665 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
666 local->can_free = true;
667
668 /* When not in IPA mode, we can still handle self recursion. */
669 if (!ipa && callee_t
670 && recursive_call_p (current_function_decl, callee_t))
671 {
672 if (dump_file)
673 fprintf (dump_file, " Recursive call can loop.\n");
674 local->looping = true;
675 }
676 /* Either callee is unknown or we are doing local analysis.
677 Look to see if there are any bits available for the callee (such as by
678 declaration or because it is builtin) and process solely on the basis of
679 those bits. Handle internal calls always, those calls don't have
680 corresponding cgraph edges and thus aren't processed during
681 the propagation. */
682 else if (!ipa || gimple_call_internal_p (call))
683 {
684 enum pure_const_state_e call_state;
685 bool call_looping;
686 if (possibly_throws && cfun->can_throw_non_call_exceptions)
687 {
688 if (dump_file)
689 fprintf (dump_file, " can throw; looping\n");
690 local->looping = true;
691 }
692 if (possibly_throws_externally)
693 {
694 if (dump_file)
695 {
696 fprintf (dump_file, " can throw externally to lp %i\n",
697 lookup_stmt_eh_lp (call));
698 if (callee_t)
699 fprintf (dump_file, " callee:%s\n",
700 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
701 }
702 local->can_throw = true;
703 }
704 if (dump_file && (dump_flags & TDF_DETAILS))
705 fprintf (dump_file, " checking flags for call:");
706 state_from_flags (&call_state, &call_looping, flags,
707 ((flags & (ECF_NORETURN | ECF_NOTHROW))
708 == (ECF_NORETURN | ECF_NOTHROW))
709 || (!flag_exceptions && (flags & ECF_NORETURN)));
710 worse_state (&local->pure_const_state, &local->looping,
711 call_state, call_looping, NULL, NULL);
712 }
713 /* Direct functions calls are handled by IPA propagation. */
714 }
715
716 /* Wrapper around check_decl for loads in local more. */
717
718 static bool
719 check_load (gimple *, tree op, tree, void *data)
720 {
721 if (DECL_P (op))
722 check_decl ((funct_state)data, op, false, false);
723 else
724 check_op ((funct_state)data, op, false);
725 return false;
726 }
727
728 /* Wrapper around check_decl for stores in local more. */
729
730 static bool
731 check_store (gimple *, tree op, tree, void *data)
732 {
733 if (DECL_P (op))
734 check_decl ((funct_state)data, op, true, false);
735 else
736 check_op ((funct_state)data, op, true);
737 return false;
738 }
739
740 /* Wrapper around check_decl for loads in ipa mode. */
741
742 static bool
743 check_ipa_load (gimple *, tree op, tree, void *data)
744 {
745 if (DECL_P (op))
746 check_decl ((funct_state)data, op, false, true);
747 else
748 check_op ((funct_state)data, op, false);
749 return false;
750 }
751
752 /* Wrapper around check_decl for stores in ipa mode. */
753
754 static bool
755 check_ipa_store (gimple *, tree op, tree, void *data)
756 {
757 if (DECL_P (op))
758 check_decl ((funct_state)data, op, true, true);
759 else
760 check_op ((funct_state)data, op, true);
761 return false;
762 }
763
764 /* Look into pointer pointed to by GSIP and figure out what interesting side
765 effects it has. */
766 static void
767 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
768 {
769 gimple *stmt = gsi_stmt (*gsip);
770
771 if (is_gimple_debug (stmt))
772 return;
773
774 /* Do consider clobber as side effects before IPA, so we rather inline
775 C++ destructors and keep clobber semantics than eliminate them.
776
777 TODO: We may get smarter during early optimizations on these and let
778 functions containing only clobbers to be optimized more. This is a common
779 case of C++ destructors. */
780
781 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
782 return;
783
784 if (dump_file)
785 {
786 fprintf (dump_file, " scanning: ");
787 print_gimple_stmt (dump_file, stmt, 0);
788 }
789
790 if (gimple_has_volatile_ops (stmt)
791 && !gimple_clobber_p (stmt))
792 {
793 local->pure_const_state = IPA_NEITHER;
794 if (dump_file)
795 fprintf (dump_file, " Volatile stmt is not const/pure\n");
796 }
797
798 /* Look for loads and stores. */
799 walk_stmt_load_store_ops (stmt, local,
800 ipa ? check_ipa_load : check_load,
801 ipa ? check_ipa_store : check_store);
802
803 if (gimple_code (stmt) != GIMPLE_CALL
804 && stmt_could_throw_p (stmt))
805 {
806 if (cfun->can_throw_non_call_exceptions)
807 {
808 if (dump_file)
809 fprintf (dump_file, " can throw; looping\n");
810 local->looping = true;
811 }
812 if (stmt_can_throw_external (stmt))
813 {
814 if (dump_file)
815 fprintf (dump_file, " can throw externally\n");
816 local->can_throw = true;
817 }
818 else
819 if (dump_file)
820 fprintf (dump_file, " can throw\n");
821 }
822 switch (gimple_code (stmt))
823 {
824 case GIMPLE_CALL:
825 check_call (local, as_a <gcall *> (stmt), ipa);
826 break;
827 case GIMPLE_LABEL:
828 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
829 /* Target of long jump. */
830 {
831 if (dump_file)
832 fprintf (dump_file, " nonlocal label is not const/pure\n");
833 local->pure_const_state = IPA_NEITHER;
834 }
835 break;
836 case GIMPLE_ASM:
837 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
838 {
839 if (dump_file)
840 fprintf (dump_file, " memory asm clobber is not const/pure\n");
841 /* Abandon all hope, ye who enter here. */
842 local->pure_const_state = IPA_NEITHER;
843 local->can_free = true;
844 }
845 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
846 {
847 if (dump_file)
848 fprintf (dump_file, " volatile is not const/pure\n");
849 /* Abandon all hope, ye who enter here. */
850 local->pure_const_state = IPA_NEITHER;
851 local->looping = true;
852 local->can_free = true;
853 }
854 return;
855 default:
856 break;
857 }
858 }
859
860 /* Check that RETVAL is used only in STMT and in comparisons against 0.
861 RETVAL is return value of the function and STMT is return stmt. */
862
863 static bool
864 check_retval_uses (tree retval, gimple *stmt)
865 {
866 imm_use_iterator use_iter;
867 gimple *use_stmt;
868
869 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
870 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
871 {
872 tree op2 = gimple_cond_rhs (cond);
873 if (!integer_zerop (op2))
874 RETURN_FROM_IMM_USE_STMT (use_iter, false);
875 }
876 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
877 {
878 enum tree_code code = gimple_assign_rhs_code (ga);
879 if (TREE_CODE_CLASS (code) != tcc_comparison)
880 RETURN_FROM_IMM_USE_STMT (use_iter, false);
881 if (!integer_zerop (gimple_assign_rhs2 (ga)))
882 RETURN_FROM_IMM_USE_STMT (use_iter, false);
883 }
884 else if (is_gimple_debug (use_stmt))
885 ;
886 else if (use_stmt != stmt)
887 RETURN_FROM_IMM_USE_STMT (use_iter, false);
888
889 return true;
890 }
891
892 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
893 attribute. Currently this function does a very conservative analysis.
894 FUN is considered to be a candidate if
895 1) It returns a value of pointer type.
896 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
897 a phi, and element of phi is either NULL or
898 SSA_NAME_DEF_STMT(element) is function call.
899 3) The return-value has immediate uses only within comparisons (gcond or gassign)
900 and return_stmt (and likewise a phi arg has immediate use only within comparison
901 or the phi stmt). */
902
903 static bool
904 malloc_candidate_p (function *fun, bool ipa)
905 {
906 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
907 edge e;
908 edge_iterator ei;
909 cgraph_node *node = cgraph_node::get_create (fun->decl);
910
911 #define DUMP_AND_RETURN(reason) \
912 { \
913 if (dump_file && (dump_flags & TDF_DETAILS)) \
914 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
915 (node->name()), (reason)); \
916 return false; \
917 }
918
919 if (EDGE_COUNT (exit_block->preds) == 0
920 || !flag_delete_null_pointer_checks)
921 return false;
922
923 FOR_EACH_EDGE (e, ei, exit_block->preds)
924 {
925 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
926 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
927
928 if (!ret_stmt)
929 return false;
930
931 tree retval = gimple_return_retval (ret_stmt);
932 if (!retval)
933 DUMP_AND_RETURN("No return value.")
934
935 if (TREE_CODE (retval) != SSA_NAME
936 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
937 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
938
939 if (!check_retval_uses (retval, ret_stmt))
940 DUMP_AND_RETURN("Return value has uses outside return stmt"
941 " and comparisons against 0.")
942
943 gimple *def = SSA_NAME_DEF_STMT (retval);
944 if (gcall *call_stmt = dyn_cast<gcall *> (def))
945 {
946 tree callee_decl = gimple_call_fndecl (call_stmt);
947 if (!callee_decl)
948 return false;
949
950 if (!ipa && !DECL_IS_MALLOC (callee_decl))
951 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
952 " non-ipa mode.")
953
954 cgraph_edge *cs = node->get_edge (call_stmt);
955 if (cs)
956 {
957 ipa_call_summary *es = ipa_call_summaries->get (cs);
958 gcc_assert (es);
959 es->is_return_callee_uncaptured = true;
960 }
961 }
962
963 else if (gphi *phi = dyn_cast<gphi *> (def))
964 {
965 bool all_args_zero = true;
966 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
967 {
968 tree arg = gimple_phi_arg_def (phi, i);
969 if (integer_zerop (arg))
970 continue;
971
972 all_args_zero = false;
973 if (TREE_CODE (arg) != SSA_NAME)
974 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
975 if (!check_retval_uses (arg, phi))
976 DUMP_AND_RETURN ("phi arg has uses outside phi"
977 " and comparisons against 0.")
978
979 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
980 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
981 if (!call_stmt)
982 return false;
983 tree callee_decl = gimple_call_fndecl (call_stmt);
984 if (!callee_decl)
985 return false;
986 if (!ipa && !DECL_IS_MALLOC (callee_decl))
987 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
988 " for non-ipa mode.")
989
990 cgraph_edge *cs = node->get_edge (call_stmt);
991 if (cs)
992 {
993 ipa_call_summary *es = ipa_call_summaries->get (cs);
994 gcc_assert (es);
995 es->is_return_callee_uncaptured = true;
996 }
997 }
998
999 if (all_args_zero)
1000 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.");
1001 }
1002
1003 else
1004 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
1005 }
1006
1007 if (dump_file && (dump_flags & TDF_DETAILS))
1008 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1009 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1010 return true;
1011
1012 #undef DUMP_AND_RETURN
1013 }
1014
1015
1016 /* This is the main routine for finding the reference patterns for
1017 global variables within a function FN. */
1018
1019 static funct_state
1020 analyze_function (struct cgraph_node *fn, bool ipa)
1021 {
1022 tree decl = fn->decl;
1023 funct_state l;
1024 basic_block this_block;
1025
1026 l = XCNEW (struct funct_state_d);
1027 l->pure_const_state = IPA_CONST;
1028 l->state_previously_known = IPA_NEITHER;
1029 l->looping_previously_known = true;
1030 l->looping = false;
1031 l->can_throw = false;
1032 l->can_free = false;
1033 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1034 flags_from_decl_or_type (fn->decl),
1035 fn->cannot_return_p ());
1036
1037 if (fn->thunk.thunk_p || fn->alias)
1038 {
1039 /* Thunk gets propagated through, so nothing interesting happens. */
1040 gcc_assert (ipa);
1041 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1042 l->pure_const_state = IPA_NEITHER;
1043 return l;
1044 }
1045
1046 if (dump_file)
1047 {
1048 fprintf (dump_file, "\n\n local analysis of %s\n ",
1049 fn->name ());
1050 }
1051
1052 push_cfun (DECL_STRUCT_FUNCTION (decl));
1053
1054 FOR_EACH_BB_FN (this_block, cfun)
1055 {
1056 gimple_stmt_iterator gsi;
1057 struct walk_stmt_info wi;
1058
1059 memset (&wi, 0, sizeof (wi));
1060 for (gsi = gsi_start_bb (this_block);
1061 !gsi_end_p (gsi);
1062 gsi_next (&gsi))
1063 {
1064 check_stmt (&gsi, l, ipa);
1065 if (l->pure_const_state == IPA_NEITHER
1066 && l->looping
1067 && l->can_throw
1068 && l->can_free)
1069 goto end;
1070 }
1071 }
1072
1073 end:
1074 if (l->pure_const_state != IPA_NEITHER)
1075 {
1076 /* Const functions cannot have back edges (an
1077 indication of possible infinite loop side
1078 effect. */
1079 if (mark_dfs_back_edges ())
1080 {
1081 /* Preheaders are needed for SCEV to work.
1082 Simple latches and recorded exits improve chances that loop will
1083 proved to be finite in testcases such as in loop-15.c
1084 and loop-24.c */
1085 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1086 | LOOPS_HAVE_SIMPLE_LATCHES
1087 | LOOPS_HAVE_RECORDED_EXITS);
1088 if (dump_file && (dump_flags & TDF_DETAILS))
1089 flow_loops_dump (dump_file, NULL, 0);
1090 if (mark_irreducible_loops ())
1091 {
1092 if (dump_file)
1093 fprintf (dump_file, " has irreducible loops\n");
1094 l->looping = true;
1095 }
1096 else
1097 {
1098 struct loop *loop;
1099 scev_initialize ();
1100 FOR_EACH_LOOP (loop, 0)
1101 if (!finite_loop_p (loop))
1102 {
1103 if (dump_file)
1104 fprintf (dump_file, " can not prove finiteness of "
1105 "loop %i\n", loop->num);
1106 l->looping =true;
1107 break;
1108 }
1109 scev_finalize ();
1110 }
1111 loop_optimizer_finalize ();
1112 }
1113 }
1114
1115 if (dump_file && (dump_flags & TDF_DETAILS))
1116 fprintf (dump_file, " checking previously known:");
1117
1118 better_state (&l->pure_const_state, &l->looping,
1119 l->state_previously_known,
1120 l->looping_previously_known);
1121 if (TREE_NOTHROW (decl))
1122 l->can_throw = false;
1123
1124 l->malloc_state = STATE_MALLOC_BOTTOM;
1125 if (DECL_IS_MALLOC (decl))
1126 l->malloc_state = STATE_MALLOC;
1127 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1128 l->malloc_state = STATE_MALLOC_TOP;
1129 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1130 l->malloc_state = STATE_MALLOC;
1131
1132 pop_cfun ();
1133 if (dump_file)
1134 {
1135 if (l->looping)
1136 fprintf (dump_file, "Function is locally looping.\n");
1137 if (l->can_throw)
1138 fprintf (dump_file, "Function is locally throwing.\n");
1139 if (l->pure_const_state == IPA_CONST)
1140 fprintf (dump_file, "Function is locally const.\n");
1141 if (l->pure_const_state == IPA_PURE)
1142 fprintf (dump_file, "Function is locally pure.\n");
1143 if (l->can_free)
1144 fprintf (dump_file, "Function can locally free.\n");
1145 if (l->malloc_state == STATE_MALLOC)
1146 fprintf (dump_file, "Function is locally malloc.\n");
1147 }
1148 return l;
1149 }
1150
1151 /* Called when new function is inserted to callgraph late. */
1152 static void
1153 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1154 {
1155 /* There are some shared nodes, in particular the initializers on
1156 static declarations. We do not need to scan them more than once
1157 since all we would be interested in are the addressof
1158 operations. */
1159 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1160 set_function_state (node, analyze_function (node, true));
1161 }
1162
1163 /* Called when new clone is inserted to callgraph late. */
1164
1165 static void
1166 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
1167 void *data ATTRIBUTE_UNUSED)
1168 {
1169 if (has_function_state (src))
1170 {
1171 funct_state l = XNEW (struct funct_state_d);
1172 gcc_assert (!has_function_state (dst));
1173 memcpy (l, get_function_state (src), sizeof (*l));
1174 set_function_state (dst, l);
1175 }
1176 }
1177
1178 /* Called when new clone is inserted to callgraph late. */
1179
1180 static void
1181 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1182 {
1183 if (has_function_state (node))
1184 set_function_state (node, NULL);
1185 }
1186
1187 \f
1188 void
1189 pass_ipa_pure_const::
1190 register_hooks (void)
1191 {
1192 if (init_p)
1193 return;
1194
1195 init_p = true;
1196
1197 node_removal_hook_holder =
1198 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
1199 node_duplication_hook_holder =
1200 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
1201 function_insertion_hook_holder =
1202 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
1203 }
1204
1205
1206 /* Analyze each function in the cgraph to see if it is locally PURE or
1207 CONST. */
1208
1209 static void
1210 pure_const_generate_summary (void)
1211 {
1212 struct cgraph_node *node;
1213
1214 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1215 pass->register_hooks ();
1216
1217 /* Process all of the functions.
1218
1219 We process AVAIL_INTERPOSABLE functions. We can not use the results
1220 by default, but the info can be used at LTO with -fwhole-program or
1221 when function got cloned and the clone is AVAILABLE. */
1222
1223 FOR_EACH_DEFINED_FUNCTION (node)
1224 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1225 set_function_state (node, analyze_function (node, true));
1226 }
1227
1228
1229 /* Serialize the ipa info for lto. */
1230
1231 static void
1232 pure_const_write_summary (void)
1233 {
1234 struct cgraph_node *node;
1235 struct lto_simple_output_block *ob
1236 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1237 unsigned int count = 0;
1238 lto_symtab_encoder_iterator lsei;
1239 lto_symtab_encoder_t encoder;
1240
1241 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1242
1243 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1244 lsei_next_function_in_partition (&lsei))
1245 {
1246 node = lsei_cgraph_node (lsei);
1247 if (node->definition && has_function_state (node))
1248 count++;
1249 }
1250
1251 streamer_write_uhwi_stream (ob->main_stream, count);
1252
1253 /* Process all of the functions. */
1254 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1255 lsei_next_function_in_partition (&lsei))
1256 {
1257 node = lsei_cgraph_node (lsei);
1258 if (node->definition && has_function_state (node))
1259 {
1260 struct bitpack_d bp;
1261 funct_state fs;
1262 int node_ref;
1263 lto_symtab_encoder_t encoder;
1264
1265 fs = get_function_state (node);
1266
1267 encoder = ob->decl_state->symtab_node_encoder;
1268 node_ref = lto_symtab_encoder_encode (encoder, node);
1269 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1270
1271 /* Note that flags will need to be read in the opposite
1272 order as we are pushing the bitflags into FLAGS. */
1273 bp = bitpack_create (ob->main_stream);
1274 bp_pack_value (&bp, fs->pure_const_state, 2);
1275 bp_pack_value (&bp, fs->state_previously_known, 2);
1276 bp_pack_value (&bp, fs->looping_previously_known, 1);
1277 bp_pack_value (&bp, fs->looping, 1);
1278 bp_pack_value (&bp, fs->can_throw, 1);
1279 bp_pack_value (&bp, fs->can_free, 1);
1280 bp_pack_value (&bp, fs->malloc_state, 2);
1281 streamer_write_bitpack (&bp);
1282 }
1283 }
1284
1285 lto_destroy_simple_output_block (ob);
1286 }
1287
1288
1289 /* Deserialize the ipa info for lto. */
1290
1291 static void
1292 pure_const_read_summary (void)
1293 {
1294 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1295 struct lto_file_decl_data *file_data;
1296 unsigned int j = 0;
1297
1298 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1299 pass->register_hooks ();
1300
1301 while ((file_data = file_data_vec[j++]))
1302 {
1303 const char *data;
1304 size_t len;
1305 struct lto_input_block *ib
1306 = lto_create_simple_input_block (file_data,
1307 LTO_section_ipa_pure_const,
1308 &data, &len);
1309 if (ib)
1310 {
1311 unsigned int i;
1312 unsigned int count = streamer_read_uhwi (ib);
1313
1314 for (i = 0; i < count; i++)
1315 {
1316 unsigned int index;
1317 struct cgraph_node *node;
1318 struct bitpack_d bp;
1319 funct_state fs;
1320 lto_symtab_encoder_t encoder;
1321
1322 fs = XCNEW (struct funct_state_d);
1323 index = streamer_read_uhwi (ib);
1324 encoder = file_data->symtab_node_encoder;
1325 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1326 index));
1327 set_function_state (node, fs);
1328
1329 /* Note that the flags must be read in the opposite
1330 order in which they were written (the bitflags were
1331 pushed into FLAGS). */
1332 bp = streamer_read_bitpack (ib);
1333 fs->pure_const_state
1334 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1335 fs->state_previously_known
1336 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1337 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1338 fs->looping = bp_unpack_value (&bp, 1);
1339 fs->can_throw = bp_unpack_value (&bp, 1);
1340 fs->can_free = bp_unpack_value (&bp, 1);
1341 fs->malloc_state
1342 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1343
1344 if (dump_file)
1345 {
1346 int flags = flags_from_decl_or_type (node->decl);
1347 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1348 if (flags & ECF_CONST)
1349 fprintf (dump_file, " const");
1350 if (flags & ECF_PURE)
1351 fprintf (dump_file, " pure");
1352 if (flags & ECF_NOTHROW)
1353 fprintf (dump_file, " nothrow");
1354 fprintf (dump_file, "\n pure const state: %s\n",
1355 pure_const_names[fs->pure_const_state]);
1356 fprintf (dump_file, " previously known state: %s\n",
1357 pure_const_names[fs->state_previously_known]);
1358 if (fs->looping)
1359 fprintf (dump_file," function is locally looping\n");
1360 if (fs->looping_previously_known)
1361 fprintf (dump_file," function is previously known looping\n");
1362 if (fs->can_throw)
1363 fprintf (dump_file," function is locally throwing\n");
1364 if (fs->can_free)
1365 fprintf (dump_file," function can locally free\n");
1366 fprintf (dump_file, "\n malloc state: %s\n",
1367 malloc_state_names[fs->malloc_state]);
1368 }
1369 }
1370
1371 lto_destroy_simple_input_block (file_data,
1372 LTO_section_ipa_pure_const,
1373 ib, data, len);
1374 }
1375 }
1376 }
1377
1378 /* We only propagate across edges that can throw externally and their callee
1379 is not interposable. */
1380
1381 static bool
1382 ignore_edge_for_nothrow (struct cgraph_edge *e)
1383 {
1384 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1385 return true;
1386
1387 enum availability avail;
1388 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1389 e->caller);
1390 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1391 return true;
1392 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1393 && !e->callee->binds_to_current_def_p (e->caller);
1394 }
1395
1396 /* Return true if NODE is self recursive function.
1397 Indirectly recursive functions appears as non-trivial strongly
1398 connected components, so we need to care about self recursion
1399 only. */
1400
1401 static bool
1402 self_recursive_p (struct cgraph_node *node)
1403 {
1404 struct cgraph_edge *e;
1405 for (e = node->callees; e; e = e->next_callee)
1406 if (e->callee->function_symbol () == node)
1407 return true;
1408 return false;
1409 }
1410
1411 /* Return true if N is cdtor that is not const or pure. In this case we may
1412 need to remove unreachable function if it is marked const/pure. */
1413
1414 static bool
1415 cdtor_p (cgraph_node *n, void *)
1416 {
1417 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1418 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1419 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1420 return false;
1421 }
1422
1423 /* We only propagate across edges with non-interposable callee. */
1424
1425 static bool
1426 ignore_edge_for_pure_const (struct cgraph_edge *e)
1427 {
1428 enum availability avail;
1429 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1430 return (avail <= AVAIL_INTERPOSABLE);
1431 }
1432
1433
1434 /* Produce transitive closure over the callgraph and compute pure/const
1435 attributes. */
1436
1437 static bool
1438 propagate_pure_const (void)
1439 {
1440 struct cgraph_node *node;
1441 struct cgraph_node *w;
1442 struct cgraph_node **order =
1443 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1444 int order_pos;
1445 int i;
1446 struct ipa_dfs_info * w_info;
1447 bool remove_p = false;
1448 bool has_cdtor;
1449
1450 order_pos = ipa_reduced_postorder (order, true, false,
1451 ignore_edge_for_pure_const);
1452 if (dump_file)
1453 {
1454 cgraph_node::dump_cgraph (dump_file);
1455 ipa_print_order (dump_file, "reduced", order, order_pos);
1456 }
1457
1458 /* Propagate the local information through the call graph to produce
1459 the global information. All the nodes within a cycle will have
1460 the same info so we collapse cycles first. Then we can do the
1461 propagation in one pass from the leaves to the roots. */
1462 for (i = 0; i < order_pos; i++ )
1463 {
1464 enum pure_const_state_e pure_const_state = IPA_CONST;
1465 bool looping = false;
1466 int count = 0;
1467 node = order[i];
1468
1469 if (node->alias)
1470 continue;
1471
1472 if (dump_file && (dump_flags & TDF_DETAILS))
1473 fprintf (dump_file, "Starting cycle\n");
1474
1475 /* Find the worst state for any node in the cycle. */
1476 w = node;
1477 while (w && pure_const_state != IPA_NEITHER)
1478 {
1479 struct cgraph_edge *e;
1480 struct cgraph_edge *ie;
1481 int i;
1482 struct ipa_ref *ref = NULL;
1483
1484 funct_state w_l = get_function_state (w);
1485 if (dump_file && (dump_flags & TDF_DETAILS))
1486 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1487 w->dump_name (),
1488 pure_const_names[w_l->pure_const_state],
1489 w_l->looping);
1490
1491 /* First merge in function body properties.
1492 We are safe to pass NULL as FROM and TO because we will take care
1493 of possible interposition when walking callees. */
1494 worse_state (&pure_const_state, &looping,
1495 w_l->pure_const_state, w_l->looping,
1496 NULL, NULL);
1497 if (pure_const_state == IPA_NEITHER)
1498 break;
1499
1500 count++;
1501
1502 /* We consider recursive cycles as possibly infinite.
1503 This might be relaxed since infinite recursion leads to stack
1504 overflow. */
1505 if (count > 1)
1506 looping = true;
1507
1508 /* Now walk the edges and merge in callee properties. */
1509 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1510 e = e->next_callee)
1511 {
1512 enum availability avail;
1513 struct cgraph_node *y = e->callee->
1514 function_or_virtual_thunk_symbol (&avail,
1515 e->caller);
1516 enum pure_const_state_e edge_state = IPA_CONST;
1517 bool edge_looping = false;
1518
1519 if (dump_file && (dump_flags & TDF_DETAILS))
1520 {
1521 fprintf (dump_file, " Call to %s",
1522 e->callee->dump_name ());
1523 }
1524 if (avail > AVAIL_INTERPOSABLE)
1525 {
1526 funct_state y_l = get_function_state (y);
1527 if (dump_file && (dump_flags & TDF_DETAILS))
1528 {
1529 fprintf (dump_file,
1530 " state:%s looping:%i\n",
1531 pure_const_names[y_l->pure_const_state],
1532 y_l->looping);
1533 }
1534 if (y_l->pure_const_state > IPA_PURE
1535 && e->cannot_lead_to_return_p ())
1536 {
1537 if (dump_file && (dump_flags & TDF_DETAILS))
1538 fprintf (dump_file,
1539 " Ignoring side effects"
1540 " -> pure, looping\n");
1541 edge_state = IPA_PURE;
1542 edge_looping = true;
1543 }
1544 else
1545 {
1546 edge_state = y_l->pure_const_state;
1547 edge_looping = y_l->looping;
1548 }
1549 }
1550 else if (special_builtin_state (&edge_state, &edge_looping,
1551 y->decl))
1552 ;
1553 else
1554 state_from_flags (&edge_state, &edge_looping,
1555 flags_from_decl_or_type (y->decl),
1556 e->cannot_lead_to_return_p ());
1557
1558 /* Merge the results with what we already know. */
1559 better_state (&edge_state, &edge_looping,
1560 w_l->state_previously_known,
1561 w_l->looping_previously_known);
1562 worse_state (&pure_const_state, &looping,
1563 edge_state, edge_looping, e->caller, e->callee);
1564 if (pure_const_state == IPA_NEITHER)
1565 break;
1566 }
1567
1568 /* Now process the indirect call. */
1569 for (ie = w->indirect_calls;
1570 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1571 {
1572 enum pure_const_state_e edge_state = IPA_CONST;
1573 bool edge_looping = false;
1574
1575 if (dump_file && (dump_flags & TDF_DETAILS))
1576 fprintf (dump_file, " Indirect call");
1577 state_from_flags (&edge_state, &edge_looping,
1578 ie->indirect_info->ecf_flags,
1579 ie->cannot_lead_to_return_p ());
1580 /* Merge the results with what we already know. */
1581 better_state (&edge_state, &edge_looping,
1582 w_l->state_previously_known,
1583 w_l->looping_previously_known);
1584 worse_state (&pure_const_state, &looping,
1585 edge_state, edge_looping, NULL, NULL);
1586 if (pure_const_state == IPA_NEITHER)
1587 break;
1588 }
1589
1590 /* And finally all loads and stores. */
1591 for (i = 0; w->iterate_reference (i, ref)
1592 && pure_const_state != IPA_NEITHER; i++)
1593 {
1594 enum pure_const_state_e ref_state = IPA_CONST;
1595 bool ref_looping = false;
1596 switch (ref->use)
1597 {
1598 case IPA_REF_LOAD:
1599 /* readonly reads are safe. */
1600 if (TREE_READONLY (ref->referred->decl))
1601 break;
1602 if (dump_file && (dump_flags & TDF_DETAILS))
1603 fprintf (dump_file, " nonreadonly global var read\n");
1604 ref_state = IPA_PURE;
1605 break;
1606 case IPA_REF_STORE:
1607 if (ref->cannot_lead_to_return ())
1608 break;
1609 ref_state = IPA_NEITHER;
1610 if (dump_file && (dump_flags & TDF_DETAILS))
1611 fprintf (dump_file, " global var write\n");
1612 break;
1613 case IPA_REF_ADDR:
1614 break;
1615 default:
1616 gcc_unreachable ();
1617 }
1618 better_state (&ref_state, &ref_looping,
1619 w_l->state_previously_known,
1620 w_l->looping_previously_known);
1621 worse_state (&pure_const_state, &looping,
1622 ref_state, ref_looping, NULL, NULL);
1623 if (pure_const_state == IPA_NEITHER)
1624 break;
1625 }
1626 w_info = (struct ipa_dfs_info *) w->aux;
1627 w = w_info->next_cycle;
1628 }
1629 if (dump_file && (dump_flags & TDF_DETAILS))
1630 fprintf (dump_file, "Result %s looping %i\n",
1631 pure_const_names [pure_const_state],
1632 looping);
1633
1634 /* Find the worst state of can_free for any node in the cycle. */
1635 bool can_free = false;
1636 w = node;
1637 while (w && !can_free)
1638 {
1639 struct cgraph_edge *e;
1640 funct_state w_l = get_function_state (w);
1641
1642 if (w_l->can_free
1643 || w->get_availability () == AVAIL_INTERPOSABLE
1644 || w->indirect_calls)
1645 can_free = true;
1646
1647 for (e = w->callees; e && !can_free; e = e->next_callee)
1648 {
1649 enum availability avail;
1650 struct cgraph_node *y = e->callee->
1651 function_or_virtual_thunk_symbol (&avail,
1652 e->caller);
1653
1654 if (avail > AVAIL_INTERPOSABLE)
1655 can_free = get_function_state (y)->can_free;
1656 else
1657 can_free = true;
1658 }
1659 w_info = (struct ipa_dfs_info *) w->aux;
1660 w = w_info->next_cycle;
1661 }
1662
1663 /* Copy back the region's pure_const_state which is shared by
1664 all nodes in the region. */
1665 w = node;
1666 while (w)
1667 {
1668 funct_state w_l = get_function_state (w);
1669 enum pure_const_state_e this_state = pure_const_state;
1670 bool this_looping = looping;
1671
1672 w_l->can_free = can_free;
1673 w->nonfreeing_fn = !can_free;
1674 if (!can_free && dump_file)
1675 fprintf (dump_file, "Function found not to call free: %s\n",
1676 w->name ());
1677
1678 if (w_l->state_previously_known != IPA_NEITHER
1679 && this_state > w_l->state_previously_known)
1680 {
1681 this_state = w_l->state_previously_known;
1682 if (this_state == IPA_NEITHER)
1683 this_looping = w_l->looping_previously_known;
1684 }
1685 if (!this_looping && self_recursive_p (w))
1686 this_looping = true;
1687 if (!w_l->looping_previously_known)
1688 this_looping = false;
1689
1690 /* All nodes within a cycle share the same info. */
1691 w_l->pure_const_state = this_state;
1692 w_l->looping = this_looping;
1693
1694 /* Inline clones share declaration with their offline copies;
1695 do not modify their declarations since the offline copy may
1696 be different. */
1697 if (!w->global.inlined_to)
1698 switch (this_state)
1699 {
1700 case IPA_CONST:
1701 if (!TREE_READONLY (w->decl))
1702 {
1703 warn_function_const (w->decl, !this_looping);
1704 if (dump_file)
1705 fprintf (dump_file, "Function found to be %sconst: %s\n",
1706 this_looping ? "looping " : "",
1707 w->name ());
1708 }
1709 /* Turning constructor or destructor to non-looping const/pure
1710 enables us to possibly remove the function completely. */
1711 if (this_looping)
1712 has_cdtor = false;
1713 else
1714 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1715 NULL, true);
1716 if (w->set_const_flag (true, this_looping))
1717 {
1718 if (dump_file)
1719 fprintf (dump_file,
1720 "Declaration updated to be %sconst: %s\n",
1721 this_looping ? "looping " : "",
1722 w->name ());
1723 remove_p |= has_cdtor;
1724 }
1725 break;
1726
1727 case IPA_PURE:
1728 if (!DECL_PURE_P (w->decl))
1729 {
1730 warn_function_pure (w->decl, !this_looping);
1731 if (dump_file)
1732 fprintf (dump_file, "Function found to be %spure: %s\n",
1733 this_looping ? "looping " : "",
1734 w->name ());
1735 }
1736 if (this_looping)
1737 has_cdtor = false;
1738 else
1739 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1740 NULL, true);
1741 if (w->set_pure_flag (true, this_looping))
1742 {
1743 if (dump_file)
1744 fprintf (dump_file,
1745 "Declaration updated to be %spure: %s\n",
1746 this_looping ? "looping " : "",
1747 w->name ());
1748 remove_p |= has_cdtor;
1749 }
1750 break;
1751
1752 default:
1753 break;
1754 }
1755 w_info = (struct ipa_dfs_info *) w->aux;
1756 w = w_info->next_cycle;
1757 }
1758 }
1759
1760 ipa_free_postorder_info ();
1761 free (order);
1762 return remove_p;
1763 }
1764
1765 /* Produce transitive closure over the callgraph and compute nothrow
1766 attributes. */
1767
1768 static void
1769 propagate_nothrow (void)
1770 {
1771 struct cgraph_node *node;
1772 struct cgraph_node *w;
1773 struct cgraph_node **order =
1774 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1775 int order_pos;
1776 int i;
1777 struct ipa_dfs_info * w_info;
1778
1779 order_pos = ipa_reduced_postorder (order, true, false,
1780 ignore_edge_for_nothrow);
1781 if (dump_file)
1782 {
1783 cgraph_node::dump_cgraph (dump_file);
1784 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1785 }
1786
1787 /* Propagate the local information through the call graph to produce
1788 the global information. All the nodes within a cycle will have
1789 the same info so we collapse cycles first. Then we can do the
1790 propagation in one pass from the leaves to the roots. */
1791 for (i = 0; i < order_pos; i++ )
1792 {
1793 bool can_throw = false;
1794 node = order[i];
1795
1796 if (node->alias)
1797 continue;
1798
1799 /* Find the worst state for any node in the cycle. */
1800 w = node;
1801 while (w && !can_throw)
1802 {
1803 struct cgraph_edge *e, *ie;
1804
1805 if (!TREE_NOTHROW (w->decl))
1806 {
1807 funct_state w_l = get_function_state (w);
1808
1809 if (w_l->can_throw
1810 || w->get_availability () == AVAIL_INTERPOSABLE)
1811 can_throw = true;
1812
1813 for (e = w->callees; e && !can_throw; e = e->next_callee)
1814 {
1815 enum availability avail;
1816
1817 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1818 continue;
1819
1820 struct cgraph_node *y = e->callee->
1821 function_or_virtual_thunk_symbol (&avail,
1822 e->caller);
1823
1824 /* We can use info about the callee only if we know it can
1825 not be interposed.
1826 When callee is compiled with non-call exceptions we also
1827 must check that the declaration is bound to current
1828 body as other semantically equivalent body may still
1829 throw. */
1830 if (avail <= AVAIL_INTERPOSABLE
1831 || (!TREE_NOTHROW (y->decl)
1832 && (get_function_state (y)->can_throw
1833 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1834 && !e->callee->binds_to_current_def_p (w)))))
1835 can_throw = true;
1836 }
1837 for (ie = w->indirect_calls; ie && !can_throw;
1838 ie = ie->next_callee)
1839 if (ie->can_throw_external
1840 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1841 can_throw = true;
1842 }
1843 w_info = (struct ipa_dfs_info *) w->aux;
1844 w = w_info->next_cycle;
1845 }
1846
1847 /* Copy back the region's pure_const_state which is shared by
1848 all nodes in the region. */
1849 w = node;
1850 while (w)
1851 {
1852 funct_state w_l = get_function_state (w);
1853 if (!can_throw && !TREE_NOTHROW (w->decl))
1854 {
1855 /* Inline clones share declaration with their offline copies;
1856 do not modify their declarations since the offline copy may
1857 be different. */
1858 if (!w->global.inlined_to)
1859 {
1860 w->set_nothrow_flag (true);
1861 if (dump_file)
1862 fprintf (dump_file, "Function found to be nothrow: %s\n",
1863 w->name ());
1864 }
1865 }
1866 else if (can_throw && !TREE_NOTHROW (w->decl))
1867 w_l->can_throw = true;
1868 w_info = (struct ipa_dfs_info *) w->aux;
1869 w = w_info->next_cycle;
1870 }
1871 }
1872
1873 ipa_free_postorder_info ();
1874 free (order);
1875 }
1876
1877 /* Debugging function to dump state of malloc lattice. */
1878
1879 DEBUG_FUNCTION
1880 static void
1881 dump_malloc_lattice (FILE *dump_file, const char *s)
1882 {
1883 if (!dump_file)
1884 return;
1885
1886 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1887 cgraph_node *node;
1888 FOR_EACH_FUNCTION (node)
1889 {
1890 funct_state fs = get_function_state (node);
1891 malloc_state_e state = fs->malloc_state;
1892 fprintf (dump_file, "%s: %s\n", node->name (), malloc_state_names[state]);
1893 }
1894 }
1895
1896 /* Propagate malloc attribute across the callgraph. */
1897
1898 static void
1899 propagate_malloc (void)
1900 {
1901 cgraph_node *node;
1902 FOR_EACH_FUNCTION (node)
1903 {
1904 if (DECL_IS_MALLOC (node->decl))
1905 if (!has_function_state (node))
1906 {
1907 funct_state l = XCNEW (struct funct_state_d);
1908 *l = varying_state;
1909 l->malloc_state = STATE_MALLOC;
1910 set_function_state (node, l);
1911 }
1912 }
1913
1914 dump_malloc_lattice (dump_file, "Initial");
1915 struct cgraph_node **order
1916 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1917 int order_pos = ipa_reverse_postorder (order);
1918 bool changed = true;
1919
1920 while (changed)
1921 {
1922 changed = false;
1923 /* Walk in postorder. */
1924 for (int i = order_pos - 1; i >= 0; --i)
1925 {
1926 cgraph_node *node = order[i];
1927 if (node->alias
1928 || !node->definition
1929 || !has_function_state (node))
1930 continue;
1931
1932 funct_state l = get_function_state (node);
1933
1934 /* FIXME: add support for indirect-calls. */
1935 if (node->indirect_calls)
1936 {
1937 l->malloc_state = STATE_MALLOC_BOTTOM;
1938 continue;
1939 }
1940
1941 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1942 {
1943 l->malloc_state = STATE_MALLOC_BOTTOM;
1944 continue;
1945 }
1946
1947 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1948 continue;
1949
1950 vec<cgraph_node *> callees = vNULL;
1951 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1952 {
1953 ipa_call_summary *es = ipa_call_summaries->get (cs);
1954 if (es && es->is_return_callee_uncaptured)
1955 callees.safe_push (cs->callee);
1956 }
1957
1958 malloc_state_e new_state = l->malloc_state;
1959 for (unsigned j = 0; j < callees.length (); j++)
1960 {
1961 cgraph_node *callee = callees[j];
1962 if (!has_function_state (callee))
1963 {
1964 new_state = STATE_MALLOC_BOTTOM;
1965 break;
1966 }
1967 malloc_state_e callee_state = get_function_state (callee)->malloc_state;
1968 if (new_state < callee_state)
1969 new_state = callee_state;
1970 }
1971 if (new_state != l->malloc_state)
1972 {
1973 changed = true;
1974 l->malloc_state = new_state;
1975 }
1976 }
1977 }
1978
1979 FOR_EACH_DEFINED_FUNCTION (node)
1980 if (has_function_state (node))
1981 {
1982 funct_state l = get_function_state (node);
1983 if (!node->alias
1984 && l->malloc_state == STATE_MALLOC
1985 && !node->global.inlined_to)
1986 {
1987 if (dump_file && (dump_flags & TDF_DETAILS))
1988 fprintf (dump_file, "Function %s found to be malloc\n",
1989 node->name ());
1990
1991 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1992 node->set_malloc_flag (true);
1993 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1994 warn_function_malloc (node->decl);
1995 }
1996 }
1997
1998 dump_malloc_lattice (dump_file, "after propagation");
1999 ipa_free_postorder_info ();
2000 free (order);
2001 }
2002
2003 /* Produce the global information by preforming a transitive closure
2004 on the local information that was produced by generate_summary. */
2005
2006 unsigned int
2007 pass_ipa_pure_const::
2008 execute (function *)
2009 {
2010 struct cgraph_node *node;
2011 bool remove_p;
2012
2013 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
2014 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
2015 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
2016
2017 /* Nothrow makes more function to not lead to return and improve
2018 later analysis. */
2019 propagate_nothrow ();
2020 propagate_malloc ();
2021 remove_p = propagate_pure_const ();
2022
2023 /* Cleanup. */
2024 FOR_EACH_FUNCTION (node)
2025 if (has_function_state (node))
2026 free (get_function_state (node));
2027 funct_state_vec.release ();
2028 return remove_p ? TODO_remove_functions : 0;
2029 }
2030
2031 static bool
2032 gate_pure_const (void)
2033 {
2034 return flag_ipa_pure_const || in_lto_p;
2035 }
2036
2037 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2038 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2039 pure_const_generate_summary, /* generate_summary */
2040 pure_const_write_summary, /* write_summary */
2041 pure_const_read_summary, /* read_summary */
2042 NULL, /* write_optimization_summary */
2043 NULL, /* read_optimization_summary */
2044 NULL, /* stmt_fixup */
2045 0, /* function_transform_todo_flags_start */
2046 NULL, /* function_transform */
2047 NULL), /* variable_transform */
2048 init_p(false),
2049 function_insertion_hook_holder(NULL),
2050 node_duplication_hook_holder(NULL),
2051 node_removal_hook_holder(NULL)
2052 {
2053 }
2054
2055 ipa_opt_pass_d *
2056 make_pass_ipa_pure_const (gcc::context *ctxt)
2057 {
2058 return new pass_ipa_pure_const (ctxt);
2059 }
2060
2061 /* Return true if function should be skipped for local pure const analysis. */
2062
2063 static bool
2064 skip_function_for_local_pure_const (struct cgraph_node *node)
2065 {
2066 /* Because we do not schedule pass_fixup_cfg over whole program after early
2067 optimizations we must not promote functions that are called by already
2068 processed functions. */
2069
2070 if (function_called_by_processed_nodes_p ())
2071 {
2072 if (dump_file)
2073 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2074 return true;
2075 }
2076 /* Save some work and do not analyze functions which are interposable and
2077 do not have any non-interposable aliases. */
2078 if (node->get_availability () <= AVAIL_INTERPOSABLE
2079 && !node->has_aliases_p ())
2080 {
2081 if (dump_file)
2082 fprintf (dump_file,
2083 "Function is interposable; not analyzing.\n");
2084 return true;
2085 }
2086 return false;
2087 }
2088
2089 /* Simple local pass for pure const discovery reusing the analysis from
2090 ipa_pure_const. This pass is effective when executed together with
2091 other optimization passes in early optimization pass queue. */
2092
2093 namespace {
2094
2095 const pass_data pass_data_local_pure_const =
2096 {
2097 GIMPLE_PASS, /* type */
2098 "local-pure-const", /* name */
2099 OPTGROUP_NONE, /* optinfo_flags */
2100 TV_IPA_PURE_CONST, /* tv_id */
2101 0, /* properties_required */
2102 0, /* properties_provided */
2103 0, /* properties_destroyed */
2104 0, /* todo_flags_start */
2105 0, /* todo_flags_finish */
2106 };
2107
2108 class pass_local_pure_const : public gimple_opt_pass
2109 {
2110 public:
2111 pass_local_pure_const (gcc::context *ctxt)
2112 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2113 {}
2114
2115 /* opt_pass methods: */
2116 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2117 virtual bool gate (function *) { return gate_pure_const (); }
2118 virtual unsigned int execute (function *);
2119
2120 }; // class pass_local_pure_const
2121
2122 unsigned int
2123 pass_local_pure_const::execute (function *fun)
2124 {
2125 bool changed = false;
2126 funct_state l;
2127 bool skip;
2128 struct cgraph_node *node;
2129
2130 node = cgraph_node::get (current_function_decl);
2131 skip = skip_function_for_local_pure_const (node);
2132
2133 if (!warn_suggest_attribute_const
2134 && !warn_suggest_attribute_pure
2135 && skip)
2136 return 0;
2137
2138 l = analyze_function (node, false);
2139
2140 /* Do NORETURN discovery. */
2141 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2142 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2143 {
2144 warn_function_noreturn (fun->decl);
2145 if (dump_file)
2146 fprintf (dump_file, "Function found to be noreturn: %s\n",
2147 current_function_name ());
2148
2149 /* Update declaration and reduce profile to executed once. */
2150 TREE_THIS_VOLATILE (current_function_decl) = 1;
2151 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2152 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2153
2154 changed = true;
2155 }
2156
2157 switch (l->pure_const_state)
2158 {
2159 case IPA_CONST:
2160 if (!TREE_READONLY (current_function_decl))
2161 {
2162 warn_function_const (current_function_decl, !l->looping);
2163 if (dump_file)
2164 fprintf (dump_file, "Function found to be %sconst: %s\n",
2165 l->looping ? "looping " : "",
2166 current_function_name ());
2167 }
2168 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2169 && !l->looping)
2170 {
2171 if (dump_file)
2172 fprintf (dump_file, "Function found to be non-looping: %s\n",
2173 current_function_name ());
2174 }
2175 if (!skip && node->set_const_flag (true, l->looping))
2176 {
2177 if (dump_file)
2178 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2179 l->looping ? "looping " : "",
2180 current_function_name ());
2181 changed = true;
2182 }
2183 break;
2184
2185 case IPA_PURE:
2186 if (!DECL_PURE_P (current_function_decl))
2187 {
2188 warn_function_pure (current_function_decl, !l->looping);
2189 if (dump_file)
2190 fprintf (dump_file, "Function found to be %spure: %s\n",
2191 l->looping ? "looping " : "",
2192 current_function_name ());
2193 }
2194 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2195 && !l->looping)
2196 {
2197 if (dump_file)
2198 fprintf (dump_file, "Function found to be non-looping: %s\n",
2199 current_function_name ());
2200 }
2201 if (!skip && node->set_pure_flag (true, l->looping))
2202 {
2203 if (dump_file)
2204 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2205 l->looping ? "looping " : "",
2206 current_function_name ());
2207 changed = true;
2208 }
2209 break;
2210
2211 default:
2212 break;
2213 }
2214 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2215 {
2216 node->set_nothrow_flag (true);
2217 changed = true;
2218 if (dump_file)
2219 fprintf (dump_file, "Function found to be nothrow: %s\n",
2220 current_function_name ());
2221 }
2222
2223 if (l->malloc_state == STATE_MALLOC
2224 && !DECL_IS_MALLOC (current_function_decl))
2225 {
2226 node->set_malloc_flag (true);
2227 if (warn_suggest_attribute_malloc)
2228 warn_function_malloc (node->decl);
2229 changed = true;
2230 if (dump_file)
2231 fprintf (dump_file, "Function found to be malloc: %s\n",
2232 node->name ());
2233 }
2234
2235 free (l);
2236 if (changed)
2237 return execute_fixup_cfg ();
2238 else
2239 return 0;
2240 }
2241
2242 } // anon namespace
2243
2244 gimple_opt_pass *
2245 make_pass_local_pure_const (gcc::context *ctxt)
2246 {
2247 return new pass_local_pure_const (ctxt);
2248 }
2249
2250 /* Emit noreturn warnings. */
2251
2252 namespace {
2253
2254 const pass_data pass_data_warn_function_noreturn =
2255 {
2256 GIMPLE_PASS, /* type */
2257 "*warn_function_noreturn", /* name */
2258 OPTGROUP_NONE, /* optinfo_flags */
2259 TV_NONE, /* tv_id */
2260 PROP_cfg, /* properties_required */
2261 0, /* properties_provided */
2262 0, /* properties_destroyed */
2263 0, /* todo_flags_start */
2264 0, /* todo_flags_finish */
2265 };
2266
2267 class pass_warn_function_noreturn : public gimple_opt_pass
2268 {
2269 public:
2270 pass_warn_function_noreturn (gcc::context *ctxt)
2271 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2272 {}
2273
2274 /* opt_pass methods: */
2275 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2276 virtual unsigned int execute (function *fun)
2277 {
2278 if (!TREE_THIS_VOLATILE (current_function_decl)
2279 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2280 warn_function_noreturn (current_function_decl);
2281 return 0;
2282 }
2283
2284 }; // class pass_warn_function_noreturn
2285
2286 } // anon namespace
2287
2288 gimple_opt_pass *
2289 make_pass_warn_function_noreturn (gcc::context *ctxt)
2290 {
2291 return new pass_warn_function_noreturn (ctxt);
2292 }
2293
2294 /* Simple local pass for pure const discovery reusing the analysis from
2295 ipa_pure_const. This pass is effective when executed together with
2296 other optimization passes in early optimization pass queue. */
2297
2298 namespace {
2299
2300 const pass_data pass_data_nothrow =
2301 {
2302 GIMPLE_PASS, /* type */
2303 "nothrow", /* name */
2304 OPTGROUP_NONE, /* optinfo_flags */
2305 TV_IPA_PURE_CONST, /* tv_id */
2306 0, /* properties_required */
2307 0, /* properties_provided */
2308 0, /* properties_destroyed */
2309 0, /* todo_flags_start */
2310 0, /* todo_flags_finish */
2311 };
2312
2313 class pass_nothrow : public gimple_opt_pass
2314 {
2315 public:
2316 pass_nothrow (gcc::context *ctxt)
2317 : gimple_opt_pass (pass_data_nothrow, ctxt)
2318 {}
2319
2320 /* opt_pass methods: */
2321 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2322 virtual bool gate (function *) { return optimize; }
2323 virtual unsigned int execute (function *);
2324
2325 }; // class pass_nothrow
2326
2327 unsigned int
2328 pass_nothrow::execute (function *)
2329 {
2330 struct cgraph_node *node;
2331 basic_block this_block;
2332
2333 if (TREE_NOTHROW (current_function_decl))
2334 return 0;
2335
2336 node = cgraph_node::get (current_function_decl);
2337
2338 /* We run during lowering, we can not really use availability yet. */
2339 if (cgraph_node::get (current_function_decl)->get_availability ()
2340 <= AVAIL_INTERPOSABLE)
2341 {
2342 if (dump_file)
2343 fprintf (dump_file, "Function is interposable;"
2344 " not analyzing.\n");
2345 return true;
2346 }
2347
2348 FOR_EACH_BB_FN (this_block, cfun)
2349 {
2350 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2351 !gsi_end_p (gsi);
2352 gsi_next (&gsi))
2353 if (stmt_can_throw_external (gsi_stmt (gsi)))
2354 {
2355 if (is_gimple_call (gsi_stmt (gsi)))
2356 {
2357 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2358 if (callee_t && recursive_call_p (current_function_decl,
2359 callee_t))
2360 continue;
2361 }
2362
2363 if (dump_file)
2364 {
2365 fprintf (dump_file, "Statement can throw: ");
2366 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2367 }
2368 return 0;
2369 }
2370 }
2371
2372 node->set_nothrow_flag (true);
2373
2374 bool cfg_changed = false;
2375 if (self_recursive_p (node))
2376 FOR_EACH_BB_FN (this_block, cfun)
2377 if (gimple *g = last_stmt (this_block))
2378 if (is_gimple_call (g))
2379 {
2380 tree callee_t = gimple_call_fndecl (g);
2381 if (callee_t
2382 && recursive_call_p (current_function_decl, callee_t)
2383 && maybe_clean_eh_stmt (g)
2384 && gimple_purge_dead_eh_edges (this_block))
2385 cfg_changed = true;
2386 }
2387
2388 if (dump_file)
2389 fprintf (dump_file, "Function found to be nothrow: %s\n",
2390 current_function_name ());
2391 return cfg_changed ? TODO_cleanup_cfg : 0;
2392 }
2393
2394 } // anon namespace
2395
2396 gimple_opt_pass *
2397 make_pass_nothrow (gcc::context *ctxt)
2398 {
2399 return new pass_nothrow (ctxt);
2400 }