1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
44 #include "diagnostic.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
56 #include "tree-scalar-evolution.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
63 #include "ipa-fnsummary.h"
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
68 enum pure_const_state_e
75 static const char *pure_const_names
[3] = {"const", "pure", "neither"};
84 static const char *malloc_state_names
[] = {"malloc_top", "malloc", "malloc_bottom"};
86 /* Holder for the const_state. There is one of these per function
91 enum pure_const_state_e pure_const_state
;
92 /* What user set here; we can be always sure about this. */
93 enum pure_const_state_e state_previously_known
;
94 bool looping_previously_known
;
96 /* True if the function could possibly infinite loop. There are a
97 lot of ways that this could be determined. We are pretty
98 conservative here. While it is possible to cse pure and const
99 calls, it is not legal to have dce get rid of the call if there
100 is a possibility that the call could infinite loop since this is
101 a behavioral change. */
106 /* If function can call free, munmap or otherwise make previously
107 non-trapping memory accesses trapping. */
110 enum malloc_state_e malloc_state
;
113 /* State used when we know nothing about function. */
114 static struct funct_state_d varying_state
115 = { IPA_NEITHER
, IPA_NEITHER
, true, true, true, true, STATE_MALLOC_BOTTOM
};
118 typedef struct funct_state_d
* funct_state
;
120 /* The storage of the funct_state is abstracted because there is the
121 possibility that it may be desirable to move this to the cgraph
124 /* Array, indexed by cgraph node uid, of function states. */
126 static vec
<funct_state
> funct_state_vec
;
128 static bool gate_pure_const (void);
132 const pass_data pass_data_ipa_pure_const
=
135 "pure-const", /* name */
136 OPTGROUP_NONE
, /* optinfo_flags */
137 TV_IPA_PURE_CONST
, /* tv_id */
138 0, /* properties_required */
139 0, /* properties_provided */
140 0, /* properties_destroyed */
141 0, /* todo_flags_start */
142 0, /* todo_flags_finish */
145 class pass_ipa_pure_const
: public ipa_opt_pass_d
148 pass_ipa_pure_const(gcc::context
*ctxt
);
150 /* opt_pass methods: */
151 bool gate (function
*) { return gate_pure_const (); }
152 unsigned int execute (function
*fun
);
154 void register_hooks (void);
159 /* Holders of ipa cgraph hooks: */
160 struct cgraph_node_hook_list
*function_insertion_hook_holder
;
161 struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
162 struct cgraph_node_hook_list
*node_removal_hook_holder
;
164 }; // class pass_ipa_pure_const
168 /* Try to guess if function body will always be visible to compiler
169 when compiling the call and whether compiler will be able
170 to propagate the information by itself. */
173 function_always_visible_to_compiler_p (tree decl
)
175 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
)
176 || DECL_COMDAT (decl
));
179 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
180 is true if the function is known to be finite. The diagnostic is
181 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
182 OPTION, this function may initialize it and it is always returned
185 static hash_set
<tree
> *
186 suggest_attribute (int option
, tree decl
, bool known_finite
,
187 hash_set
<tree
> *warned_about
,
188 const char * attrib_name
)
190 if (!option_enabled (option
, &global_options
))
192 if (TREE_THIS_VOLATILE (decl
)
193 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
197 warned_about
= new hash_set
<tree
>;
198 if (warned_about
->contains (decl
))
200 warned_about
->add (decl
);
201 warning_at (DECL_SOURCE_LOCATION (decl
),
204 ? G_("function might be candidate for attribute %qs")
205 : G_("function might be candidate for attribute %qs"
206 " if it is known to return normally"), attrib_name
);
210 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
211 is true if the function is known to be finite. */
214 warn_function_pure (tree decl
, bool known_finite
)
216 /* Declaring a void function pure makes no sense and is diagnosed
217 by -Wattributes because calling it would have no effect. */
218 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
221 static hash_set
<tree
> *warned_about
;
223 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
224 known_finite
, warned_about
, "pure");
227 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
228 is true if the function is known to be finite. */
231 warn_function_const (tree decl
, bool known_finite
)
233 /* Declaring a void function const makes no sense is diagnosed
234 by -Wattributes because calling it would have no effect. */
235 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
238 static hash_set
<tree
> *warned_about
;
240 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
241 known_finite
, warned_about
, "const");
244 /* Emit suggestion about __attribute__((malloc)) for DECL. */
247 warn_function_malloc (tree decl
)
249 static hash_set
<tree
> *warned_about
;
251 = suggest_attribute (OPT_Wsuggest_attribute_malloc
, decl
,
252 true, warned_about
, "malloc");
255 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
258 warn_function_noreturn (tree decl
)
260 tree original_decl
= decl
;
262 static hash_set
<tree
> *warned_about
;
263 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
264 && targetm
.warn_func_return (decl
))
266 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, original_decl
,
267 true, warned_about
, "noreturn");
271 warn_function_cold (tree decl
)
273 tree original_decl
= decl
;
275 static hash_set
<tree
> *warned_about
;
277 = suggest_attribute (OPT_Wsuggest_attribute_cold
, original_decl
,
278 true, warned_about
, "cold");
281 /* Return true if we have a function state for NODE. */
284 has_function_state (struct cgraph_node
*node
)
286 if (!funct_state_vec
.exists ()
287 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
289 return funct_state_vec
[node
->uid
] != NULL
;
292 /* Return the function state from NODE. */
294 static inline funct_state
295 get_function_state (struct cgraph_node
*node
)
297 if (!funct_state_vec
.exists ()
298 || funct_state_vec
.length () <= (unsigned int)node
->uid
299 || !funct_state_vec
[node
->uid
])
300 /* We might want to put correct previously_known state into varying. */
301 return &varying_state
;
302 return funct_state_vec
[node
->uid
];
305 /* Set the function state S for NODE. */
308 set_function_state (struct cgraph_node
*node
, funct_state s
)
310 if (!funct_state_vec
.exists ()
311 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
312 funct_state_vec
.safe_grow_cleared (node
->uid
+ 1);
314 /* If funct_state_vec already contains a funct_state, we have to release
315 it before it's going to be ovewritten. */
316 if (funct_state_vec
[node
->uid
] != NULL
317 && funct_state_vec
[node
->uid
] != &varying_state
)
318 free (funct_state_vec
[node
->uid
]);
320 funct_state_vec
[node
->uid
] = s
;
323 /* Check to see if the use (or definition when CHECKING_WRITE is true)
324 variable T is legal in a function that is either pure or const. */
327 check_decl (funct_state local
,
328 tree t
, bool checking_write
, bool ipa
)
330 /* Do not want to do anything with volatile except mark any
331 function that uses one to be not const or pure. */
332 if (TREE_THIS_VOLATILE (t
))
334 local
->pure_const_state
= IPA_NEITHER
;
336 fprintf (dump_file
, " Volatile operand is not const/pure\n");
340 /* Do not care about a local automatic that is not static. */
341 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
344 /* If the variable has the "used" attribute, treat it as if it had a
345 been touched by the devil. */
346 if (DECL_PRESERVE_P (t
))
348 local
->pure_const_state
= IPA_NEITHER
;
350 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
354 /* In IPA mode we are not interested in checking actual loads and stores;
355 they will be processed at propagation time using ipa_ref. */
359 /* Since we have dealt with the locals and params cases above, if we
360 are CHECKING_WRITE, this cannot be a pure or constant
364 local
->pure_const_state
= IPA_NEITHER
;
366 fprintf (dump_file
, " static/global memory write is not const/pure\n");
370 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
372 /* Readonly reads are safe. */
373 if (TREE_READONLY (t
) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t
)))
374 return; /* Read of a constant, do not change the function state. */
378 fprintf (dump_file
, " global memory read is not const\n");
379 /* Just a regular read. */
380 if (local
->pure_const_state
== IPA_CONST
)
381 local
->pure_const_state
= IPA_PURE
;
386 /* Compilation level statics can be read if they are readonly
388 if (TREE_READONLY (t
))
392 fprintf (dump_file
, " static memory read is not const\n");
393 /* Just a regular read. */
394 if (local
->pure_const_state
== IPA_CONST
)
395 local
->pure_const_state
= IPA_PURE
;
400 /* Check to see if the use (or definition when CHECKING_WRITE is true)
401 variable T is legal in a function that is either pure or const. */
404 check_op (funct_state local
, tree t
, bool checking_write
)
406 t
= get_base_address (t
);
407 if (t
&& TREE_THIS_VOLATILE (t
))
409 local
->pure_const_state
= IPA_NEITHER
;
411 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
415 && (INDIRECT_REF_P (t
) || TREE_CODE (t
) == MEM_REF
)
416 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
417 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t
, 0)))
420 fprintf (dump_file
, " Indirect ref to local memory is OK\n");
423 else if (checking_write
)
425 local
->pure_const_state
= IPA_NEITHER
;
427 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
433 fprintf (dump_file
, " Indirect ref read is not const\n");
434 if (local
->pure_const_state
== IPA_CONST
)
435 local
->pure_const_state
= IPA_PURE
;
439 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
442 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
443 int flags
, bool cannot_lead_to_return
)
446 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
449 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
450 fprintf (dump_file
, " looping\n");
452 if (flags
& ECF_CONST
)
455 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
456 fprintf (dump_file
, " const\n");
458 else if (flags
& ECF_PURE
)
461 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
462 fprintf (dump_file
, " pure\n");
464 else if (cannot_lead_to_return
)
468 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
469 fprintf (dump_file
, " ignoring side effects->pure looping\n");
473 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
474 fprintf (dump_file
, " neither\n");
475 *state
= IPA_NEITHER
;
480 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
481 into STATE and LOOPING better of the two variants.
482 Be sure to merge looping correctly. IPA_NEITHER functions
483 have looping 0 even if they don't have to return. */
486 better_state (enum pure_const_state_e
*state
, bool *looping
,
487 enum pure_const_state_e state2
, bool looping2
)
491 if (*state
== IPA_NEITHER
)
494 *looping
= MIN (*looping
, looping2
);
497 else if (state2
!= IPA_NEITHER
)
498 *looping
= MIN (*looping
, looping2
);
501 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
502 into STATE and LOOPING worse of the two variants.
503 N is the actual node called. */
506 worse_state (enum pure_const_state_e
*state
, bool *looping
,
507 enum pure_const_state_e state2
, bool looping2
,
508 struct symtab_node
*from
,
509 struct symtab_node
*to
)
511 /* Consider function:
518 During early optimization we will turn this into:
525 Now if this function will be detected as CONST however when interposed it
526 may end up being just pure. We always must assume the worst scenario here.
528 if (*state
== IPA_CONST
&& state2
== IPA_CONST
529 && to
&& !TREE_READONLY (to
->decl
) && !to
->binds_to_current_def_p (from
))
531 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
532 fprintf (dump_file
, "Dropping state to PURE because call to %s may not "
533 "bind to current def.\n", to
->name ());
536 *state
= MAX (*state
, state2
);
537 *looping
= MAX (*looping
, looping2
);
540 /* Recognize special cases of builtins that are by themselves not pure or const
541 but function using them is. */
543 special_builtin_state (enum pure_const_state_e
*state
, bool *looping
,
546 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
547 switch (DECL_FUNCTION_CODE (callee
))
549 case BUILT_IN_RETURN
:
550 case BUILT_IN_UNREACHABLE
:
551 CASE_BUILT_IN_ALLOCA
:
552 case BUILT_IN_STACK_SAVE
:
553 case BUILT_IN_STACK_RESTORE
:
554 case BUILT_IN_EH_POINTER
:
555 case BUILT_IN_EH_FILTER
:
556 case BUILT_IN_UNWIND_RESUME
:
557 case BUILT_IN_CXA_END_CLEANUP
:
558 case BUILT_IN_EH_COPY_VALUES
:
559 case BUILT_IN_FRAME_ADDRESS
:
561 case BUILT_IN_APPLY_ARGS
:
562 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
:
563 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
:
567 case BUILT_IN_PREFETCH
:
577 /* Check the parameters of a function call to CALL_EXPR to see if
578 there are any references in the parameters that are not allowed for
579 pure or const functions. Also check to see if this is either an
580 indirect call, a call outside the compilation unit, or has special
581 attributes that may also effect the purity. The CALL_EXPR node for
582 the entire call expression. */
585 check_call (funct_state local
, gcall
*call
, bool ipa
)
587 int flags
= gimple_call_flags (call
);
588 tree callee_t
= gimple_call_fndecl (call
);
589 bool possibly_throws
= stmt_could_throw_p (call
);
590 bool possibly_throws_externally
= (possibly_throws
591 && stmt_can_throw_external (call
));
596 for (i
= 0; i
< gimple_num_ops (call
); i
++)
597 if (gimple_op (call
, i
)
598 && tree_could_throw_p (gimple_op (call
, i
)))
600 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
603 fprintf (dump_file
, " operand can throw; looping\n");
604 local
->looping
= true;
606 if (possibly_throws_externally
)
609 fprintf (dump_file
, " operand can throw externally\n");
610 local
->can_throw
= true;
615 /* The const and pure flags are set by a variety of places in the
616 compiler (including here). If someone has already set the flags
617 for the callee, (such as for some of the builtins) we will use
618 them, otherwise we will compute our own information.
620 Const and pure functions have less clobber effects than other
621 functions so we process these first. Otherwise if it is a call
622 outside the compilation unit or an indirect call we punt. This
623 leaves local calls which will be processed by following the call
627 enum pure_const_state_e call_state
;
630 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
631 && !nonfreeing_call_p (call
))
632 local
->can_free
= true;
634 if (special_builtin_state (&call_state
, &call_looping
, callee_t
))
636 worse_state (&local
->pure_const_state
, &local
->looping
,
637 call_state
, call_looping
,
641 /* When bad things happen to bad functions, they cannot be const
643 if (setjmp_call_p (callee_t
))
646 fprintf (dump_file
, " setjmp is not const/pure\n");
647 local
->looping
= true;
648 local
->pure_const_state
= IPA_NEITHER
;
651 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
652 switch (DECL_FUNCTION_CODE (callee_t
))
654 case BUILT_IN_LONGJMP
:
655 case BUILT_IN_NONLOCAL_GOTO
:
657 fprintf (dump_file
, " longjmp and nonlocal goto is not const/pure\n");
658 local
->pure_const_state
= IPA_NEITHER
;
659 local
->looping
= true;
665 else if (gimple_call_internal_p (call
) && !nonfreeing_call_p (call
))
666 local
->can_free
= true;
668 /* When not in IPA mode, we can still handle self recursion. */
670 && recursive_call_p (current_function_decl
, callee_t
))
673 fprintf (dump_file
, " Recursive call can loop.\n");
674 local
->looping
= true;
676 /* Either callee is unknown or we are doing local analysis.
677 Look to see if there are any bits available for the callee (such as by
678 declaration or because it is builtin) and process solely on the basis of
679 those bits. Handle internal calls always, those calls don't have
680 corresponding cgraph edges and thus aren't processed during
682 else if (!ipa
|| gimple_call_internal_p (call
))
684 enum pure_const_state_e call_state
;
686 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
689 fprintf (dump_file
, " can throw; looping\n");
690 local
->looping
= true;
692 if (possibly_throws_externally
)
696 fprintf (dump_file
, " can throw externally to lp %i\n",
697 lookup_stmt_eh_lp (call
));
699 fprintf (dump_file
, " callee:%s\n",
700 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
702 local
->can_throw
= true;
704 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
705 fprintf (dump_file
, " checking flags for call:");
706 state_from_flags (&call_state
, &call_looping
, flags
,
707 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
708 == (ECF_NORETURN
| ECF_NOTHROW
))
709 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
710 worse_state (&local
->pure_const_state
, &local
->looping
,
711 call_state
, call_looping
, NULL
, NULL
);
713 /* Direct functions calls are handled by IPA propagation. */
716 /* Wrapper around check_decl for loads in local more. */
719 check_load (gimple
*, tree op
, tree
, void *data
)
722 check_decl ((funct_state
)data
, op
, false, false);
724 check_op ((funct_state
)data
, op
, false);
728 /* Wrapper around check_decl for stores in local more. */
731 check_store (gimple
*, tree op
, tree
, void *data
)
734 check_decl ((funct_state
)data
, op
, true, false);
736 check_op ((funct_state
)data
, op
, true);
740 /* Wrapper around check_decl for loads in ipa mode. */
743 check_ipa_load (gimple
*, tree op
, tree
, void *data
)
746 check_decl ((funct_state
)data
, op
, false, true);
748 check_op ((funct_state
)data
, op
, false);
752 /* Wrapper around check_decl for stores in ipa mode. */
755 check_ipa_store (gimple
*, tree op
, tree
, void *data
)
758 check_decl ((funct_state
)data
, op
, true, true);
760 check_op ((funct_state
)data
, op
, true);
764 /* Look into pointer pointed to by GSIP and figure out what interesting side
767 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
769 gimple
*stmt
= gsi_stmt (*gsip
);
771 if (is_gimple_debug (stmt
))
774 /* Do consider clobber as side effects before IPA, so we rather inline
775 C++ destructors and keep clobber semantics than eliminate them.
777 TODO: We may get smarter during early optimizations on these and let
778 functions containing only clobbers to be optimized more. This is a common
779 case of C++ destructors. */
781 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
786 fprintf (dump_file
, " scanning: ");
787 print_gimple_stmt (dump_file
, stmt
, 0);
790 if (gimple_has_volatile_ops (stmt
)
791 && !gimple_clobber_p (stmt
))
793 local
->pure_const_state
= IPA_NEITHER
;
795 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
798 /* Look for loads and stores. */
799 walk_stmt_load_store_ops (stmt
, local
,
800 ipa
? check_ipa_load
: check_load
,
801 ipa
? check_ipa_store
: check_store
);
803 if (gimple_code (stmt
) != GIMPLE_CALL
804 && stmt_could_throw_p (stmt
))
806 if (cfun
->can_throw_non_call_exceptions
)
809 fprintf (dump_file
, " can throw; looping\n");
810 local
->looping
= true;
812 if (stmt_can_throw_external (stmt
))
815 fprintf (dump_file
, " can throw externally\n");
816 local
->can_throw
= true;
820 fprintf (dump_file
, " can throw\n");
822 switch (gimple_code (stmt
))
825 check_call (local
, as_a
<gcall
*> (stmt
), ipa
);
828 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
829 /* Target of long jump. */
832 fprintf (dump_file
, " nonlocal label is not const/pure\n");
833 local
->pure_const_state
= IPA_NEITHER
;
837 if (gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
840 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
841 /* Abandon all hope, ye who enter here. */
842 local
->pure_const_state
= IPA_NEITHER
;
843 local
->can_free
= true;
845 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
848 fprintf (dump_file
, " volatile is not const/pure\n");
849 /* Abandon all hope, ye who enter here. */
850 local
->pure_const_state
= IPA_NEITHER
;
851 local
->looping
= true;
852 local
->can_free
= true;
860 /* Check that RETVAL is used only in STMT and in comparisons against 0.
861 RETVAL is return value of the function and STMT is return stmt. */
864 check_retval_uses (tree retval
, gimple
*stmt
)
866 imm_use_iterator use_iter
;
869 FOR_EACH_IMM_USE_STMT (use_stmt
, use_iter
, retval
)
870 if (gcond
*cond
= dyn_cast
<gcond
*> (use_stmt
))
872 tree op2
= gimple_cond_rhs (cond
);
873 if (!integer_zerop (op2
))
874 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
876 else if (gassign
*ga
= dyn_cast
<gassign
*> (use_stmt
))
878 enum tree_code code
= gimple_assign_rhs_code (ga
);
879 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
880 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
881 if (!integer_zerop (gimple_assign_rhs2 (ga
)))
882 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
884 else if (is_gimple_debug (use_stmt
))
886 else if (use_stmt
!= stmt
)
887 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
892 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
893 attribute. Currently this function does a very conservative analysis.
894 FUN is considered to be a candidate if
895 1) It returns a value of pointer type.
896 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
897 a phi, and element of phi is either NULL or
898 SSA_NAME_DEF_STMT(element) is function call.
899 3) The return-value has immediate uses only within comparisons (gcond or gassign)
900 and return_stmt (and likewise a phi arg has immediate use only within comparison
904 malloc_candidate_p (function
*fun
, bool ipa
)
906 basic_block exit_block
= EXIT_BLOCK_PTR_FOR_FN (fun
);
909 cgraph_node
*node
= cgraph_node::get_create (fun
->decl
);
911 #define DUMP_AND_RETURN(reason) \
913 if (dump_file && (dump_flags & TDF_DETAILS)) \
914 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
915 (node->name()), (reason)); \
919 if (EDGE_COUNT (exit_block
->preds
) == 0
920 || !flag_delete_null_pointer_checks
)
923 FOR_EACH_EDGE (e
, ei
, exit_block
->preds
)
925 gimple_stmt_iterator gsi
= gsi_last_bb (e
->src
);
926 greturn
*ret_stmt
= dyn_cast
<greturn
*> (gsi_stmt (gsi
));
931 tree retval
= gimple_return_retval (ret_stmt
);
933 DUMP_AND_RETURN("No return value.")
935 if (TREE_CODE (retval
) != SSA_NAME
936 || TREE_CODE (TREE_TYPE (retval
)) != POINTER_TYPE
)
937 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
939 if (!check_retval_uses (retval
, ret_stmt
))
940 DUMP_AND_RETURN("Return value has uses outside return stmt"
941 " and comparisons against 0.")
943 gimple
*def
= SSA_NAME_DEF_STMT (retval
);
944 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (def
))
946 tree callee_decl
= gimple_call_fndecl (call_stmt
);
950 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
951 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
954 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
957 ipa_call_summary
*es
= ipa_call_summaries
->get (cs
);
959 es
->is_return_callee_uncaptured
= true;
963 else if (gphi
*phi
= dyn_cast
<gphi
*> (def
))
965 bool all_args_zero
= true;
966 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
968 tree arg
= gimple_phi_arg_def (phi
, i
);
969 if (integer_zerop (arg
))
972 all_args_zero
= false;
973 if (TREE_CODE (arg
) != SSA_NAME
)
974 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
975 if (!check_retval_uses (arg
, phi
))
976 DUMP_AND_RETURN ("phi arg has uses outside phi"
977 " and comparisons against 0.")
979 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
980 gcall
*call_stmt
= dyn_cast
<gcall
*> (arg_def
);
983 tree callee_decl
= gimple_call_fndecl (call_stmt
);
986 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
987 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
988 " for non-ipa mode.")
990 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
993 ipa_call_summary
*es
= ipa_call_summaries
->get (cs
);
995 es
->is_return_callee_uncaptured
= true;
1000 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.");
1004 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
1007 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1008 fprintf (dump_file
, "\nFound %s to be candidate for malloc attribute\n",
1009 IDENTIFIER_POINTER (DECL_NAME (fun
->decl
)));
1012 #undef DUMP_AND_RETURN
1016 /* This is the main routine for finding the reference patterns for
1017 global variables within a function FN. */
1020 analyze_function (struct cgraph_node
*fn
, bool ipa
)
1022 tree decl
= fn
->decl
;
1024 basic_block this_block
;
1026 l
= XCNEW (struct funct_state_d
);
1027 l
->pure_const_state
= IPA_CONST
;
1028 l
->state_previously_known
= IPA_NEITHER
;
1029 l
->looping_previously_known
= true;
1031 l
->can_throw
= false;
1032 l
->can_free
= false;
1033 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
1034 flags_from_decl_or_type (fn
->decl
),
1035 fn
->cannot_return_p ());
1037 if (fn
->thunk
.thunk_p
|| fn
->alias
)
1039 /* Thunk gets propagated through, so nothing interesting happens. */
1041 if (fn
->thunk
.thunk_p
&& fn
->thunk
.virtual_offset_p
)
1042 l
->pure_const_state
= IPA_NEITHER
;
1048 fprintf (dump_file
, "\n\n local analysis of %s\n ",
1052 push_cfun (DECL_STRUCT_FUNCTION (decl
));
1054 FOR_EACH_BB_FN (this_block
, cfun
)
1056 gimple_stmt_iterator gsi
;
1057 struct walk_stmt_info wi
;
1059 memset (&wi
, 0, sizeof (wi
));
1060 for (gsi
= gsi_start_bb (this_block
);
1064 check_stmt (&gsi
, l
, ipa
);
1065 if (l
->pure_const_state
== IPA_NEITHER
1074 if (l
->pure_const_state
!= IPA_NEITHER
)
1076 /* Const functions cannot have back edges (an
1077 indication of possible infinite loop side
1079 if (mark_dfs_back_edges ())
1081 /* Preheaders are needed for SCEV to work.
1082 Simple latches and recorded exits improve chances that loop will
1083 proved to be finite in testcases such as in loop-15.c
1085 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1086 | LOOPS_HAVE_SIMPLE_LATCHES
1087 | LOOPS_HAVE_RECORDED_EXITS
);
1088 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1089 flow_loops_dump (dump_file
, NULL
, 0);
1090 if (mark_irreducible_loops ())
1093 fprintf (dump_file
, " has irreducible loops\n");
1100 FOR_EACH_LOOP (loop
, 0)
1101 if (!finite_loop_p (loop
))
1104 fprintf (dump_file
, " can not prove finiteness of "
1105 "loop %i\n", loop
->num
);
1111 loop_optimizer_finalize ();
1115 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1116 fprintf (dump_file
, " checking previously known:");
1118 better_state (&l
->pure_const_state
, &l
->looping
,
1119 l
->state_previously_known
,
1120 l
->looping_previously_known
);
1121 if (TREE_NOTHROW (decl
))
1122 l
->can_throw
= false;
1124 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1125 if (DECL_IS_MALLOC (decl
))
1126 l
->malloc_state
= STATE_MALLOC
;
1127 else if (ipa
&& malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), true))
1128 l
->malloc_state
= STATE_MALLOC_TOP
;
1129 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), false))
1130 l
->malloc_state
= STATE_MALLOC
;
1136 fprintf (dump_file
, "Function is locally looping.\n");
1138 fprintf (dump_file
, "Function is locally throwing.\n");
1139 if (l
->pure_const_state
== IPA_CONST
)
1140 fprintf (dump_file
, "Function is locally const.\n");
1141 if (l
->pure_const_state
== IPA_PURE
)
1142 fprintf (dump_file
, "Function is locally pure.\n");
1144 fprintf (dump_file
, "Function can locally free.\n");
1145 if (l
->malloc_state
== STATE_MALLOC
)
1146 fprintf (dump_file
, "Function is locally malloc.\n");
1151 /* Called when new function is inserted to callgraph late. */
1153 add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
1155 /* There are some shared nodes, in particular the initializers on
1156 static declarations. We do not need to scan them more than once
1157 since all we would be interested in are the addressof
1159 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1160 set_function_state (node
, analyze_function (node
, true));
1163 /* Called when new clone is inserted to callgraph late. */
1166 duplicate_node_data (struct cgraph_node
*src
, struct cgraph_node
*dst
,
1167 void *data ATTRIBUTE_UNUSED
)
1169 if (has_function_state (src
))
1171 funct_state l
= XNEW (struct funct_state_d
);
1172 gcc_assert (!has_function_state (dst
));
1173 memcpy (l
, get_function_state (src
), sizeof (*l
));
1174 set_function_state (dst
, l
);
1178 /* Called when new clone is inserted to callgraph late. */
1181 remove_node_data (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
1183 if (has_function_state (node
))
1184 set_function_state (node
, NULL
);
1189 pass_ipa_pure_const::
1190 register_hooks (void)
1197 node_removal_hook_holder
=
1198 symtab
->add_cgraph_removal_hook (&remove_node_data
, NULL
);
1199 node_duplication_hook_holder
=
1200 symtab
->add_cgraph_duplication_hook (&duplicate_node_data
, NULL
);
1201 function_insertion_hook_holder
=
1202 symtab
->add_cgraph_insertion_hook (&add_new_function
, NULL
);
1206 /* Analyze each function in the cgraph to see if it is locally PURE or
1210 pure_const_generate_summary (void)
1212 struct cgraph_node
*node
;
1214 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1215 pass
->register_hooks ();
1217 /* Process all of the functions.
1219 We process AVAIL_INTERPOSABLE functions. We can not use the results
1220 by default, but the info can be used at LTO with -fwhole-program or
1221 when function got cloned and the clone is AVAILABLE. */
1223 FOR_EACH_DEFINED_FUNCTION (node
)
1224 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1225 set_function_state (node
, analyze_function (node
, true));
1229 /* Serialize the ipa info for lto. */
1232 pure_const_write_summary (void)
1234 struct cgraph_node
*node
;
1235 struct lto_simple_output_block
*ob
1236 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
1237 unsigned int count
= 0;
1238 lto_symtab_encoder_iterator lsei
;
1239 lto_symtab_encoder_t encoder
;
1241 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
1243 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1244 lsei_next_function_in_partition (&lsei
))
1246 node
= lsei_cgraph_node (lsei
);
1247 if (node
->definition
&& has_function_state (node
))
1251 streamer_write_uhwi_stream (ob
->main_stream
, count
);
1253 /* Process all of the functions. */
1254 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1255 lsei_next_function_in_partition (&lsei
))
1257 node
= lsei_cgraph_node (lsei
);
1258 if (node
->definition
&& has_function_state (node
))
1260 struct bitpack_d bp
;
1263 lto_symtab_encoder_t encoder
;
1265 fs
= get_function_state (node
);
1267 encoder
= ob
->decl_state
->symtab_node_encoder
;
1268 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
1269 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
1271 /* Note that flags will need to be read in the opposite
1272 order as we are pushing the bitflags into FLAGS. */
1273 bp
= bitpack_create (ob
->main_stream
);
1274 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
1275 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
1276 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
1277 bp_pack_value (&bp
, fs
->looping
, 1);
1278 bp_pack_value (&bp
, fs
->can_throw
, 1);
1279 bp_pack_value (&bp
, fs
->can_free
, 1);
1280 bp_pack_value (&bp
, fs
->malloc_state
, 2);
1281 streamer_write_bitpack (&bp
);
1285 lto_destroy_simple_output_block (ob
);
1289 /* Deserialize the ipa info for lto. */
1292 pure_const_read_summary (void)
1294 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1295 struct lto_file_decl_data
*file_data
;
1298 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1299 pass
->register_hooks ();
1301 while ((file_data
= file_data_vec
[j
++]))
1305 struct lto_input_block
*ib
1306 = lto_create_simple_input_block (file_data
,
1307 LTO_section_ipa_pure_const
,
1312 unsigned int count
= streamer_read_uhwi (ib
);
1314 for (i
= 0; i
< count
; i
++)
1317 struct cgraph_node
*node
;
1318 struct bitpack_d bp
;
1320 lto_symtab_encoder_t encoder
;
1322 fs
= XCNEW (struct funct_state_d
);
1323 index
= streamer_read_uhwi (ib
);
1324 encoder
= file_data
->symtab_node_encoder
;
1325 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1327 set_function_state (node
, fs
);
1329 /* Note that the flags must be read in the opposite
1330 order in which they were written (the bitflags were
1331 pushed into FLAGS). */
1332 bp
= streamer_read_bitpack (ib
);
1333 fs
->pure_const_state
1334 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1335 fs
->state_previously_known
1336 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1337 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1338 fs
->looping
= bp_unpack_value (&bp
, 1);
1339 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1340 fs
->can_free
= bp_unpack_value (&bp
, 1);
1342 = (enum malloc_state_e
) bp_unpack_value (&bp
, 2);
1346 int flags
= flags_from_decl_or_type (node
->decl
);
1347 fprintf (dump_file
, "Read info for %s ", node
->dump_name ());
1348 if (flags
& ECF_CONST
)
1349 fprintf (dump_file
, " const");
1350 if (flags
& ECF_PURE
)
1351 fprintf (dump_file
, " pure");
1352 if (flags
& ECF_NOTHROW
)
1353 fprintf (dump_file
, " nothrow");
1354 fprintf (dump_file
, "\n pure const state: %s\n",
1355 pure_const_names
[fs
->pure_const_state
]);
1356 fprintf (dump_file
, " previously known state: %s\n",
1357 pure_const_names
[fs
->state_previously_known
]);
1359 fprintf (dump_file
," function is locally looping\n");
1360 if (fs
->looping_previously_known
)
1361 fprintf (dump_file
," function is previously known looping\n");
1363 fprintf (dump_file
," function is locally throwing\n");
1365 fprintf (dump_file
," function can locally free\n");
1366 fprintf (dump_file
, "\n malloc state: %s\n",
1367 malloc_state_names
[fs
->malloc_state
]);
1371 lto_destroy_simple_input_block (file_data
,
1372 LTO_section_ipa_pure_const
,
1378 /* We only propagate across edges that can throw externally and their callee
1379 is not interposable. */
1382 ignore_edge_for_nothrow (struct cgraph_edge
*e
)
1384 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1387 enum availability avail
;
1388 cgraph_node
*n
= e
->callee
->function_or_virtual_thunk_symbol (&avail
,
1390 if (avail
<= AVAIL_INTERPOSABLE
|| TREE_NOTHROW (n
->decl
))
1392 return opt_for_fn (e
->callee
->decl
, flag_non_call_exceptions
)
1393 && !e
->callee
->binds_to_current_def_p (e
->caller
);
1396 /* Return true if NODE is self recursive function.
1397 Indirectly recursive functions appears as non-trivial strongly
1398 connected components, so we need to care about self recursion
1402 self_recursive_p (struct cgraph_node
*node
)
1404 struct cgraph_edge
*e
;
1405 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1406 if (e
->callee
->function_symbol () == node
)
1411 /* Return true if N is cdtor that is not const or pure. In this case we may
1412 need to remove unreachable function if it is marked const/pure. */
1415 cdtor_p (cgraph_node
*n
, void *)
1417 if (DECL_STATIC_CONSTRUCTOR (n
->decl
) || DECL_STATIC_DESTRUCTOR (n
->decl
))
1418 return ((!TREE_READONLY (n
->decl
) && !DECL_PURE_P (n
->decl
))
1419 || DECL_LOOPING_CONST_OR_PURE_P (n
->decl
));
1423 /* We only propagate across edges with non-interposable callee. */
1426 ignore_edge_for_pure_const (struct cgraph_edge
*e
)
1428 enum availability avail
;
1429 e
->callee
->function_or_virtual_thunk_symbol (&avail
, e
->caller
);
1430 return (avail
<= AVAIL_INTERPOSABLE
);
1434 /* Produce transitive closure over the callgraph and compute pure/const
1438 propagate_pure_const (void)
1440 struct cgraph_node
*node
;
1441 struct cgraph_node
*w
;
1442 struct cgraph_node
**order
=
1443 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1446 struct ipa_dfs_info
* w_info
;
1447 bool remove_p
= false;
1450 order_pos
= ipa_reduced_postorder (order
, true, false,
1451 ignore_edge_for_pure_const
);
1454 cgraph_node::dump_cgraph (dump_file
);
1455 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1458 /* Propagate the local information through the call graph to produce
1459 the global information. All the nodes within a cycle will have
1460 the same info so we collapse cycles first. Then we can do the
1461 propagation in one pass from the leaves to the roots. */
1462 for (i
= 0; i
< order_pos
; i
++ )
1464 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1465 bool looping
= false;
1472 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1473 fprintf (dump_file
, "Starting cycle\n");
1475 /* Find the worst state for any node in the cycle. */
1477 while (w
&& pure_const_state
!= IPA_NEITHER
)
1479 struct cgraph_edge
*e
;
1480 struct cgraph_edge
*ie
;
1482 struct ipa_ref
*ref
= NULL
;
1484 funct_state w_l
= get_function_state (w
);
1485 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1486 fprintf (dump_file
, " Visiting %s state:%s looping %i\n",
1488 pure_const_names
[w_l
->pure_const_state
],
1491 /* First merge in function body properties.
1492 We are safe to pass NULL as FROM and TO because we will take care
1493 of possible interposition when walking callees. */
1494 worse_state (&pure_const_state
, &looping
,
1495 w_l
->pure_const_state
, w_l
->looping
,
1497 if (pure_const_state
== IPA_NEITHER
)
1502 /* We consider recursive cycles as possibly infinite.
1503 This might be relaxed since infinite recursion leads to stack
1508 /* Now walk the edges and merge in callee properties. */
1509 for (e
= w
->callees
; e
&& pure_const_state
!= IPA_NEITHER
;
1512 enum availability avail
;
1513 struct cgraph_node
*y
= e
->callee
->
1514 function_or_virtual_thunk_symbol (&avail
,
1516 enum pure_const_state_e edge_state
= IPA_CONST
;
1517 bool edge_looping
= false;
1519 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1521 fprintf (dump_file
, " Call to %s",
1522 e
->callee
->dump_name ());
1524 if (avail
> AVAIL_INTERPOSABLE
)
1526 funct_state y_l
= get_function_state (y
);
1527 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1530 " state:%s looping:%i\n",
1531 pure_const_names
[y_l
->pure_const_state
],
1534 if (y_l
->pure_const_state
> IPA_PURE
1535 && e
->cannot_lead_to_return_p ())
1537 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1539 " Ignoring side effects"
1540 " -> pure, looping\n");
1541 edge_state
= IPA_PURE
;
1542 edge_looping
= true;
1546 edge_state
= y_l
->pure_const_state
;
1547 edge_looping
= y_l
->looping
;
1550 else if (special_builtin_state (&edge_state
, &edge_looping
,
1554 state_from_flags (&edge_state
, &edge_looping
,
1555 flags_from_decl_or_type (y
->decl
),
1556 e
->cannot_lead_to_return_p ());
1558 /* Merge the results with what we already know. */
1559 better_state (&edge_state
, &edge_looping
,
1560 w_l
->state_previously_known
,
1561 w_l
->looping_previously_known
);
1562 worse_state (&pure_const_state
, &looping
,
1563 edge_state
, edge_looping
, e
->caller
, e
->callee
);
1564 if (pure_const_state
== IPA_NEITHER
)
1568 /* Now process the indirect call. */
1569 for (ie
= w
->indirect_calls
;
1570 ie
&& pure_const_state
!= IPA_NEITHER
; ie
= ie
->next_callee
)
1572 enum pure_const_state_e edge_state
= IPA_CONST
;
1573 bool edge_looping
= false;
1575 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1576 fprintf (dump_file
, " Indirect call");
1577 state_from_flags (&edge_state
, &edge_looping
,
1578 ie
->indirect_info
->ecf_flags
,
1579 ie
->cannot_lead_to_return_p ());
1580 /* Merge the results with what we already know. */
1581 better_state (&edge_state
, &edge_looping
,
1582 w_l
->state_previously_known
,
1583 w_l
->looping_previously_known
);
1584 worse_state (&pure_const_state
, &looping
,
1585 edge_state
, edge_looping
, NULL
, NULL
);
1586 if (pure_const_state
== IPA_NEITHER
)
1590 /* And finally all loads and stores. */
1591 for (i
= 0; w
->iterate_reference (i
, ref
)
1592 && pure_const_state
!= IPA_NEITHER
; i
++)
1594 enum pure_const_state_e ref_state
= IPA_CONST
;
1595 bool ref_looping
= false;
1599 /* readonly reads are safe. */
1600 if (TREE_READONLY (ref
->referred
->decl
))
1602 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1603 fprintf (dump_file
, " nonreadonly global var read\n");
1604 ref_state
= IPA_PURE
;
1607 if (ref
->cannot_lead_to_return ())
1609 ref_state
= IPA_NEITHER
;
1610 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1611 fprintf (dump_file
, " global var write\n");
1618 better_state (&ref_state
, &ref_looping
,
1619 w_l
->state_previously_known
,
1620 w_l
->looping_previously_known
);
1621 worse_state (&pure_const_state
, &looping
,
1622 ref_state
, ref_looping
, NULL
, NULL
);
1623 if (pure_const_state
== IPA_NEITHER
)
1626 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1627 w
= w_info
->next_cycle
;
1629 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1630 fprintf (dump_file
, "Result %s looping %i\n",
1631 pure_const_names
[pure_const_state
],
1634 /* Find the worst state of can_free for any node in the cycle. */
1635 bool can_free
= false;
1637 while (w
&& !can_free
)
1639 struct cgraph_edge
*e
;
1640 funct_state w_l
= get_function_state (w
);
1643 || w
->get_availability () == AVAIL_INTERPOSABLE
1644 || w
->indirect_calls
)
1647 for (e
= w
->callees
; e
&& !can_free
; e
= e
->next_callee
)
1649 enum availability avail
;
1650 struct cgraph_node
*y
= e
->callee
->
1651 function_or_virtual_thunk_symbol (&avail
,
1654 if (avail
> AVAIL_INTERPOSABLE
)
1655 can_free
= get_function_state (y
)->can_free
;
1659 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1660 w
= w_info
->next_cycle
;
1663 /* Copy back the region's pure_const_state which is shared by
1664 all nodes in the region. */
1668 funct_state w_l
= get_function_state (w
);
1669 enum pure_const_state_e this_state
= pure_const_state
;
1670 bool this_looping
= looping
;
1672 w_l
->can_free
= can_free
;
1673 w
->nonfreeing_fn
= !can_free
;
1674 if (!can_free
&& dump_file
)
1675 fprintf (dump_file
, "Function found not to call free: %s\n",
1678 if (w_l
->state_previously_known
!= IPA_NEITHER
1679 && this_state
> w_l
->state_previously_known
)
1681 this_state
= w_l
->state_previously_known
;
1682 if (this_state
== IPA_NEITHER
)
1683 this_looping
= w_l
->looping_previously_known
;
1685 if (!this_looping
&& self_recursive_p (w
))
1686 this_looping
= true;
1687 if (!w_l
->looping_previously_known
)
1688 this_looping
= false;
1690 /* All nodes within a cycle share the same info. */
1691 w_l
->pure_const_state
= this_state
;
1692 w_l
->looping
= this_looping
;
1694 /* Inline clones share declaration with their offline copies;
1695 do not modify their declarations since the offline copy may
1697 if (!w
->global
.inlined_to
)
1701 if (!TREE_READONLY (w
->decl
))
1703 warn_function_const (w
->decl
, !this_looping
);
1705 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1706 this_looping
? "looping " : "",
1709 /* Turning constructor or destructor to non-looping const/pure
1710 enables us to possibly remove the function completely. */
1714 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1716 if (w
->set_const_flag (true, this_looping
))
1720 "Declaration updated to be %sconst: %s\n",
1721 this_looping
? "looping " : "",
1723 remove_p
|= has_cdtor
;
1728 if (!DECL_PURE_P (w
->decl
))
1730 warn_function_pure (w
->decl
, !this_looping
);
1732 fprintf (dump_file
, "Function found to be %spure: %s\n",
1733 this_looping
? "looping " : "",
1739 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1741 if (w
->set_pure_flag (true, this_looping
))
1745 "Declaration updated to be %spure: %s\n",
1746 this_looping
? "looping " : "",
1748 remove_p
|= has_cdtor
;
1755 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1756 w
= w_info
->next_cycle
;
1760 ipa_free_postorder_info ();
1765 /* Produce transitive closure over the callgraph and compute nothrow
1769 propagate_nothrow (void)
1771 struct cgraph_node
*node
;
1772 struct cgraph_node
*w
;
1773 struct cgraph_node
**order
=
1774 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1777 struct ipa_dfs_info
* w_info
;
1779 order_pos
= ipa_reduced_postorder (order
, true, false,
1780 ignore_edge_for_nothrow
);
1783 cgraph_node::dump_cgraph (dump_file
);
1784 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1787 /* Propagate the local information through the call graph to produce
1788 the global information. All the nodes within a cycle will have
1789 the same info so we collapse cycles first. Then we can do the
1790 propagation in one pass from the leaves to the roots. */
1791 for (i
= 0; i
< order_pos
; i
++ )
1793 bool can_throw
= false;
1799 /* Find the worst state for any node in the cycle. */
1801 while (w
&& !can_throw
)
1803 struct cgraph_edge
*e
, *ie
;
1805 if (!TREE_NOTHROW (w
->decl
))
1807 funct_state w_l
= get_function_state (w
);
1810 || w
->get_availability () == AVAIL_INTERPOSABLE
)
1813 for (e
= w
->callees
; e
&& !can_throw
; e
= e
->next_callee
)
1815 enum availability avail
;
1817 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1820 struct cgraph_node
*y
= e
->callee
->
1821 function_or_virtual_thunk_symbol (&avail
,
1824 /* We can use info about the callee only if we know it can
1826 When callee is compiled with non-call exceptions we also
1827 must check that the declaration is bound to current
1828 body as other semantically equivalent body may still
1830 if (avail
<= AVAIL_INTERPOSABLE
1831 || (!TREE_NOTHROW (y
->decl
)
1832 && (get_function_state (y
)->can_throw
1833 || (opt_for_fn (y
->decl
, flag_non_call_exceptions
)
1834 && !e
->callee
->binds_to_current_def_p (w
)))))
1837 for (ie
= w
->indirect_calls
; ie
&& !can_throw
;
1838 ie
= ie
->next_callee
)
1839 if (ie
->can_throw_external
1840 && !(ie
->indirect_info
->ecf_flags
& ECF_NOTHROW
))
1843 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1844 w
= w_info
->next_cycle
;
1847 /* Copy back the region's pure_const_state which is shared by
1848 all nodes in the region. */
1852 funct_state w_l
= get_function_state (w
);
1853 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1855 /* Inline clones share declaration with their offline copies;
1856 do not modify their declarations since the offline copy may
1858 if (!w
->global
.inlined_to
)
1860 w
->set_nothrow_flag (true);
1862 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1866 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1867 w_l
->can_throw
= true;
1868 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1869 w
= w_info
->next_cycle
;
1873 ipa_free_postorder_info ();
1877 /* Debugging function to dump state of malloc lattice. */
1881 dump_malloc_lattice (FILE *dump_file
, const char *s
)
1886 fprintf (dump_file
, "\n\nMALLOC LATTICE %s:\n", s
);
1888 FOR_EACH_FUNCTION (node
)
1890 funct_state fs
= get_function_state (node
);
1891 malloc_state_e state
= fs
->malloc_state
;
1892 fprintf (dump_file
, "%s: %s\n", node
->name (), malloc_state_names
[state
]);
1896 /* Propagate malloc attribute across the callgraph. */
1899 propagate_malloc (void)
1902 FOR_EACH_FUNCTION (node
)
1904 if (DECL_IS_MALLOC (node
->decl
))
1905 if (!has_function_state (node
))
1907 funct_state l
= XCNEW (struct funct_state_d
);
1909 l
->malloc_state
= STATE_MALLOC
;
1910 set_function_state (node
, l
);
1914 dump_malloc_lattice (dump_file
, "Initial");
1915 struct cgraph_node
**order
1916 = XNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1917 int order_pos
= ipa_reverse_postorder (order
);
1918 bool changed
= true;
1923 /* Walk in postorder. */
1924 for (int i
= order_pos
- 1; i
>= 0; --i
)
1926 cgraph_node
*node
= order
[i
];
1928 || !node
->definition
1929 || !has_function_state (node
))
1932 funct_state l
= get_function_state (node
);
1934 /* FIXME: add support for indirect-calls. */
1935 if (node
->indirect_calls
)
1937 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1941 if (node
->get_availability () <= AVAIL_INTERPOSABLE
)
1943 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1947 if (l
->malloc_state
== STATE_MALLOC_BOTTOM
)
1950 vec
<cgraph_node
*> callees
= vNULL
;
1951 for (cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1953 ipa_call_summary
*es
= ipa_call_summaries
->get (cs
);
1954 if (es
&& es
->is_return_callee_uncaptured
)
1955 callees
.safe_push (cs
->callee
);
1958 malloc_state_e new_state
= l
->malloc_state
;
1959 for (unsigned j
= 0; j
< callees
.length (); j
++)
1961 cgraph_node
*callee
= callees
[j
];
1962 if (!has_function_state (callee
))
1964 new_state
= STATE_MALLOC_BOTTOM
;
1967 malloc_state_e callee_state
= get_function_state (callee
)->malloc_state
;
1968 if (new_state
< callee_state
)
1969 new_state
= callee_state
;
1971 if (new_state
!= l
->malloc_state
)
1974 l
->malloc_state
= new_state
;
1979 FOR_EACH_DEFINED_FUNCTION (node
)
1980 if (has_function_state (node
))
1982 funct_state l
= get_function_state (node
);
1984 && l
->malloc_state
== STATE_MALLOC
1985 && !node
->global
.inlined_to
)
1987 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1988 fprintf (dump_file
, "Function %s found to be malloc\n",
1991 bool malloc_decl_p
= DECL_IS_MALLOC (node
->decl
);
1992 node
->set_malloc_flag (true);
1993 if (!malloc_decl_p
&& warn_suggest_attribute_malloc
)
1994 warn_function_malloc (node
->decl
);
1998 dump_malloc_lattice (dump_file
, "after propagation");
1999 ipa_free_postorder_info ();
2003 /* Produce the global information by preforming a transitive closure
2004 on the local information that was produced by generate_summary. */
2007 pass_ipa_pure_const::
2008 execute (function
*)
2010 struct cgraph_node
*node
;
2013 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
2014 symtab
->remove_cgraph_duplication_hook (node_duplication_hook_holder
);
2015 symtab
->remove_cgraph_removal_hook (node_removal_hook_holder
);
2017 /* Nothrow makes more function to not lead to return and improve
2019 propagate_nothrow ();
2020 propagate_malloc ();
2021 remove_p
= propagate_pure_const ();
2024 FOR_EACH_FUNCTION (node
)
2025 if (has_function_state (node
))
2026 free (get_function_state (node
));
2027 funct_state_vec
.release ();
2028 return remove_p
? TODO_remove_functions
: 0;
2032 gate_pure_const (void)
2034 return flag_ipa_pure_const
|| in_lto_p
;
2037 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context
*ctxt
)
2038 : ipa_opt_pass_d(pass_data_ipa_pure_const
, ctxt
,
2039 pure_const_generate_summary
, /* generate_summary */
2040 pure_const_write_summary
, /* write_summary */
2041 pure_const_read_summary
, /* read_summary */
2042 NULL
, /* write_optimization_summary */
2043 NULL
, /* read_optimization_summary */
2044 NULL
, /* stmt_fixup */
2045 0, /* function_transform_todo_flags_start */
2046 NULL
, /* function_transform */
2047 NULL
), /* variable_transform */
2049 function_insertion_hook_holder(NULL
),
2050 node_duplication_hook_holder(NULL
),
2051 node_removal_hook_holder(NULL
)
2056 make_pass_ipa_pure_const (gcc::context
*ctxt
)
2058 return new pass_ipa_pure_const (ctxt
);
2061 /* Return true if function should be skipped for local pure const analysis. */
2064 skip_function_for_local_pure_const (struct cgraph_node
*node
)
2066 /* Because we do not schedule pass_fixup_cfg over whole program after early
2067 optimizations we must not promote functions that are called by already
2068 processed functions. */
2070 if (function_called_by_processed_nodes_p ())
2073 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
2076 /* Save some work and do not analyze functions which are interposable and
2077 do not have any non-interposable aliases. */
2078 if (node
->get_availability () <= AVAIL_INTERPOSABLE
2079 && !node
->has_aliases_p ())
2083 "Function is interposable; not analyzing.\n");
2089 /* Simple local pass for pure const discovery reusing the analysis from
2090 ipa_pure_const. This pass is effective when executed together with
2091 other optimization passes in early optimization pass queue. */
2095 const pass_data pass_data_local_pure_const
=
2097 GIMPLE_PASS
, /* type */
2098 "local-pure-const", /* name */
2099 OPTGROUP_NONE
, /* optinfo_flags */
2100 TV_IPA_PURE_CONST
, /* tv_id */
2101 0, /* properties_required */
2102 0, /* properties_provided */
2103 0, /* properties_destroyed */
2104 0, /* todo_flags_start */
2105 0, /* todo_flags_finish */
2108 class pass_local_pure_const
: public gimple_opt_pass
2111 pass_local_pure_const (gcc::context
*ctxt
)
2112 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
2115 /* opt_pass methods: */
2116 opt_pass
* clone () { return new pass_local_pure_const (m_ctxt
); }
2117 virtual bool gate (function
*) { return gate_pure_const (); }
2118 virtual unsigned int execute (function
*);
2120 }; // class pass_local_pure_const
2123 pass_local_pure_const::execute (function
*fun
)
2125 bool changed
= false;
2128 struct cgraph_node
*node
;
2130 node
= cgraph_node::get (current_function_decl
);
2131 skip
= skip_function_for_local_pure_const (node
);
2133 if (!warn_suggest_attribute_const
2134 && !warn_suggest_attribute_pure
2138 l
= analyze_function (node
, false);
2140 /* Do NORETURN discovery. */
2141 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
2142 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2144 warn_function_noreturn (fun
->decl
);
2146 fprintf (dump_file
, "Function found to be noreturn: %s\n",
2147 current_function_name ());
2149 /* Update declaration and reduce profile to executed once. */
2150 TREE_THIS_VOLATILE (current_function_decl
) = 1;
2151 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
2152 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2157 switch (l
->pure_const_state
)
2160 if (!TREE_READONLY (current_function_decl
))
2162 warn_function_const (current_function_decl
, !l
->looping
);
2164 fprintf (dump_file
, "Function found to be %sconst: %s\n",
2165 l
->looping
? "looping " : "",
2166 current_function_name ());
2168 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2172 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2173 current_function_name ());
2175 if (!skip
&& node
->set_const_flag (true, l
->looping
))
2178 fprintf (dump_file
, "Declaration updated to be %sconst: %s\n",
2179 l
->looping
? "looping " : "",
2180 current_function_name ());
2186 if (!DECL_PURE_P (current_function_decl
))
2188 warn_function_pure (current_function_decl
, !l
->looping
);
2190 fprintf (dump_file
, "Function found to be %spure: %s\n",
2191 l
->looping
? "looping " : "",
2192 current_function_name ());
2194 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2198 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2199 current_function_name ());
2201 if (!skip
&& node
->set_pure_flag (true, l
->looping
))
2204 fprintf (dump_file
, "Declaration updated to be %spure: %s\n",
2205 l
->looping
? "looping " : "",
2206 current_function_name ());
2214 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
2216 node
->set_nothrow_flag (true);
2219 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2220 current_function_name ());
2223 if (l
->malloc_state
== STATE_MALLOC
2224 && !DECL_IS_MALLOC (current_function_decl
))
2226 node
->set_malloc_flag (true);
2227 if (warn_suggest_attribute_malloc
)
2228 warn_function_malloc (node
->decl
);
2231 fprintf (dump_file
, "Function found to be malloc: %s\n",
2237 return execute_fixup_cfg ();
2245 make_pass_local_pure_const (gcc::context
*ctxt
)
2247 return new pass_local_pure_const (ctxt
);
2250 /* Emit noreturn warnings. */
2254 const pass_data pass_data_warn_function_noreturn
=
2256 GIMPLE_PASS
, /* type */
2257 "*warn_function_noreturn", /* name */
2258 OPTGROUP_NONE
, /* optinfo_flags */
2259 TV_NONE
, /* tv_id */
2260 PROP_cfg
, /* properties_required */
2261 0, /* properties_provided */
2262 0, /* properties_destroyed */
2263 0, /* todo_flags_start */
2264 0, /* todo_flags_finish */
2267 class pass_warn_function_noreturn
: public gimple_opt_pass
2270 pass_warn_function_noreturn (gcc::context
*ctxt
)
2271 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
2274 /* opt_pass methods: */
2275 virtual bool gate (function
*) { return warn_suggest_attribute_noreturn
; }
2276 virtual unsigned int execute (function
*fun
)
2278 if (!TREE_THIS_VOLATILE (current_function_decl
)
2279 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2280 warn_function_noreturn (current_function_decl
);
2284 }; // class pass_warn_function_noreturn
2289 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
2291 return new pass_warn_function_noreturn (ctxt
);
2294 /* Simple local pass for pure const discovery reusing the analysis from
2295 ipa_pure_const. This pass is effective when executed together with
2296 other optimization passes in early optimization pass queue. */
2300 const pass_data pass_data_nothrow
=
2302 GIMPLE_PASS
, /* type */
2303 "nothrow", /* name */
2304 OPTGROUP_NONE
, /* optinfo_flags */
2305 TV_IPA_PURE_CONST
, /* tv_id */
2306 0, /* properties_required */
2307 0, /* properties_provided */
2308 0, /* properties_destroyed */
2309 0, /* todo_flags_start */
2310 0, /* todo_flags_finish */
2313 class pass_nothrow
: public gimple_opt_pass
2316 pass_nothrow (gcc::context
*ctxt
)
2317 : gimple_opt_pass (pass_data_nothrow
, ctxt
)
2320 /* opt_pass methods: */
2321 opt_pass
* clone () { return new pass_nothrow (m_ctxt
); }
2322 virtual bool gate (function
*) { return optimize
; }
2323 virtual unsigned int execute (function
*);
2325 }; // class pass_nothrow
2328 pass_nothrow::execute (function
*)
2330 struct cgraph_node
*node
;
2331 basic_block this_block
;
2333 if (TREE_NOTHROW (current_function_decl
))
2336 node
= cgraph_node::get (current_function_decl
);
2338 /* We run during lowering, we can not really use availability yet. */
2339 if (cgraph_node::get (current_function_decl
)->get_availability ()
2340 <= AVAIL_INTERPOSABLE
)
2343 fprintf (dump_file
, "Function is interposable;"
2344 " not analyzing.\n");
2348 FOR_EACH_BB_FN (this_block
, cfun
)
2350 for (gimple_stmt_iterator gsi
= gsi_start_bb (this_block
);
2353 if (stmt_can_throw_external (gsi_stmt (gsi
)))
2355 if (is_gimple_call (gsi_stmt (gsi
)))
2357 tree callee_t
= gimple_call_fndecl (gsi_stmt (gsi
));
2358 if (callee_t
&& recursive_call_p (current_function_decl
,
2365 fprintf (dump_file
, "Statement can throw: ");
2366 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0);
2372 node
->set_nothrow_flag (true);
2374 bool cfg_changed
= false;
2375 if (self_recursive_p (node
))
2376 FOR_EACH_BB_FN (this_block
, cfun
)
2377 if (gimple
*g
= last_stmt (this_block
))
2378 if (is_gimple_call (g
))
2380 tree callee_t
= gimple_call_fndecl (g
);
2382 && recursive_call_p (current_function_decl
, callee_t
)
2383 && maybe_clean_eh_stmt (g
)
2384 && gimple_purge_dead_eh_edges (this_block
))
2389 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2390 current_function_name ());
2391 return cfg_changed
? TODO_cleanup_cfg
: 0;
2397 make_pass_nothrow (gcc::context
*ctxt
)
2399 return new pass_nothrow (ctxt
);