1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
43 CONSTANT -> V_i has been found to hold a constant
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
123 #include "coretypes.h"
125 #include "hash-set.h"
126 #include "machmode.h"
128 #include "double-int.h"
132 #include "wide-int.h"
136 #include "fold-const.h"
137 #include "stor-layout.h"
141 #include "hard-reg-set.h"
143 #include "function.h"
144 #include "dominance.h"
146 #include "basic-block.h"
147 #include "gimple-pretty-print.h"
148 #include "hash-table.h"
149 #include "tree-ssa-alias.h"
150 #include "internal-fn.h"
151 #include "gimple-fold.h"
153 #include "gimple-expr.h"
156 #include "gimplify.h"
157 #include "gimple-iterator.h"
158 #include "gimple-ssa.h"
159 #include "tree-cfg.h"
160 #include "tree-phinodes.h"
161 #include "ssa-iterators.h"
162 #include "stringpool.h"
163 #include "tree-ssanames.h"
164 #include "tree-pass.h"
165 #include "tree-ssa-propagate.h"
166 #include "value-prof.h"
167 #include "langhooks.h"
169 #include "diagnostic-core.h"
172 #include "wide-int-print.h"
173 #include "builtins.h"
174 #include "tree-chkp.h"
177 /* Possible lattice values. */
186 struct ccp_prop_value_t
{
188 ccp_lattice_t lattice_val
;
190 /* Propagated value. */
193 /* Mask that applies to the propagated value during CCP. For X
194 with a CONSTANT lattice value X & ~mask == value & ~mask. The
195 zero bits in the mask cover constant values. The ones mean no
200 /* Array of propagated constant values. After propagation,
201 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
202 the constant is held in an SSA name representing a memory store
203 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
204 memory reference used to store (i.e., the LHS of the assignment
206 static ccp_prop_value_t
*const_val
;
207 static unsigned n_const_val
;
209 static void canonicalize_value (ccp_prop_value_t
*);
210 static bool ccp_fold_stmt (gimple_stmt_iterator
*);
212 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
215 dump_lattice_value (FILE *outf
, const char *prefix
, ccp_prop_value_t val
)
217 switch (val
.lattice_val
)
220 fprintf (outf
, "%sUNINITIALIZED", prefix
);
223 fprintf (outf
, "%sUNDEFINED", prefix
);
226 fprintf (outf
, "%sVARYING", prefix
);
229 if (TREE_CODE (val
.value
) != INTEGER_CST
232 fprintf (outf
, "%sCONSTANT ", prefix
);
233 print_generic_expr (outf
, val
.value
, dump_flags
);
237 widest_int cval
= wi::bit_and_not (wi::to_widest (val
.value
),
239 fprintf (outf
, "%sCONSTANT ", prefix
);
240 print_hex (cval
, outf
);
241 fprintf (outf
, " (");
242 print_hex (val
.mask
, outf
);
252 /* Print lattice value VAL to stderr. */
254 void debug_lattice_value (ccp_prop_value_t val
);
257 debug_lattice_value (ccp_prop_value_t val
)
259 dump_lattice_value (stderr
, "", val
);
260 fprintf (stderr
, "\n");
263 /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
266 extend_mask (const wide_int
&nonzero_bits
)
268 return (wi::mask
<widest_int
> (wi::get_precision (nonzero_bits
), true)
269 | widest_int::from (nonzero_bits
, UNSIGNED
));
272 /* Compute a default value for variable VAR and store it in the
273 CONST_VAL array. The following rules are used to get default
276 1- Global and static variables that are declared constant are
279 2- Any other value is considered UNDEFINED. This is useful when
280 considering PHI nodes. PHI arguments that are undefined do not
281 change the constant value of the PHI node, which allows for more
282 constants to be propagated.
284 3- Variables defined by statements other than assignments and PHI
285 nodes are considered VARYING.
287 4- Initial values of variables that are not GIMPLE registers are
288 considered VARYING. */
290 static ccp_prop_value_t
291 get_default_value (tree var
)
293 ccp_prop_value_t val
= { UNINITIALIZED
, NULL_TREE
, 0 };
296 stmt
= SSA_NAME_DEF_STMT (var
);
298 if (gimple_nop_p (stmt
))
300 /* Variables defined by an empty statement are those used
301 before being initialized. If VAR is a local variable, we
302 can assume initially that it is UNDEFINED, otherwise we must
303 consider it VARYING. */
304 if (!virtual_operand_p (var
)
305 && TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
306 val
.lattice_val
= UNDEFINED
;
309 val
.lattice_val
= VARYING
;
311 if (flag_tree_bit_ccp
)
313 wide_int nonzero_bits
= get_nonzero_bits (var
);
314 if (nonzero_bits
!= -1)
316 val
.lattice_val
= CONSTANT
;
317 val
.value
= build_zero_cst (TREE_TYPE (var
));
318 val
.mask
= extend_mask (nonzero_bits
);
323 else if (is_gimple_assign (stmt
))
326 if (gimple_assign_single_p (stmt
)
327 && DECL_P (gimple_assign_rhs1 (stmt
))
328 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
330 val
.lattice_val
= CONSTANT
;
335 /* Any other variable defined by an assignment is considered
337 val
.lattice_val
= UNDEFINED
;
340 else if ((is_gimple_call (stmt
)
341 && gimple_call_lhs (stmt
) != NULL_TREE
)
342 || gimple_code (stmt
) == GIMPLE_PHI
)
344 /* A variable defined by a call or a PHI node is considered
346 val
.lattice_val
= UNDEFINED
;
350 /* Otherwise, VAR will never take on a constant value. */
351 val
.lattice_val
= VARYING
;
359 /* Get the constant value associated with variable VAR. */
361 static inline ccp_prop_value_t
*
364 ccp_prop_value_t
*val
;
366 if (const_val
== NULL
367 || SSA_NAME_VERSION (var
) >= n_const_val
)
370 val
= &const_val
[SSA_NAME_VERSION (var
)];
371 if (val
->lattice_val
== UNINITIALIZED
)
372 *val
= get_default_value (var
);
374 canonicalize_value (val
);
379 /* Return the constant tree value associated with VAR. */
382 get_constant_value (tree var
)
384 ccp_prop_value_t
*val
;
385 if (TREE_CODE (var
) != SSA_NAME
)
387 if (is_gimple_min_invariant (var
))
391 val
= get_value (var
);
393 && val
->lattice_val
== CONSTANT
394 && (TREE_CODE (val
->value
) != INTEGER_CST
400 /* Sets the value associated with VAR to VARYING. */
403 set_value_varying (tree var
)
405 ccp_prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
407 val
->lattice_val
= VARYING
;
408 val
->value
= NULL_TREE
;
412 /* For integer constants, make sure to drop TREE_OVERFLOW. */
415 canonicalize_value (ccp_prop_value_t
*val
)
417 if (val
->lattice_val
!= CONSTANT
)
420 if (TREE_OVERFLOW_P (val
->value
))
421 val
->value
= drop_tree_overflow (val
->value
);
424 /* Return whether the lattice transition is valid. */
427 valid_lattice_transition (ccp_prop_value_t old_val
, ccp_prop_value_t new_val
)
429 /* Lattice transitions must always be monotonically increasing in
431 if (old_val
.lattice_val
< new_val
.lattice_val
)
434 if (old_val
.lattice_val
!= new_val
.lattice_val
)
437 if (!old_val
.value
&& !new_val
.value
)
440 /* Now both lattice values are CONSTANT. */
442 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
443 when only a single copy edge is executable. */
444 if (TREE_CODE (old_val
.value
) == SSA_NAME
445 && TREE_CODE (new_val
.value
) == SSA_NAME
)
448 /* Allow transitioning from a constant to a copy. */
449 if (is_gimple_min_invariant (old_val
.value
)
450 && TREE_CODE (new_val
.value
) == SSA_NAME
)
453 /* Allow transitioning from PHI <&x, not executable> == &x
454 to PHI <&x, &y> == common alignment. */
455 if (TREE_CODE (old_val
.value
) != INTEGER_CST
456 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
459 /* Bit-lattices have to agree in the still valid bits. */
460 if (TREE_CODE (old_val
.value
) == INTEGER_CST
461 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
462 return (wi::bit_and_not (wi::to_widest (old_val
.value
), new_val
.mask
)
463 == wi::bit_and_not (wi::to_widest (new_val
.value
), new_val
.mask
));
465 /* Otherwise constant values have to agree. */
466 if (operand_equal_p (old_val
.value
, new_val
.value
, 0))
469 /* At least the kinds and types should agree now. */
470 if (TREE_CODE (old_val
.value
) != TREE_CODE (new_val
.value
)
471 || !types_compatible_p (TREE_TYPE (old_val
.value
),
472 TREE_TYPE (new_val
.value
)))
475 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
477 tree type
= TREE_TYPE (new_val
.value
);
478 if (SCALAR_FLOAT_TYPE_P (type
)
479 && !HONOR_NANS (type
))
481 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val
.value
)))
484 else if (VECTOR_FLOAT_TYPE_P (type
)
485 && !HONOR_NANS (type
))
487 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (old_val
.value
); ++i
)
488 if (!REAL_VALUE_ISNAN
489 (TREE_REAL_CST (VECTOR_CST_ELT (old_val
.value
, i
)))
490 && !operand_equal_p (VECTOR_CST_ELT (old_val
.value
, i
),
491 VECTOR_CST_ELT (new_val
.value
, i
), 0))
495 else if (COMPLEX_FLOAT_TYPE_P (type
)
496 && !HONOR_NANS (type
))
498 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val
.value
)))
499 && !operand_equal_p (TREE_REALPART (old_val
.value
),
500 TREE_REALPART (new_val
.value
), 0))
502 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val
.value
)))
503 && !operand_equal_p (TREE_IMAGPART (old_val
.value
),
504 TREE_IMAGPART (new_val
.value
), 0))
511 /* Set the value for variable VAR to NEW_VAL. Return true if the new
512 value is different from VAR's previous value. */
515 set_lattice_value (tree var
, ccp_prop_value_t new_val
)
517 /* We can deal with old UNINITIALIZED values just fine here. */
518 ccp_prop_value_t
*old_val
= &const_val
[SSA_NAME_VERSION (var
)];
520 canonicalize_value (&new_val
);
522 /* We have to be careful to not go up the bitwise lattice
523 represented by the mask.
524 ??? This doesn't seem to be the best place to enforce this. */
525 if (new_val
.lattice_val
== CONSTANT
526 && old_val
->lattice_val
== CONSTANT
527 && TREE_CODE (new_val
.value
) == INTEGER_CST
528 && TREE_CODE (old_val
->value
) == INTEGER_CST
)
530 widest_int diff
= (wi::to_widest (new_val
.value
)
531 ^ wi::to_widest (old_val
->value
));
532 new_val
.mask
= new_val
.mask
| old_val
->mask
| diff
;
535 gcc_checking_assert (valid_lattice_transition (*old_val
, new_val
));
537 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
538 caller that this was a non-transition. */
539 if (old_val
->lattice_val
!= new_val
.lattice_val
540 || (new_val
.lattice_val
== CONSTANT
541 && (TREE_CODE (new_val
.value
) != TREE_CODE (old_val
->value
)
542 || simple_cst_equal (new_val
.value
, old_val
->value
) != 1
543 || (TREE_CODE (new_val
.value
) == INTEGER_CST
544 && new_val
.mask
!= old_val
->mask
))))
546 /* ??? We would like to delay creation of INTEGER_CSTs from
547 partially constants here. */
549 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
551 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
552 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
557 gcc_assert (new_val
.lattice_val
!= UNINITIALIZED
);
564 static ccp_prop_value_t
get_value_for_expr (tree
, bool);
565 static ccp_prop_value_t
bit_value_binop (enum tree_code
, tree
, tree
, tree
);
566 static void bit_value_binop_1 (enum tree_code
, tree
, widest_int
*, widest_int
*,
567 tree
, const widest_int
&, const widest_int
&,
568 tree
, const widest_int
&, const widest_int
&);
570 /* Return a widest_int that can be used for bitwise simplifications
574 value_to_wide_int (ccp_prop_value_t val
)
577 && TREE_CODE (val
.value
) == INTEGER_CST
)
578 return wi::to_widest (val
.value
);
583 /* Return the value for the address expression EXPR based on alignment
586 static ccp_prop_value_t
587 get_value_from_alignment (tree expr
)
589 tree type
= TREE_TYPE (expr
);
590 ccp_prop_value_t val
;
591 unsigned HOST_WIDE_INT bitpos
;
594 gcc_assert (TREE_CODE (expr
) == ADDR_EXPR
);
596 get_pointer_alignment_1 (expr
, &align
, &bitpos
);
597 val
.mask
= (POINTER_TYPE_P (type
) || TYPE_UNSIGNED (type
)
598 ? wi::mask
<widest_int
> (TYPE_PRECISION (type
), false)
599 : -1).and_not (align
/ BITS_PER_UNIT
- 1);
601 = wi::sext (val
.mask
, TYPE_PRECISION (type
)) == -1 ? VARYING
: CONSTANT
;
602 if (val
.lattice_val
== CONSTANT
)
603 val
.value
= build_int_cstu (type
, bitpos
/ BITS_PER_UNIT
);
605 val
.value
= NULL_TREE
;
610 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
611 return constant bits extracted from alignment information for
612 invariant addresses. */
614 static ccp_prop_value_t
615 get_value_for_expr (tree expr
, bool for_bits_p
)
617 ccp_prop_value_t val
;
619 if (TREE_CODE (expr
) == SSA_NAME
)
621 val
= *get_value (expr
);
623 && val
.lattice_val
== CONSTANT
624 && TREE_CODE (val
.value
) == ADDR_EXPR
)
625 val
= get_value_from_alignment (val
.value
);
627 else if (is_gimple_min_invariant (expr
)
628 && (!for_bits_p
|| TREE_CODE (expr
) != ADDR_EXPR
))
630 val
.lattice_val
= CONSTANT
;
633 canonicalize_value (&val
);
635 else if (TREE_CODE (expr
) == ADDR_EXPR
)
636 val
= get_value_from_alignment (expr
);
639 val
.lattice_val
= VARYING
;
641 val
.value
= NULL_TREE
;
646 /* Return the likely CCP lattice value for STMT.
648 If STMT has no operands, then return CONSTANT.
650 Else if undefinedness of operands of STMT cause its value to be
651 undefined, then return UNDEFINED.
653 Else if any operands of STMT are constants, then return CONSTANT.
655 Else return VARYING. */
658 likely_value (gimple stmt
)
660 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
661 bool has_nsa_operand
;
666 enum gimple_code code
= gimple_code (stmt
);
668 /* This function appears to be called only for assignments, calls,
669 conditionals, and switches, due to the logic in visit_stmt. */
670 gcc_assert (code
== GIMPLE_ASSIGN
671 || code
== GIMPLE_CALL
672 || code
== GIMPLE_COND
673 || code
== GIMPLE_SWITCH
);
675 /* If the statement has volatile operands, it won't fold to a
677 if (gimple_has_volatile_ops (stmt
))
680 /* Arrive here for more complex cases. */
681 has_constant_operand
= false;
682 has_undefined_operand
= false;
683 all_undefined_operands
= true;
684 has_nsa_operand
= false;
685 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
687 ccp_prop_value_t
*val
= get_value (use
);
689 if (val
->lattice_val
== UNDEFINED
)
690 has_undefined_operand
= true;
692 all_undefined_operands
= false;
694 if (val
->lattice_val
== CONSTANT
)
695 has_constant_operand
= true;
697 if (SSA_NAME_IS_DEFAULT_DEF (use
)
698 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use
)))
699 has_nsa_operand
= true;
702 /* There may be constants in regular rhs operands. For calls we
703 have to ignore lhs, fndecl and static chain, otherwise only
705 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
706 i
< gimple_num_ops (stmt
); ++i
)
708 tree op
= gimple_op (stmt
, i
);
709 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
711 if (is_gimple_min_invariant (op
))
712 has_constant_operand
= true;
715 if (has_constant_operand
)
716 all_undefined_operands
= false;
718 if (has_undefined_operand
719 && code
== GIMPLE_CALL
720 && gimple_call_internal_p (stmt
))
721 switch (gimple_call_internal_fn (stmt
))
723 /* These 3 builtins use the first argument just as a magic
724 way how to find out a decl uid. */
725 case IFN_GOMP_SIMD_LANE
:
726 case IFN_GOMP_SIMD_VF
:
727 case IFN_GOMP_SIMD_LAST_LANE
:
728 has_undefined_operand
= false;
734 /* If the operation combines operands like COMPLEX_EXPR make sure to
735 not mark the result UNDEFINED if only one part of the result is
737 if (has_undefined_operand
&& all_undefined_operands
)
739 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
741 switch (gimple_assign_rhs_code (stmt
))
743 /* Unary operators are handled with all_undefined_operands. */
746 case POINTER_PLUS_EXPR
:
747 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
748 Not bitwise operators, one VARYING operand may specify the
749 result completely. Not logical operators for the same reason.
750 Not COMPLEX_EXPR as one VARYING operand makes the result partly
751 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
752 the undefined operand may be promoted. */
756 /* If any part of an address is UNDEFINED, like the index
757 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
764 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
765 fall back to CONSTANT. During iteration UNDEFINED may still drop
767 if (has_undefined_operand
)
770 /* We do not consider virtual operands here -- load from read-only
771 memory may have only VARYING virtual operands, but still be
772 constant. Also we can combine the stmt with definitions from
773 operands whose definitions are not simulated again. */
774 if (has_constant_operand
776 || gimple_references_memory_p (stmt
))
782 /* Returns true if STMT cannot be constant. */
785 surely_varying_stmt_p (gimple stmt
)
787 /* If the statement has operands that we cannot handle, it cannot be
789 if (gimple_has_volatile_ops (stmt
))
792 /* If it is a call and does not return a value or is not a
793 builtin and not an indirect call or a call to function with
794 assume_aligned/alloc_align attribute, it is varying. */
795 if (is_gimple_call (stmt
))
797 tree fndecl
, fntype
= gimple_call_fntype (stmt
);
798 if (!gimple_call_lhs (stmt
)
799 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
800 && !DECL_BUILT_IN (fndecl
)
801 && !lookup_attribute ("assume_aligned",
802 TYPE_ATTRIBUTES (fntype
))
803 && !lookup_attribute ("alloc_align",
804 TYPE_ATTRIBUTES (fntype
))))
808 /* Any other store operation is not interesting. */
809 else if (gimple_vdef (stmt
))
812 /* Anything other than assignments and conditional jumps are not
813 interesting for CCP. */
814 if (gimple_code (stmt
) != GIMPLE_ASSIGN
815 && gimple_code (stmt
) != GIMPLE_COND
816 && gimple_code (stmt
) != GIMPLE_SWITCH
817 && gimple_code (stmt
) != GIMPLE_CALL
)
823 /* Initialize local data structures for CCP. */
826 ccp_initialize (void)
830 n_const_val
= num_ssa_names
;
831 const_val
= XCNEWVEC (ccp_prop_value_t
, n_const_val
);
833 /* Initialize simulation flags for PHI nodes and statements. */
834 FOR_EACH_BB_FN (bb
, cfun
)
836 gimple_stmt_iterator i
;
838 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
840 gimple stmt
= gsi_stmt (i
);
843 /* If the statement is a control insn, then we do not
844 want to avoid simulating the statement once. Failure
845 to do so means that those edges will never get added. */
846 if (stmt_ends_bb_p (stmt
))
849 is_varying
= surely_varying_stmt_p (stmt
);
856 /* If the statement will not produce a constant, mark
857 all its outputs VARYING. */
858 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
859 set_value_varying (def
);
861 prop_set_simulate_again (stmt
, !is_varying
);
865 /* Now process PHI nodes. We never clear the simulate_again flag on
866 phi nodes, since we do not know which edges are executable yet,
867 except for phi nodes for virtual operands when we do not do store ccp. */
868 FOR_EACH_BB_FN (bb
, cfun
)
872 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
874 gphi
*phi
= i
.phi ();
876 if (virtual_operand_p (gimple_phi_result (phi
)))
877 prop_set_simulate_again (phi
, false);
879 prop_set_simulate_again (phi
, true);
884 /* Debug count support. Reset the values of ssa names
885 VARYING when the total number ssa names analyzed is
886 beyond the debug count specified. */
892 for (i
= 0; i
< num_ssa_names
; i
++)
896 const_val
[i
].lattice_val
= VARYING
;
897 const_val
[i
].mask
= -1;
898 const_val
[i
].value
= NULL_TREE
;
904 /* Do final substitution of propagated values, cleanup the flowgraph and
905 free allocated storage.
907 Return TRUE when something was optimized. */
912 bool something_changed
;
917 /* Derive alignment and misalignment information from partially
918 constant pointers in the lattice or nonzero bits from partially
919 constant integers. */
920 for (i
= 1; i
< num_ssa_names
; ++i
)
922 tree name
= ssa_name (i
);
923 ccp_prop_value_t
*val
;
924 unsigned int tem
, align
;
927 || (!POINTER_TYPE_P (TREE_TYPE (name
))
928 && (!INTEGRAL_TYPE_P (TREE_TYPE (name
))
929 /* Don't record nonzero bits before IPA to avoid
930 using too much memory. */
931 || first_pass_instance
)))
934 val
= get_value (name
);
935 if (val
->lattice_val
!= CONSTANT
936 || TREE_CODE (val
->value
) != INTEGER_CST
)
939 if (POINTER_TYPE_P (TREE_TYPE (name
)))
941 /* Trailing mask bits specify the alignment, trailing value
942 bits the misalignment. */
943 tem
= val
->mask
.to_uhwi ();
944 align
= (tem
& -tem
);
946 set_ptr_info_alignment (get_ptr_info (name
), align
,
947 (TREE_INT_CST_LOW (val
->value
)
952 unsigned int precision
= TYPE_PRECISION (TREE_TYPE (val
->value
));
953 wide_int nonzero_bits
= wide_int::from (val
->mask
, precision
,
954 UNSIGNED
) | val
->value
;
955 nonzero_bits
&= get_nonzero_bits (name
);
956 set_nonzero_bits (name
, nonzero_bits
);
960 /* Perform substitutions based on the known constant values. */
961 something_changed
= substitute_and_fold (get_constant_value
,
962 ccp_fold_stmt
, true);
966 return something_changed
;;
970 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
973 any M UNDEFINED = any
974 any M VARYING = VARYING
975 Ci M Cj = Ci if (i == j)
976 Ci M Cj = VARYING if (i != j)
980 ccp_lattice_meet (basic_block where
,
981 ccp_prop_value_t
*val1
, ccp_prop_value_t
*val2
)
983 if (val1
->lattice_val
== UNDEFINED
984 /* For UNDEFINED M SSA we can't always SSA because its definition
985 may not dominate the PHI node. Doing optimistic copy propagation
986 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
987 && (val2
->lattice_val
!= CONSTANT
988 || TREE_CODE (val2
->value
) != SSA_NAME
))
990 /* UNDEFINED M any = any */
993 else if (val2
->lattice_val
== UNDEFINED
995 && (val1
->lattice_val
!= CONSTANT
996 || TREE_CODE (val1
->value
) != SSA_NAME
))
998 /* any M UNDEFINED = any
999 Nothing to do. VAL1 already contains the value we want. */
1002 else if (val1
->lattice_val
== VARYING
1003 || val2
->lattice_val
== VARYING
)
1005 /* any M VARYING = VARYING. */
1006 val1
->lattice_val
= VARYING
;
1008 val1
->value
= NULL_TREE
;
1010 else if (val1
->lattice_val
== CONSTANT
1011 && val2
->lattice_val
== CONSTANT
1012 && TREE_CODE (val1
->value
) == INTEGER_CST
1013 && TREE_CODE (val2
->value
) == INTEGER_CST
)
1015 /* Ci M Cj = Ci if (i == j)
1016 Ci M Cj = VARYING if (i != j)
1018 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1020 val1
->mask
= (val1
->mask
| val2
->mask
1021 | (wi::to_widest (val1
->value
)
1022 ^ wi::to_widest (val2
->value
)));
1023 if (wi::sext (val1
->mask
, TYPE_PRECISION (TREE_TYPE (val1
->value
))) == -1)
1025 val1
->lattice_val
= VARYING
;
1026 val1
->value
= NULL_TREE
;
1029 else if (val1
->lattice_val
== CONSTANT
1030 && val2
->lattice_val
== CONSTANT
1031 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
1033 /* Ci M Cj = Ci if (i == j)
1034 Ci M Cj = VARYING if (i != j)
1036 VAL1 already contains the value we want for equivalent values. */
1038 else if (val1
->lattice_val
== CONSTANT
1039 && val2
->lattice_val
== CONSTANT
1040 && (TREE_CODE (val1
->value
) == ADDR_EXPR
1041 || TREE_CODE (val2
->value
) == ADDR_EXPR
))
1043 /* When not equal addresses are involved try meeting for
1045 ccp_prop_value_t tem
= *val2
;
1046 if (TREE_CODE (val1
->value
) == ADDR_EXPR
)
1047 *val1
= get_value_for_expr (val1
->value
, true);
1048 if (TREE_CODE (val2
->value
) == ADDR_EXPR
)
1049 tem
= get_value_for_expr (val2
->value
, true);
1050 ccp_lattice_meet (where
, val1
, &tem
);
1054 /* Any other combination is VARYING. */
1055 val1
->lattice_val
= VARYING
;
1057 val1
->value
= NULL_TREE
;
1062 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1063 lattice values to determine PHI_NODE's lattice value. The value of a
1064 PHI node is determined calling ccp_lattice_meet with all the arguments
1065 of the PHI node that are incoming via executable edges. */
1067 static enum ssa_prop_result
1068 ccp_visit_phi_node (gphi
*phi
)
1071 ccp_prop_value_t
*old_val
, new_val
;
1073 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1075 fprintf (dump_file
, "\nVisiting PHI node: ");
1076 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
1079 old_val
= get_value (gimple_phi_result (phi
));
1080 switch (old_val
->lattice_val
)
1083 return SSA_PROP_VARYING
;
1090 new_val
.lattice_val
= UNDEFINED
;
1091 new_val
.value
= NULL_TREE
;
1098 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1100 /* Compute the meet operator over all the PHI arguments flowing
1101 through executable edges. */
1102 edge e
= gimple_phi_arg_edge (phi
, i
);
1104 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1107 "\n Argument #%d (%d -> %d %sexecutable)\n",
1108 i
, e
->src
->index
, e
->dest
->index
,
1109 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
1112 /* If the incoming edge is executable, Compute the meet operator for
1113 the existing value of the PHI node and the current PHI argument. */
1114 if (e
->flags
& EDGE_EXECUTABLE
)
1116 tree arg
= gimple_phi_arg (phi
, i
)->def
;
1117 ccp_prop_value_t arg_val
= get_value_for_expr (arg
, false);
1119 ccp_lattice_meet (gimple_bb (phi
), &new_val
, &arg_val
);
1121 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1123 fprintf (dump_file
, "\t");
1124 print_generic_expr (dump_file
, arg
, dump_flags
);
1125 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
1126 fprintf (dump_file
, "\n");
1129 if (new_val
.lattice_val
== VARYING
)
1134 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1136 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
1137 fprintf (dump_file
, "\n\n");
1140 /* Make the transition to the new value. */
1141 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
1143 if (new_val
.lattice_val
== VARYING
)
1144 return SSA_PROP_VARYING
;
1146 return SSA_PROP_INTERESTING
;
1149 return SSA_PROP_NOT_INTERESTING
;
1152 /* Return the constant value for OP or OP otherwise. */
1155 valueize_op (tree op
)
1157 if (TREE_CODE (op
) == SSA_NAME
)
1159 tree tem
= get_constant_value (op
);
1166 /* Return the constant value for OP, but signal to not follow SSA
1167 edges if the definition may be simulated again. */
1170 valueize_op_1 (tree op
)
1172 if (TREE_CODE (op
) == SSA_NAME
)
1174 /* If the definition may be simulated again we cannot follow
1175 this SSA edge as the SSA propagator does not necessarily
1176 re-visit the use. */
1177 gimple def_stmt
= SSA_NAME_DEF_STMT (op
);
1178 if (!gimple_nop_p (def_stmt
)
1179 && prop_simulate_again_p (def_stmt
))
1181 tree tem
= get_constant_value (op
);
1188 /* CCP specific front-end to the non-destructive constant folding
1191 Attempt to simplify the RHS of STMT knowing that one or more
1192 operands are constants.
1194 If simplification is possible, return the simplified RHS,
1195 otherwise return the original RHS or NULL_TREE. */
1198 ccp_fold (gimple stmt
)
1200 location_t loc
= gimple_location (stmt
);
1201 switch (gimple_code (stmt
))
1205 /* Handle comparison operators that can appear in GIMPLE form. */
1206 tree op0
= valueize_op (gimple_cond_lhs (stmt
));
1207 tree op1
= valueize_op (gimple_cond_rhs (stmt
));
1208 enum tree_code code
= gimple_cond_code (stmt
);
1209 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1214 /* Return the constant switch index. */
1215 return valueize_op (gimple_switch_index (as_a
<gswitch
*> (stmt
)));
1220 return gimple_fold_stmt_to_constant_1 (stmt
,
1221 valueize_op
, valueize_op_1
);
1228 /* Apply the operation CODE in type TYPE to the value, mask pair
1229 RVAL and RMASK representing a value of type RTYPE and set
1230 the value, mask pair *VAL and *MASK to the result. */
1233 bit_value_unop_1 (enum tree_code code
, tree type
,
1234 widest_int
*val
, widest_int
*mask
,
1235 tree rtype
, const widest_int
&rval
, const widest_int
&rmask
)
1246 widest_int temv
, temm
;
1247 /* Return ~rval + 1. */
1248 bit_value_unop_1 (BIT_NOT_EXPR
, type
, &temv
, &temm
, type
, rval
, rmask
);
1249 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1250 type
, temv
, temm
, type
, 1, 0);
1258 /* First extend mask and value according to the original type. */
1259 sgn
= TYPE_SIGN (rtype
);
1260 *mask
= wi::ext (rmask
, TYPE_PRECISION (rtype
), sgn
);
1261 *val
= wi::ext (rval
, TYPE_PRECISION (rtype
), sgn
);
1263 /* Then extend mask and value according to the target type. */
1264 sgn
= TYPE_SIGN (type
);
1265 *mask
= wi::ext (*mask
, TYPE_PRECISION (type
), sgn
);
1266 *val
= wi::ext (*val
, TYPE_PRECISION (type
), sgn
);
1276 /* Apply the operation CODE in type TYPE to the value, mask pairs
1277 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1278 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1281 bit_value_binop_1 (enum tree_code code
, tree type
,
1282 widest_int
*val
, widest_int
*mask
,
1283 tree r1type
, const widest_int
&r1val
,
1284 const widest_int
&r1mask
, tree r2type
,
1285 const widest_int
&r2val
, const widest_int
&r2mask
)
1287 signop sgn
= TYPE_SIGN (type
);
1288 int width
= TYPE_PRECISION (type
);
1289 bool swap_p
= false;
1291 /* Assume we'll get a constant result. Use an initial non varying
1292 value, we fall back to varying in the end if necessary. */
1298 /* The mask is constant where there is a known not
1299 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1300 *mask
= (r1mask
| r2mask
) & (r1val
| r1mask
) & (r2val
| r2mask
);
1301 *val
= r1val
& r2val
;
1305 /* The mask is constant where there is a known
1306 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1307 *mask
= (r1mask
| r2mask
)
1308 .and_not (r1val
.and_not (r1mask
) | r2val
.and_not (r2mask
));
1309 *val
= r1val
| r2val
;
1314 *mask
= r1mask
| r2mask
;
1315 *val
= r1val
^ r2val
;
1322 widest_int shift
= r2val
;
1330 if (wi::neg_p (shift
))
1333 if (code
== RROTATE_EXPR
)
1334 code
= LROTATE_EXPR
;
1336 code
= RROTATE_EXPR
;
1338 if (code
== RROTATE_EXPR
)
1340 *mask
= wi::rrotate (r1mask
, shift
, width
);
1341 *val
= wi::rrotate (r1val
, shift
, width
);
1345 *mask
= wi::lrotate (r1mask
, shift
, width
);
1346 *val
= wi::lrotate (r1val
, shift
, width
);
1354 /* ??? We can handle partially known shift counts if we know
1355 its sign. That way we can tell that (x << (y | 8)) & 255
1359 widest_int shift
= r2val
;
1367 if (wi::neg_p (shift
))
1370 if (code
== RSHIFT_EXPR
)
1375 if (code
== RSHIFT_EXPR
)
1377 *mask
= wi::rshift (wi::ext (r1mask
, width
, sgn
), shift
, sgn
);
1378 *val
= wi::rshift (wi::ext (r1val
, width
, sgn
), shift
, sgn
);
1382 *mask
= wi::ext (wi::lshift (r1mask
, shift
), width
, sgn
);
1383 *val
= wi::ext (wi::lshift (r1val
, shift
), width
, sgn
);
1390 case POINTER_PLUS_EXPR
:
1392 /* Do the addition with unknown bits set to zero, to give carry-ins of
1393 zero wherever possible. */
1394 widest_int lo
= r1val
.and_not (r1mask
) + r2val
.and_not (r2mask
);
1395 lo
= wi::ext (lo
, width
, sgn
);
1396 /* Do the addition with unknown bits set to one, to give carry-ins of
1397 one wherever possible. */
1398 widest_int hi
= (r1val
| r1mask
) + (r2val
| r2mask
);
1399 hi
= wi::ext (hi
, width
, sgn
);
1400 /* Each bit in the result is known if (a) the corresponding bits in
1401 both inputs are known, and (b) the carry-in to that bit position
1402 is known. We can check condition (b) by seeing if we got the same
1403 result with minimised carries as with maximised carries. */
1404 *mask
= r1mask
| r2mask
| (lo
^ hi
);
1405 *mask
= wi::ext (*mask
, width
, sgn
);
1406 /* It shouldn't matter whether we choose lo or hi here. */
1413 widest_int temv
, temm
;
1414 bit_value_unop_1 (NEGATE_EXPR
, r2type
, &temv
, &temm
,
1415 r2type
, r2val
, r2mask
);
1416 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1417 r1type
, r1val
, r1mask
,
1418 r2type
, temv
, temm
);
1424 /* Just track trailing zeros in both operands and transfer
1425 them to the other. */
1426 int r1tz
= wi::ctz (r1val
| r1mask
);
1427 int r2tz
= wi::ctz (r2val
| r2mask
);
1428 if (r1tz
+ r2tz
>= width
)
1433 else if (r1tz
+ r2tz
> 0)
1435 *mask
= wi::ext (wi::mask
<widest_int
> (r1tz
+ r2tz
, true),
1445 widest_int m
= r1mask
| r2mask
;
1446 if (r1val
.and_not (m
) != r2val
.and_not (m
))
1449 *val
= ((code
== EQ_EXPR
) ? 0 : 1);
1453 /* We know the result of a comparison is always one or zero. */
1463 code
= swap_tree_comparison (code
);
1470 const widest_int
&o1val
= swap_p
? r2val
: r1val
;
1471 const widest_int
&o1mask
= swap_p
? r2mask
: r1mask
;
1472 const widest_int
&o2val
= swap_p
? r1val
: r2val
;
1473 const widest_int
&o2mask
= swap_p
? r1mask
: r2mask
;
1475 /* If the most significant bits are not known we know nothing. */
1476 if (wi::neg_p (o1mask
) || wi::neg_p (o2mask
))
1479 /* For comparisons the signedness is in the comparison operands. */
1480 sgn
= TYPE_SIGN (r1type
);
1482 /* If we know the most significant bits we know the values
1483 value ranges by means of treating varying bits as zero
1484 or one. Do a cross comparison of the max/min pairs. */
1485 maxmin
= wi::cmp (o1val
| o1mask
, o2val
.and_not (o2mask
), sgn
);
1486 minmax
= wi::cmp (o1val
.and_not (o1mask
), o2val
| o2mask
, sgn
);
1487 if (maxmin
< 0) /* o1 is less than o2. */
1492 else if (minmax
> 0) /* o1 is not less or equal to o2. */
1497 else if (maxmin
== minmax
) /* o1 and o2 are equal. */
1499 /* This probably should never happen as we'd have
1500 folded the thing during fully constant value folding. */
1502 *val
= (code
== LE_EXPR
? 1 : 0);
1506 /* We know the result of a comparison is always one or zero. */
1517 /* Return the propagation value when applying the operation CODE to
1518 the value RHS yielding type TYPE. */
1520 static ccp_prop_value_t
1521 bit_value_unop (enum tree_code code
, tree type
, tree rhs
)
1523 ccp_prop_value_t rval
= get_value_for_expr (rhs
, true);
1524 widest_int value
, mask
;
1525 ccp_prop_value_t val
;
1527 if (rval
.lattice_val
== UNDEFINED
)
1530 gcc_assert ((rval
.lattice_val
== CONSTANT
1531 && TREE_CODE (rval
.value
) == INTEGER_CST
)
1532 || wi::sext (rval
.mask
, TYPE_PRECISION (TREE_TYPE (rhs
))) == -1);
1533 bit_value_unop_1 (code
, type
, &value
, &mask
,
1534 TREE_TYPE (rhs
), value_to_wide_int (rval
), rval
.mask
);
1535 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1537 val
.lattice_val
= CONSTANT
;
1539 /* ??? Delay building trees here. */
1540 val
.value
= wide_int_to_tree (type
, value
);
1544 val
.lattice_val
= VARYING
;
1545 val
.value
= NULL_TREE
;
1551 /* Return the propagation value when applying the operation CODE to
1552 the values RHS1 and RHS2 yielding type TYPE. */
1554 static ccp_prop_value_t
1555 bit_value_binop (enum tree_code code
, tree type
, tree rhs1
, tree rhs2
)
1557 ccp_prop_value_t r1val
= get_value_for_expr (rhs1
, true);
1558 ccp_prop_value_t r2val
= get_value_for_expr (rhs2
, true);
1559 widest_int value
, mask
;
1560 ccp_prop_value_t val
;
1562 if (r1val
.lattice_val
== UNDEFINED
1563 || r2val
.lattice_val
== UNDEFINED
)
1565 val
.lattice_val
= VARYING
;
1566 val
.value
= NULL_TREE
;
1571 gcc_assert ((r1val
.lattice_val
== CONSTANT
1572 && TREE_CODE (r1val
.value
) == INTEGER_CST
)
1573 || wi::sext (r1val
.mask
,
1574 TYPE_PRECISION (TREE_TYPE (rhs1
))) == -1);
1575 gcc_assert ((r2val
.lattice_val
== CONSTANT
1576 && TREE_CODE (r2val
.value
) == INTEGER_CST
)
1577 || wi::sext (r2val
.mask
,
1578 TYPE_PRECISION (TREE_TYPE (rhs2
))) == -1);
1579 bit_value_binop_1 (code
, type
, &value
, &mask
,
1580 TREE_TYPE (rhs1
), value_to_wide_int (r1val
), r1val
.mask
,
1581 TREE_TYPE (rhs2
), value_to_wide_int (r2val
), r2val
.mask
);
1582 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1584 val
.lattice_val
= CONSTANT
;
1586 /* ??? Delay building trees here. */
1587 val
.value
= wide_int_to_tree (type
, value
);
1591 val
.lattice_val
= VARYING
;
1592 val
.value
= NULL_TREE
;
1598 /* Return the propagation value for __builtin_assume_aligned
1599 and functions with assume_aligned or alloc_aligned attribute.
1600 For __builtin_assume_aligned, ATTR is NULL_TREE,
1601 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1602 is false, for alloc_aligned attribute ATTR is non-NULL and
1603 ALLOC_ALIGNED is true. */
1605 static ccp_prop_value_t
1606 bit_value_assume_aligned (gimple stmt
, tree attr
, ccp_prop_value_t ptrval
,
1609 tree align
, misalign
= NULL_TREE
, type
;
1610 unsigned HOST_WIDE_INT aligni
, misaligni
= 0;
1611 ccp_prop_value_t alignval
;
1612 widest_int value
, mask
;
1613 ccp_prop_value_t val
;
1615 if (attr
== NULL_TREE
)
1617 tree ptr
= gimple_call_arg (stmt
, 0);
1618 type
= TREE_TYPE (ptr
);
1619 ptrval
= get_value_for_expr (ptr
, true);
1623 tree lhs
= gimple_call_lhs (stmt
);
1624 type
= TREE_TYPE (lhs
);
1627 if (ptrval
.lattice_val
== UNDEFINED
)
1629 gcc_assert ((ptrval
.lattice_val
== CONSTANT
1630 && TREE_CODE (ptrval
.value
) == INTEGER_CST
)
1631 || wi::sext (ptrval
.mask
, TYPE_PRECISION (type
)) == -1);
1632 if (attr
== NULL_TREE
)
1634 /* Get aligni and misaligni from __builtin_assume_aligned. */
1635 align
= gimple_call_arg (stmt
, 1);
1636 if (!tree_fits_uhwi_p (align
))
1638 aligni
= tree_to_uhwi (align
);
1639 if (gimple_call_num_args (stmt
) > 2)
1641 misalign
= gimple_call_arg (stmt
, 2);
1642 if (!tree_fits_uhwi_p (misalign
))
1644 misaligni
= tree_to_uhwi (misalign
);
1649 /* Get aligni and misaligni from assume_aligned or
1650 alloc_align attributes. */
1651 if (TREE_VALUE (attr
) == NULL_TREE
)
1653 attr
= TREE_VALUE (attr
);
1654 align
= TREE_VALUE (attr
);
1655 if (!tree_fits_uhwi_p (align
))
1657 aligni
= tree_to_uhwi (align
);
1660 if (aligni
== 0 || aligni
> gimple_call_num_args (stmt
))
1662 align
= gimple_call_arg (stmt
, aligni
- 1);
1663 if (!tree_fits_uhwi_p (align
))
1665 aligni
= tree_to_uhwi (align
);
1667 else if (TREE_CHAIN (attr
) && TREE_VALUE (TREE_CHAIN (attr
)))
1669 misalign
= TREE_VALUE (TREE_CHAIN (attr
));
1670 if (!tree_fits_uhwi_p (misalign
))
1672 misaligni
= tree_to_uhwi (misalign
);
1675 if (aligni
<= 1 || (aligni
& (aligni
- 1)) != 0 || misaligni
>= aligni
)
1678 align
= build_int_cst_type (type
, -aligni
);
1679 alignval
= get_value_for_expr (align
, true);
1680 bit_value_binop_1 (BIT_AND_EXPR
, type
, &value
, &mask
,
1681 type
, value_to_wide_int (ptrval
), ptrval
.mask
,
1682 type
, value_to_wide_int (alignval
), alignval
.mask
);
1683 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1685 val
.lattice_val
= CONSTANT
;
1687 gcc_assert ((mask
.to_uhwi () & (aligni
- 1)) == 0);
1688 gcc_assert ((value
.to_uhwi () & (aligni
- 1)) == 0);
1690 /* ??? Delay building trees here. */
1691 val
.value
= wide_int_to_tree (type
, value
);
1695 val
.lattice_val
= VARYING
;
1696 val
.value
= NULL_TREE
;
1702 /* Evaluate statement STMT.
1703 Valid only for assignments, calls, conditionals, and switches. */
1705 static ccp_prop_value_t
1706 evaluate_stmt (gimple stmt
)
1708 ccp_prop_value_t val
;
1709 tree simplified
= NULL_TREE
;
1710 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1711 bool is_constant
= false;
1714 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1716 fprintf (dump_file
, "which is likely ");
1717 switch (likelyvalue
)
1720 fprintf (dump_file
, "CONSTANT");
1723 fprintf (dump_file
, "UNDEFINED");
1726 fprintf (dump_file
, "VARYING");
1730 fprintf (dump_file
, "\n");
1733 /* If the statement is likely to have a CONSTANT result, then try
1734 to fold the statement to determine the constant value. */
1735 /* FIXME. This is the only place that we call ccp_fold.
1736 Since likely_value never returns CONSTANT for calls, we will
1737 not attempt to fold them, including builtins that may profit. */
1738 if (likelyvalue
== CONSTANT
)
1740 fold_defer_overflow_warnings ();
1741 simplified
= ccp_fold (stmt
);
1742 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1743 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1746 /* The statement produced a constant value. */
1747 val
.lattice_val
= CONSTANT
;
1748 val
.value
= simplified
;
1752 /* If the statement is likely to have a VARYING result, then do not
1753 bother folding the statement. */
1754 else if (likelyvalue
== VARYING
)
1756 enum gimple_code code
= gimple_code (stmt
);
1757 if (code
== GIMPLE_ASSIGN
)
1759 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1761 /* Other cases cannot satisfy is_gimple_min_invariant
1763 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1764 simplified
= gimple_assign_rhs1 (stmt
);
1766 else if (code
== GIMPLE_SWITCH
)
1767 simplified
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
1769 /* These cannot satisfy is_gimple_min_invariant without folding. */
1770 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1771 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1774 /* The statement produced a constant value. */
1775 val
.lattice_val
= CONSTANT
;
1776 val
.value
= simplified
;
1780 /* If the statement result is likely UNDEFINED, make it so. */
1781 else if (likelyvalue
== UNDEFINED
)
1783 val
.lattice_val
= UNDEFINED
;
1784 val
.value
= NULL_TREE
;
1789 /* Resort to simplification for bitwise tracking. */
1790 if (flag_tree_bit_ccp
1791 && (likelyvalue
== CONSTANT
|| is_gimple_call (stmt
)
1792 || (gimple_assign_single_p (stmt
)
1793 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
))
1796 enum gimple_code code
= gimple_code (stmt
);
1797 val
.lattice_val
= VARYING
;
1798 val
.value
= NULL_TREE
;
1800 if (code
== GIMPLE_ASSIGN
)
1802 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1803 tree rhs1
= gimple_assign_rhs1 (stmt
);
1804 tree lhs
= gimple_assign_lhs (stmt
);
1805 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1806 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
1807 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1808 || POINTER_TYPE_P (TREE_TYPE (rhs1
))))
1809 switch (get_gimple_rhs_class (subcode
))
1811 case GIMPLE_SINGLE_RHS
:
1812 val
= get_value_for_expr (rhs1
, true);
1815 case GIMPLE_UNARY_RHS
:
1816 val
= bit_value_unop (subcode
, TREE_TYPE (lhs
), rhs1
);
1819 case GIMPLE_BINARY_RHS
:
1820 val
= bit_value_binop (subcode
, TREE_TYPE (lhs
), rhs1
,
1821 gimple_assign_rhs2 (stmt
));
1827 else if (code
== GIMPLE_COND
)
1829 enum tree_code code
= gimple_cond_code (stmt
);
1830 tree rhs1
= gimple_cond_lhs (stmt
);
1831 tree rhs2
= gimple_cond_rhs (stmt
);
1832 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1833 || POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1834 val
= bit_value_binop (code
, TREE_TYPE (rhs1
), rhs1
, rhs2
);
1836 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
1838 tree fndecl
= gimple_call_fndecl (stmt
);
1839 switch (DECL_FUNCTION_CODE (fndecl
))
1841 case BUILT_IN_MALLOC
:
1842 case BUILT_IN_REALLOC
:
1843 case BUILT_IN_CALLOC
:
1844 case BUILT_IN_STRDUP
:
1845 case BUILT_IN_STRNDUP
:
1846 val
.lattice_val
= CONSTANT
;
1847 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1848 val
.mask
= ~((HOST_WIDE_INT
) MALLOC_ABI_ALIGNMENT
1849 / BITS_PER_UNIT
- 1);
1852 case BUILT_IN_ALLOCA
:
1853 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1854 align
= (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
1855 ? TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1))
1856 : BIGGEST_ALIGNMENT
);
1857 val
.lattice_val
= CONSTANT
;
1858 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1859 val
.mask
= ~((HOST_WIDE_INT
) align
/ BITS_PER_UNIT
- 1);
1862 /* These builtins return their first argument, unmodified. */
1863 case BUILT_IN_MEMCPY
:
1864 case BUILT_IN_MEMMOVE
:
1865 case BUILT_IN_MEMSET
:
1866 case BUILT_IN_STRCPY
:
1867 case BUILT_IN_STRNCPY
:
1868 case BUILT_IN_MEMCPY_CHK
:
1869 case BUILT_IN_MEMMOVE_CHK
:
1870 case BUILT_IN_MEMSET_CHK
:
1871 case BUILT_IN_STRCPY_CHK
:
1872 case BUILT_IN_STRNCPY_CHK
:
1873 val
= get_value_for_expr (gimple_call_arg (stmt
, 0), true);
1876 case BUILT_IN_ASSUME_ALIGNED
:
1877 val
= bit_value_assume_aligned (stmt
, NULL_TREE
, val
, false);
1880 case BUILT_IN_ALIGNED_ALLOC
:
1882 tree align
= get_constant_value (gimple_call_arg (stmt
, 0));
1884 && tree_fits_uhwi_p (align
))
1886 unsigned HOST_WIDE_INT aligni
= tree_to_uhwi (align
);
1888 /* align must be power-of-two */
1889 && (aligni
& (aligni
- 1)) == 0)
1891 val
.lattice_val
= CONSTANT
;
1892 val
.value
= build_int_cst (ptr_type_node
, 0);
1902 if (is_gimple_call (stmt
) && gimple_call_lhs (stmt
))
1904 tree fntype
= gimple_call_fntype (stmt
);
1907 tree attrs
= lookup_attribute ("assume_aligned",
1908 TYPE_ATTRIBUTES (fntype
));
1910 val
= bit_value_assume_aligned (stmt
, attrs
, val
, false);
1911 attrs
= lookup_attribute ("alloc_align",
1912 TYPE_ATTRIBUTES (fntype
));
1914 val
= bit_value_assume_aligned (stmt
, attrs
, val
, true);
1917 is_constant
= (val
.lattice_val
== CONSTANT
);
1920 if (flag_tree_bit_ccp
1921 && ((is_constant
&& TREE_CODE (val
.value
) == INTEGER_CST
)
1923 && gimple_get_lhs (stmt
)
1924 && TREE_CODE (gimple_get_lhs (stmt
)) == SSA_NAME
)
1926 tree lhs
= gimple_get_lhs (stmt
);
1927 wide_int nonzero_bits
= get_nonzero_bits (lhs
);
1928 if (nonzero_bits
!= -1)
1932 val
.lattice_val
= CONSTANT
;
1933 val
.value
= build_zero_cst (TREE_TYPE (lhs
));
1934 val
.mask
= extend_mask (nonzero_bits
);
1939 if (wi::bit_and_not (val
.value
, nonzero_bits
) != 0)
1940 val
.value
= wide_int_to_tree (TREE_TYPE (lhs
),
1941 nonzero_bits
& val
.value
);
1942 if (nonzero_bits
== 0)
1945 val
.mask
= val
.mask
& extend_mask (nonzero_bits
);
1950 /* The statement produced a nonconstant value. */
1953 /* The statement produced a copy. */
1954 if (simplified
&& TREE_CODE (simplified
) == SSA_NAME
1955 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified
))
1957 val
.lattice_val
= CONSTANT
;
1958 val
.value
= simplified
;
1961 /* The statement is VARYING. */
1964 val
.lattice_val
= VARYING
;
1965 val
.value
= NULL_TREE
;
1973 typedef hash_table
<pointer_hash
<gimple_statement_base
> > gimple_htab
;
1975 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1976 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1979 insert_clobber_before_stack_restore (tree saved_val
, tree var
,
1980 gimple_htab
**visited
)
1983 gassign
*clobber_stmt
;
1985 imm_use_iterator iter
;
1986 gimple_stmt_iterator i
;
1989 FOR_EACH_IMM_USE_STMT (stmt
, iter
, saved_val
)
1990 if (gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
1992 clobber
= build_constructor (TREE_TYPE (var
),
1994 TREE_THIS_VOLATILE (clobber
) = 1;
1995 clobber_stmt
= gimple_build_assign (var
, clobber
);
1997 i
= gsi_for_stmt (stmt
);
1998 gsi_insert_before (&i
, clobber_stmt
, GSI_SAME_STMT
);
2000 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2003 *visited
= new gimple_htab (10);
2005 slot
= (*visited
)->find_slot (stmt
, INSERT
);
2010 insert_clobber_before_stack_restore (gimple_phi_result (stmt
), var
,
2013 else if (gimple_assign_ssa_name_copy_p (stmt
))
2014 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt
), var
,
2016 else if (chkp_gimple_call_builtin_p (stmt
, BUILT_IN_CHKP_BNDRET
))
2019 gcc_assert (is_gimple_debug (stmt
));
2022 /* Advance the iterator to the previous non-debug gimple statement in the same
2023 or dominating basic block. */
2026 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator
*i
)
2030 gsi_prev_nondebug (i
);
2031 while (gsi_end_p (*i
))
2033 dom
= get_immediate_dominator (CDI_DOMINATORS
, i
->bb
);
2034 if (dom
== NULL
|| dom
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
2037 *i
= gsi_last_bb (dom
);
2041 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2042 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2044 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2045 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2046 that case the function gives up without inserting the clobbers. */
2049 insert_clobbers_for_var (gimple_stmt_iterator i
, tree var
)
2053 gimple_htab
*visited
= NULL
;
2055 for (; !gsi_end_p (i
); gsi_prev_dom_bb_nondebug (&i
))
2057 stmt
= gsi_stmt (i
);
2059 if (!gimple_call_builtin_p (stmt
, BUILT_IN_STACK_SAVE
))
2062 saved_val
= gimple_call_lhs (stmt
);
2063 if (saved_val
== NULL_TREE
)
2066 insert_clobber_before_stack_restore (saved_val
, var
, &visited
);
2073 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
2074 fixed-size array and returns the address, if found, otherwise returns
2078 fold_builtin_alloca_with_align (gimple stmt
)
2080 unsigned HOST_WIDE_INT size
, threshold
, n_elem
;
2081 tree lhs
, arg
, block
, var
, elem_type
, array_type
;
2084 lhs
= gimple_call_lhs (stmt
);
2085 if (lhs
== NULL_TREE
)
2088 /* Detect constant argument. */
2089 arg
= get_constant_value (gimple_call_arg (stmt
, 0));
2090 if (arg
== NULL_TREE
2091 || TREE_CODE (arg
) != INTEGER_CST
2092 || !tree_fits_uhwi_p (arg
))
2095 size
= tree_to_uhwi (arg
);
2097 /* Heuristic: don't fold large allocas. */
2098 threshold
= (unsigned HOST_WIDE_INT
)PARAM_VALUE (PARAM_LARGE_STACK_FRAME
);
2099 /* In case the alloca is located at function entry, it has the same lifetime
2100 as a declared array, so we allow a larger size. */
2101 block
= gimple_block (stmt
);
2102 if (!(cfun
->after_inlining
2103 && TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
))
2105 if (size
> threshold
)
2108 /* Declare array. */
2109 elem_type
= build_nonstandard_integer_type (BITS_PER_UNIT
, 1);
2110 n_elem
= size
* 8 / BITS_PER_UNIT
;
2111 array_type
= build_array_type_nelts (elem_type
, n_elem
);
2112 var
= create_tmp_var (array_type
);
2113 DECL_ALIGN (var
) = TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1));
2115 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (lhs
);
2116 if (pi
!= NULL
&& !pi
->pt
.anything
)
2120 singleton_p
= pt_solution_singleton_p (&pi
->pt
, &uid
);
2121 gcc_assert (singleton_p
);
2122 SET_DECL_PT_UID (var
, uid
);
2126 /* Fold alloca to the address of the array. */
2127 return fold_convert (TREE_TYPE (lhs
), build_fold_addr_expr (var
));
2130 /* Fold the stmt at *GSI with CCP specific information that propagating
2131 and regular folding does not catch. */
2134 ccp_fold_stmt (gimple_stmt_iterator
*gsi
)
2136 gimple stmt
= gsi_stmt (*gsi
);
2138 switch (gimple_code (stmt
))
2142 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
2143 ccp_prop_value_t val
;
2144 /* Statement evaluation will handle type mismatches in constants
2145 more gracefully than the final propagation. This allows us to
2146 fold more conditionals here. */
2147 val
= evaluate_stmt (stmt
);
2148 if (val
.lattice_val
!= CONSTANT
2154 fprintf (dump_file
, "Folding predicate ");
2155 print_gimple_expr (dump_file
, stmt
, 0, 0);
2156 fprintf (dump_file
, " to ");
2157 print_generic_expr (dump_file
, val
.value
, 0);
2158 fprintf (dump_file
, "\n");
2161 if (integer_zerop (val
.value
))
2162 gimple_cond_make_false (cond_stmt
);
2164 gimple_cond_make_true (cond_stmt
);
2171 tree lhs
= gimple_call_lhs (stmt
);
2172 int flags
= gimple_call_flags (stmt
);
2175 bool changed
= false;
2178 /* If the call was folded into a constant make sure it goes
2179 away even if we cannot propagate into all uses because of
2182 && TREE_CODE (lhs
) == SSA_NAME
2183 && (val
= get_constant_value (lhs
))
2184 /* Don't optimize away calls that have side-effects. */
2185 && (flags
& (ECF_CONST
|ECF_PURE
)) != 0
2186 && (flags
& ECF_LOOPING_CONST_OR_PURE
) == 0)
2188 tree new_rhs
= unshare_expr (val
);
2190 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
2191 TREE_TYPE (new_rhs
)))
2192 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
2193 res
= update_call_from_tree (gsi
, new_rhs
);
2198 /* Internal calls provide no argument types, so the extra laxity
2199 for normal calls does not apply. */
2200 if (gimple_call_internal_p (stmt
))
2203 /* The heuristic of fold_builtin_alloca_with_align differs before and
2204 after inlining, so we don't require the arg to be changed into a
2205 constant for folding, but just to be constant. */
2206 if (gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
2208 tree new_rhs
= fold_builtin_alloca_with_align (stmt
);
2211 bool res
= update_call_from_tree (gsi
, new_rhs
);
2212 tree var
= TREE_OPERAND (TREE_OPERAND (new_rhs
, 0),0);
2214 insert_clobbers_for_var (*gsi
, var
);
2219 /* Propagate into the call arguments. Compared to replace_uses_in
2220 this can use the argument slot types for type verification
2221 instead of the current argument type. We also can safely
2222 drop qualifiers here as we are dealing with constants anyway. */
2223 argt
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
2224 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
2225 ++i
, argt
= TREE_CHAIN (argt
))
2227 tree arg
= gimple_call_arg (stmt
, i
);
2228 if (TREE_CODE (arg
) == SSA_NAME
2229 && (val
= get_constant_value (arg
))
2230 && useless_type_conversion_p
2231 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
2232 TYPE_MAIN_VARIANT (TREE_TYPE (val
))))
2234 gimple_call_set_arg (stmt
, i
, unshare_expr (val
));
2244 tree lhs
= gimple_assign_lhs (stmt
);
2247 /* If we have a load that turned out to be constant replace it
2248 as we cannot propagate into all uses in all cases. */
2249 if (gimple_assign_single_p (stmt
)
2250 && TREE_CODE (lhs
) == SSA_NAME
2251 && (val
= get_constant_value (lhs
)))
2253 tree rhs
= unshare_expr (val
);
2254 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2255 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
2256 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
2268 /* Visit the assignment statement STMT. Set the value of its LHS to the
2269 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2270 creates virtual definitions, set the value of each new name to that
2271 of the RHS (if we can derive a constant out of the RHS).
2272 Value-returning call statements also perform an assignment, and
2273 are handled here. */
2275 static enum ssa_prop_result
2276 visit_assignment (gimple stmt
, tree
*output_p
)
2278 ccp_prop_value_t val
;
2279 enum ssa_prop_result retval
= SSA_PROP_NOT_INTERESTING
;
2281 tree lhs
= gimple_get_lhs (stmt
);
2282 if (TREE_CODE (lhs
) == SSA_NAME
)
2284 /* Evaluate the statement, which could be
2285 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2286 val
= evaluate_stmt (stmt
);
2288 /* If STMT is an assignment to an SSA_NAME, we only have one
2290 if (set_lattice_value (lhs
, val
))
2293 if (val
.lattice_val
== VARYING
)
2294 retval
= SSA_PROP_VARYING
;
2296 retval
= SSA_PROP_INTERESTING
;
2304 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2305 if it can determine which edge will be taken. Otherwise, return
2306 SSA_PROP_VARYING. */
2308 static enum ssa_prop_result
2309 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
2311 ccp_prop_value_t val
;
2314 block
= gimple_bb (stmt
);
2315 val
= evaluate_stmt (stmt
);
2316 if (val
.lattice_val
!= CONSTANT
2318 return SSA_PROP_VARYING
;
2320 /* Find which edge out of the conditional block will be taken and add it
2321 to the worklist. If no single edge can be determined statically,
2322 return SSA_PROP_VARYING to feed all the outgoing edges to the
2323 propagation engine. */
2324 *taken_edge_p
= find_taken_edge (block
, val
.value
);
2326 return SSA_PROP_INTERESTING
;
2328 return SSA_PROP_VARYING
;
2332 /* Evaluate statement STMT. If the statement produces an output value and
2333 its evaluation changes the lattice value of its output, return
2334 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2337 If STMT is a conditional branch and we can determine its truth
2338 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2339 value, return SSA_PROP_VARYING. */
2341 static enum ssa_prop_result
2342 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
2347 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2349 fprintf (dump_file
, "\nVisiting statement:\n");
2350 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2353 switch (gimple_code (stmt
))
2356 /* If the statement is an assignment that produces a single
2357 output value, evaluate its RHS to see if the lattice value of
2358 its output has changed. */
2359 return visit_assignment (stmt
, output_p
);
2362 /* A value-returning call also performs an assignment. */
2363 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2364 return visit_assignment (stmt
, output_p
);
2369 /* If STMT is a conditional branch, see if we can determine
2370 which branch will be taken. */
2371 /* FIXME. It appears that we should be able to optimize
2372 computed GOTOs here as well. */
2373 return visit_cond_stmt (stmt
, taken_edge_p
);
2379 /* Any other kind of statement is not interesting for constant
2380 propagation and, therefore, not worth simulating. */
2381 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2382 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
2384 /* Definitions made by statements other than assignments to
2385 SSA_NAMEs represent unknown modifications to their outputs.
2386 Mark them VARYING. */
2387 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2389 ccp_prop_value_t v
= { VARYING
, NULL_TREE
, -1 };
2390 set_lattice_value (def
, v
);
2393 return SSA_PROP_VARYING
;
2397 /* Main entry point for SSA Conditional Constant Propagation. */
2402 unsigned int todo
= 0;
2403 calculate_dominance_info (CDI_DOMINATORS
);
2405 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
2406 if (ccp_finalize ())
2407 todo
= (TODO_cleanup_cfg
| TODO_update_ssa
);
2408 free_dominance_info (CDI_DOMINATORS
);
2415 const pass_data pass_data_ccp
=
2417 GIMPLE_PASS
, /* type */
2419 OPTGROUP_NONE
, /* optinfo_flags */
2420 TV_TREE_CCP
, /* tv_id */
2421 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2422 0, /* properties_provided */
2423 0, /* properties_destroyed */
2424 0, /* todo_flags_start */
2425 TODO_update_address_taken
, /* todo_flags_finish */
2428 class pass_ccp
: public gimple_opt_pass
2431 pass_ccp (gcc::context
*ctxt
)
2432 : gimple_opt_pass (pass_data_ccp
, ctxt
)
2435 /* opt_pass methods: */
2436 opt_pass
* clone () { return new pass_ccp (m_ctxt
); }
2437 virtual bool gate (function
*) { return flag_tree_ccp
!= 0; }
2438 virtual unsigned int execute (function
*) { return do_ssa_ccp (); }
2440 }; // class pass_ccp
2445 make_pass_ccp (gcc::context
*ctxt
)
2447 return new pass_ccp (ctxt
);
2452 /* Try to optimize out __builtin_stack_restore. Optimize it out
2453 if there is another __builtin_stack_restore in the same basic
2454 block and no calls or ASM_EXPRs are in between, or if this block's
2455 only outgoing edge is to EXIT_BLOCK and there are no calls or
2456 ASM_EXPRs after this __builtin_stack_restore. */
2459 optimize_stack_restore (gimple_stmt_iterator i
)
2464 basic_block bb
= gsi_bb (i
);
2465 gimple call
= gsi_stmt (i
);
2467 if (gimple_code (call
) != GIMPLE_CALL
2468 || gimple_call_num_args (call
) != 1
2469 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
2470 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
2473 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
2475 stmt
= gsi_stmt (i
);
2476 if (gimple_code (stmt
) == GIMPLE_ASM
)
2478 if (gimple_code (stmt
) != GIMPLE_CALL
)
2481 callee
= gimple_call_fndecl (stmt
);
2483 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2484 /* All regular builtins are ok, just obviously not alloca. */
2485 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
2486 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA_WITH_ALIGN
)
2489 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
2490 goto second_stack_restore
;
2496 /* Allow one successor of the exit block, or zero successors. */
2497 switch (EDGE_COUNT (bb
->succs
))
2502 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2508 second_stack_restore
:
2510 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2511 If there are multiple uses, then the last one should remove the call.
2512 In any case, whether the call to __builtin_stack_save can be removed
2513 or not is irrelevant to removing the call to __builtin_stack_restore. */
2514 if (has_single_use (gimple_call_arg (call
, 0)))
2516 gimple stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
2517 if (is_gimple_call (stack_save
))
2519 callee
= gimple_call_fndecl (stack_save
);
2521 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
2522 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
)
2524 gimple_stmt_iterator stack_save_gsi
;
2527 stack_save_gsi
= gsi_for_stmt (stack_save
);
2528 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
2529 update_call_from_tree (&stack_save_gsi
, rhs
);
2534 /* No effect, so the statement will be deleted. */
2535 return integer_zero_node
;
2538 /* If va_list type is a simple pointer and nothing special is needed,
2539 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2540 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2541 pointer assignment. */
2544 optimize_stdarg_builtin (gimple call
)
2546 tree callee
, lhs
, rhs
, cfun_va_list
;
2547 bool va_list_simple_ptr
;
2548 location_t loc
= gimple_location (call
);
2550 if (gimple_code (call
) != GIMPLE_CALL
)
2553 callee
= gimple_call_fndecl (call
);
2555 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
2556 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
2557 && (TREE_TYPE (cfun_va_list
) == void_type_node
2558 || TREE_TYPE (cfun_va_list
) == char_type_node
);
2560 switch (DECL_FUNCTION_CODE (callee
))
2562 case BUILT_IN_VA_START
:
2563 if (!va_list_simple_ptr
2564 || targetm
.expand_builtin_va_start
!= NULL
2565 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG
))
2568 if (gimple_call_num_args (call
) != 2)
2571 lhs
= gimple_call_arg (call
, 0);
2572 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2573 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2574 != TYPE_MAIN_VARIANT (cfun_va_list
))
2577 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2578 rhs
= build_call_expr_loc (loc
, builtin_decl_explicit (BUILT_IN_NEXT_ARG
),
2579 1, integer_zero_node
);
2580 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2581 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2583 case BUILT_IN_VA_COPY
:
2584 if (!va_list_simple_ptr
)
2587 if (gimple_call_num_args (call
) != 2)
2590 lhs
= gimple_call_arg (call
, 0);
2591 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2592 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2593 != TYPE_MAIN_VARIANT (cfun_va_list
))
2596 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2597 rhs
= gimple_call_arg (call
, 1);
2598 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
2599 != TYPE_MAIN_VARIANT (cfun_va_list
))
2602 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2603 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2605 case BUILT_IN_VA_END
:
2606 /* No effect, so the statement will be deleted. */
2607 return integer_zero_node
;
2614 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2615 the incoming jumps. Return true if at least one jump was changed. */
2618 optimize_unreachable (gimple_stmt_iterator i
)
2620 basic_block bb
= gsi_bb (i
);
2621 gimple_stmt_iterator gsi
;
2627 if (flag_sanitize
& SANITIZE_UNREACHABLE
)
2630 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2632 stmt
= gsi_stmt (gsi
);
2634 if (is_gimple_debug (stmt
))
2637 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2639 /* Verify we do not need to preserve the label. */
2640 if (FORCED_LABEL (gimple_label_label (label_stmt
)))
2646 /* Only handle the case that __builtin_unreachable is the first statement
2647 in the block. We rely on DCE to remove stmts without side-effects
2648 before __builtin_unreachable. */
2649 if (gsi_stmt (gsi
) != gsi_stmt (i
))
2654 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2656 gsi
= gsi_last_bb (e
->src
);
2657 if (gsi_end_p (gsi
))
2660 stmt
= gsi_stmt (gsi
);
2661 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
2663 if (e
->flags
& EDGE_TRUE_VALUE
)
2664 gimple_cond_make_false (cond_stmt
);
2665 else if (e
->flags
& EDGE_FALSE_VALUE
)
2666 gimple_cond_make_true (cond_stmt
);
2669 update_stmt (cond_stmt
);
2673 /* Todo: handle other cases, f.i. switch statement. */
2683 /* A simple pass that attempts to fold all builtin functions. This pass
2684 is run after we've propagated as many constants as we can. */
2688 const pass_data pass_data_fold_builtins
=
2690 GIMPLE_PASS
, /* type */
2692 OPTGROUP_NONE
, /* optinfo_flags */
2693 TV_NONE
, /* tv_id */
2694 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2695 0, /* properties_provided */
2696 0, /* properties_destroyed */
2697 0, /* todo_flags_start */
2698 TODO_update_ssa
, /* todo_flags_finish */
2701 class pass_fold_builtins
: public gimple_opt_pass
2704 pass_fold_builtins (gcc::context
*ctxt
)
2705 : gimple_opt_pass (pass_data_fold_builtins
, ctxt
)
2708 /* opt_pass methods: */
2709 opt_pass
* clone () { return new pass_fold_builtins (m_ctxt
); }
2710 virtual unsigned int execute (function
*);
2712 }; // class pass_fold_builtins
2715 pass_fold_builtins::execute (function
*fun
)
2717 bool cfg_changed
= false;
2719 unsigned int todoflags
= 0;
2721 FOR_EACH_BB_FN (bb
, fun
)
2723 gimple_stmt_iterator i
;
2724 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
2726 gimple stmt
, old_stmt
;
2728 enum built_in_function fcode
;
2730 stmt
= gsi_stmt (i
);
2732 if (gimple_code (stmt
) != GIMPLE_CALL
)
2734 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2735 after the last GIMPLE DSE they aren't needed and might
2736 unnecessarily keep the SSA_NAMEs live. */
2737 if (gimple_clobber_p (stmt
))
2739 tree lhs
= gimple_assign_lhs (stmt
);
2740 if (TREE_CODE (lhs
) == MEM_REF
2741 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
2743 unlink_stmt_vdef (stmt
);
2744 gsi_remove (&i
, true);
2745 release_defs (stmt
);
2753 callee
= gimple_call_fndecl (stmt
);
2754 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2760 fcode
= DECL_FUNCTION_CODE (callee
);
2765 tree result
= NULL_TREE
;
2766 switch (DECL_FUNCTION_CODE (callee
))
2768 case BUILT_IN_CONSTANT_P
:
2769 /* Resolve __builtin_constant_p. If it hasn't been
2770 folded to integer_one_node by now, it's fairly
2771 certain that the value simply isn't constant. */
2772 result
= integer_zero_node
;
2775 case BUILT_IN_ASSUME_ALIGNED
:
2776 /* Remove __builtin_assume_aligned. */
2777 result
= gimple_call_arg (stmt
, 0);
2780 case BUILT_IN_STACK_RESTORE
:
2781 result
= optimize_stack_restore (i
);
2787 case BUILT_IN_UNREACHABLE
:
2788 if (optimize_unreachable (i
))
2792 case BUILT_IN_VA_START
:
2793 case BUILT_IN_VA_END
:
2794 case BUILT_IN_VA_COPY
:
2795 /* These shouldn't be folded before pass_stdarg. */
2796 result
= optimize_stdarg_builtin (stmt
);
2810 if (!update_call_from_tree (&i
, result
))
2811 gimplify_and_update_call_from_tree (&i
, result
);
2814 todoflags
|= TODO_update_address_taken
;
2816 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2818 fprintf (dump_file
, "Simplified\n ");
2819 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2823 stmt
= gsi_stmt (i
);
2826 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
2827 && gimple_purge_dead_eh_edges (bb
))
2830 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2832 fprintf (dump_file
, "to\n ");
2833 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2834 fprintf (dump_file
, "\n");
2837 /* Retry the same statement if it changed into another
2838 builtin, there might be new opportunities now. */
2839 if (gimple_code (stmt
) != GIMPLE_CALL
)
2844 callee
= gimple_call_fndecl (stmt
);
2846 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2847 || DECL_FUNCTION_CODE (callee
) == fcode
)
2852 /* Delete unreachable blocks. */
2854 todoflags
|= TODO_cleanup_cfg
;
2862 make_pass_fold_builtins (gcc::context
*ctxt
)
2864 return new pass_fold_builtins (ctxt
);