1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
43 CONSTANT -> V_i has been found to hold a constant
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
123 #include "coretypes.h"
125 #include "hash-set.h"
126 #include "machmode.h"
128 #include "double-int.h"
132 #include "wide-int.h"
136 #include "fold-const.h"
137 #include "stor-layout.h"
141 #include "hard-reg-set.h"
143 #include "function.h"
144 #include "dominance.h"
146 #include "basic-block.h"
147 #include "gimple-pretty-print.h"
148 #include "hash-table.h"
149 #include "tree-ssa-alias.h"
150 #include "internal-fn.h"
151 #include "gimple-fold.h"
153 #include "gimple-expr.h"
156 #include "gimplify.h"
157 #include "gimple-iterator.h"
158 #include "gimple-ssa.h"
159 #include "tree-cfg.h"
160 #include "tree-phinodes.h"
161 #include "ssa-iterators.h"
162 #include "stringpool.h"
163 #include "tree-ssanames.h"
164 #include "tree-pass.h"
165 #include "tree-ssa-propagate.h"
166 #include "value-prof.h"
167 #include "langhooks.h"
169 #include "diagnostic-core.h"
172 #include "wide-int-print.h"
173 #include "builtins.h"
174 #include "tree-chkp.h"
177 /* Possible lattice values. */
186 struct ccp_prop_value_t
{
188 ccp_lattice_t lattice_val
;
190 /* Propagated value. */
193 /* Mask that applies to the propagated value during CCP. For X
194 with a CONSTANT lattice value X & ~mask == value & ~mask. The
195 zero bits in the mask cover constant values. The ones mean no
200 /* Array of propagated constant values. After propagation,
201 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
202 the constant is held in an SSA name representing a memory store
203 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
204 memory reference used to store (i.e., the LHS of the assignment
206 static ccp_prop_value_t
*const_val
;
207 static unsigned n_const_val
;
209 static void canonicalize_value (ccp_prop_value_t
*);
210 static bool ccp_fold_stmt (gimple_stmt_iterator
*);
212 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
215 dump_lattice_value (FILE *outf
, const char *prefix
, ccp_prop_value_t val
)
217 switch (val
.lattice_val
)
220 fprintf (outf
, "%sUNINITIALIZED", prefix
);
223 fprintf (outf
, "%sUNDEFINED", prefix
);
226 fprintf (outf
, "%sVARYING", prefix
);
229 if (TREE_CODE (val
.value
) != INTEGER_CST
232 fprintf (outf
, "%sCONSTANT ", prefix
);
233 print_generic_expr (outf
, val
.value
, dump_flags
);
237 widest_int cval
= wi::bit_and_not (wi::to_widest (val
.value
),
239 fprintf (outf
, "%sCONSTANT ", prefix
);
240 print_hex (cval
, outf
);
241 fprintf (outf
, " (");
242 print_hex (val
.mask
, outf
);
252 /* Print lattice value VAL to stderr. */
254 void debug_lattice_value (ccp_prop_value_t val
);
257 debug_lattice_value (ccp_prop_value_t val
)
259 dump_lattice_value (stderr
, "", val
);
260 fprintf (stderr
, "\n");
263 /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
266 extend_mask (const wide_int
&nonzero_bits
)
268 return (wi::mask
<widest_int
> (wi::get_precision (nonzero_bits
), true)
269 | widest_int::from (nonzero_bits
, UNSIGNED
));
272 /* Compute a default value for variable VAR and store it in the
273 CONST_VAL array. The following rules are used to get default
276 1- Global and static variables that are declared constant are
279 2- Any other value is considered UNDEFINED. This is useful when
280 considering PHI nodes. PHI arguments that are undefined do not
281 change the constant value of the PHI node, which allows for more
282 constants to be propagated.
284 3- Variables defined by statements other than assignments and PHI
285 nodes are considered VARYING.
287 4- Initial values of variables that are not GIMPLE registers are
288 considered VARYING. */
290 static ccp_prop_value_t
291 get_default_value (tree var
)
293 ccp_prop_value_t val
= { UNINITIALIZED
, NULL_TREE
, 0 };
296 stmt
= SSA_NAME_DEF_STMT (var
);
298 if (gimple_nop_p (stmt
))
300 /* Variables defined by an empty statement are those used
301 before being initialized. If VAR is a local variable, we
302 can assume initially that it is UNDEFINED, otherwise we must
303 consider it VARYING. */
304 if (!virtual_operand_p (var
)
305 && TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
306 val
.lattice_val
= UNDEFINED
;
309 val
.lattice_val
= VARYING
;
311 if (flag_tree_bit_ccp
)
313 wide_int nonzero_bits
= get_nonzero_bits (var
);
314 if (nonzero_bits
!= -1)
316 val
.lattice_val
= CONSTANT
;
317 val
.value
= build_zero_cst (TREE_TYPE (var
));
318 val
.mask
= extend_mask (nonzero_bits
);
323 else if (is_gimple_assign (stmt
))
326 if (gimple_assign_single_p (stmt
)
327 && DECL_P (gimple_assign_rhs1 (stmt
))
328 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
330 val
.lattice_val
= CONSTANT
;
335 /* Any other variable defined by an assignment is considered
337 val
.lattice_val
= UNDEFINED
;
340 else if ((is_gimple_call (stmt
)
341 && gimple_call_lhs (stmt
) != NULL_TREE
)
342 || gimple_code (stmt
) == GIMPLE_PHI
)
344 /* A variable defined by a call or a PHI node is considered
346 val
.lattice_val
= UNDEFINED
;
350 /* Otherwise, VAR will never take on a constant value. */
351 val
.lattice_val
= VARYING
;
359 /* Get the constant value associated with variable VAR. */
361 static inline ccp_prop_value_t
*
364 ccp_prop_value_t
*val
;
366 if (const_val
== NULL
367 || SSA_NAME_VERSION (var
) >= n_const_val
)
370 val
= &const_val
[SSA_NAME_VERSION (var
)];
371 if (val
->lattice_val
== UNINITIALIZED
)
372 *val
= get_default_value (var
);
374 canonicalize_value (val
);
379 /* Return the constant tree value associated with VAR. */
382 get_constant_value (tree var
)
384 ccp_prop_value_t
*val
;
385 if (TREE_CODE (var
) != SSA_NAME
)
387 if (is_gimple_min_invariant (var
))
391 val
= get_value (var
);
393 && val
->lattice_val
== CONSTANT
394 && (TREE_CODE (val
->value
) != INTEGER_CST
400 /* Sets the value associated with VAR to VARYING. */
403 set_value_varying (tree var
)
405 ccp_prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
407 val
->lattice_val
= VARYING
;
408 val
->value
= NULL_TREE
;
412 /* For integer constants, make sure to drop TREE_OVERFLOW. */
415 canonicalize_value (ccp_prop_value_t
*val
)
417 if (val
->lattice_val
!= CONSTANT
)
420 if (TREE_OVERFLOW_P (val
->value
))
421 val
->value
= drop_tree_overflow (val
->value
);
424 /* Return whether the lattice transition is valid. */
427 valid_lattice_transition (ccp_prop_value_t old_val
, ccp_prop_value_t new_val
)
429 /* Lattice transitions must always be monotonically increasing in
431 if (old_val
.lattice_val
< new_val
.lattice_val
)
434 if (old_val
.lattice_val
!= new_val
.lattice_val
)
437 if (!old_val
.value
&& !new_val
.value
)
440 /* Now both lattice values are CONSTANT. */
442 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
443 when only a single copy edge is executable. */
444 if (TREE_CODE (old_val
.value
) == SSA_NAME
445 && TREE_CODE (new_val
.value
) == SSA_NAME
)
448 /* Allow transitioning from a constant to a copy. */
449 if (is_gimple_min_invariant (old_val
.value
)
450 && TREE_CODE (new_val
.value
) == SSA_NAME
)
453 /* Allow transitioning from PHI <&x, not executable> == &x
454 to PHI <&x, &y> == common alignment. */
455 if (TREE_CODE (old_val
.value
) != INTEGER_CST
456 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
459 /* Bit-lattices have to agree in the still valid bits. */
460 if (TREE_CODE (old_val
.value
) == INTEGER_CST
461 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
462 return (wi::bit_and_not (wi::to_widest (old_val
.value
), new_val
.mask
)
463 == wi::bit_and_not (wi::to_widest (new_val
.value
), new_val
.mask
));
465 /* Otherwise constant values have to agree. */
466 if (operand_equal_p (old_val
.value
, new_val
.value
, 0))
469 /* At least the kinds and types should agree now. */
470 if (TREE_CODE (old_val
.value
) != TREE_CODE (new_val
.value
)
471 || !types_compatible_p (TREE_TYPE (old_val
.value
),
472 TREE_TYPE (new_val
.value
)))
475 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
477 tree type
= TREE_TYPE (new_val
.value
);
478 if (SCALAR_FLOAT_TYPE_P (type
)
479 && !HONOR_NANS (type
))
481 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val
.value
)))
484 else if (VECTOR_FLOAT_TYPE_P (type
)
485 && !HONOR_NANS (type
))
487 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (old_val
.value
); ++i
)
488 if (!REAL_VALUE_ISNAN
489 (TREE_REAL_CST (VECTOR_CST_ELT (old_val
.value
, i
)))
490 && !operand_equal_p (VECTOR_CST_ELT (old_val
.value
, i
),
491 VECTOR_CST_ELT (new_val
.value
, i
), 0))
495 else if (COMPLEX_FLOAT_TYPE_P (type
)
496 && !HONOR_NANS (type
))
498 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val
.value
)))
499 && !operand_equal_p (TREE_REALPART (old_val
.value
),
500 TREE_REALPART (new_val
.value
), 0))
502 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val
.value
)))
503 && !operand_equal_p (TREE_IMAGPART (old_val
.value
),
504 TREE_IMAGPART (new_val
.value
), 0))
511 /* Set the value for variable VAR to NEW_VAL. Return true if the new
512 value is different from VAR's previous value. */
515 set_lattice_value (tree var
, ccp_prop_value_t new_val
)
517 /* We can deal with old UNINITIALIZED values just fine here. */
518 ccp_prop_value_t
*old_val
= &const_val
[SSA_NAME_VERSION (var
)];
520 canonicalize_value (&new_val
);
522 /* We have to be careful to not go up the bitwise lattice
523 represented by the mask.
524 ??? This doesn't seem to be the best place to enforce this. */
525 if (new_val
.lattice_val
== CONSTANT
526 && old_val
->lattice_val
== CONSTANT
527 && TREE_CODE (new_val
.value
) == INTEGER_CST
528 && TREE_CODE (old_val
->value
) == INTEGER_CST
)
530 widest_int diff
= (wi::to_widest (new_val
.value
)
531 ^ wi::to_widest (old_val
->value
));
532 new_val
.mask
= new_val
.mask
| old_val
->mask
| diff
;
535 gcc_checking_assert (valid_lattice_transition (*old_val
, new_val
));
537 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
538 caller that this was a non-transition. */
539 if (old_val
->lattice_val
!= new_val
.lattice_val
540 || (new_val
.lattice_val
== CONSTANT
541 && (TREE_CODE (new_val
.value
) != TREE_CODE (old_val
->value
)
542 || (TREE_CODE (new_val
.value
) == INTEGER_CST
543 && (new_val
.mask
!= old_val
->mask
544 || (wi::bit_and_not (wi::to_widest (old_val
->value
),
546 != wi::bit_and_not (wi::to_widest (new_val
.value
),
548 || (TREE_CODE (new_val
.value
) != INTEGER_CST
549 && !operand_equal_p (new_val
.value
, old_val
->value
, 0)))))
551 /* ??? We would like to delay creation of INTEGER_CSTs from
552 partially constants here. */
554 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
556 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
557 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
562 gcc_assert (new_val
.lattice_val
!= UNINITIALIZED
);
569 static ccp_prop_value_t
get_value_for_expr (tree
, bool);
570 static ccp_prop_value_t
bit_value_binop (enum tree_code
, tree
, tree
, tree
);
571 static void bit_value_binop_1 (enum tree_code
, tree
, widest_int
*, widest_int
*,
572 tree
, const widest_int
&, const widest_int
&,
573 tree
, const widest_int
&, const widest_int
&);
575 /* Return a widest_int that can be used for bitwise simplifications
579 value_to_wide_int (ccp_prop_value_t val
)
582 && TREE_CODE (val
.value
) == INTEGER_CST
)
583 return wi::to_widest (val
.value
);
588 /* Return the value for the address expression EXPR based on alignment
591 static ccp_prop_value_t
592 get_value_from_alignment (tree expr
)
594 tree type
= TREE_TYPE (expr
);
595 ccp_prop_value_t val
;
596 unsigned HOST_WIDE_INT bitpos
;
599 gcc_assert (TREE_CODE (expr
) == ADDR_EXPR
);
601 get_pointer_alignment_1 (expr
, &align
, &bitpos
);
602 val
.mask
= (POINTER_TYPE_P (type
) || TYPE_UNSIGNED (type
)
603 ? wi::mask
<widest_int
> (TYPE_PRECISION (type
), false)
604 : -1).and_not (align
/ BITS_PER_UNIT
- 1);
606 = wi::sext (val
.mask
, TYPE_PRECISION (type
)) == -1 ? VARYING
: CONSTANT
;
607 if (val
.lattice_val
== CONSTANT
)
608 val
.value
= build_int_cstu (type
, bitpos
/ BITS_PER_UNIT
);
610 val
.value
= NULL_TREE
;
615 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
616 return constant bits extracted from alignment information for
617 invariant addresses. */
619 static ccp_prop_value_t
620 get_value_for_expr (tree expr
, bool for_bits_p
)
622 ccp_prop_value_t val
;
624 if (TREE_CODE (expr
) == SSA_NAME
)
626 val
= *get_value (expr
);
628 && val
.lattice_val
== CONSTANT
629 && TREE_CODE (val
.value
) == ADDR_EXPR
)
630 val
= get_value_from_alignment (val
.value
);
631 /* Fall back to a copy value. */
633 && val
.lattice_val
== VARYING
634 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr
))
636 val
.lattice_val
= CONSTANT
;
641 else if (is_gimple_min_invariant (expr
)
642 && (!for_bits_p
|| TREE_CODE (expr
) != ADDR_EXPR
))
644 val
.lattice_val
= CONSTANT
;
647 canonicalize_value (&val
);
649 else if (TREE_CODE (expr
) == ADDR_EXPR
)
650 val
= get_value_from_alignment (expr
);
653 val
.lattice_val
= VARYING
;
655 val
.value
= NULL_TREE
;
660 /* Return the likely CCP lattice value for STMT.
662 If STMT has no operands, then return CONSTANT.
664 Else if undefinedness of operands of STMT cause its value to be
665 undefined, then return UNDEFINED.
667 Else if any operands of STMT are constants, then return CONSTANT.
669 Else return VARYING. */
672 likely_value (gimple stmt
)
674 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
675 bool has_nsa_operand
;
680 enum gimple_code code
= gimple_code (stmt
);
682 /* This function appears to be called only for assignments, calls,
683 conditionals, and switches, due to the logic in visit_stmt. */
684 gcc_assert (code
== GIMPLE_ASSIGN
685 || code
== GIMPLE_CALL
686 || code
== GIMPLE_COND
687 || code
== GIMPLE_SWITCH
);
689 /* If the statement has volatile operands, it won't fold to a
691 if (gimple_has_volatile_ops (stmt
))
694 /* Arrive here for more complex cases. */
695 has_constant_operand
= false;
696 has_undefined_operand
= false;
697 all_undefined_operands
= true;
698 has_nsa_operand
= false;
699 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
701 ccp_prop_value_t
*val
= get_value (use
);
703 if (val
->lattice_val
== UNDEFINED
)
704 has_undefined_operand
= true;
706 all_undefined_operands
= false;
708 if (val
->lattice_val
== CONSTANT
)
709 has_constant_operand
= true;
711 if (SSA_NAME_IS_DEFAULT_DEF (use
)
712 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use
)))
713 has_nsa_operand
= true;
716 /* There may be constants in regular rhs operands. For calls we
717 have to ignore lhs, fndecl and static chain, otherwise only
719 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
720 i
< gimple_num_ops (stmt
); ++i
)
722 tree op
= gimple_op (stmt
, i
);
723 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
725 if (is_gimple_min_invariant (op
))
726 has_constant_operand
= true;
729 if (has_constant_operand
)
730 all_undefined_operands
= false;
732 if (has_undefined_operand
733 && code
== GIMPLE_CALL
734 && gimple_call_internal_p (stmt
))
735 switch (gimple_call_internal_fn (stmt
))
737 /* These 3 builtins use the first argument just as a magic
738 way how to find out a decl uid. */
739 case IFN_GOMP_SIMD_LANE
:
740 case IFN_GOMP_SIMD_VF
:
741 case IFN_GOMP_SIMD_LAST_LANE
:
742 has_undefined_operand
= false;
748 /* If the operation combines operands like COMPLEX_EXPR make sure to
749 not mark the result UNDEFINED if only one part of the result is
751 if (has_undefined_operand
&& all_undefined_operands
)
753 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
755 switch (gimple_assign_rhs_code (stmt
))
757 /* Unary operators are handled with all_undefined_operands. */
760 case POINTER_PLUS_EXPR
:
761 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
762 Not bitwise operators, one VARYING operand may specify the
763 result completely. Not logical operators for the same reason.
764 Not COMPLEX_EXPR as one VARYING operand makes the result partly
765 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
766 the undefined operand may be promoted. */
770 /* If any part of an address is UNDEFINED, like the index
771 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
778 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
779 fall back to CONSTANT. During iteration UNDEFINED may still drop
781 if (has_undefined_operand
)
784 /* We do not consider virtual operands here -- load from read-only
785 memory may have only VARYING virtual operands, but still be
786 constant. Also we can combine the stmt with definitions from
787 operands whose definitions are not simulated again. */
788 if (has_constant_operand
790 || gimple_references_memory_p (stmt
))
796 /* Returns true if STMT cannot be constant. */
799 surely_varying_stmt_p (gimple stmt
)
801 /* If the statement has operands that we cannot handle, it cannot be
803 if (gimple_has_volatile_ops (stmt
))
806 /* If it is a call and does not return a value or is not a
807 builtin and not an indirect call or a call to function with
808 assume_aligned/alloc_align attribute, it is varying. */
809 if (is_gimple_call (stmt
))
811 tree fndecl
, fntype
= gimple_call_fntype (stmt
);
812 if (!gimple_call_lhs (stmt
)
813 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
814 && !DECL_BUILT_IN (fndecl
)
815 && !lookup_attribute ("assume_aligned",
816 TYPE_ATTRIBUTES (fntype
))
817 && !lookup_attribute ("alloc_align",
818 TYPE_ATTRIBUTES (fntype
))))
822 /* Any other store operation is not interesting. */
823 else if (gimple_vdef (stmt
))
826 /* Anything other than assignments and conditional jumps are not
827 interesting for CCP. */
828 if (gimple_code (stmt
) != GIMPLE_ASSIGN
829 && gimple_code (stmt
) != GIMPLE_COND
830 && gimple_code (stmt
) != GIMPLE_SWITCH
831 && gimple_code (stmt
) != GIMPLE_CALL
)
837 /* Initialize local data structures for CCP. */
840 ccp_initialize (void)
844 n_const_val
= num_ssa_names
;
845 const_val
= XCNEWVEC (ccp_prop_value_t
, n_const_val
);
847 /* Initialize simulation flags for PHI nodes and statements. */
848 FOR_EACH_BB_FN (bb
, cfun
)
850 gimple_stmt_iterator i
;
852 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
854 gimple stmt
= gsi_stmt (i
);
857 /* If the statement is a control insn, then we do not
858 want to avoid simulating the statement once. Failure
859 to do so means that those edges will never get added. */
860 if (stmt_ends_bb_p (stmt
))
863 is_varying
= surely_varying_stmt_p (stmt
);
870 /* If the statement will not produce a constant, mark
871 all its outputs VARYING. */
872 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
873 set_value_varying (def
);
875 prop_set_simulate_again (stmt
, !is_varying
);
879 /* Now process PHI nodes. We never clear the simulate_again flag on
880 phi nodes, since we do not know which edges are executable yet,
881 except for phi nodes for virtual operands when we do not do store ccp. */
882 FOR_EACH_BB_FN (bb
, cfun
)
886 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
888 gphi
*phi
= i
.phi ();
890 if (virtual_operand_p (gimple_phi_result (phi
)))
891 prop_set_simulate_again (phi
, false);
893 prop_set_simulate_again (phi
, true);
898 /* Debug count support. Reset the values of ssa names
899 VARYING when the total number ssa names analyzed is
900 beyond the debug count specified. */
906 for (i
= 0; i
< num_ssa_names
; i
++)
910 const_val
[i
].lattice_val
= VARYING
;
911 const_val
[i
].mask
= -1;
912 const_val
[i
].value
= NULL_TREE
;
918 /* Do final substitution of propagated values, cleanup the flowgraph and
919 free allocated storage.
921 Return TRUE when something was optimized. */
926 bool something_changed
;
931 /* Derive alignment and misalignment information from partially
932 constant pointers in the lattice or nonzero bits from partially
933 constant integers. */
934 for (i
= 1; i
< num_ssa_names
; ++i
)
936 tree name
= ssa_name (i
);
937 ccp_prop_value_t
*val
;
938 unsigned int tem
, align
;
941 || (!POINTER_TYPE_P (TREE_TYPE (name
))
942 && (!INTEGRAL_TYPE_P (TREE_TYPE (name
))
943 /* Don't record nonzero bits before IPA to avoid
944 using too much memory. */
945 || first_pass_instance
)))
948 val
= get_value (name
);
949 if (val
->lattice_val
!= CONSTANT
950 || TREE_CODE (val
->value
) != INTEGER_CST
)
953 if (POINTER_TYPE_P (TREE_TYPE (name
)))
955 /* Trailing mask bits specify the alignment, trailing value
956 bits the misalignment. */
957 tem
= val
->mask
.to_uhwi ();
958 align
= (tem
& -tem
);
960 set_ptr_info_alignment (get_ptr_info (name
), align
,
961 (TREE_INT_CST_LOW (val
->value
)
966 unsigned int precision
= TYPE_PRECISION (TREE_TYPE (val
->value
));
967 wide_int nonzero_bits
= wide_int::from (val
->mask
, precision
,
968 UNSIGNED
) | val
->value
;
969 nonzero_bits
&= get_nonzero_bits (name
);
970 set_nonzero_bits (name
, nonzero_bits
);
974 /* Perform substitutions based on the known constant values. */
975 something_changed
= substitute_and_fold (get_constant_value
,
976 ccp_fold_stmt
, true);
980 return something_changed
;;
984 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
987 any M UNDEFINED = any
988 any M VARYING = VARYING
989 Ci M Cj = Ci if (i == j)
990 Ci M Cj = VARYING if (i != j)
994 ccp_lattice_meet (basic_block where
,
995 ccp_prop_value_t
*val1
, ccp_prop_value_t
*val2
)
997 if (val1
->lattice_val
== UNDEFINED
998 /* For UNDEFINED M SSA we can't always SSA because its definition
999 may not dominate the PHI node. Doing optimistic copy propagation
1000 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
1001 && (val2
->lattice_val
!= CONSTANT
1002 || TREE_CODE (val2
->value
) != SSA_NAME
))
1004 /* UNDEFINED M any = any */
1007 else if (val2
->lattice_val
== UNDEFINED
1009 && (val1
->lattice_val
!= CONSTANT
1010 || TREE_CODE (val1
->value
) != SSA_NAME
))
1012 /* any M UNDEFINED = any
1013 Nothing to do. VAL1 already contains the value we want. */
1016 else if (val1
->lattice_val
== VARYING
1017 || val2
->lattice_val
== VARYING
)
1019 /* any M VARYING = VARYING. */
1020 val1
->lattice_val
= VARYING
;
1022 val1
->value
= NULL_TREE
;
1024 else if (val1
->lattice_val
== CONSTANT
1025 && val2
->lattice_val
== CONSTANT
1026 && TREE_CODE (val1
->value
) == INTEGER_CST
1027 && TREE_CODE (val2
->value
) == INTEGER_CST
)
1029 /* Ci M Cj = Ci if (i == j)
1030 Ci M Cj = VARYING if (i != j)
1032 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1034 val1
->mask
= (val1
->mask
| val2
->mask
1035 | (wi::to_widest (val1
->value
)
1036 ^ wi::to_widest (val2
->value
)));
1037 if (wi::sext (val1
->mask
, TYPE_PRECISION (TREE_TYPE (val1
->value
))) == -1)
1039 val1
->lattice_val
= VARYING
;
1040 val1
->value
= NULL_TREE
;
1043 else if (val1
->lattice_val
== CONSTANT
1044 && val2
->lattice_val
== CONSTANT
1045 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
1047 /* Ci M Cj = Ci if (i == j)
1048 Ci M Cj = VARYING if (i != j)
1050 VAL1 already contains the value we want for equivalent values. */
1052 else if (val1
->lattice_val
== CONSTANT
1053 && val2
->lattice_val
== CONSTANT
1054 && (TREE_CODE (val1
->value
) == ADDR_EXPR
1055 || TREE_CODE (val2
->value
) == ADDR_EXPR
))
1057 /* When not equal addresses are involved try meeting for
1059 ccp_prop_value_t tem
= *val2
;
1060 if (TREE_CODE (val1
->value
) == ADDR_EXPR
)
1061 *val1
= get_value_for_expr (val1
->value
, true);
1062 if (TREE_CODE (val2
->value
) == ADDR_EXPR
)
1063 tem
= get_value_for_expr (val2
->value
, true);
1064 ccp_lattice_meet (where
, val1
, &tem
);
1068 /* Any other combination is VARYING. */
1069 val1
->lattice_val
= VARYING
;
1071 val1
->value
= NULL_TREE
;
1076 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1077 lattice values to determine PHI_NODE's lattice value. The value of a
1078 PHI node is determined calling ccp_lattice_meet with all the arguments
1079 of the PHI node that are incoming via executable edges. */
1081 static enum ssa_prop_result
1082 ccp_visit_phi_node (gphi
*phi
)
1085 ccp_prop_value_t new_val
;
1087 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1089 fprintf (dump_file
, "\nVisiting PHI node: ");
1090 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
1093 new_val
.lattice_val
= UNDEFINED
;
1094 new_val
.value
= NULL_TREE
;
1098 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1100 /* Compute the meet operator over all the PHI arguments flowing
1101 through executable edges. */
1102 edge e
= gimple_phi_arg_edge (phi
, i
);
1104 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1107 "\n Argument #%d (%d -> %d %sexecutable)\n",
1108 i
, e
->src
->index
, e
->dest
->index
,
1109 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
1112 /* If the incoming edge is executable, Compute the meet operator for
1113 the existing value of the PHI node and the current PHI argument. */
1114 if (e
->flags
& EDGE_EXECUTABLE
)
1116 tree arg
= gimple_phi_arg (phi
, i
)->def
;
1117 ccp_prop_value_t arg_val
= get_value_for_expr (arg
, false);
1125 ccp_lattice_meet (gimple_bb (phi
), &new_val
, &arg_val
);
1127 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1129 fprintf (dump_file
, "\t");
1130 print_generic_expr (dump_file
, arg
, dump_flags
);
1131 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
1132 fprintf (dump_file
, "\n");
1135 if (new_val
.lattice_val
== VARYING
)
1140 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1142 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
1143 fprintf (dump_file
, "\n\n");
1146 /* Make the transition to the new value. */
1147 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
1149 if (new_val
.lattice_val
== VARYING
)
1150 return SSA_PROP_VARYING
;
1152 return SSA_PROP_INTERESTING
;
1155 return SSA_PROP_NOT_INTERESTING
;
1158 /* Return the constant value for OP or OP otherwise. */
1161 valueize_op (tree op
)
1163 if (TREE_CODE (op
) == SSA_NAME
)
1165 tree tem
= get_constant_value (op
);
1172 /* Return the constant value for OP, but signal to not follow SSA
1173 edges if the definition may be simulated again. */
1176 valueize_op_1 (tree op
)
1178 if (TREE_CODE (op
) == SSA_NAME
)
1180 /* If the definition may be simulated again we cannot follow
1181 this SSA edge as the SSA propagator does not necessarily
1182 re-visit the use. */
1183 gimple def_stmt
= SSA_NAME_DEF_STMT (op
);
1184 if (!gimple_nop_p (def_stmt
)
1185 && prop_simulate_again_p (def_stmt
))
1187 tree tem
= get_constant_value (op
);
1194 /* CCP specific front-end to the non-destructive constant folding
1197 Attempt to simplify the RHS of STMT knowing that one or more
1198 operands are constants.
1200 If simplification is possible, return the simplified RHS,
1201 otherwise return the original RHS or NULL_TREE. */
1204 ccp_fold (gimple stmt
)
1206 location_t loc
= gimple_location (stmt
);
1207 switch (gimple_code (stmt
))
1211 /* Handle comparison operators that can appear in GIMPLE form. */
1212 tree op0
= valueize_op (gimple_cond_lhs (stmt
));
1213 tree op1
= valueize_op (gimple_cond_rhs (stmt
));
1214 enum tree_code code
= gimple_cond_code (stmt
);
1215 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1220 /* Return the constant switch index. */
1221 return valueize_op (gimple_switch_index (as_a
<gswitch
*> (stmt
)));
1226 return gimple_fold_stmt_to_constant_1 (stmt
,
1227 valueize_op
, valueize_op_1
);
1234 /* Apply the operation CODE in type TYPE to the value, mask pair
1235 RVAL and RMASK representing a value of type RTYPE and set
1236 the value, mask pair *VAL and *MASK to the result. */
1239 bit_value_unop_1 (enum tree_code code
, tree type
,
1240 widest_int
*val
, widest_int
*mask
,
1241 tree rtype
, const widest_int
&rval
, const widest_int
&rmask
)
1252 widest_int temv
, temm
;
1253 /* Return ~rval + 1. */
1254 bit_value_unop_1 (BIT_NOT_EXPR
, type
, &temv
, &temm
, type
, rval
, rmask
);
1255 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1256 type
, temv
, temm
, type
, 1, 0);
1264 /* First extend mask and value according to the original type. */
1265 sgn
= TYPE_SIGN (rtype
);
1266 *mask
= wi::ext (rmask
, TYPE_PRECISION (rtype
), sgn
);
1267 *val
= wi::ext (rval
, TYPE_PRECISION (rtype
), sgn
);
1269 /* Then extend mask and value according to the target type. */
1270 sgn
= TYPE_SIGN (type
);
1271 *mask
= wi::ext (*mask
, TYPE_PRECISION (type
), sgn
);
1272 *val
= wi::ext (*val
, TYPE_PRECISION (type
), sgn
);
1282 /* Apply the operation CODE in type TYPE to the value, mask pairs
1283 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1284 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1287 bit_value_binop_1 (enum tree_code code
, tree type
,
1288 widest_int
*val
, widest_int
*mask
,
1289 tree r1type
, const widest_int
&r1val
,
1290 const widest_int
&r1mask
, tree r2type
,
1291 const widest_int
&r2val
, const widest_int
&r2mask
)
1293 signop sgn
= TYPE_SIGN (type
);
1294 int width
= TYPE_PRECISION (type
);
1295 bool swap_p
= false;
1297 /* Assume we'll get a constant result. Use an initial non varying
1298 value, we fall back to varying in the end if necessary. */
1304 /* The mask is constant where there is a known not
1305 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1306 *mask
= (r1mask
| r2mask
) & (r1val
| r1mask
) & (r2val
| r2mask
);
1307 *val
= r1val
& r2val
;
1311 /* The mask is constant where there is a known
1312 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1313 *mask
= (r1mask
| r2mask
)
1314 .and_not (r1val
.and_not (r1mask
) | r2val
.and_not (r2mask
));
1315 *val
= r1val
| r2val
;
1320 *mask
= r1mask
| r2mask
;
1321 *val
= r1val
^ r2val
;
1328 widest_int shift
= r2val
;
1336 if (wi::neg_p (shift
))
1339 if (code
== RROTATE_EXPR
)
1340 code
= LROTATE_EXPR
;
1342 code
= RROTATE_EXPR
;
1344 if (code
== RROTATE_EXPR
)
1346 *mask
= wi::rrotate (r1mask
, shift
, width
);
1347 *val
= wi::rrotate (r1val
, shift
, width
);
1351 *mask
= wi::lrotate (r1mask
, shift
, width
);
1352 *val
= wi::lrotate (r1val
, shift
, width
);
1360 /* ??? We can handle partially known shift counts if we know
1361 its sign. That way we can tell that (x << (y | 8)) & 255
1365 widest_int shift
= r2val
;
1373 if (wi::neg_p (shift
))
1376 if (code
== RSHIFT_EXPR
)
1381 if (code
== RSHIFT_EXPR
)
1383 *mask
= wi::rshift (wi::ext (r1mask
, width
, sgn
), shift
, sgn
);
1384 *val
= wi::rshift (wi::ext (r1val
, width
, sgn
), shift
, sgn
);
1388 *mask
= wi::ext (wi::lshift (r1mask
, shift
), width
, sgn
);
1389 *val
= wi::ext (wi::lshift (r1val
, shift
), width
, sgn
);
1396 case POINTER_PLUS_EXPR
:
1398 /* Do the addition with unknown bits set to zero, to give carry-ins of
1399 zero wherever possible. */
1400 widest_int lo
= r1val
.and_not (r1mask
) + r2val
.and_not (r2mask
);
1401 lo
= wi::ext (lo
, width
, sgn
);
1402 /* Do the addition with unknown bits set to one, to give carry-ins of
1403 one wherever possible. */
1404 widest_int hi
= (r1val
| r1mask
) + (r2val
| r2mask
);
1405 hi
= wi::ext (hi
, width
, sgn
);
1406 /* Each bit in the result is known if (a) the corresponding bits in
1407 both inputs are known, and (b) the carry-in to that bit position
1408 is known. We can check condition (b) by seeing if we got the same
1409 result with minimised carries as with maximised carries. */
1410 *mask
= r1mask
| r2mask
| (lo
^ hi
);
1411 *mask
= wi::ext (*mask
, width
, sgn
);
1412 /* It shouldn't matter whether we choose lo or hi here. */
1419 widest_int temv
, temm
;
1420 bit_value_unop_1 (NEGATE_EXPR
, r2type
, &temv
, &temm
,
1421 r2type
, r2val
, r2mask
);
1422 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1423 r1type
, r1val
, r1mask
,
1424 r2type
, temv
, temm
);
1430 /* Just track trailing zeros in both operands and transfer
1431 them to the other. */
1432 int r1tz
= wi::ctz (r1val
| r1mask
);
1433 int r2tz
= wi::ctz (r2val
| r2mask
);
1434 if (r1tz
+ r2tz
>= width
)
1439 else if (r1tz
+ r2tz
> 0)
1441 *mask
= wi::ext (wi::mask
<widest_int
> (r1tz
+ r2tz
, true),
1451 widest_int m
= r1mask
| r2mask
;
1452 if (r1val
.and_not (m
) != r2val
.and_not (m
))
1455 *val
= ((code
== EQ_EXPR
) ? 0 : 1);
1459 /* We know the result of a comparison is always one or zero. */
1469 code
= swap_tree_comparison (code
);
1476 const widest_int
&o1val
= swap_p
? r2val
: r1val
;
1477 const widest_int
&o1mask
= swap_p
? r2mask
: r1mask
;
1478 const widest_int
&o2val
= swap_p
? r1val
: r2val
;
1479 const widest_int
&o2mask
= swap_p
? r1mask
: r2mask
;
1481 /* If the most significant bits are not known we know nothing. */
1482 if (wi::neg_p (o1mask
) || wi::neg_p (o2mask
))
1485 /* For comparisons the signedness is in the comparison operands. */
1486 sgn
= TYPE_SIGN (r1type
);
1488 /* If we know the most significant bits we know the values
1489 value ranges by means of treating varying bits as zero
1490 or one. Do a cross comparison of the max/min pairs. */
1491 maxmin
= wi::cmp (o1val
| o1mask
, o2val
.and_not (o2mask
), sgn
);
1492 minmax
= wi::cmp (o1val
.and_not (o1mask
), o2val
| o2mask
, sgn
);
1493 if (maxmin
< 0) /* o1 is less than o2. */
1498 else if (minmax
> 0) /* o1 is not less or equal to o2. */
1503 else if (maxmin
== minmax
) /* o1 and o2 are equal. */
1505 /* This probably should never happen as we'd have
1506 folded the thing during fully constant value folding. */
1508 *val
= (code
== LE_EXPR
? 1 : 0);
1512 /* We know the result of a comparison is always one or zero. */
1523 /* Return the propagation value when applying the operation CODE to
1524 the value RHS yielding type TYPE. */
1526 static ccp_prop_value_t
1527 bit_value_unop (enum tree_code code
, tree type
, tree rhs
)
1529 ccp_prop_value_t rval
= get_value_for_expr (rhs
, true);
1530 widest_int value
, mask
;
1531 ccp_prop_value_t val
;
1533 if (rval
.lattice_val
== UNDEFINED
)
1536 gcc_assert ((rval
.lattice_val
== CONSTANT
1537 && TREE_CODE (rval
.value
) == INTEGER_CST
)
1538 || wi::sext (rval
.mask
, TYPE_PRECISION (TREE_TYPE (rhs
))) == -1);
1539 bit_value_unop_1 (code
, type
, &value
, &mask
,
1540 TREE_TYPE (rhs
), value_to_wide_int (rval
), rval
.mask
);
1541 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1543 val
.lattice_val
= CONSTANT
;
1545 /* ??? Delay building trees here. */
1546 val
.value
= wide_int_to_tree (type
, value
);
1550 val
.lattice_val
= VARYING
;
1551 val
.value
= NULL_TREE
;
1557 /* Return the propagation value when applying the operation CODE to
1558 the values RHS1 and RHS2 yielding type TYPE. */
1560 static ccp_prop_value_t
1561 bit_value_binop (enum tree_code code
, tree type
, tree rhs1
, tree rhs2
)
1563 ccp_prop_value_t r1val
= get_value_for_expr (rhs1
, true);
1564 ccp_prop_value_t r2val
= get_value_for_expr (rhs2
, true);
1565 widest_int value
, mask
;
1566 ccp_prop_value_t val
;
1568 if (r1val
.lattice_val
== UNDEFINED
1569 || r2val
.lattice_val
== UNDEFINED
)
1571 val
.lattice_val
= VARYING
;
1572 val
.value
= NULL_TREE
;
1577 gcc_assert ((r1val
.lattice_val
== CONSTANT
1578 && TREE_CODE (r1val
.value
) == INTEGER_CST
)
1579 || wi::sext (r1val
.mask
,
1580 TYPE_PRECISION (TREE_TYPE (rhs1
))) == -1);
1581 gcc_assert ((r2val
.lattice_val
== CONSTANT
1582 && TREE_CODE (r2val
.value
) == INTEGER_CST
)
1583 || wi::sext (r2val
.mask
,
1584 TYPE_PRECISION (TREE_TYPE (rhs2
))) == -1);
1585 bit_value_binop_1 (code
, type
, &value
, &mask
,
1586 TREE_TYPE (rhs1
), value_to_wide_int (r1val
), r1val
.mask
,
1587 TREE_TYPE (rhs2
), value_to_wide_int (r2val
), r2val
.mask
);
1588 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1590 val
.lattice_val
= CONSTANT
;
1592 /* ??? Delay building trees here. */
1593 val
.value
= wide_int_to_tree (type
, value
);
1597 val
.lattice_val
= VARYING
;
1598 val
.value
= NULL_TREE
;
1604 /* Return the propagation value for __builtin_assume_aligned
1605 and functions with assume_aligned or alloc_aligned attribute.
1606 For __builtin_assume_aligned, ATTR is NULL_TREE,
1607 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1608 is false, for alloc_aligned attribute ATTR is non-NULL and
1609 ALLOC_ALIGNED is true. */
1611 static ccp_prop_value_t
1612 bit_value_assume_aligned (gimple stmt
, tree attr
, ccp_prop_value_t ptrval
,
1615 tree align
, misalign
= NULL_TREE
, type
;
1616 unsigned HOST_WIDE_INT aligni
, misaligni
= 0;
1617 ccp_prop_value_t alignval
;
1618 widest_int value
, mask
;
1619 ccp_prop_value_t val
;
1621 if (attr
== NULL_TREE
)
1623 tree ptr
= gimple_call_arg (stmt
, 0);
1624 type
= TREE_TYPE (ptr
);
1625 ptrval
= get_value_for_expr (ptr
, true);
1629 tree lhs
= gimple_call_lhs (stmt
);
1630 type
= TREE_TYPE (lhs
);
1633 if (ptrval
.lattice_val
== UNDEFINED
)
1635 gcc_assert ((ptrval
.lattice_val
== CONSTANT
1636 && TREE_CODE (ptrval
.value
) == INTEGER_CST
)
1637 || wi::sext (ptrval
.mask
, TYPE_PRECISION (type
)) == -1);
1638 if (attr
== NULL_TREE
)
1640 /* Get aligni and misaligni from __builtin_assume_aligned. */
1641 align
= gimple_call_arg (stmt
, 1);
1642 if (!tree_fits_uhwi_p (align
))
1644 aligni
= tree_to_uhwi (align
);
1645 if (gimple_call_num_args (stmt
) > 2)
1647 misalign
= gimple_call_arg (stmt
, 2);
1648 if (!tree_fits_uhwi_p (misalign
))
1650 misaligni
= tree_to_uhwi (misalign
);
1655 /* Get aligni and misaligni from assume_aligned or
1656 alloc_align attributes. */
1657 if (TREE_VALUE (attr
) == NULL_TREE
)
1659 attr
= TREE_VALUE (attr
);
1660 align
= TREE_VALUE (attr
);
1661 if (!tree_fits_uhwi_p (align
))
1663 aligni
= tree_to_uhwi (align
);
1666 if (aligni
== 0 || aligni
> gimple_call_num_args (stmt
))
1668 align
= gimple_call_arg (stmt
, aligni
- 1);
1669 if (!tree_fits_uhwi_p (align
))
1671 aligni
= tree_to_uhwi (align
);
1673 else if (TREE_CHAIN (attr
) && TREE_VALUE (TREE_CHAIN (attr
)))
1675 misalign
= TREE_VALUE (TREE_CHAIN (attr
));
1676 if (!tree_fits_uhwi_p (misalign
))
1678 misaligni
= tree_to_uhwi (misalign
);
1681 if (aligni
<= 1 || (aligni
& (aligni
- 1)) != 0 || misaligni
>= aligni
)
1684 align
= build_int_cst_type (type
, -aligni
);
1685 alignval
= get_value_for_expr (align
, true);
1686 bit_value_binop_1 (BIT_AND_EXPR
, type
, &value
, &mask
,
1687 type
, value_to_wide_int (ptrval
), ptrval
.mask
,
1688 type
, value_to_wide_int (alignval
), alignval
.mask
);
1689 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1691 val
.lattice_val
= CONSTANT
;
1693 gcc_assert ((mask
.to_uhwi () & (aligni
- 1)) == 0);
1694 gcc_assert ((value
.to_uhwi () & (aligni
- 1)) == 0);
1696 /* ??? Delay building trees here. */
1697 val
.value
= wide_int_to_tree (type
, value
);
1701 val
.lattice_val
= VARYING
;
1702 val
.value
= NULL_TREE
;
1708 /* Evaluate statement STMT.
1709 Valid only for assignments, calls, conditionals, and switches. */
1711 static ccp_prop_value_t
1712 evaluate_stmt (gimple stmt
)
1714 ccp_prop_value_t val
;
1715 tree simplified
= NULL_TREE
;
1716 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1717 bool is_constant
= false;
1720 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1722 fprintf (dump_file
, "which is likely ");
1723 switch (likelyvalue
)
1726 fprintf (dump_file
, "CONSTANT");
1729 fprintf (dump_file
, "UNDEFINED");
1732 fprintf (dump_file
, "VARYING");
1736 fprintf (dump_file
, "\n");
1739 /* If the statement is likely to have a CONSTANT result, then try
1740 to fold the statement to determine the constant value. */
1741 /* FIXME. This is the only place that we call ccp_fold.
1742 Since likely_value never returns CONSTANT for calls, we will
1743 not attempt to fold them, including builtins that may profit. */
1744 if (likelyvalue
== CONSTANT
)
1746 fold_defer_overflow_warnings ();
1747 simplified
= ccp_fold (stmt
);
1748 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1749 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1752 /* The statement produced a constant value. */
1753 val
.lattice_val
= CONSTANT
;
1754 val
.value
= simplified
;
1758 /* If the statement is likely to have a VARYING result, then do not
1759 bother folding the statement. */
1760 else if (likelyvalue
== VARYING
)
1762 enum gimple_code code
= gimple_code (stmt
);
1763 if (code
== GIMPLE_ASSIGN
)
1765 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1767 /* Other cases cannot satisfy is_gimple_min_invariant
1769 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1770 simplified
= gimple_assign_rhs1 (stmt
);
1772 else if (code
== GIMPLE_SWITCH
)
1773 simplified
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
1775 /* These cannot satisfy is_gimple_min_invariant without folding. */
1776 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1777 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1780 /* The statement produced a constant value. */
1781 val
.lattice_val
= CONSTANT
;
1782 val
.value
= simplified
;
1786 /* If the statement result is likely UNDEFINED, make it so. */
1787 else if (likelyvalue
== UNDEFINED
)
1789 val
.lattice_val
= UNDEFINED
;
1790 val
.value
= NULL_TREE
;
1795 /* Resort to simplification for bitwise tracking. */
1796 if (flag_tree_bit_ccp
1797 && (likelyvalue
== CONSTANT
|| is_gimple_call (stmt
)
1798 || (gimple_assign_single_p (stmt
)
1799 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
))
1802 enum gimple_code code
= gimple_code (stmt
);
1803 val
.lattice_val
= VARYING
;
1804 val
.value
= NULL_TREE
;
1806 if (code
== GIMPLE_ASSIGN
)
1808 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1809 tree rhs1
= gimple_assign_rhs1 (stmt
);
1810 tree lhs
= gimple_assign_lhs (stmt
);
1811 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1812 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
1813 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1814 || POINTER_TYPE_P (TREE_TYPE (rhs1
))))
1815 switch (get_gimple_rhs_class (subcode
))
1817 case GIMPLE_SINGLE_RHS
:
1818 val
= get_value_for_expr (rhs1
, true);
1821 case GIMPLE_UNARY_RHS
:
1822 val
= bit_value_unop (subcode
, TREE_TYPE (lhs
), rhs1
);
1825 case GIMPLE_BINARY_RHS
:
1826 val
= bit_value_binop (subcode
, TREE_TYPE (lhs
), rhs1
,
1827 gimple_assign_rhs2 (stmt
));
1833 else if (code
== GIMPLE_COND
)
1835 enum tree_code code
= gimple_cond_code (stmt
);
1836 tree rhs1
= gimple_cond_lhs (stmt
);
1837 tree rhs2
= gimple_cond_rhs (stmt
);
1838 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1839 || POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1840 val
= bit_value_binop (code
, TREE_TYPE (rhs1
), rhs1
, rhs2
);
1842 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
1844 tree fndecl
= gimple_call_fndecl (stmt
);
1845 switch (DECL_FUNCTION_CODE (fndecl
))
1847 case BUILT_IN_MALLOC
:
1848 case BUILT_IN_REALLOC
:
1849 case BUILT_IN_CALLOC
:
1850 case BUILT_IN_STRDUP
:
1851 case BUILT_IN_STRNDUP
:
1852 val
.lattice_val
= CONSTANT
;
1853 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1854 val
.mask
= ~((HOST_WIDE_INT
) MALLOC_ABI_ALIGNMENT
1855 / BITS_PER_UNIT
- 1);
1858 case BUILT_IN_ALLOCA
:
1859 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1860 align
= (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
1861 ? TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1))
1862 : BIGGEST_ALIGNMENT
);
1863 val
.lattice_val
= CONSTANT
;
1864 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1865 val
.mask
= ~((HOST_WIDE_INT
) align
/ BITS_PER_UNIT
- 1);
1868 /* These builtins return their first argument, unmodified. */
1869 case BUILT_IN_MEMCPY
:
1870 case BUILT_IN_MEMMOVE
:
1871 case BUILT_IN_MEMSET
:
1872 case BUILT_IN_STRCPY
:
1873 case BUILT_IN_STRNCPY
:
1874 case BUILT_IN_MEMCPY_CHK
:
1875 case BUILT_IN_MEMMOVE_CHK
:
1876 case BUILT_IN_MEMSET_CHK
:
1877 case BUILT_IN_STRCPY_CHK
:
1878 case BUILT_IN_STRNCPY_CHK
:
1879 val
= get_value_for_expr (gimple_call_arg (stmt
, 0), true);
1882 case BUILT_IN_ASSUME_ALIGNED
:
1883 val
= bit_value_assume_aligned (stmt
, NULL_TREE
, val
, false);
1886 case BUILT_IN_ALIGNED_ALLOC
:
1888 tree align
= get_constant_value (gimple_call_arg (stmt
, 0));
1890 && tree_fits_uhwi_p (align
))
1892 unsigned HOST_WIDE_INT aligni
= tree_to_uhwi (align
);
1894 /* align must be power-of-two */
1895 && (aligni
& (aligni
- 1)) == 0)
1897 val
.lattice_val
= CONSTANT
;
1898 val
.value
= build_int_cst (ptr_type_node
, 0);
1908 if (is_gimple_call (stmt
) && gimple_call_lhs (stmt
))
1910 tree fntype
= gimple_call_fntype (stmt
);
1913 tree attrs
= lookup_attribute ("assume_aligned",
1914 TYPE_ATTRIBUTES (fntype
));
1916 val
= bit_value_assume_aligned (stmt
, attrs
, val
, false);
1917 attrs
= lookup_attribute ("alloc_align",
1918 TYPE_ATTRIBUTES (fntype
));
1920 val
= bit_value_assume_aligned (stmt
, attrs
, val
, true);
1923 is_constant
= (val
.lattice_val
== CONSTANT
);
1926 if (flag_tree_bit_ccp
1927 && ((is_constant
&& TREE_CODE (val
.value
) == INTEGER_CST
)
1929 && gimple_get_lhs (stmt
)
1930 && TREE_CODE (gimple_get_lhs (stmt
)) == SSA_NAME
)
1932 tree lhs
= gimple_get_lhs (stmt
);
1933 wide_int nonzero_bits
= get_nonzero_bits (lhs
);
1934 if (nonzero_bits
!= -1)
1938 val
.lattice_val
= CONSTANT
;
1939 val
.value
= build_zero_cst (TREE_TYPE (lhs
));
1940 val
.mask
= extend_mask (nonzero_bits
);
1945 if (wi::bit_and_not (val
.value
, nonzero_bits
) != 0)
1946 val
.value
= wide_int_to_tree (TREE_TYPE (lhs
),
1947 nonzero_bits
& val
.value
);
1948 if (nonzero_bits
== 0)
1951 val
.mask
= val
.mask
& extend_mask (nonzero_bits
);
1956 /* The statement produced a nonconstant value. */
1959 /* The statement produced a copy. */
1960 if (simplified
&& TREE_CODE (simplified
) == SSA_NAME
1961 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified
))
1963 val
.lattice_val
= CONSTANT
;
1964 val
.value
= simplified
;
1967 /* The statement is VARYING. */
1970 val
.lattice_val
= VARYING
;
1971 val
.value
= NULL_TREE
;
1979 typedef hash_table
<pointer_hash
<gimple_statement_base
> > gimple_htab
;
1981 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1982 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1985 insert_clobber_before_stack_restore (tree saved_val
, tree var
,
1986 gimple_htab
**visited
)
1989 gassign
*clobber_stmt
;
1991 imm_use_iterator iter
;
1992 gimple_stmt_iterator i
;
1995 FOR_EACH_IMM_USE_STMT (stmt
, iter
, saved_val
)
1996 if (gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
1998 clobber
= build_constructor (TREE_TYPE (var
),
2000 TREE_THIS_VOLATILE (clobber
) = 1;
2001 clobber_stmt
= gimple_build_assign (var
, clobber
);
2003 i
= gsi_for_stmt (stmt
);
2004 gsi_insert_before (&i
, clobber_stmt
, GSI_SAME_STMT
);
2006 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2009 *visited
= new gimple_htab (10);
2011 slot
= (*visited
)->find_slot (stmt
, INSERT
);
2016 insert_clobber_before_stack_restore (gimple_phi_result (stmt
), var
,
2019 else if (gimple_assign_ssa_name_copy_p (stmt
))
2020 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt
), var
,
2022 else if (chkp_gimple_call_builtin_p (stmt
, BUILT_IN_CHKP_BNDRET
))
2025 gcc_assert (is_gimple_debug (stmt
));
2028 /* Advance the iterator to the previous non-debug gimple statement in the same
2029 or dominating basic block. */
2032 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator
*i
)
2036 gsi_prev_nondebug (i
);
2037 while (gsi_end_p (*i
))
2039 dom
= get_immediate_dominator (CDI_DOMINATORS
, i
->bb
);
2040 if (dom
== NULL
|| dom
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
2043 *i
= gsi_last_bb (dom
);
2047 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2048 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2050 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2051 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2052 that case the function gives up without inserting the clobbers. */
2055 insert_clobbers_for_var (gimple_stmt_iterator i
, tree var
)
2059 gimple_htab
*visited
= NULL
;
2061 for (; !gsi_end_p (i
); gsi_prev_dom_bb_nondebug (&i
))
2063 stmt
= gsi_stmt (i
);
2065 if (!gimple_call_builtin_p (stmt
, BUILT_IN_STACK_SAVE
))
2068 saved_val
= gimple_call_lhs (stmt
);
2069 if (saved_val
== NULL_TREE
)
2072 insert_clobber_before_stack_restore (saved_val
, var
, &visited
);
2079 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
2080 fixed-size array and returns the address, if found, otherwise returns
2084 fold_builtin_alloca_with_align (gimple stmt
)
2086 unsigned HOST_WIDE_INT size
, threshold
, n_elem
;
2087 tree lhs
, arg
, block
, var
, elem_type
, array_type
;
2090 lhs
= gimple_call_lhs (stmt
);
2091 if (lhs
== NULL_TREE
)
2094 /* Detect constant argument. */
2095 arg
= get_constant_value (gimple_call_arg (stmt
, 0));
2096 if (arg
== NULL_TREE
2097 || TREE_CODE (arg
) != INTEGER_CST
2098 || !tree_fits_uhwi_p (arg
))
2101 size
= tree_to_uhwi (arg
);
2103 /* Heuristic: don't fold large allocas. */
2104 threshold
= (unsigned HOST_WIDE_INT
)PARAM_VALUE (PARAM_LARGE_STACK_FRAME
);
2105 /* In case the alloca is located at function entry, it has the same lifetime
2106 as a declared array, so we allow a larger size. */
2107 block
= gimple_block (stmt
);
2108 if (!(cfun
->after_inlining
2109 && TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
))
2111 if (size
> threshold
)
2114 /* Declare array. */
2115 elem_type
= build_nonstandard_integer_type (BITS_PER_UNIT
, 1);
2116 n_elem
= size
* 8 / BITS_PER_UNIT
;
2117 array_type
= build_array_type_nelts (elem_type
, n_elem
);
2118 var
= create_tmp_var (array_type
);
2119 DECL_ALIGN (var
) = TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1));
2121 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (lhs
);
2122 if (pi
!= NULL
&& !pi
->pt
.anything
)
2126 singleton_p
= pt_solution_singleton_p (&pi
->pt
, &uid
);
2127 gcc_assert (singleton_p
);
2128 SET_DECL_PT_UID (var
, uid
);
2132 /* Fold alloca to the address of the array. */
2133 return fold_convert (TREE_TYPE (lhs
), build_fold_addr_expr (var
));
2136 /* Fold the stmt at *GSI with CCP specific information that propagating
2137 and regular folding does not catch. */
2140 ccp_fold_stmt (gimple_stmt_iterator
*gsi
)
2142 gimple stmt
= gsi_stmt (*gsi
);
2144 switch (gimple_code (stmt
))
2148 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
2149 ccp_prop_value_t val
;
2150 /* Statement evaluation will handle type mismatches in constants
2151 more gracefully than the final propagation. This allows us to
2152 fold more conditionals here. */
2153 val
= evaluate_stmt (stmt
);
2154 if (val
.lattice_val
!= CONSTANT
2160 fprintf (dump_file
, "Folding predicate ");
2161 print_gimple_expr (dump_file
, stmt
, 0, 0);
2162 fprintf (dump_file
, " to ");
2163 print_generic_expr (dump_file
, val
.value
, 0);
2164 fprintf (dump_file
, "\n");
2167 if (integer_zerop (val
.value
))
2168 gimple_cond_make_false (cond_stmt
);
2170 gimple_cond_make_true (cond_stmt
);
2177 tree lhs
= gimple_call_lhs (stmt
);
2178 int flags
= gimple_call_flags (stmt
);
2181 bool changed
= false;
2184 /* If the call was folded into a constant make sure it goes
2185 away even if we cannot propagate into all uses because of
2188 && TREE_CODE (lhs
) == SSA_NAME
2189 && (val
= get_constant_value (lhs
))
2190 /* Don't optimize away calls that have side-effects. */
2191 && (flags
& (ECF_CONST
|ECF_PURE
)) != 0
2192 && (flags
& ECF_LOOPING_CONST_OR_PURE
) == 0)
2194 tree new_rhs
= unshare_expr (val
);
2196 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
2197 TREE_TYPE (new_rhs
)))
2198 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
2199 res
= update_call_from_tree (gsi
, new_rhs
);
2204 /* Internal calls provide no argument types, so the extra laxity
2205 for normal calls does not apply. */
2206 if (gimple_call_internal_p (stmt
))
2209 /* The heuristic of fold_builtin_alloca_with_align differs before and
2210 after inlining, so we don't require the arg to be changed into a
2211 constant for folding, but just to be constant. */
2212 if (gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
2214 tree new_rhs
= fold_builtin_alloca_with_align (stmt
);
2217 bool res
= update_call_from_tree (gsi
, new_rhs
);
2218 tree var
= TREE_OPERAND (TREE_OPERAND (new_rhs
, 0),0);
2220 insert_clobbers_for_var (*gsi
, var
);
2225 /* Propagate into the call arguments. Compared to replace_uses_in
2226 this can use the argument slot types for type verification
2227 instead of the current argument type. We also can safely
2228 drop qualifiers here as we are dealing with constants anyway. */
2229 argt
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
2230 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
2231 ++i
, argt
= TREE_CHAIN (argt
))
2233 tree arg
= gimple_call_arg (stmt
, i
);
2234 if (TREE_CODE (arg
) == SSA_NAME
2235 && (val
= get_constant_value (arg
))
2236 && useless_type_conversion_p
2237 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
2238 TYPE_MAIN_VARIANT (TREE_TYPE (val
))))
2240 gimple_call_set_arg (stmt
, i
, unshare_expr (val
));
2250 tree lhs
= gimple_assign_lhs (stmt
);
2253 /* If we have a load that turned out to be constant replace it
2254 as we cannot propagate into all uses in all cases. */
2255 if (gimple_assign_single_p (stmt
)
2256 && TREE_CODE (lhs
) == SSA_NAME
2257 && (val
= get_constant_value (lhs
)))
2259 tree rhs
= unshare_expr (val
);
2260 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2261 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
2262 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
2274 /* Visit the assignment statement STMT. Set the value of its LHS to the
2275 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2276 creates virtual definitions, set the value of each new name to that
2277 of the RHS (if we can derive a constant out of the RHS).
2278 Value-returning call statements also perform an assignment, and
2279 are handled here. */
2281 static enum ssa_prop_result
2282 visit_assignment (gimple stmt
, tree
*output_p
)
2284 ccp_prop_value_t val
;
2285 enum ssa_prop_result retval
= SSA_PROP_NOT_INTERESTING
;
2287 tree lhs
= gimple_get_lhs (stmt
);
2288 if (TREE_CODE (lhs
) == SSA_NAME
)
2290 /* Evaluate the statement, which could be
2291 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2292 val
= evaluate_stmt (stmt
);
2294 /* If STMT is an assignment to an SSA_NAME, we only have one
2296 if (set_lattice_value (lhs
, val
))
2299 if (val
.lattice_val
== VARYING
)
2300 retval
= SSA_PROP_VARYING
;
2302 retval
= SSA_PROP_INTERESTING
;
2310 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2311 if it can determine which edge will be taken. Otherwise, return
2312 SSA_PROP_VARYING. */
2314 static enum ssa_prop_result
2315 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
2317 ccp_prop_value_t val
;
2320 block
= gimple_bb (stmt
);
2321 val
= evaluate_stmt (stmt
);
2322 if (val
.lattice_val
!= CONSTANT
2324 return SSA_PROP_VARYING
;
2326 /* Find which edge out of the conditional block will be taken and add it
2327 to the worklist. If no single edge can be determined statically,
2328 return SSA_PROP_VARYING to feed all the outgoing edges to the
2329 propagation engine. */
2330 *taken_edge_p
= find_taken_edge (block
, val
.value
);
2332 return SSA_PROP_INTERESTING
;
2334 return SSA_PROP_VARYING
;
2338 /* Evaluate statement STMT. If the statement produces an output value and
2339 its evaluation changes the lattice value of its output, return
2340 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2343 If STMT is a conditional branch and we can determine its truth
2344 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2345 value, return SSA_PROP_VARYING. */
2347 static enum ssa_prop_result
2348 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
2353 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2355 fprintf (dump_file
, "\nVisiting statement:\n");
2356 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2359 switch (gimple_code (stmt
))
2362 /* If the statement is an assignment that produces a single
2363 output value, evaluate its RHS to see if the lattice value of
2364 its output has changed. */
2365 return visit_assignment (stmt
, output_p
);
2368 /* A value-returning call also performs an assignment. */
2369 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2370 return visit_assignment (stmt
, output_p
);
2375 /* If STMT is a conditional branch, see if we can determine
2376 which branch will be taken. */
2377 /* FIXME. It appears that we should be able to optimize
2378 computed GOTOs here as well. */
2379 return visit_cond_stmt (stmt
, taken_edge_p
);
2385 /* Any other kind of statement is not interesting for constant
2386 propagation and, therefore, not worth simulating. */
2387 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2388 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
2390 /* Definitions made by statements other than assignments to
2391 SSA_NAMEs represent unknown modifications to their outputs.
2392 Mark them VARYING. */
2393 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2395 ccp_prop_value_t v
= { VARYING
, NULL_TREE
, -1 };
2396 set_lattice_value (def
, v
);
2399 return SSA_PROP_VARYING
;
2403 /* Main entry point for SSA Conditional Constant Propagation. */
2408 unsigned int todo
= 0;
2409 calculate_dominance_info (CDI_DOMINATORS
);
2411 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
2412 if (ccp_finalize ())
2413 todo
= (TODO_cleanup_cfg
| TODO_update_ssa
);
2414 free_dominance_info (CDI_DOMINATORS
);
2421 const pass_data pass_data_ccp
=
2423 GIMPLE_PASS
, /* type */
2425 OPTGROUP_NONE
, /* optinfo_flags */
2426 TV_TREE_CCP
, /* tv_id */
2427 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2428 0, /* properties_provided */
2429 0, /* properties_destroyed */
2430 0, /* todo_flags_start */
2431 TODO_update_address_taken
, /* todo_flags_finish */
2434 class pass_ccp
: public gimple_opt_pass
2437 pass_ccp (gcc::context
*ctxt
)
2438 : gimple_opt_pass (pass_data_ccp
, ctxt
)
2441 /* opt_pass methods: */
2442 opt_pass
* clone () { return new pass_ccp (m_ctxt
); }
2443 virtual bool gate (function
*) { return flag_tree_ccp
!= 0; }
2444 virtual unsigned int execute (function
*) { return do_ssa_ccp (); }
2446 }; // class pass_ccp
2451 make_pass_ccp (gcc::context
*ctxt
)
2453 return new pass_ccp (ctxt
);
2458 /* Try to optimize out __builtin_stack_restore. Optimize it out
2459 if there is another __builtin_stack_restore in the same basic
2460 block and no calls or ASM_EXPRs are in between, or if this block's
2461 only outgoing edge is to EXIT_BLOCK and there are no calls or
2462 ASM_EXPRs after this __builtin_stack_restore. */
2465 optimize_stack_restore (gimple_stmt_iterator i
)
2470 basic_block bb
= gsi_bb (i
);
2471 gimple call
= gsi_stmt (i
);
2473 if (gimple_code (call
) != GIMPLE_CALL
2474 || gimple_call_num_args (call
) != 1
2475 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
2476 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
2479 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
2481 stmt
= gsi_stmt (i
);
2482 if (gimple_code (stmt
) == GIMPLE_ASM
)
2484 if (gimple_code (stmt
) != GIMPLE_CALL
)
2487 callee
= gimple_call_fndecl (stmt
);
2489 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2490 /* All regular builtins are ok, just obviously not alloca. */
2491 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
2492 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA_WITH_ALIGN
)
2495 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
2496 goto second_stack_restore
;
2502 /* Allow one successor of the exit block, or zero successors. */
2503 switch (EDGE_COUNT (bb
->succs
))
2508 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2514 second_stack_restore
:
2516 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2517 If there are multiple uses, then the last one should remove the call.
2518 In any case, whether the call to __builtin_stack_save can be removed
2519 or not is irrelevant to removing the call to __builtin_stack_restore. */
2520 if (has_single_use (gimple_call_arg (call
, 0)))
2522 gimple stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
2523 if (is_gimple_call (stack_save
))
2525 callee
= gimple_call_fndecl (stack_save
);
2527 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
2528 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
)
2530 gimple_stmt_iterator stack_save_gsi
;
2533 stack_save_gsi
= gsi_for_stmt (stack_save
);
2534 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
2535 update_call_from_tree (&stack_save_gsi
, rhs
);
2540 /* No effect, so the statement will be deleted. */
2541 return integer_zero_node
;
2544 /* If va_list type is a simple pointer and nothing special is needed,
2545 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2546 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2547 pointer assignment. */
2550 optimize_stdarg_builtin (gimple call
)
2552 tree callee
, lhs
, rhs
, cfun_va_list
;
2553 bool va_list_simple_ptr
;
2554 location_t loc
= gimple_location (call
);
2556 if (gimple_code (call
) != GIMPLE_CALL
)
2559 callee
= gimple_call_fndecl (call
);
2561 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
2562 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
2563 && (TREE_TYPE (cfun_va_list
) == void_type_node
2564 || TREE_TYPE (cfun_va_list
) == char_type_node
);
2566 switch (DECL_FUNCTION_CODE (callee
))
2568 case BUILT_IN_VA_START
:
2569 if (!va_list_simple_ptr
2570 || targetm
.expand_builtin_va_start
!= NULL
2571 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG
))
2574 if (gimple_call_num_args (call
) != 2)
2577 lhs
= gimple_call_arg (call
, 0);
2578 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2579 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2580 != TYPE_MAIN_VARIANT (cfun_va_list
))
2583 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2584 rhs
= build_call_expr_loc (loc
, builtin_decl_explicit (BUILT_IN_NEXT_ARG
),
2585 1, integer_zero_node
);
2586 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2587 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2589 case BUILT_IN_VA_COPY
:
2590 if (!va_list_simple_ptr
)
2593 if (gimple_call_num_args (call
) != 2)
2596 lhs
= gimple_call_arg (call
, 0);
2597 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2598 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2599 != TYPE_MAIN_VARIANT (cfun_va_list
))
2602 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2603 rhs
= gimple_call_arg (call
, 1);
2604 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
2605 != TYPE_MAIN_VARIANT (cfun_va_list
))
2608 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2609 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2611 case BUILT_IN_VA_END
:
2612 /* No effect, so the statement will be deleted. */
2613 return integer_zero_node
;
2620 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2621 the incoming jumps. Return true if at least one jump was changed. */
2624 optimize_unreachable (gimple_stmt_iterator i
)
2626 basic_block bb
= gsi_bb (i
);
2627 gimple_stmt_iterator gsi
;
2633 if (flag_sanitize
& SANITIZE_UNREACHABLE
)
2636 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2638 stmt
= gsi_stmt (gsi
);
2640 if (is_gimple_debug (stmt
))
2643 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2645 /* Verify we do not need to preserve the label. */
2646 if (FORCED_LABEL (gimple_label_label (label_stmt
)))
2652 /* Only handle the case that __builtin_unreachable is the first statement
2653 in the block. We rely on DCE to remove stmts without side-effects
2654 before __builtin_unreachable. */
2655 if (gsi_stmt (gsi
) != gsi_stmt (i
))
2660 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2662 gsi
= gsi_last_bb (e
->src
);
2663 if (gsi_end_p (gsi
))
2666 stmt
= gsi_stmt (gsi
);
2667 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
2669 if (e
->flags
& EDGE_TRUE_VALUE
)
2670 gimple_cond_make_false (cond_stmt
);
2671 else if (e
->flags
& EDGE_FALSE_VALUE
)
2672 gimple_cond_make_true (cond_stmt
);
2675 update_stmt (cond_stmt
);
2679 /* Todo: handle other cases, f.i. switch statement. */
2689 /* A simple pass that attempts to fold all builtin functions. This pass
2690 is run after we've propagated as many constants as we can. */
2694 const pass_data pass_data_fold_builtins
=
2696 GIMPLE_PASS
, /* type */
2698 OPTGROUP_NONE
, /* optinfo_flags */
2699 TV_NONE
, /* tv_id */
2700 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2701 0, /* properties_provided */
2702 0, /* properties_destroyed */
2703 0, /* todo_flags_start */
2704 TODO_update_ssa
, /* todo_flags_finish */
2707 class pass_fold_builtins
: public gimple_opt_pass
2710 pass_fold_builtins (gcc::context
*ctxt
)
2711 : gimple_opt_pass (pass_data_fold_builtins
, ctxt
)
2714 /* opt_pass methods: */
2715 opt_pass
* clone () { return new pass_fold_builtins (m_ctxt
); }
2716 virtual unsigned int execute (function
*);
2718 }; // class pass_fold_builtins
2721 pass_fold_builtins::execute (function
*fun
)
2723 bool cfg_changed
= false;
2725 unsigned int todoflags
= 0;
2727 FOR_EACH_BB_FN (bb
, fun
)
2729 gimple_stmt_iterator i
;
2730 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
2732 gimple stmt
, old_stmt
;
2734 enum built_in_function fcode
;
2736 stmt
= gsi_stmt (i
);
2738 if (gimple_code (stmt
) != GIMPLE_CALL
)
2740 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2741 after the last GIMPLE DSE they aren't needed and might
2742 unnecessarily keep the SSA_NAMEs live. */
2743 if (gimple_clobber_p (stmt
))
2745 tree lhs
= gimple_assign_lhs (stmt
);
2746 if (TREE_CODE (lhs
) == MEM_REF
2747 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
2749 unlink_stmt_vdef (stmt
);
2750 gsi_remove (&i
, true);
2751 release_defs (stmt
);
2759 callee
= gimple_call_fndecl (stmt
);
2760 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2766 fcode
= DECL_FUNCTION_CODE (callee
);
2771 tree result
= NULL_TREE
;
2772 switch (DECL_FUNCTION_CODE (callee
))
2774 case BUILT_IN_CONSTANT_P
:
2775 /* Resolve __builtin_constant_p. If it hasn't been
2776 folded to integer_one_node by now, it's fairly
2777 certain that the value simply isn't constant. */
2778 result
= integer_zero_node
;
2781 case BUILT_IN_ASSUME_ALIGNED
:
2782 /* Remove __builtin_assume_aligned. */
2783 result
= gimple_call_arg (stmt
, 0);
2786 case BUILT_IN_STACK_RESTORE
:
2787 result
= optimize_stack_restore (i
);
2793 case BUILT_IN_UNREACHABLE
:
2794 if (optimize_unreachable (i
))
2798 case BUILT_IN_VA_START
:
2799 case BUILT_IN_VA_END
:
2800 case BUILT_IN_VA_COPY
:
2801 /* These shouldn't be folded before pass_stdarg. */
2802 result
= optimize_stdarg_builtin (stmt
);
2816 if (!update_call_from_tree (&i
, result
))
2817 gimplify_and_update_call_from_tree (&i
, result
);
2820 todoflags
|= TODO_update_address_taken
;
2822 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2824 fprintf (dump_file
, "Simplified\n ");
2825 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2829 stmt
= gsi_stmt (i
);
2832 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
2833 && gimple_purge_dead_eh_edges (bb
))
2836 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2838 fprintf (dump_file
, "to\n ");
2839 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2840 fprintf (dump_file
, "\n");
2843 /* Retry the same statement if it changed into another
2844 builtin, there might be new opportunities now. */
2845 if (gimple_code (stmt
) != GIMPLE_CALL
)
2850 callee
= gimple_call_fndecl (stmt
);
2852 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2853 || DECL_FUNCTION_CODE (callee
) == fcode
)
2858 /* Delete unreachable blocks. */
2860 todoflags
|= TODO_cleanup_cfg
;
2868 make_pass_fold_builtins (gcc::context
*ctxt
)
2870 return new pass_fold_builtins (ctxt
);