1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
43 CONSTANT -> V_i has been found to hold a constant
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
80 a_11 = PHI (a_9, a_10)
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
123 #include "coretypes.h"
125 #include "hash-set.h"
132 #include "fold-const.h"
133 #include "stor-layout.h"
137 #include "hard-reg-set.h"
139 #include "function.h"
140 #include "dominance.h"
142 #include "basic-block.h"
143 #include "gimple-pretty-print.h"
144 #include "hash-table.h"
145 #include "tree-ssa-alias.h"
146 #include "internal-fn.h"
147 #include "gimple-fold.h"
149 #include "gimple-expr.h"
152 #include "gimplify.h"
153 #include "gimple-iterator.h"
154 #include "gimple-ssa.h"
155 #include "tree-cfg.h"
156 #include "tree-phinodes.h"
157 #include "ssa-iterators.h"
158 #include "stringpool.h"
159 #include "tree-ssanames.h"
160 #include "tree-pass.h"
161 #include "tree-ssa-propagate.h"
162 #include "value-prof.h"
163 #include "langhooks.h"
165 #include "diagnostic-core.h"
168 #include "wide-int-print.h"
169 #include "builtins.h"
170 #include "tree-chkp.h"
173 /* Possible lattice values. */
182 struct ccp_prop_value_t
{
184 ccp_lattice_t lattice_val
;
186 /* Propagated value. */
189 /* Mask that applies to the propagated value during CCP. For X
190 with a CONSTANT lattice value X & ~mask == value & ~mask. The
191 zero bits in the mask cover constant values. The ones mean no
196 /* Array of propagated constant values. After propagation,
197 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
198 the constant is held in an SSA name representing a memory store
199 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
200 memory reference used to store (i.e., the LHS of the assignment
202 static ccp_prop_value_t
*const_val
;
203 static unsigned n_const_val
;
205 static void canonicalize_value (ccp_prop_value_t
*);
206 static bool ccp_fold_stmt (gimple_stmt_iterator
*);
207 static void ccp_lattice_meet (ccp_prop_value_t
*, ccp_prop_value_t
*);
209 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
212 dump_lattice_value (FILE *outf
, const char *prefix
, ccp_prop_value_t val
)
214 switch (val
.lattice_val
)
217 fprintf (outf
, "%sUNINITIALIZED", prefix
);
220 fprintf (outf
, "%sUNDEFINED", prefix
);
223 fprintf (outf
, "%sVARYING", prefix
);
226 if (TREE_CODE (val
.value
) != INTEGER_CST
229 fprintf (outf
, "%sCONSTANT ", prefix
);
230 print_generic_expr (outf
, val
.value
, dump_flags
);
234 widest_int cval
= wi::bit_and_not (wi::to_widest (val
.value
),
236 fprintf (outf
, "%sCONSTANT ", prefix
);
237 print_hex (cval
, outf
);
238 fprintf (outf
, " (");
239 print_hex (val
.mask
, outf
);
249 /* Print lattice value VAL to stderr. */
251 void debug_lattice_value (ccp_prop_value_t val
);
254 debug_lattice_value (ccp_prop_value_t val
)
256 dump_lattice_value (stderr
, "", val
);
257 fprintf (stderr
, "\n");
260 /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
263 extend_mask (const wide_int
&nonzero_bits
)
265 return (wi::mask
<widest_int
> (wi::get_precision (nonzero_bits
), true)
266 | widest_int::from (nonzero_bits
, UNSIGNED
));
269 /* Compute a default value for variable VAR and store it in the
270 CONST_VAL array. The following rules are used to get default
273 1- Global and static variables that are declared constant are
276 2- Any other value is considered UNDEFINED. This is useful when
277 considering PHI nodes. PHI arguments that are undefined do not
278 change the constant value of the PHI node, which allows for more
279 constants to be propagated.
281 3- Variables defined by statements other than assignments and PHI
282 nodes are considered VARYING.
284 4- Initial values of variables that are not GIMPLE registers are
285 considered VARYING. */
287 static ccp_prop_value_t
288 get_default_value (tree var
)
290 ccp_prop_value_t val
= { UNINITIALIZED
, NULL_TREE
, 0 };
293 stmt
= SSA_NAME_DEF_STMT (var
);
295 if (gimple_nop_p (stmt
))
297 /* Variables defined by an empty statement are those used
298 before being initialized. If VAR is a local variable, we
299 can assume initially that it is UNDEFINED, otherwise we must
300 consider it VARYING. */
301 if (!virtual_operand_p (var
)
302 && TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
303 val
.lattice_val
= UNDEFINED
;
306 val
.lattice_val
= VARYING
;
308 if (flag_tree_bit_ccp
)
310 wide_int nonzero_bits
= get_nonzero_bits (var
);
311 if (nonzero_bits
!= -1)
313 val
.lattice_val
= CONSTANT
;
314 val
.value
= build_zero_cst (TREE_TYPE (var
));
315 val
.mask
= extend_mask (nonzero_bits
);
320 else if (is_gimple_assign (stmt
))
323 if (gimple_assign_single_p (stmt
)
324 && DECL_P (gimple_assign_rhs1 (stmt
))
325 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
327 val
.lattice_val
= CONSTANT
;
332 /* Any other variable defined by an assignment is considered
334 val
.lattice_val
= UNDEFINED
;
337 else if ((is_gimple_call (stmt
)
338 && gimple_call_lhs (stmt
) != NULL_TREE
)
339 || gimple_code (stmt
) == GIMPLE_PHI
)
341 /* A variable defined by a call or a PHI node is considered
343 val
.lattice_val
= UNDEFINED
;
347 /* Otherwise, VAR will never take on a constant value. */
348 val
.lattice_val
= VARYING
;
356 /* Get the constant value associated with variable VAR. */
358 static inline ccp_prop_value_t
*
361 ccp_prop_value_t
*val
;
363 if (const_val
== NULL
364 || SSA_NAME_VERSION (var
) >= n_const_val
)
367 val
= &const_val
[SSA_NAME_VERSION (var
)];
368 if (val
->lattice_val
== UNINITIALIZED
)
369 *val
= get_default_value (var
);
371 canonicalize_value (val
);
376 /* Return the constant tree value associated with VAR. */
379 get_constant_value (tree var
)
381 ccp_prop_value_t
*val
;
382 if (TREE_CODE (var
) != SSA_NAME
)
384 if (is_gimple_min_invariant (var
))
388 val
= get_value (var
);
390 && val
->lattice_val
== CONSTANT
391 && (TREE_CODE (val
->value
) != INTEGER_CST
397 /* Sets the value associated with VAR to VARYING. */
400 set_value_varying (tree var
)
402 ccp_prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
404 val
->lattice_val
= VARYING
;
405 val
->value
= NULL_TREE
;
409 /* For integer constants, make sure to drop TREE_OVERFLOW. */
412 canonicalize_value (ccp_prop_value_t
*val
)
414 if (val
->lattice_val
!= CONSTANT
)
417 if (TREE_OVERFLOW_P (val
->value
))
418 val
->value
= drop_tree_overflow (val
->value
);
421 /* Return whether the lattice transition is valid. */
424 valid_lattice_transition (ccp_prop_value_t old_val
, ccp_prop_value_t new_val
)
426 /* Lattice transitions must always be monotonically increasing in
428 if (old_val
.lattice_val
< new_val
.lattice_val
)
431 if (old_val
.lattice_val
!= new_val
.lattice_val
)
434 if (!old_val
.value
&& !new_val
.value
)
437 /* Now both lattice values are CONSTANT. */
439 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
440 when only a single copy edge is executable. */
441 if (TREE_CODE (old_val
.value
) == SSA_NAME
442 && TREE_CODE (new_val
.value
) == SSA_NAME
)
445 /* Allow transitioning from a constant to a copy. */
446 if (is_gimple_min_invariant (old_val
.value
)
447 && TREE_CODE (new_val
.value
) == SSA_NAME
)
450 /* Allow transitioning from PHI <&x, not executable> == &x
451 to PHI <&x, &y> == common alignment. */
452 if (TREE_CODE (old_val
.value
) != INTEGER_CST
453 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
456 /* Bit-lattices have to agree in the still valid bits. */
457 if (TREE_CODE (old_val
.value
) == INTEGER_CST
458 && TREE_CODE (new_val
.value
) == INTEGER_CST
)
459 return (wi::bit_and_not (wi::to_widest (old_val
.value
), new_val
.mask
)
460 == wi::bit_and_not (wi::to_widest (new_val
.value
), new_val
.mask
));
462 /* Otherwise constant values have to agree. */
463 if (operand_equal_p (old_val
.value
, new_val
.value
, 0))
466 /* At least the kinds and types should agree now. */
467 if (TREE_CODE (old_val
.value
) != TREE_CODE (new_val
.value
)
468 || !types_compatible_p (TREE_TYPE (old_val
.value
),
469 TREE_TYPE (new_val
.value
)))
472 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
474 tree type
= TREE_TYPE (new_val
.value
);
475 if (SCALAR_FLOAT_TYPE_P (type
)
476 && !HONOR_NANS (type
))
478 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val
.value
)))
481 else if (VECTOR_FLOAT_TYPE_P (type
)
482 && !HONOR_NANS (type
))
484 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (old_val
.value
); ++i
)
485 if (!REAL_VALUE_ISNAN
486 (TREE_REAL_CST (VECTOR_CST_ELT (old_val
.value
, i
)))
487 && !operand_equal_p (VECTOR_CST_ELT (old_val
.value
, i
),
488 VECTOR_CST_ELT (new_val
.value
, i
), 0))
492 else if (COMPLEX_FLOAT_TYPE_P (type
)
493 && !HONOR_NANS (type
))
495 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val
.value
)))
496 && !operand_equal_p (TREE_REALPART (old_val
.value
),
497 TREE_REALPART (new_val
.value
), 0))
499 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val
.value
)))
500 && !operand_equal_p (TREE_IMAGPART (old_val
.value
),
501 TREE_IMAGPART (new_val
.value
), 0))
508 /* Set the value for variable VAR to NEW_VAL. Return true if the new
509 value is different from VAR's previous value. */
512 set_lattice_value (tree var
, ccp_prop_value_t
*new_val
)
514 /* We can deal with old UNINITIALIZED values just fine here. */
515 ccp_prop_value_t
*old_val
= &const_val
[SSA_NAME_VERSION (var
)];
517 canonicalize_value (new_val
);
519 /* We have to be careful to not go up the bitwise lattice
520 represented by the mask. Instead of dropping to VARYING
521 use the meet operator to retain a conservative value.
522 Missed optimizations like PR65851 makes this necessary.
523 It also ensures we converge to a stable lattice solution. */
524 if (new_val
->lattice_val
== CONSTANT
525 && old_val
->lattice_val
== CONSTANT
526 && TREE_CODE (new_val
->value
) != SSA_NAME
)
527 ccp_lattice_meet (new_val
, old_val
);
529 gcc_checking_assert (valid_lattice_transition (*old_val
, *new_val
));
531 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
532 caller that this was a non-transition. */
533 if (old_val
->lattice_val
!= new_val
->lattice_val
534 || (new_val
->lattice_val
== CONSTANT
535 && (TREE_CODE (new_val
->value
) != TREE_CODE (old_val
->value
)
536 || (TREE_CODE (new_val
->value
) == INTEGER_CST
537 && (new_val
->mask
!= old_val
->mask
538 || (wi::bit_and_not (wi::to_widest (old_val
->value
),
540 != wi::bit_and_not (wi::to_widest (new_val
->value
),
542 || (TREE_CODE (new_val
->value
) != INTEGER_CST
543 && !operand_equal_p (new_val
->value
, old_val
->value
, 0)))))
545 /* ??? We would like to delay creation of INTEGER_CSTs from
546 partially constants here. */
548 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
550 dump_lattice_value (dump_file
, "Lattice value changed to ", *new_val
);
551 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
556 gcc_assert (new_val
->lattice_val
!= UNINITIALIZED
);
563 static ccp_prop_value_t
get_value_for_expr (tree
, bool);
564 static ccp_prop_value_t
bit_value_binop (enum tree_code
, tree
, tree
, tree
);
565 static void bit_value_binop_1 (enum tree_code
, tree
, widest_int
*, widest_int
*,
566 tree
, const widest_int
&, const widest_int
&,
567 tree
, const widest_int
&, const widest_int
&);
569 /* Return a widest_int that can be used for bitwise simplifications
573 value_to_wide_int (ccp_prop_value_t val
)
576 && TREE_CODE (val
.value
) == INTEGER_CST
)
577 return wi::to_widest (val
.value
);
582 /* Return the value for the address expression EXPR based on alignment
585 static ccp_prop_value_t
586 get_value_from_alignment (tree expr
)
588 tree type
= TREE_TYPE (expr
);
589 ccp_prop_value_t val
;
590 unsigned HOST_WIDE_INT bitpos
;
593 gcc_assert (TREE_CODE (expr
) == ADDR_EXPR
);
595 get_pointer_alignment_1 (expr
, &align
, &bitpos
);
596 val
.mask
= (POINTER_TYPE_P (type
) || TYPE_UNSIGNED (type
)
597 ? wi::mask
<widest_int
> (TYPE_PRECISION (type
), false)
598 : -1).and_not (align
/ BITS_PER_UNIT
- 1);
600 = wi::sext (val
.mask
, TYPE_PRECISION (type
)) == -1 ? VARYING
: CONSTANT
;
601 if (val
.lattice_val
== CONSTANT
)
602 val
.value
= build_int_cstu (type
, bitpos
/ BITS_PER_UNIT
);
604 val
.value
= NULL_TREE
;
609 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
610 return constant bits extracted from alignment information for
611 invariant addresses. */
613 static ccp_prop_value_t
614 get_value_for_expr (tree expr
, bool for_bits_p
)
616 ccp_prop_value_t val
;
618 if (TREE_CODE (expr
) == SSA_NAME
)
620 val
= *get_value (expr
);
622 && val
.lattice_val
== CONSTANT
623 && TREE_CODE (val
.value
) == ADDR_EXPR
)
624 val
= get_value_from_alignment (val
.value
);
625 /* Fall back to a copy value. */
627 && val
.lattice_val
== VARYING
628 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr
))
630 val
.lattice_val
= CONSTANT
;
635 else if (is_gimple_min_invariant (expr
)
636 && (!for_bits_p
|| TREE_CODE (expr
) != ADDR_EXPR
))
638 val
.lattice_val
= CONSTANT
;
641 canonicalize_value (&val
);
643 else if (TREE_CODE (expr
) == ADDR_EXPR
)
644 val
= get_value_from_alignment (expr
);
647 val
.lattice_val
= VARYING
;
649 val
.value
= NULL_TREE
;
654 /* Return the likely CCP lattice value for STMT.
656 If STMT has no operands, then return CONSTANT.
658 Else if undefinedness of operands of STMT cause its value to be
659 undefined, then return UNDEFINED.
661 Else if any operands of STMT are constants, then return CONSTANT.
663 Else return VARYING. */
666 likely_value (gimple stmt
)
668 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
669 bool has_nsa_operand
;
674 enum gimple_code code
= gimple_code (stmt
);
676 /* This function appears to be called only for assignments, calls,
677 conditionals, and switches, due to the logic in visit_stmt. */
678 gcc_assert (code
== GIMPLE_ASSIGN
679 || code
== GIMPLE_CALL
680 || code
== GIMPLE_COND
681 || code
== GIMPLE_SWITCH
);
683 /* If the statement has volatile operands, it won't fold to a
685 if (gimple_has_volatile_ops (stmt
))
688 /* Arrive here for more complex cases. */
689 has_constant_operand
= false;
690 has_undefined_operand
= false;
691 all_undefined_operands
= true;
692 has_nsa_operand
= false;
693 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
695 ccp_prop_value_t
*val
= get_value (use
);
697 if (val
->lattice_val
== UNDEFINED
)
698 has_undefined_operand
= true;
700 all_undefined_operands
= false;
702 if (val
->lattice_val
== CONSTANT
)
703 has_constant_operand
= true;
705 if (SSA_NAME_IS_DEFAULT_DEF (use
)
706 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use
)))
707 has_nsa_operand
= true;
710 /* There may be constants in regular rhs operands. For calls we
711 have to ignore lhs, fndecl and static chain, otherwise only
713 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
714 i
< gimple_num_ops (stmt
); ++i
)
716 tree op
= gimple_op (stmt
, i
);
717 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
719 if (is_gimple_min_invariant (op
))
720 has_constant_operand
= true;
723 if (has_constant_operand
)
724 all_undefined_operands
= false;
726 if (has_undefined_operand
727 && code
== GIMPLE_CALL
728 && gimple_call_internal_p (stmt
))
729 switch (gimple_call_internal_fn (stmt
))
731 /* These 3 builtins use the first argument just as a magic
732 way how to find out a decl uid. */
733 case IFN_GOMP_SIMD_LANE
:
734 case IFN_GOMP_SIMD_VF
:
735 case IFN_GOMP_SIMD_LAST_LANE
:
736 has_undefined_operand
= false;
742 /* If the operation combines operands like COMPLEX_EXPR make sure to
743 not mark the result UNDEFINED if only one part of the result is
745 if (has_undefined_operand
&& all_undefined_operands
)
747 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
749 switch (gimple_assign_rhs_code (stmt
))
751 /* Unary operators are handled with all_undefined_operands. */
754 case POINTER_PLUS_EXPR
:
755 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
756 Not bitwise operators, one VARYING operand may specify the
757 result completely. Not logical operators for the same reason.
758 Not COMPLEX_EXPR as one VARYING operand makes the result partly
759 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
760 the undefined operand may be promoted. */
764 /* If any part of an address is UNDEFINED, like the index
765 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
772 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
773 fall back to CONSTANT. During iteration UNDEFINED may still drop
775 if (has_undefined_operand
)
778 /* We do not consider virtual operands here -- load from read-only
779 memory may have only VARYING virtual operands, but still be
780 constant. Also we can combine the stmt with definitions from
781 operands whose definitions are not simulated again. */
782 if (has_constant_operand
784 || gimple_references_memory_p (stmt
))
790 /* Returns true if STMT cannot be constant. */
793 surely_varying_stmt_p (gimple stmt
)
795 /* If the statement has operands that we cannot handle, it cannot be
797 if (gimple_has_volatile_ops (stmt
))
800 /* If it is a call and does not return a value or is not a
801 builtin and not an indirect call or a call to function with
802 assume_aligned/alloc_align attribute, it is varying. */
803 if (is_gimple_call (stmt
))
805 tree fndecl
, fntype
= gimple_call_fntype (stmt
);
806 if (!gimple_call_lhs (stmt
)
807 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
808 && !DECL_BUILT_IN (fndecl
)
809 && !lookup_attribute ("assume_aligned",
810 TYPE_ATTRIBUTES (fntype
))
811 && !lookup_attribute ("alloc_align",
812 TYPE_ATTRIBUTES (fntype
))))
816 /* Any other store operation is not interesting. */
817 else if (gimple_vdef (stmt
))
820 /* Anything other than assignments and conditional jumps are not
821 interesting for CCP. */
822 if (gimple_code (stmt
) != GIMPLE_ASSIGN
823 && gimple_code (stmt
) != GIMPLE_COND
824 && gimple_code (stmt
) != GIMPLE_SWITCH
825 && gimple_code (stmt
) != GIMPLE_CALL
)
831 /* Initialize local data structures for CCP. */
834 ccp_initialize (void)
838 n_const_val
= num_ssa_names
;
839 const_val
= XCNEWVEC (ccp_prop_value_t
, n_const_val
);
841 /* Initialize simulation flags for PHI nodes and statements. */
842 FOR_EACH_BB_FN (bb
, cfun
)
844 gimple_stmt_iterator i
;
846 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
848 gimple stmt
= gsi_stmt (i
);
851 /* If the statement is a control insn, then we do not
852 want to avoid simulating the statement once. Failure
853 to do so means that those edges will never get added. */
854 if (stmt_ends_bb_p (stmt
))
857 is_varying
= surely_varying_stmt_p (stmt
);
864 /* If the statement will not produce a constant, mark
865 all its outputs VARYING. */
866 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
867 set_value_varying (def
);
869 prop_set_simulate_again (stmt
, !is_varying
);
873 /* Now process PHI nodes. We never clear the simulate_again flag on
874 phi nodes, since we do not know which edges are executable yet,
875 except for phi nodes for virtual operands when we do not do store ccp. */
876 FOR_EACH_BB_FN (bb
, cfun
)
880 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
882 gphi
*phi
= i
.phi ();
884 if (virtual_operand_p (gimple_phi_result (phi
)))
885 prop_set_simulate_again (phi
, false);
887 prop_set_simulate_again (phi
, true);
892 /* Debug count support. Reset the values of ssa names
893 VARYING when the total number ssa names analyzed is
894 beyond the debug count specified. */
900 for (i
= 0; i
< num_ssa_names
; i
++)
904 const_val
[i
].lattice_val
= VARYING
;
905 const_val
[i
].mask
= -1;
906 const_val
[i
].value
= NULL_TREE
;
912 /* Do final substitution of propagated values, cleanup the flowgraph and
913 free allocated storage.
915 Return TRUE when something was optimized. */
920 bool something_changed
;
925 /* Derive alignment and misalignment information from partially
926 constant pointers in the lattice or nonzero bits from partially
927 constant integers. */
928 for (i
= 1; i
< num_ssa_names
; ++i
)
930 tree name
= ssa_name (i
);
931 ccp_prop_value_t
*val
;
932 unsigned int tem
, align
;
935 || (!POINTER_TYPE_P (TREE_TYPE (name
))
936 && (!INTEGRAL_TYPE_P (TREE_TYPE (name
))
937 /* Don't record nonzero bits before IPA to avoid
938 using too much memory. */
939 || first_pass_instance
)))
942 val
= get_value (name
);
943 if (val
->lattice_val
!= CONSTANT
944 || TREE_CODE (val
->value
) != INTEGER_CST
)
947 if (POINTER_TYPE_P (TREE_TYPE (name
)))
949 /* Trailing mask bits specify the alignment, trailing value
950 bits the misalignment. */
951 tem
= val
->mask
.to_uhwi ();
952 align
= (tem
& -tem
);
954 set_ptr_info_alignment (get_ptr_info (name
), align
,
955 (TREE_INT_CST_LOW (val
->value
)
960 unsigned int precision
= TYPE_PRECISION (TREE_TYPE (val
->value
));
961 wide_int nonzero_bits
= wide_int::from (val
->mask
, precision
,
962 UNSIGNED
) | val
->value
;
963 nonzero_bits
&= get_nonzero_bits (name
);
964 set_nonzero_bits (name
, nonzero_bits
);
968 /* Perform substitutions based on the known constant values. */
969 something_changed
= substitute_and_fold (get_constant_value
,
970 ccp_fold_stmt
, true);
974 return something_changed
;;
978 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
981 any M UNDEFINED = any
982 any M VARYING = VARYING
983 Ci M Cj = Ci if (i == j)
984 Ci M Cj = VARYING if (i != j)
988 ccp_lattice_meet (ccp_prop_value_t
*val1
, ccp_prop_value_t
*val2
)
990 if (val1
->lattice_val
== UNDEFINED
991 /* For UNDEFINED M SSA we can't always SSA because its definition
992 may not dominate the PHI node. Doing optimistic copy propagation
993 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
994 && (val2
->lattice_val
!= CONSTANT
995 || TREE_CODE (val2
->value
) != SSA_NAME
))
997 /* UNDEFINED M any = any */
1000 else if (val2
->lattice_val
== UNDEFINED
1002 && (val1
->lattice_val
!= CONSTANT
1003 || TREE_CODE (val1
->value
) != SSA_NAME
))
1005 /* any M UNDEFINED = any
1006 Nothing to do. VAL1 already contains the value we want. */
1009 else if (val1
->lattice_val
== VARYING
1010 || val2
->lattice_val
== VARYING
)
1012 /* any M VARYING = VARYING. */
1013 val1
->lattice_val
= VARYING
;
1015 val1
->value
= NULL_TREE
;
1017 else if (val1
->lattice_val
== CONSTANT
1018 && val2
->lattice_val
== CONSTANT
1019 && TREE_CODE (val1
->value
) == INTEGER_CST
1020 && TREE_CODE (val2
->value
) == INTEGER_CST
)
1022 /* Ci M Cj = Ci if (i == j)
1023 Ci M Cj = VARYING if (i != j)
1025 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1027 val1
->mask
= (val1
->mask
| val2
->mask
1028 | (wi::to_widest (val1
->value
)
1029 ^ wi::to_widest (val2
->value
)));
1030 if (wi::sext (val1
->mask
, TYPE_PRECISION (TREE_TYPE (val1
->value
))) == -1)
1032 val1
->lattice_val
= VARYING
;
1033 val1
->value
= NULL_TREE
;
1036 else if (val1
->lattice_val
== CONSTANT
1037 && val2
->lattice_val
== CONSTANT
1038 && operand_equal_p (val1
->value
, val2
->value
, 0))
1040 /* Ci M Cj = Ci if (i == j)
1041 Ci M Cj = VARYING if (i != j)
1043 VAL1 already contains the value we want for equivalent values. */
1045 else if (val1
->lattice_val
== CONSTANT
1046 && val2
->lattice_val
== CONSTANT
1047 && (TREE_CODE (val1
->value
) == ADDR_EXPR
1048 || TREE_CODE (val2
->value
) == ADDR_EXPR
))
1050 /* When not equal addresses are involved try meeting for
1052 ccp_prop_value_t tem
= *val2
;
1053 if (TREE_CODE (val1
->value
) == ADDR_EXPR
)
1054 *val1
= get_value_for_expr (val1
->value
, true);
1055 if (TREE_CODE (val2
->value
) == ADDR_EXPR
)
1056 tem
= get_value_for_expr (val2
->value
, true);
1057 ccp_lattice_meet (val1
, &tem
);
1061 /* Any other combination is VARYING. */
1062 val1
->lattice_val
= VARYING
;
1064 val1
->value
= NULL_TREE
;
1069 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1070 lattice values to determine PHI_NODE's lattice value. The value of a
1071 PHI node is determined calling ccp_lattice_meet with all the arguments
1072 of the PHI node that are incoming via executable edges. */
1074 static enum ssa_prop_result
1075 ccp_visit_phi_node (gphi
*phi
)
1078 ccp_prop_value_t new_val
;
1080 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1082 fprintf (dump_file
, "\nVisiting PHI node: ");
1083 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
1086 new_val
.lattice_val
= UNDEFINED
;
1087 new_val
.value
= NULL_TREE
;
1091 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1093 /* Compute the meet operator over all the PHI arguments flowing
1094 through executable edges. */
1095 edge e
= gimple_phi_arg_edge (phi
, i
);
1097 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1100 "\n Argument #%d (%d -> %d %sexecutable)\n",
1101 i
, e
->src
->index
, e
->dest
->index
,
1102 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
1105 /* If the incoming edge is executable, Compute the meet operator for
1106 the existing value of the PHI node and the current PHI argument. */
1107 if (e
->flags
& EDGE_EXECUTABLE
)
1109 tree arg
= gimple_phi_arg (phi
, i
)->def
;
1110 ccp_prop_value_t arg_val
= get_value_for_expr (arg
, false);
1118 ccp_lattice_meet (&new_val
, &arg_val
);
1120 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1122 fprintf (dump_file
, "\t");
1123 print_generic_expr (dump_file
, arg
, dump_flags
);
1124 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
1125 fprintf (dump_file
, "\n");
1128 if (new_val
.lattice_val
== VARYING
)
1133 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1135 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
1136 fprintf (dump_file
, "\n\n");
1139 /* Make the transition to the new value. */
1140 if (set_lattice_value (gimple_phi_result (phi
), &new_val
))
1142 if (new_val
.lattice_val
== VARYING
)
1143 return SSA_PROP_VARYING
;
1145 return SSA_PROP_INTERESTING
;
1148 return SSA_PROP_NOT_INTERESTING
;
1151 /* Return the constant value for OP or OP otherwise. */
1154 valueize_op (tree op
)
1156 if (TREE_CODE (op
) == SSA_NAME
)
1158 tree tem
= get_constant_value (op
);
1165 /* Return the constant value for OP, but signal to not follow SSA
1166 edges if the definition may be simulated again. */
1169 valueize_op_1 (tree op
)
1171 if (TREE_CODE (op
) == SSA_NAME
)
1173 /* If the definition may be simulated again we cannot follow
1174 this SSA edge as the SSA propagator does not necessarily
1175 re-visit the use. */
1176 gimple def_stmt
= SSA_NAME_DEF_STMT (op
);
1177 if (!gimple_nop_p (def_stmt
)
1178 && prop_simulate_again_p (def_stmt
))
1180 tree tem
= get_constant_value (op
);
1187 /* CCP specific front-end to the non-destructive constant folding
1190 Attempt to simplify the RHS of STMT knowing that one or more
1191 operands are constants.
1193 If simplification is possible, return the simplified RHS,
1194 otherwise return the original RHS or NULL_TREE. */
1197 ccp_fold (gimple stmt
)
1199 location_t loc
= gimple_location (stmt
);
1200 switch (gimple_code (stmt
))
1204 /* Handle comparison operators that can appear in GIMPLE form. */
1205 tree op0
= valueize_op (gimple_cond_lhs (stmt
));
1206 tree op1
= valueize_op (gimple_cond_rhs (stmt
));
1207 enum tree_code code
= gimple_cond_code (stmt
);
1208 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1213 /* Return the constant switch index. */
1214 return valueize_op (gimple_switch_index (as_a
<gswitch
*> (stmt
)));
1219 return gimple_fold_stmt_to_constant_1 (stmt
,
1220 valueize_op
, valueize_op_1
);
1227 /* Apply the operation CODE in type TYPE to the value, mask pair
1228 RVAL and RMASK representing a value of type RTYPE and set
1229 the value, mask pair *VAL and *MASK to the result. */
1232 bit_value_unop_1 (enum tree_code code
, tree type
,
1233 widest_int
*val
, widest_int
*mask
,
1234 tree rtype
, const widest_int
&rval
, const widest_int
&rmask
)
1245 widest_int temv
, temm
;
1246 /* Return ~rval + 1. */
1247 bit_value_unop_1 (BIT_NOT_EXPR
, type
, &temv
, &temm
, type
, rval
, rmask
);
1248 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1249 type
, temv
, temm
, type
, 1, 0);
1257 /* First extend mask and value according to the original type. */
1258 sgn
= TYPE_SIGN (rtype
);
1259 *mask
= wi::ext (rmask
, TYPE_PRECISION (rtype
), sgn
);
1260 *val
= wi::ext (rval
, TYPE_PRECISION (rtype
), sgn
);
1262 /* Then extend mask and value according to the target type. */
1263 sgn
= TYPE_SIGN (type
);
1264 *mask
= wi::ext (*mask
, TYPE_PRECISION (type
), sgn
);
1265 *val
= wi::ext (*val
, TYPE_PRECISION (type
), sgn
);
1275 /* Apply the operation CODE in type TYPE to the value, mask pairs
1276 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1277 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1280 bit_value_binop_1 (enum tree_code code
, tree type
,
1281 widest_int
*val
, widest_int
*mask
,
1282 tree r1type
, const widest_int
&r1val
,
1283 const widest_int
&r1mask
, tree r2type
,
1284 const widest_int
&r2val
, const widest_int
&r2mask
)
1286 signop sgn
= TYPE_SIGN (type
);
1287 int width
= TYPE_PRECISION (type
);
1288 bool swap_p
= false;
1290 /* Assume we'll get a constant result. Use an initial non varying
1291 value, we fall back to varying in the end if necessary. */
1297 /* The mask is constant where there is a known not
1298 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1299 *mask
= (r1mask
| r2mask
) & (r1val
| r1mask
) & (r2val
| r2mask
);
1300 *val
= r1val
& r2val
;
1304 /* The mask is constant where there is a known
1305 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1306 *mask
= (r1mask
| r2mask
)
1307 .and_not (r1val
.and_not (r1mask
) | r2val
.and_not (r2mask
));
1308 *val
= r1val
| r2val
;
1313 *mask
= r1mask
| r2mask
;
1314 *val
= r1val
^ r2val
;
1321 widest_int shift
= r2val
;
1329 if (wi::neg_p (shift
))
1332 if (code
== RROTATE_EXPR
)
1333 code
= LROTATE_EXPR
;
1335 code
= RROTATE_EXPR
;
1337 if (code
== RROTATE_EXPR
)
1339 *mask
= wi::rrotate (r1mask
, shift
, width
);
1340 *val
= wi::rrotate (r1val
, shift
, width
);
1344 *mask
= wi::lrotate (r1mask
, shift
, width
);
1345 *val
= wi::lrotate (r1val
, shift
, width
);
1353 /* ??? We can handle partially known shift counts if we know
1354 its sign. That way we can tell that (x << (y | 8)) & 255
1358 widest_int shift
= r2val
;
1366 if (wi::neg_p (shift
))
1369 if (code
== RSHIFT_EXPR
)
1374 if (code
== RSHIFT_EXPR
)
1376 *mask
= wi::rshift (wi::ext (r1mask
, width
, sgn
), shift
, sgn
);
1377 *val
= wi::rshift (wi::ext (r1val
, width
, sgn
), shift
, sgn
);
1381 *mask
= wi::ext (wi::lshift (r1mask
, shift
), width
, sgn
);
1382 *val
= wi::ext (wi::lshift (r1val
, shift
), width
, sgn
);
1389 case POINTER_PLUS_EXPR
:
1391 /* Do the addition with unknown bits set to zero, to give carry-ins of
1392 zero wherever possible. */
1393 widest_int lo
= r1val
.and_not (r1mask
) + r2val
.and_not (r2mask
);
1394 lo
= wi::ext (lo
, width
, sgn
);
1395 /* Do the addition with unknown bits set to one, to give carry-ins of
1396 one wherever possible. */
1397 widest_int hi
= (r1val
| r1mask
) + (r2val
| r2mask
);
1398 hi
= wi::ext (hi
, width
, sgn
);
1399 /* Each bit in the result is known if (a) the corresponding bits in
1400 both inputs are known, and (b) the carry-in to that bit position
1401 is known. We can check condition (b) by seeing if we got the same
1402 result with minimised carries as with maximised carries. */
1403 *mask
= r1mask
| r2mask
| (lo
^ hi
);
1404 *mask
= wi::ext (*mask
, width
, sgn
);
1405 /* It shouldn't matter whether we choose lo or hi here. */
1412 widest_int temv
, temm
;
1413 bit_value_unop_1 (NEGATE_EXPR
, r2type
, &temv
, &temm
,
1414 r2type
, r2val
, r2mask
);
1415 bit_value_binop_1 (PLUS_EXPR
, type
, val
, mask
,
1416 r1type
, r1val
, r1mask
,
1417 r2type
, temv
, temm
);
1423 /* Just track trailing zeros in both operands and transfer
1424 them to the other. */
1425 int r1tz
= wi::ctz (r1val
| r1mask
);
1426 int r2tz
= wi::ctz (r2val
| r2mask
);
1427 if (r1tz
+ r2tz
>= width
)
1432 else if (r1tz
+ r2tz
> 0)
1434 *mask
= wi::ext (wi::mask
<widest_int
> (r1tz
+ r2tz
, true),
1444 widest_int m
= r1mask
| r2mask
;
1445 if (r1val
.and_not (m
) != r2val
.and_not (m
))
1448 *val
= ((code
== EQ_EXPR
) ? 0 : 1);
1452 /* We know the result of a comparison is always one or zero. */
1462 code
= swap_tree_comparison (code
);
1469 const widest_int
&o1val
= swap_p
? r2val
: r1val
;
1470 const widest_int
&o1mask
= swap_p
? r2mask
: r1mask
;
1471 const widest_int
&o2val
= swap_p
? r1val
: r2val
;
1472 const widest_int
&o2mask
= swap_p
? r1mask
: r2mask
;
1474 /* If the most significant bits are not known we know nothing. */
1475 if (wi::neg_p (o1mask
) || wi::neg_p (o2mask
))
1478 /* For comparisons the signedness is in the comparison operands. */
1479 sgn
= TYPE_SIGN (r1type
);
1481 /* If we know the most significant bits we know the values
1482 value ranges by means of treating varying bits as zero
1483 or one. Do a cross comparison of the max/min pairs. */
1484 maxmin
= wi::cmp (o1val
| o1mask
, o2val
.and_not (o2mask
), sgn
);
1485 minmax
= wi::cmp (o1val
.and_not (o1mask
), o2val
| o2mask
, sgn
);
1486 if (maxmin
< 0) /* o1 is less than o2. */
1491 else if (minmax
> 0) /* o1 is not less or equal to o2. */
1496 else if (maxmin
== minmax
) /* o1 and o2 are equal. */
1498 /* This probably should never happen as we'd have
1499 folded the thing during fully constant value folding. */
1501 *val
= (code
== LE_EXPR
? 1 : 0);
1505 /* We know the result of a comparison is always one or zero. */
1516 /* Return the propagation value when applying the operation CODE to
1517 the value RHS yielding type TYPE. */
1519 static ccp_prop_value_t
1520 bit_value_unop (enum tree_code code
, tree type
, tree rhs
)
1522 ccp_prop_value_t rval
= get_value_for_expr (rhs
, true);
1523 widest_int value
, mask
;
1524 ccp_prop_value_t val
;
1526 if (rval
.lattice_val
== UNDEFINED
)
1529 gcc_assert ((rval
.lattice_val
== CONSTANT
1530 && TREE_CODE (rval
.value
) == INTEGER_CST
)
1531 || wi::sext (rval
.mask
, TYPE_PRECISION (TREE_TYPE (rhs
))) == -1);
1532 bit_value_unop_1 (code
, type
, &value
, &mask
,
1533 TREE_TYPE (rhs
), value_to_wide_int (rval
), rval
.mask
);
1534 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1536 val
.lattice_val
= CONSTANT
;
1538 /* ??? Delay building trees here. */
1539 val
.value
= wide_int_to_tree (type
, value
);
1543 val
.lattice_val
= VARYING
;
1544 val
.value
= NULL_TREE
;
1550 /* Return the propagation value when applying the operation CODE to
1551 the values RHS1 and RHS2 yielding type TYPE. */
1553 static ccp_prop_value_t
1554 bit_value_binop (enum tree_code code
, tree type
, tree rhs1
, tree rhs2
)
1556 ccp_prop_value_t r1val
= get_value_for_expr (rhs1
, true);
1557 ccp_prop_value_t r2val
= get_value_for_expr (rhs2
, true);
1558 widest_int value
, mask
;
1559 ccp_prop_value_t val
;
1561 if (r1val
.lattice_val
== UNDEFINED
1562 || r2val
.lattice_val
== UNDEFINED
)
1564 val
.lattice_val
= VARYING
;
1565 val
.value
= NULL_TREE
;
1570 gcc_assert ((r1val
.lattice_val
== CONSTANT
1571 && TREE_CODE (r1val
.value
) == INTEGER_CST
)
1572 || wi::sext (r1val
.mask
,
1573 TYPE_PRECISION (TREE_TYPE (rhs1
))) == -1);
1574 gcc_assert ((r2val
.lattice_val
== CONSTANT
1575 && TREE_CODE (r2val
.value
) == INTEGER_CST
)
1576 || wi::sext (r2val
.mask
,
1577 TYPE_PRECISION (TREE_TYPE (rhs2
))) == -1);
1578 bit_value_binop_1 (code
, type
, &value
, &mask
,
1579 TREE_TYPE (rhs1
), value_to_wide_int (r1val
), r1val
.mask
,
1580 TREE_TYPE (rhs2
), value_to_wide_int (r2val
), r2val
.mask
);
1581 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1583 val
.lattice_val
= CONSTANT
;
1585 /* ??? Delay building trees here. */
1586 val
.value
= wide_int_to_tree (type
, value
);
1590 val
.lattice_val
= VARYING
;
1591 val
.value
= NULL_TREE
;
1597 /* Return the propagation value for __builtin_assume_aligned
1598 and functions with assume_aligned or alloc_aligned attribute.
1599 For __builtin_assume_aligned, ATTR is NULL_TREE,
1600 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1601 is false, for alloc_aligned attribute ATTR is non-NULL and
1602 ALLOC_ALIGNED is true. */
1604 static ccp_prop_value_t
1605 bit_value_assume_aligned (gimple stmt
, tree attr
, ccp_prop_value_t ptrval
,
1608 tree align
, misalign
= NULL_TREE
, type
;
1609 unsigned HOST_WIDE_INT aligni
, misaligni
= 0;
1610 ccp_prop_value_t alignval
;
1611 widest_int value
, mask
;
1612 ccp_prop_value_t val
;
1614 if (attr
== NULL_TREE
)
1616 tree ptr
= gimple_call_arg (stmt
, 0);
1617 type
= TREE_TYPE (ptr
);
1618 ptrval
= get_value_for_expr (ptr
, true);
1622 tree lhs
= gimple_call_lhs (stmt
);
1623 type
= TREE_TYPE (lhs
);
1626 if (ptrval
.lattice_val
== UNDEFINED
)
1628 gcc_assert ((ptrval
.lattice_val
== CONSTANT
1629 && TREE_CODE (ptrval
.value
) == INTEGER_CST
)
1630 || wi::sext (ptrval
.mask
, TYPE_PRECISION (type
)) == -1);
1631 if (attr
== NULL_TREE
)
1633 /* Get aligni and misaligni from __builtin_assume_aligned. */
1634 align
= gimple_call_arg (stmt
, 1);
1635 if (!tree_fits_uhwi_p (align
))
1637 aligni
= tree_to_uhwi (align
);
1638 if (gimple_call_num_args (stmt
) > 2)
1640 misalign
= gimple_call_arg (stmt
, 2);
1641 if (!tree_fits_uhwi_p (misalign
))
1643 misaligni
= tree_to_uhwi (misalign
);
1648 /* Get aligni and misaligni from assume_aligned or
1649 alloc_align attributes. */
1650 if (TREE_VALUE (attr
) == NULL_TREE
)
1652 attr
= TREE_VALUE (attr
);
1653 align
= TREE_VALUE (attr
);
1654 if (!tree_fits_uhwi_p (align
))
1656 aligni
= tree_to_uhwi (align
);
1659 if (aligni
== 0 || aligni
> gimple_call_num_args (stmt
))
1661 align
= gimple_call_arg (stmt
, aligni
- 1);
1662 if (!tree_fits_uhwi_p (align
))
1664 aligni
= tree_to_uhwi (align
);
1666 else if (TREE_CHAIN (attr
) && TREE_VALUE (TREE_CHAIN (attr
)))
1668 misalign
= TREE_VALUE (TREE_CHAIN (attr
));
1669 if (!tree_fits_uhwi_p (misalign
))
1671 misaligni
= tree_to_uhwi (misalign
);
1674 if (aligni
<= 1 || (aligni
& (aligni
- 1)) != 0 || misaligni
>= aligni
)
1677 align
= build_int_cst_type (type
, -aligni
);
1678 alignval
= get_value_for_expr (align
, true);
1679 bit_value_binop_1 (BIT_AND_EXPR
, type
, &value
, &mask
,
1680 type
, value_to_wide_int (ptrval
), ptrval
.mask
,
1681 type
, value_to_wide_int (alignval
), alignval
.mask
);
1682 if (wi::sext (mask
, TYPE_PRECISION (type
)) != -1)
1684 val
.lattice_val
= CONSTANT
;
1686 gcc_assert ((mask
.to_uhwi () & (aligni
- 1)) == 0);
1687 gcc_assert ((value
.to_uhwi () & (aligni
- 1)) == 0);
1689 /* ??? Delay building trees here. */
1690 val
.value
= wide_int_to_tree (type
, value
);
1694 val
.lattice_val
= VARYING
;
1695 val
.value
= NULL_TREE
;
1701 /* Evaluate statement STMT.
1702 Valid only for assignments, calls, conditionals, and switches. */
1704 static ccp_prop_value_t
1705 evaluate_stmt (gimple stmt
)
1707 ccp_prop_value_t val
;
1708 tree simplified
= NULL_TREE
;
1709 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1710 bool is_constant
= false;
1713 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1715 fprintf (dump_file
, "which is likely ");
1716 switch (likelyvalue
)
1719 fprintf (dump_file
, "CONSTANT");
1722 fprintf (dump_file
, "UNDEFINED");
1725 fprintf (dump_file
, "VARYING");
1729 fprintf (dump_file
, "\n");
1732 /* If the statement is likely to have a CONSTANT result, then try
1733 to fold the statement to determine the constant value. */
1734 /* FIXME. This is the only place that we call ccp_fold.
1735 Since likely_value never returns CONSTANT for calls, we will
1736 not attempt to fold them, including builtins that may profit. */
1737 if (likelyvalue
== CONSTANT
)
1739 fold_defer_overflow_warnings ();
1740 simplified
= ccp_fold (stmt
);
1741 if (simplified
&& TREE_CODE (simplified
) == SSA_NAME
)
1743 val
= *get_value (simplified
);
1744 if (val
.lattice_val
!= VARYING
)
1746 fold_undefer_overflow_warnings (true, stmt
, 0);
1750 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1751 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1754 /* The statement produced a constant value. */
1755 val
.lattice_val
= CONSTANT
;
1756 val
.value
= simplified
;
1761 /* If the statement is likely to have a VARYING result, then do not
1762 bother folding the statement. */
1763 else if (likelyvalue
== VARYING
)
1765 enum gimple_code code
= gimple_code (stmt
);
1766 if (code
== GIMPLE_ASSIGN
)
1768 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1770 /* Other cases cannot satisfy is_gimple_min_invariant
1772 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1773 simplified
= gimple_assign_rhs1 (stmt
);
1775 else if (code
== GIMPLE_SWITCH
)
1776 simplified
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
1778 /* These cannot satisfy is_gimple_min_invariant without folding. */
1779 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1780 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1783 /* The statement produced a constant value. */
1784 val
.lattice_val
= CONSTANT
;
1785 val
.value
= simplified
;
1789 /* If the statement result is likely UNDEFINED, make it so. */
1790 else if (likelyvalue
== UNDEFINED
)
1792 val
.lattice_val
= UNDEFINED
;
1793 val
.value
= NULL_TREE
;
1798 /* Resort to simplification for bitwise tracking. */
1799 if (flag_tree_bit_ccp
1800 && (likelyvalue
== CONSTANT
|| is_gimple_call (stmt
)
1801 || (gimple_assign_single_p (stmt
)
1802 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
))
1805 enum gimple_code code
= gimple_code (stmt
);
1806 val
.lattice_val
= VARYING
;
1807 val
.value
= NULL_TREE
;
1809 if (code
== GIMPLE_ASSIGN
)
1811 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1812 tree rhs1
= gimple_assign_rhs1 (stmt
);
1813 tree lhs
= gimple_assign_lhs (stmt
);
1814 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1815 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
1816 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1817 || POINTER_TYPE_P (TREE_TYPE (rhs1
))))
1818 switch (get_gimple_rhs_class (subcode
))
1820 case GIMPLE_SINGLE_RHS
:
1821 val
= get_value_for_expr (rhs1
, true);
1824 case GIMPLE_UNARY_RHS
:
1825 val
= bit_value_unop (subcode
, TREE_TYPE (lhs
), rhs1
);
1828 case GIMPLE_BINARY_RHS
:
1829 val
= bit_value_binop (subcode
, TREE_TYPE (lhs
), rhs1
,
1830 gimple_assign_rhs2 (stmt
));
1836 else if (code
== GIMPLE_COND
)
1838 enum tree_code code
= gimple_cond_code (stmt
);
1839 tree rhs1
= gimple_cond_lhs (stmt
);
1840 tree rhs2
= gimple_cond_rhs (stmt
);
1841 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1842 || POINTER_TYPE_P (TREE_TYPE (rhs1
)))
1843 val
= bit_value_binop (code
, TREE_TYPE (rhs1
), rhs1
, rhs2
);
1845 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
1847 tree fndecl
= gimple_call_fndecl (stmt
);
1848 switch (DECL_FUNCTION_CODE (fndecl
))
1850 case BUILT_IN_MALLOC
:
1851 case BUILT_IN_REALLOC
:
1852 case BUILT_IN_CALLOC
:
1853 case BUILT_IN_STRDUP
:
1854 case BUILT_IN_STRNDUP
:
1855 val
.lattice_val
= CONSTANT
;
1856 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1857 val
.mask
= ~((HOST_WIDE_INT
) MALLOC_ABI_ALIGNMENT
1858 / BITS_PER_UNIT
- 1);
1861 case BUILT_IN_ALLOCA
:
1862 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1863 align
= (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
1864 ? TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1))
1865 : BIGGEST_ALIGNMENT
);
1866 val
.lattice_val
= CONSTANT
;
1867 val
.value
= build_int_cst (TREE_TYPE (gimple_get_lhs (stmt
)), 0);
1868 val
.mask
= ~((HOST_WIDE_INT
) align
/ BITS_PER_UNIT
- 1);
1871 /* These builtins return their first argument, unmodified. */
1872 case BUILT_IN_MEMCPY
:
1873 case BUILT_IN_MEMMOVE
:
1874 case BUILT_IN_MEMSET
:
1875 case BUILT_IN_STRCPY
:
1876 case BUILT_IN_STRNCPY
:
1877 case BUILT_IN_MEMCPY_CHK
:
1878 case BUILT_IN_MEMMOVE_CHK
:
1879 case BUILT_IN_MEMSET_CHK
:
1880 case BUILT_IN_STRCPY_CHK
:
1881 case BUILT_IN_STRNCPY_CHK
:
1882 val
= get_value_for_expr (gimple_call_arg (stmt
, 0), true);
1885 case BUILT_IN_ASSUME_ALIGNED
:
1886 val
= bit_value_assume_aligned (stmt
, NULL_TREE
, val
, false);
1889 case BUILT_IN_ALIGNED_ALLOC
:
1891 tree align
= get_constant_value (gimple_call_arg (stmt
, 0));
1893 && tree_fits_uhwi_p (align
))
1895 unsigned HOST_WIDE_INT aligni
= tree_to_uhwi (align
);
1897 /* align must be power-of-two */
1898 && (aligni
& (aligni
- 1)) == 0)
1900 val
.lattice_val
= CONSTANT
;
1901 val
.value
= build_int_cst (ptr_type_node
, 0);
1911 if (is_gimple_call (stmt
) && gimple_call_lhs (stmt
))
1913 tree fntype
= gimple_call_fntype (stmt
);
1916 tree attrs
= lookup_attribute ("assume_aligned",
1917 TYPE_ATTRIBUTES (fntype
));
1919 val
= bit_value_assume_aligned (stmt
, attrs
, val
, false);
1920 attrs
= lookup_attribute ("alloc_align",
1921 TYPE_ATTRIBUTES (fntype
));
1923 val
= bit_value_assume_aligned (stmt
, attrs
, val
, true);
1926 is_constant
= (val
.lattice_val
== CONSTANT
);
1929 if (flag_tree_bit_ccp
1930 && ((is_constant
&& TREE_CODE (val
.value
) == INTEGER_CST
)
1932 && gimple_get_lhs (stmt
)
1933 && TREE_CODE (gimple_get_lhs (stmt
)) == SSA_NAME
)
1935 tree lhs
= gimple_get_lhs (stmt
);
1936 wide_int nonzero_bits
= get_nonzero_bits (lhs
);
1937 if (nonzero_bits
!= -1)
1941 val
.lattice_val
= CONSTANT
;
1942 val
.value
= build_zero_cst (TREE_TYPE (lhs
));
1943 val
.mask
= extend_mask (nonzero_bits
);
1948 if (wi::bit_and_not (val
.value
, nonzero_bits
) != 0)
1949 val
.value
= wide_int_to_tree (TREE_TYPE (lhs
),
1950 nonzero_bits
& val
.value
);
1951 if (nonzero_bits
== 0)
1954 val
.mask
= val
.mask
& extend_mask (nonzero_bits
);
1959 /* The statement produced a nonconstant value. */
1962 /* The statement produced a copy. */
1963 if (simplified
&& TREE_CODE (simplified
) == SSA_NAME
1964 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified
))
1966 val
.lattice_val
= CONSTANT
;
1967 val
.value
= simplified
;
1970 /* The statement is VARYING. */
1973 val
.lattice_val
= VARYING
;
1974 val
.value
= NULL_TREE
;
1982 typedef hash_table
<pointer_hash
<gimple_statement_base
> > gimple_htab
;
1984 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1985 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1988 insert_clobber_before_stack_restore (tree saved_val
, tree var
,
1989 gimple_htab
**visited
)
1992 gassign
*clobber_stmt
;
1994 imm_use_iterator iter
;
1995 gimple_stmt_iterator i
;
1998 FOR_EACH_IMM_USE_STMT (stmt
, iter
, saved_val
)
1999 if (gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
2001 clobber
= build_constructor (TREE_TYPE (var
),
2003 TREE_THIS_VOLATILE (clobber
) = 1;
2004 clobber_stmt
= gimple_build_assign (var
, clobber
);
2006 i
= gsi_for_stmt (stmt
);
2007 gsi_insert_before (&i
, clobber_stmt
, GSI_SAME_STMT
);
2009 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2012 *visited
= new gimple_htab (10);
2014 slot
= (*visited
)->find_slot (stmt
, INSERT
);
2019 insert_clobber_before_stack_restore (gimple_phi_result (stmt
), var
,
2022 else if (gimple_assign_ssa_name_copy_p (stmt
))
2023 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt
), var
,
2025 else if (chkp_gimple_call_builtin_p (stmt
, BUILT_IN_CHKP_BNDRET
))
2028 gcc_assert (is_gimple_debug (stmt
));
2031 /* Advance the iterator to the previous non-debug gimple statement in the same
2032 or dominating basic block. */
2035 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator
*i
)
2039 gsi_prev_nondebug (i
);
2040 while (gsi_end_p (*i
))
2042 dom
= get_immediate_dominator (CDI_DOMINATORS
, i
->bb
);
2043 if (dom
== NULL
|| dom
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
2046 *i
= gsi_last_bb (dom
);
2050 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2051 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2053 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2054 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2055 that case the function gives up without inserting the clobbers. */
2058 insert_clobbers_for_var (gimple_stmt_iterator i
, tree var
)
2062 gimple_htab
*visited
= NULL
;
2064 for (; !gsi_end_p (i
); gsi_prev_dom_bb_nondebug (&i
))
2066 stmt
= gsi_stmt (i
);
2068 if (!gimple_call_builtin_p (stmt
, BUILT_IN_STACK_SAVE
))
2071 saved_val
= gimple_call_lhs (stmt
);
2072 if (saved_val
== NULL_TREE
)
2075 insert_clobber_before_stack_restore (saved_val
, var
, &visited
);
2082 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
2083 fixed-size array and returns the address, if found, otherwise returns
2087 fold_builtin_alloca_with_align (gimple stmt
)
2089 unsigned HOST_WIDE_INT size
, threshold
, n_elem
;
2090 tree lhs
, arg
, block
, var
, elem_type
, array_type
;
2093 lhs
= gimple_call_lhs (stmt
);
2094 if (lhs
== NULL_TREE
)
2097 /* Detect constant argument. */
2098 arg
= get_constant_value (gimple_call_arg (stmt
, 0));
2099 if (arg
== NULL_TREE
2100 || TREE_CODE (arg
) != INTEGER_CST
2101 || !tree_fits_uhwi_p (arg
))
2104 size
= tree_to_uhwi (arg
);
2106 /* Heuristic: don't fold large allocas. */
2107 threshold
= (unsigned HOST_WIDE_INT
)PARAM_VALUE (PARAM_LARGE_STACK_FRAME
);
2108 /* In case the alloca is located at function entry, it has the same lifetime
2109 as a declared array, so we allow a larger size. */
2110 block
= gimple_block (stmt
);
2111 if (!(cfun
->after_inlining
2112 && TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
))
2114 if (size
> threshold
)
2117 /* Declare array. */
2118 elem_type
= build_nonstandard_integer_type (BITS_PER_UNIT
, 1);
2119 n_elem
= size
* 8 / BITS_PER_UNIT
;
2120 array_type
= build_array_type_nelts (elem_type
, n_elem
);
2121 var
= create_tmp_var (array_type
);
2122 DECL_ALIGN (var
) = TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1));
2124 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (lhs
);
2125 if (pi
!= NULL
&& !pi
->pt
.anything
)
2129 singleton_p
= pt_solution_singleton_p (&pi
->pt
, &uid
);
2130 gcc_assert (singleton_p
);
2131 SET_DECL_PT_UID (var
, uid
);
2135 /* Fold alloca to the address of the array. */
2136 return fold_convert (TREE_TYPE (lhs
), build_fold_addr_expr (var
));
2139 /* Fold the stmt at *GSI with CCP specific information that propagating
2140 and regular folding does not catch. */
2143 ccp_fold_stmt (gimple_stmt_iterator
*gsi
)
2145 gimple stmt
= gsi_stmt (*gsi
);
2147 switch (gimple_code (stmt
))
2151 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
2152 ccp_prop_value_t val
;
2153 /* Statement evaluation will handle type mismatches in constants
2154 more gracefully than the final propagation. This allows us to
2155 fold more conditionals here. */
2156 val
= evaluate_stmt (stmt
);
2157 if (val
.lattice_val
!= CONSTANT
2163 fprintf (dump_file
, "Folding predicate ");
2164 print_gimple_expr (dump_file
, stmt
, 0, 0);
2165 fprintf (dump_file
, " to ");
2166 print_generic_expr (dump_file
, val
.value
, 0);
2167 fprintf (dump_file
, "\n");
2170 if (integer_zerop (val
.value
))
2171 gimple_cond_make_false (cond_stmt
);
2173 gimple_cond_make_true (cond_stmt
);
2180 tree lhs
= gimple_call_lhs (stmt
);
2181 int flags
= gimple_call_flags (stmt
);
2184 bool changed
= false;
2187 /* If the call was folded into a constant make sure it goes
2188 away even if we cannot propagate into all uses because of
2191 && TREE_CODE (lhs
) == SSA_NAME
2192 && (val
= get_constant_value (lhs
))
2193 /* Don't optimize away calls that have side-effects. */
2194 && (flags
& (ECF_CONST
|ECF_PURE
)) != 0
2195 && (flags
& ECF_LOOPING_CONST_OR_PURE
) == 0)
2197 tree new_rhs
= unshare_expr (val
);
2199 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
2200 TREE_TYPE (new_rhs
)))
2201 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
2202 res
= update_call_from_tree (gsi
, new_rhs
);
2207 /* Internal calls provide no argument types, so the extra laxity
2208 for normal calls does not apply. */
2209 if (gimple_call_internal_p (stmt
))
2212 /* The heuristic of fold_builtin_alloca_with_align differs before and
2213 after inlining, so we don't require the arg to be changed into a
2214 constant for folding, but just to be constant. */
2215 if (gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
2217 tree new_rhs
= fold_builtin_alloca_with_align (stmt
);
2220 bool res
= update_call_from_tree (gsi
, new_rhs
);
2221 tree var
= TREE_OPERAND (TREE_OPERAND (new_rhs
, 0),0);
2223 insert_clobbers_for_var (*gsi
, var
);
2228 /* Propagate into the call arguments. Compared to replace_uses_in
2229 this can use the argument slot types for type verification
2230 instead of the current argument type. We also can safely
2231 drop qualifiers here as we are dealing with constants anyway. */
2232 argt
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
2233 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
2234 ++i
, argt
= TREE_CHAIN (argt
))
2236 tree arg
= gimple_call_arg (stmt
, i
);
2237 if (TREE_CODE (arg
) == SSA_NAME
2238 && (val
= get_constant_value (arg
))
2239 && useless_type_conversion_p
2240 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
2241 TYPE_MAIN_VARIANT (TREE_TYPE (val
))))
2243 gimple_call_set_arg (stmt
, i
, unshare_expr (val
));
2253 tree lhs
= gimple_assign_lhs (stmt
);
2256 /* If we have a load that turned out to be constant replace it
2257 as we cannot propagate into all uses in all cases. */
2258 if (gimple_assign_single_p (stmt
)
2259 && TREE_CODE (lhs
) == SSA_NAME
2260 && (val
= get_constant_value (lhs
)))
2262 tree rhs
= unshare_expr (val
);
2263 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2264 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
2265 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
2277 /* Visit the assignment statement STMT. Set the value of its LHS to the
2278 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2279 creates virtual definitions, set the value of each new name to that
2280 of the RHS (if we can derive a constant out of the RHS).
2281 Value-returning call statements also perform an assignment, and
2282 are handled here. */
2284 static enum ssa_prop_result
2285 visit_assignment (gimple stmt
, tree
*output_p
)
2287 ccp_prop_value_t val
;
2288 enum ssa_prop_result retval
= SSA_PROP_NOT_INTERESTING
;
2290 tree lhs
= gimple_get_lhs (stmt
);
2291 if (TREE_CODE (lhs
) == SSA_NAME
)
2293 /* Evaluate the statement, which could be
2294 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2295 val
= evaluate_stmt (stmt
);
2297 /* If STMT is an assignment to an SSA_NAME, we only have one
2299 if (set_lattice_value (lhs
, &val
))
2302 if (val
.lattice_val
== VARYING
)
2303 retval
= SSA_PROP_VARYING
;
2305 retval
= SSA_PROP_INTERESTING
;
2313 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2314 if it can determine which edge will be taken. Otherwise, return
2315 SSA_PROP_VARYING. */
2317 static enum ssa_prop_result
2318 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
2320 ccp_prop_value_t val
;
2323 block
= gimple_bb (stmt
);
2324 val
= evaluate_stmt (stmt
);
2325 if (val
.lattice_val
!= CONSTANT
2327 return SSA_PROP_VARYING
;
2329 /* Find which edge out of the conditional block will be taken and add it
2330 to the worklist. If no single edge can be determined statically,
2331 return SSA_PROP_VARYING to feed all the outgoing edges to the
2332 propagation engine. */
2333 *taken_edge_p
= find_taken_edge (block
, val
.value
);
2335 return SSA_PROP_INTERESTING
;
2337 return SSA_PROP_VARYING
;
2341 /* Evaluate statement STMT. If the statement produces an output value and
2342 its evaluation changes the lattice value of its output, return
2343 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2346 If STMT is a conditional branch and we can determine its truth
2347 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2348 value, return SSA_PROP_VARYING. */
2350 static enum ssa_prop_result
2351 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
2356 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2358 fprintf (dump_file
, "\nVisiting statement:\n");
2359 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2362 switch (gimple_code (stmt
))
2365 /* If the statement is an assignment that produces a single
2366 output value, evaluate its RHS to see if the lattice value of
2367 its output has changed. */
2368 return visit_assignment (stmt
, output_p
);
2371 /* A value-returning call also performs an assignment. */
2372 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2373 return visit_assignment (stmt
, output_p
);
2378 /* If STMT is a conditional branch, see if we can determine
2379 which branch will be taken. */
2380 /* FIXME. It appears that we should be able to optimize
2381 computed GOTOs here as well. */
2382 return visit_cond_stmt (stmt
, taken_edge_p
);
2388 /* Any other kind of statement is not interesting for constant
2389 propagation and, therefore, not worth simulating. */
2390 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2391 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
2393 /* Definitions made by statements other than assignments to
2394 SSA_NAMEs represent unknown modifications to their outputs.
2395 Mark them VARYING. */
2396 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
2397 set_value_varying (def
);
2399 return SSA_PROP_VARYING
;
2403 /* Main entry point for SSA Conditional Constant Propagation. */
2408 unsigned int todo
= 0;
2409 calculate_dominance_info (CDI_DOMINATORS
);
2411 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
2412 if (ccp_finalize ())
2413 todo
= (TODO_cleanup_cfg
| TODO_update_ssa
);
2414 free_dominance_info (CDI_DOMINATORS
);
2421 const pass_data pass_data_ccp
=
2423 GIMPLE_PASS
, /* type */
2425 OPTGROUP_NONE
, /* optinfo_flags */
2426 TV_TREE_CCP
, /* tv_id */
2427 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2428 0, /* properties_provided */
2429 0, /* properties_destroyed */
2430 0, /* todo_flags_start */
2431 TODO_update_address_taken
, /* todo_flags_finish */
2434 class pass_ccp
: public gimple_opt_pass
2437 pass_ccp (gcc::context
*ctxt
)
2438 : gimple_opt_pass (pass_data_ccp
, ctxt
)
2441 /* opt_pass methods: */
2442 opt_pass
* clone () { return new pass_ccp (m_ctxt
); }
2443 virtual bool gate (function
*) { return flag_tree_ccp
!= 0; }
2444 virtual unsigned int execute (function
*) { return do_ssa_ccp (); }
2446 }; // class pass_ccp
2451 make_pass_ccp (gcc::context
*ctxt
)
2453 return new pass_ccp (ctxt
);
2458 /* Try to optimize out __builtin_stack_restore. Optimize it out
2459 if there is another __builtin_stack_restore in the same basic
2460 block and no calls or ASM_EXPRs are in between, or if this block's
2461 only outgoing edge is to EXIT_BLOCK and there are no calls or
2462 ASM_EXPRs after this __builtin_stack_restore. */
2465 optimize_stack_restore (gimple_stmt_iterator i
)
2470 basic_block bb
= gsi_bb (i
);
2471 gimple call
= gsi_stmt (i
);
2473 if (gimple_code (call
) != GIMPLE_CALL
2474 || gimple_call_num_args (call
) != 1
2475 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
2476 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
2479 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
2481 stmt
= gsi_stmt (i
);
2482 if (gimple_code (stmt
) == GIMPLE_ASM
)
2484 if (gimple_code (stmt
) != GIMPLE_CALL
)
2487 callee
= gimple_call_fndecl (stmt
);
2489 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2490 /* All regular builtins are ok, just obviously not alloca. */
2491 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
2492 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA_WITH_ALIGN
)
2495 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
2496 goto second_stack_restore
;
2502 /* Allow one successor of the exit block, or zero successors. */
2503 switch (EDGE_COUNT (bb
->succs
))
2508 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2514 second_stack_restore
:
2516 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2517 If there are multiple uses, then the last one should remove the call.
2518 In any case, whether the call to __builtin_stack_save can be removed
2519 or not is irrelevant to removing the call to __builtin_stack_restore. */
2520 if (has_single_use (gimple_call_arg (call
, 0)))
2522 gimple stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
2523 if (is_gimple_call (stack_save
))
2525 callee
= gimple_call_fndecl (stack_save
);
2527 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
2528 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
)
2530 gimple_stmt_iterator stack_save_gsi
;
2533 stack_save_gsi
= gsi_for_stmt (stack_save
);
2534 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
2535 update_call_from_tree (&stack_save_gsi
, rhs
);
2540 /* No effect, so the statement will be deleted. */
2541 return integer_zero_node
;
2544 /* If va_list type is a simple pointer and nothing special is needed,
2545 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2546 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2547 pointer assignment. */
2550 optimize_stdarg_builtin (gimple call
)
2552 tree callee
, lhs
, rhs
, cfun_va_list
;
2553 bool va_list_simple_ptr
;
2554 location_t loc
= gimple_location (call
);
2556 if (gimple_code (call
) != GIMPLE_CALL
)
2559 callee
= gimple_call_fndecl (call
);
2561 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
2562 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
2563 && (TREE_TYPE (cfun_va_list
) == void_type_node
2564 || TREE_TYPE (cfun_va_list
) == char_type_node
);
2566 switch (DECL_FUNCTION_CODE (callee
))
2568 case BUILT_IN_VA_START
:
2569 if (!va_list_simple_ptr
2570 || targetm
.expand_builtin_va_start
!= NULL
2571 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG
))
2574 if (gimple_call_num_args (call
) != 2)
2577 lhs
= gimple_call_arg (call
, 0);
2578 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2579 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2580 != TYPE_MAIN_VARIANT (cfun_va_list
))
2583 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2584 rhs
= build_call_expr_loc (loc
, builtin_decl_explicit (BUILT_IN_NEXT_ARG
),
2585 1, integer_zero_node
);
2586 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2587 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2589 case BUILT_IN_VA_COPY
:
2590 if (!va_list_simple_ptr
)
2593 if (gimple_call_num_args (call
) != 2)
2596 lhs
= gimple_call_arg (call
, 0);
2597 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
2598 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
2599 != TYPE_MAIN_VARIANT (cfun_va_list
))
2602 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
2603 rhs
= gimple_call_arg (call
, 1);
2604 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
2605 != TYPE_MAIN_VARIANT (cfun_va_list
))
2608 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
2609 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
2611 case BUILT_IN_VA_END
:
2612 /* No effect, so the statement will be deleted. */
2613 return integer_zero_node
;
2620 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2621 the incoming jumps. Return true if at least one jump was changed. */
2624 optimize_unreachable (gimple_stmt_iterator i
)
2626 basic_block bb
= gsi_bb (i
);
2627 gimple_stmt_iterator gsi
;
2633 if (flag_sanitize
& SANITIZE_UNREACHABLE
)
2636 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2638 stmt
= gsi_stmt (gsi
);
2640 if (is_gimple_debug (stmt
))
2643 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2645 /* Verify we do not need to preserve the label. */
2646 if (FORCED_LABEL (gimple_label_label (label_stmt
)))
2652 /* Only handle the case that __builtin_unreachable is the first statement
2653 in the block. We rely on DCE to remove stmts without side-effects
2654 before __builtin_unreachable. */
2655 if (gsi_stmt (gsi
) != gsi_stmt (i
))
2660 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2662 gsi
= gsi_last_bb (e
->src
);
2663 if (gsi_end_p (gsi
))
2666 stmt
= gsi_stmt (gsi
);
2667 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
2669 if (e
->flags
& EDGE_TRUE_VALUE
)
2670 gimple_cond_make_false (cond_stmt
);
2671 else if (e
->flags
& EDGE_FALSE_VALUE
)
2672 gimple_cond_make_true (cond_stmt
);
2675 update_stmt (cond_stmt
);
2679 /* Todo: handle other cases, f.i. switch statement. */
2689 /* A simple pass that attempts to fold all builtin functions. This pass
2690 is run after we've propagated as many constants as we can. */
2694 const pass_data pass_data_fold_builtins
=
2696 GIMPLE_PASS
, /* type */
2698 OPTGROUP_NONE
, /* optinfo_flags */
2699 TV_NONE
, /* tv_id */
2700 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2701 0, /* properties_provided */
2702 0, /* properties_destroyed */
2703 0, /* todo_flags_start */
2704 TODO_update_ssa
, /* todo_flags_finish */
2707 class pass_fold_builtins
: public gimple_opt_pass
2710 pass_fold_builtins (gcc::context
*ctxt
)
2711 : gimple_opt_pass (pass_data_fold_builtins
, ctxt
)
2714 /* opt_pass methods: */
2715 opt_pass
* clone () { return new pass_fold_builtins (m_ctxt
); }
2716 virtual unsigned int execute (function
*);
2718 }; // class pass_fold_builtins
2721 pass_fold_builtins::execute (function
*fun
)
2723 bool cfg_changed
= false;
2725 unsigned int todoflags
= 0;
2727 FOR_EACH_BB_FN (bb
, fun
)
2729 gimple_stmt_iterator i
;
2730 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
2732 gimple stmt
, old_stmt
;
2734 enum built_in_function fcode
;
2736 stmt
= gsi_stmt (i
);
2738 if (gimple_code (stmt
) != GIMPLE_CALL
)
2740 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2741 after the last GIMPLE DSE they aren't needed and might
2742 unnecessarily keep the SSA_NAMEs live. */
2743 if (gimple_clobber_p (stmt
))
2745 tree lhs
= gimple_assign_lhs (stmt
);
2746 if (TREE_CODE (lhs
) == MEM_REF
2747 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
2749 unlink_stmt_vdef (stmt
);
2750 gsi_remove (&i
, true);
2751 release_defs (stmt
);
2759 callee
= gimple_call_fndecl (stmt
);
2760 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2766 fcode
= DECL_FUNCTION_CODE (callee
);
2771 tree result
= NULL_TREE
;
2772 switch (DECL_FUNCTION_CODE (callee
))
2774 case BUILT_IN_CONSTANT_P
:
2775 /* Resolve __builtin_constant_p. If it hasn't been
2776 folded to integer_one_node by now, it's fairly
2777 certain that the value simply isn't constant. */
2778 result
= integer_zero_node
;
2781 case BUILT_IN_ASSUME_ALIGNED
:
2782 /* Remove __builtin_assume_aligned. */
2783 result
= gimple_call_arg (stmt
, 0);
2786 case BUILT_IN_STACK_RESTORE
:
2787 result
= optimize_stack_restore (i
);
2793 case BUILT_IN_UNREACHABLE
:
2794 if (optimize_unreachable (i
))
2798 case BUILT_IN_VA_START
:
2799 case BUILT_IN_VA_END
:
2800 case BUILT_IN_VA_COPY
:
2801 /* These shouldn't be folded before pass_stdarg. */
2802 result
= optimize_stdarg_builtin (stmt
);
2816 if (!update_call_from_tree (&i
, result
))
2817 gimplify_and_update_call_from_tree (&i
, result
);
2820 todoflags
|= TODO_update_address_taken
;
2822 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2824 fprintf (dump_file
, "Simplified\n ");
2825 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2829 stmt
= gsi_stmt (i
);
2832 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
2833 && gimple_purge_dead_eh_edges (bb
))
2836 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2838 fprintf (dump_file
, "to\n ");
2839 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2840 fprintf (dump_file
, "\n");
2843 /* Retry the same statement if it changed into another
2844 builtin, there might be new opportunities now. */
2845 if (gimple_code (stmt
) != GIMPLE_CALL
)
2850 callee
= gimple_call_fndecl (stmt
);
2852 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2853 || DECL_FUNCTION_CODE (callee
) == fcode
)
2858 /* Delete unreachable blocks. */
2860 todoflags
|= TODO_cleanup_cfg
;
2868 make_pass_fold_builtins (gcc::context
*ctxt
)
2870 return new pass_fold_builtins (ctxt
);