1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
56 #include "diagnostic-core.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
63 #include "tree-iterator.h"
66 #include "langhooks.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
76 #include "case-cfn-macros.h"
78 #ifndef LOAD_EXTEND_OP
79 #define LOAD_EXTEND_OP(M) UNKNOWN
82 /* Nonzero if we are folding constants inside an initializer; zero
84 int folding_initializer
= 0;
86 /* The following constants represent a bit based encoding of GCC's
87 comparison operators. This encoding simplifies transformations
88 on relational comparison operators, such as AND and OR. */
89 enum comparison_code
{
108 static bool negate_expr_p (tree
);
109 static tree
negate_expr (tree
);
110 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
111 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
112 static enum comparison_code
comparison_to_compcode (enum tree_code
);
113 static enum tree_code
compcode_to_comparison (enum comparison_code
);
114 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
115 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
116 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
117 static tree
make_bit_field_ref (location_t
, tree
, tree
,
118 HOST_WIDE_INT
, HOST_WIDE_INT
, int, int);
119 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
121 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
123 machine_mode
*, int *, int *, int *,
125 static int simple_operand_p (const_tree
);
126 static bool simple_operand_p_2 (tree
);
127 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
128 static tree
range_predecessor (tree
);
129 static tree
range_successor (tree
);
130 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
131 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
132 static tree
unextend (tree
, int, int, tree
);
133 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
135 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
136 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
137 static tree
fold_binary_op_with_conditional_arg (location_t
,
138 enum tree_code
, tree
,
141 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (const_tree
, const_tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (const_tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
146 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
147 static tree
fold_view_convert_expr (tree
, tree
);
148 static bool vec_cst_ctor_to_array (tree
, tree
*);
151 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
152 Otherwise, return LOC. */
155 expr_location_or (tree t
, location_t loc
)
157 location_t tloc
= EXPR_LOCATION (t
);
158 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
161 /* Similar to protected_set_expr_location, but never modify x in place,
162 if location can and needs to be set, unshare it. */
165 protected_set_expr_location_unshare (tree x
, location_t loc
)
167 if (CAN_HAVE_LOCATION_P (x
)
168 && EXPR_LOCATION (x
) != loc
169 && !(TREE_CODE (x
) == SAVE_EXPR
170 || TREE_CODE (x
) == TARGET_EXPR
171 || TREE_CODE (x
) == BIND_EXPR
))
174 SET_EXPR_LOCATION (x
, loc
);
179 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
180 division and returns the quotient. Otherwise returns
184 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
188 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
190 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
204 static int fold_deferring_overflow_warnings
;
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
211 static const char* fold_deferred_overflow_warning
;
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
216 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
222 fold_defer_overflow_warnings (void)
224 ++fold_deferring_overflow_warnings
;
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
237 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
242 gcc_assert (fold_deferring_overflow_warnings
> 0);
243 --fold_deferring_overflow_warnings
;
244 if (fold_deferring_overflow_warnings
> 0)
246 if (fold_deferred_overflow_warning
!= NULL
248 && code
< (int) fold_deferred_overflow_code
)
249 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
253 warnmsg
= fold_deferred_overflow_warning
;
254 fold_deferred_overflow_warning
= NULL
;
256 if (!issue
|| warnmsg
== NULL
)
259 if (gimple_no_warning_p (stmt
))
262 /* Use the smallest code level when deciding to issue the
264 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
265 code
= fold_deferred_overflow_code
;
267 if (!issue_strict_overflow_warning (code
))
271 locus
= input_location
;
273 locus
= gimple_location (stmt
);
274 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
277 /* Stop deferring overflow warnings, ignoring any deferred
281 fold_undefer_and_ignore_overflow_warnings (void)
283 fold_undefer_overflow_warnings (false, NULL
, 0);
286 /* Whether we are deferring overflow warnings. */
289 fold_deferring_overflow_warnings_p (void)
291 return fold_deferring_overflow_warnings
> 0;
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
298 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
300 if (fold_deferring_overflow_warnings
> 0)
302 if (fold_deferred_overflow_warning
== NULL
303 || wc
< fold_deferred_overflow_code
)
305 fold_deferred_overflow_warning
= gmsgid
;
306 fold_deferred_overflow_code
= wc
;
309 else if (issue_strict_overflow_warning (wc
))
310 warning (OPT_Wstrict_overflow
, gmsgid
);
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
317 negate_mathfn_p (combined_fn fn
)
350 return !flag_rounding_math
;
358 /* Check whether we may negate an integer constant T without causing
362 may_negate_without_overflow_p (const_tree t
)
366 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
368 type
= TREE_TYPE (t
);
369 if (TYPE_UNSIGNED (type
))
372 return !wi::only_sign_bit_p (t
);
375 /* Determine whether an expression T can be cheaply negated using
376 the function negate_expr without introducing undefined overflow. */
379 negate_expr_p (tree t
)
386 type
= TREE_TYPE (t
);
389 switch (TREE_CODE (t
))
392 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
395 /* Check that -CST will not overflow type. */
396 return may_negate_without_overflow_p (t
);
398 return (INTEGRAL_TYPE_P (type
)
399 && TYPE_OVERFLOW_WRAPS (type
));
405 return !TYPE_OVERFLOW_SANITIZED (type
);
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
413 return negate_expr_p (TREE_REALPART (t
))
414 && negate_expr_p (TREE_IMAGPART (t
));
418 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
421 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
423 for (i
= 0; i
< count
; i
++)
424 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
431 return negate_expr_p (TREE_OPERAND (t
, 0))
432 && negate_expr_p (TREE_OPERAND (t
, 1));
435 return negate_expr_p (TREE_OPERAND (t
, 0));
438 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
439 || HONOR_SIGNED_ZEROS (element_mode (type
))
440 || (INTEGRAL_TYPE_P (type
)
441 && ! TYPE_OVERFLOW_WRAPS (type
)))
443 /* -(A + B) -> (-B) - A. */
444 if (negate_expr_p (TREE_OPERAND (t
, 1))
445 && reorder_operands_p (TREE_OPERAND (t
, 0),
446 TREE_OPERAND (t
, 1)))
448 /* -(A + B) -> (-A) - B. */
449 return negate_expr_p (TREE_OPERAND (t
, 0));
452 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
453 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
454 && !HONOR_SIGNED_ZEROS (element_mode (type
))
455 && (! INTEGRAL_TYPE_P (type
)
456 || TYPE_OVERFLOW_WRAPS (type
))
457 && reorder_operands_p (TREE_OPERAND (t
, 0),
458 TREE_OPERAND (t
, 1));
461 if (TYPE_UNSIGNED (type
))
463 /* INT_MIN/n * n doesn't overflow while negating one operand it does
464 if n is a power of two. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
466 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
467 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
468 && ! integer_pow2p (TREE_OPERAND (t
, 0)))
469 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
470 && ! integer_pow2p (TREE_OPERAND (t
, 1)))))
476 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
477 return negate_expr_p (TREE_OPERAND (t
, 1))
478 || negate_expr_p (TREE_OPERAND (t
, 0));
484 if (TYPE_UNSIGNED (type
))
486 if (negate_expr_p (TREE_OPERAND (t
, 0)))
488 /* In general we can't negate B in A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. */
491 if (! INTEGRAL_TYPE_P (TREE_TYPE (t
))
492 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
493 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
494 && ! integer_onep (TREE_OPERAND (t
, 1))))
495 return negate_expr_p (TREE_OPERAND (t
, 1));
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type
) == REAL_TYPE
)
502 tree tem
= strip_float_extensions (t
);
504 return negate_expr_p (tem
);
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (get_call_combined_fn (t
)))
511 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
516 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
518 tree op1
= TREE_OPERAND (t
, 1);
519 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
530 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
531 simplification is possible.
532 If negate_expr_p would return true for T, NULL_TREE will never be
536 fold_negate_expr (location_t loc
, tree t
)
538 tree type
= TREE_TYPE (t
);
541 switch (TREE_CODE (t
))
543 /* Convert - (~A) to A + 1. */
545 if (INTEGRAL_TYPE_P (type
))
546 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
547 build_one_cst (type
));
551 tem
= fold_negate_const (t
, type
);
552 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
553 || (ANY_INTEGRAL_TYPE_P (type
)
554 && !TYPE_OVERFLOW_TRAPS (type
)
555 && TYPE_OVERFLOW_WRAPS (type
))
556 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
561 tem
= fold_negate_const (t
, type
);
565 tem
= fold_negate_const (t
, type
);
570 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
571 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
573 return build_complex (type
, rpart
, ipart
);
579 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
580 tree
*elts
= XALLOCAVEC (tree
, count
);
582 for (i
= 0; i
< count
; i
++)
584 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
585 if (elts
[i
] == NULL_TREE
)
589 return build_vector (type
, elts
);
593 if (negate_expr_p (t
))
594 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
595 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
596 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
600 if (negate_expr_p (t
))
601 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
602 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
606 if (!TYPE_OVERFLOW_SANITIZED (type
))
607 return TREE_OPERAND (t
, 0);
611 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
612 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
614 /* -(A + B) -> (-B) - A. */
615 if (negate_expr_p (TREE_OPERAND (t
, 1))
616 && reorder_operands_p (TREE_OPERAND (t
, 0),
617 TREE_OPERAND (t
, 1)))
619 tem
= negate_expr (TREE_OPERAND (t
, 1));
620 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
621 tem
, TREE_OPERAND (t
, 0));
624 /* -(A + B) -> (-A) - B. */
625 if (negate_expr_p (TREE_OPERAND (t
, 0)))
627 tem
= negate_expr (TREE_OPERAND (t
, 0));
628 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
629 tem
, TREE_OPERAND (t
, 1));
635 /* - (A - B) -> B - A */
636 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
637 && !HONOR_SIGNED_ZEROS (element_mode (type
))
638 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
639 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
640 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
644 if (TYPE_UNSIGNED (type
))
650 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
652 tem
= TREE_OPERAND (t
, 1);
653 if (negate_expr_p (tem
))
654 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
655 TREE_OPERAND (t
, 0), negate_expr (tem
));
656 tem
= TREE_OPERAND (t
, 0);
657 if (negate_expr_p (tem
))
658 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
659 negate_expr (tem
), TREE_OPERAND (t
, 1));
666 if (TYPE_UNSIGNED (type
))
668 if (negate_expr_p (TREE_OPERAND (t
, 0)))
669 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
670 negate_expr (TREE_OPERAND (t
, 0)),
671 TREE_OPERAND (t
, 1));
672 /* In general we can't negate B in A / B, because if A is INT_MIN and
673 B is 1, we may turn this into INT_MIN / -1 which is undefined
674 and actually traps on some architectures. */
675 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t
))
676 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
677 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
678 && ! integer_onep (TREE_OPERAND (t
, 1))))
679 && negate_expr_p (TREE_OPERAND (t
, 1)))
680 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
682 negate_expr (TREE_OPERAND (t
, 1)));
686 /* Convert -((double)float) into (double)(-float). */
687 if (TREE_CODE (type
) == REAL_TYPE
)
689 tem
= strip_float_extensions (t
);
690 if (tem
!= t
&& negate_expr_p (tem
))
691 return fold_convert_loc (loc
, type
, negate_expr (tem
));
696 /* Negate -f(x) as f(-x). */
697 if (negate_mathfn_p (get_call_combined_fn (t
))
698 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
702 fndecl
= get_callee_fndecl (t
);
703 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
704 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
709 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
710 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
712 tree op1
= TREE_OPERAND (t
, 1);
713 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
715 tree ntype
= TYPE_UNSIGNED (type
)
716 ? signed_type_for (type
)
717 : unsigned_type_for (type
);
718 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
719 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
720 return fold_convert_loc (loc
, type
, temp
);
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
745 loc
= EXPR_LOCATION (t
);
746 type
= TREE_TYPE (t
);
749 tem
= fold_negate_expr (loc
, t
);
751 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
752 return fold_convert_loc (loc
, type
, tem
);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
776 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
777 tree
*minus_litp
, int negate_p
)
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in
);
788 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
789 || TREE_CODE (in
) == FIXED_CST
)
791 else if (TREE_CODE (in
) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
799 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
801 tree op0
= TREE_OPERAND (in
, 0);
802 tree op1
= TREE_OPERAND (in
, 1);
803 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
804 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
808 || TREE_CODE (op0
) == FIXED_CST
)
809 *litp
= op0
, op0
= 0;
810 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
811 || TREE_CODE (op1
) == FIXED_CST
)
812 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
814 if (op0
!= 0 && TREE_CONSTANT (op0
))
815 *conp
= op0
, op0
= 0;
816 else if (op1
!= 0 && TREE_CONSTANT (op1
))
817 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0
!= 0 && op1
!= 0)
826 var
= op1
, neg_var_p
= neg1_p
;
828 /* Now do any needed negations. */
830 *minus_litp
= *litp
, *litp
= 0;
832 *conp
= negate_expr (*conp
);
834 var
= negate_expr (var
);
836 else if (TREE_CODE (in
) == BIT_NOT_EXPR
837 && code
== PLUS_EXPR
)
839 /* -X - 1 is folded to ~X, undo that here. */
840 *minus_litp
= build_one_cst (TREE_TYPE (in
));
841 var
= negate_expr (TREE_OPERAND (in
, 0));
843 else if (TREE_CONSTANT (in
))
851 *minus_litp
= *litp
, *litp
= 0;
852 else if (*minus_litp
)
853 *litp
= *minus_litp
, *minus_litp
= 0;
854 *conp
= negate_expr (*conp
);
855 var
= negate_expr (var
);
861 /* Re-associate trees split by the above function. T1 and T2 are
862 either expressions to associate or null. Return the new
863 expression, if any. LOC is the location of the new expression. If
864 we build an operation, do it in TYPE and with CODE. */
867 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
874 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
875 try to fold this since we will have infinite recursion. But do
876 deal with any NEGATE_EXPRs. */
877 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
878 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
880 if (code
== PLUS_EXPR
)
882 if (TREE_CODE (t1
) == NEGATE_EXPR
)
883 return build2_loc (loc
, MINUS_EXPR
, type
,
884 fold_convert_loc (loc
, type
, t2
),
885 fold_convert_loc (loc
, type
,
886 TREE_OPERAND (t1
, 0)));
887 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
888 return build2_loc (loc
, MINUS_EXPR
, type
,
889 fold_convert_loc (loc
, type
, t1
),
890 fold_convert_loc (loc
, type
,
891 TREE_OPERAND (t2
, 0)));
892 else if (integer_zerop (t2
))
893 return fold_convert_loc (loc
, type
, t1
);
895 else if (code
== MINUS_EXPR
)
897 if (integer_zerop (t2
))
898 return fold_convert_loc (loc
, type
, t1
);
901 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
902 fold_convert_loc (loc
, type
, t2
));
905 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
906 fold_convert_loc (loc
, type
, t2
));
909 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
910 for use in int_const_binop, size_binop and size_diffop. */
913 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
915 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
917 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
932 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
933 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
934 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
938 /* Combine two integer constants ARG1 and ARG2 under operation CODE
939 to produce a new constant. Return NULL_TREE if we don't know how
940 to evaluate CODE at compile-time. */
943 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
948 tree type
= TREE_TYPE (arg1
);
949 signop sign
= TYPE_SIGN (type
);
950 bool overflow
= false;
952 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
953 TYPE_SIGN (TREE_TYPE (parg2
)));
958 res
= wi::bit_or (arg1
, arg2
);
962 res
= wi::bit_xor (arg1
, arg2
);
966 res
= wi::bit_and (arg1
, arg2
);
971 if (wi::neg_p (arg2
))
974 if (code
== RSHIFT_EXPR
)
980 if (code
== RSHIFT_EXPR
)
981 /* It's unclear from the C standard whether shifts can overflow.
982 The following code ignores overflow; perhaps a C standard
983 interpretation ruling is needed. */
984 res
= wi::rshift (arg1
, arg2
, sign
);
986 res
= wi::lshift (arg1
, arg2
);
991 if (wi::neg_p (arg2
))
994 if (code
== RROTATE_EXPR
)
1000 if (code
== RROTATE_EXPR
)
1001 res
= wi::rrotate (arg1
, arg2
);
1003 res
= wi::lrotate (arg1
, arg2
);
1007 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1011 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1015 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1018 case MULT_HIGHPART_EXPR
:
1019 res
= wi::mul_high (arg1
, arg2
, sign
);
1022 case TRUNC_DIV_EXPR
:
1023 case EXACT_DIV_EXPR
:
1026 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1029 case FLOOR_DIV_EXPR
:
1032 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1038 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1041 case ROUND_DIV_EXPR
:
1044 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1047 case TRUNC_MOD_EXPR
:
1050 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1053 case FLOOR_MOD_EXPR
:
1056 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1062 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1065 case ROUND_MOD_EXPR
:
1068 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1072 res
= wi::min (arg1
, arg2
, sign
);
1076 res
= wi::max (arg1
, arg2
, sign
);
1083 t
= force_fit_type (type
, res
, overflowable
,
1084 (((sign
== SIGNED
|| overflowable
== -1)
1086 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1092 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1094 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1103 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1105 /* Sanity check for the recursive cases. */
1112 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1114 if (code
== POINTER_PLUS_EXPR
)
1115 return int_const_binop (PLUS_EXPR
,
1116 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1118 return int_const_binop (code
, arg1
, arg2
);
1121 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1126 REAL_VALUE_TYPE value
;
1127 REAL_VALUE_TYPE result
;
1131 /* The following codes are handled by real_arithmetic. */
1146 d1
= TREE_REAL_CST (arg1
);
1147 d2
= TREE_REAL_CST (arg2
);
1149 type
= TREE_TYPE (arg1
);
1150 mode
= TYPE_MODE (type
);
1152 /* Don't perform operation if we honor signaling NaNs and
1153 either operand is a signaling NaN. */
1154 if (HONOR_SNANS (mode
)
1155 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1156 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1159 /* Don't perform operation if it would raise a division
1160 by zero exception. */
1161 if (code
== RDIV_EXPR
1162 && real_equal (&d2
, &dconst0
)
1163 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1166 /* If either operand is a NaN, just return it. Otherwise, set up
1167 for floating-point trap; we return an overflow. */
1168 if (REAL_VALUE_ISNAN (d1
))
1170 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1173 t
= build_real (type
, d1
);
1176 else if (REAL_VALUE_ISNAN (d2
))
1178 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1181 t
= build_real (type
, d2
);
1185 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1186 real_convert (&result
, mode
, &value
);
1188 /* Don't constant fold this floating point operation if
1189 the result has overflowed and flag_trapping_math. */
1190 if (flag_trapping_math
1191 && MODE_HAS_INFINITIES (mode
)
1192 && REAL_VALUE_ISINF (result
)
1193 && !REAL_VALUE_ISINF (d1
)
1194 && !REAL_VALUE_ISINF (d2
))
1197 /* Don't constant fold this floating point operation if the
1198 result may dependent upon the run-time rounding mode and
1199 flag_rounding_math is set, or if GCC's software emulation
1200 is unable to accurately represent the result. */
1201 if ((flag_rounding_math
1202 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1203 && (inexact
|| !real_identical (&result
, &value
)))
1206 t
= build_real (type
, result
);
1208 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1212 if (TREE_CODE (arg1
) == FIXED_CST
)
1214 FIXED_VALUE_TYPE f1
;
1215 FIXED_VALUE_TYPE f2
;
1216 FIXED_VALUE_TYPE result
;
1221 /* The following codes are handled by fixed_arithmetic. */
1227 case TRUNC_DIV_EXPR
:
1228 if (TREE_CODE (arg2
) != FIXED_CST
)
1230 f2
= TREE_FIXED_CST (arg2
);
1236 if (TREE_CODE (arg2
) != INTEGER_CST
)
1239 f2
.data
.high
= w2
.elt (1);
1240 f2
.data
.low
= w2
.elt (0);
1249 f1
= TREE_FIXED_CST (arg1
);
1250 type
= TREE_TYPE (arg1
);
1251 sat_p
= TYPE_SATURATING (type
);
1252 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1253 t
= build_fixed (type
, result
);
1254 /* Propagate overflow flags. */
1255 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1256 TREE_OVERFLOW (t
) = 1;
1260 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1262 tree type
= TREE_TYPE (arg1
);
1263 tree r1
= TREE_REALPART (arg1
);
1264 tree i1
= TREE_IMAGPART (arg1
);
1265 tree r2
= TREE_REALPART (arg2
);
1266 tree i2
= TREE_IMAGPART (arg2
);
1273 real
= const_binop (code
, r1
, r2
);
1274 imag
= const_binop (code
, i1
, i2
);
1278 if (COMPLEX_FLOAT_TYPE_P (type
))
1279 return do_mpc_arg2 (arg1
, arg2
, type
,
1280 /* do_nonfinite= */ folding_initializer
,
1283 real
= const_binop (MINUS_EXPR
,
1284 const_binop (MULT_EXPR
, r1
, r2
),
1285 const_binop (MULT_EXPR
, i1
, i2
));
1286 imag
= const_binop (PLUS_EXPR
,
1287 const_binop (MULT_EXPR
, r1
, i2
),
1288 const_binop (MULT_EXPR
, i1
, r2
));
1292 if (COMPLEX_FLOAT_TYPE_P (type
))
1293 return do_mpc_arg2 (arg1
, arg2
, type
,
1294 /* do_nonfinite= */ folding_initializer
,
1297 case TRUNC_DIV_EXPR
:
1299 case FLOOR_DIV_EXPR
:
1300 case ROUND_DIV_EXPR
:
1301 if (flag_complex_method
== 0)
1303 /* Keep this algorithm in sync with
1304 tree-complex.c:expand_complex_div_straight().
1306 Expand complex division to scalars, straightforward algorithm.
1307 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1311 = const_binop (PLUS_EXPR
,
1312 const_binop (MULT_EXPR
, r2
, r2
),
1313 const_binop (MULT_EXPR
, i2
, i2
));
1315 = const_binop (PLUS_EXPR
,
1316 const_binop (MULT_EXPR
, r1
, r2
),
1317 const_binop (MULT_EXPR
, i1
, i2
));
1319 = const_binop (MINUS_EXPR
,
1320 const_binop (MULT_EXPR
, i1
, r2
),
1321 const_binop (MULT_EXPR
, r1
, i2
));
1323 real
= const_binop (code
, t1
, magsquared
);
1324 imag
= const_binop (code
, t2
, magsquared
);
1328 /* Keep this algorithm in sync with
1329 tree-complex.c:expand_complex_div_wide().
1331 Expand complex division to scalars, modified algorithm to minimize
1332 overflow with wide input ranges. */
1333 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1334 fold_abs_const (r2
, TREE_TYPE (type
)),
1335 fold_abs_const (i2
, TREE_TYPE (type
)));
1337 if (integer_nonzerop (compare
))
1339 /* In the TRUE branch, we compute
1341 div = (br * ratio) + bi;
1342 tr = (ar * ratio) + ai;
1343 ti = (ai * ratio) - ar;
1346 tree ratio
= const_binop (code
, r2
, i2
);
1347 tree div
= const_binop (PLUS_EXPR
, i2
,
1348 const_binop (MULT_EXPR
, r2
, ratio
));
1349 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1350 real
= const_binop (PLUS_EXPR
, real
, i1
);
1351 real
= const_binop (code
, real
, div
);
1353 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1354 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1355 imag
= const_binop (code
, imag
, div
);
1359 /* In the FALSE branch, we compute
1361 divisor = (d * ratio) + c;
1362 tr = (b * ratio) + a;
1363 ti = b - (a * ratio);
1366 tree ratio
= const_binop (code
, i2
, r2
);
1367 tree div
= const_binop (PLUS_EXPR
, r2
,
1368 const_binop (MULT_EXPR
, i2
, ratio
));
1370 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1371 real
= const_binop (PLUS_EXPR
, real
, r1
);
1372 real
= const_binop (code
, real
, div
);
1374 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1375 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1376 imag
= const_binop (code
, imag
, div
);
1386 return build_complex (type
, real
, imag
);
1389 if (TREE_CODE (arg1
) == VECTOR_CST
1390 && TREE_CODE (arg2
) == VECTOR_CST
)
1392 tree type
= TREE_TYPE (arg1
);
1393 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1394 tree
*elts
= XALLOCAVEC (tree
, count
);
1396 for (i
= 0; i
< count
; i
++)
1398 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1399 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1401 elts
[i
] = const_binop (code
, elem1
, elem2
);
1403 /* It is possible that const_binop cannot handle the given
1404 code and return NULL_TREE */
1405 if (elts
[i
] == NULL_TREE
)
1409 return build_vector (type
, elts
);
1412 /* Shifts allow a scalar offset for a vector. */
1413 if (TREE_CODE (arg1
) == VECTOR_CST
1414 && TREE_CODE (arg2
) == INTEGER_CST
)
1416 tree type
= TREE_TYPE (arg1
);
1417 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1418 tree
*elts
= XALLOCAVEC (tree
, count
);
1420 for (i
= 0; i
< count
; i
++)
1422 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1424 elts
[i
] = const_binop (code
, elem1
, arg2
);
1426 /* It is possible that const_binop cannot handle the given
1427 code and return NULL_TREE. */
1428 if (elts
[i
] == NULL_TREE
)
1432 return build_vector (type
, elts
);
1437 /* Overload that adds a TYPE parameter to be able to dispatch
1438 to fold_relational_const. */
1441 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1443 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1444 return fold_relational_const (code
, type
, arg1
, arg2
);
1446 /* ??? Until we make the const_binop worker take the type of the
1447 result as argument put those cases that need it here. */
1451 if ((TREE_CODE (arg1
) == REAL_CST
1452 && TREE_CODE (arg2
) == REAL_CST
)
1453 || (TREE_CODE (arg1
) == INTEGER_CST
1454 && TREE_CODE (arg2
) == INTEGER_CST
))
1455 return build_complex (type
, arg1
, arg2
);
1458 case VEC_PACK_TRUNC_EXPR
:
1459 case VEC_PACK_FIX_TRUNC_EXPR
:
1461 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1464 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1465 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1466 if (TREE_CODE (arg1
) != VECTOR_CST
1467 || TREE_CODE (arg2
) != VECTOR_CST
)
1470 elts
= XALLOCAVEC (tree
, nelts
);
1471 if (!vec_cst_ctor_to_array (arg1
, elts
)
1472 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1475 for (i
= 0; i
< nelts
; i
++)
1477 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1478 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1479 TREE_TYPE (type
), elts
[i
]);
1480 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1484 return build_vector (type
, elts
);
1487 case VEC_WIDEN_MULT_LO_EXPR
:
1488 case VEC_WIDEN_MULT_HI_EXPR
:
1489 case VEC_WIDEN_MULT_EVEN_EXPR
:
1490 case VEC_WIDEN_MULT_ODD_EXPR
:
1492 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1493 unsigned int out
, ofs
, scale
;
1496 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1497 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1498 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1501 elts
= XALLOCAVEC (tree
, nelts
* 4);
1502 if (!vec_cst_ctor_to_array (arg1
, elts
)
1503 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1506 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1507 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1508 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1509 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1510 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1512 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1515 for (out
= 0; out
< nelts
; out
++)
1517 unsigned int in1
= (out
<< scale
) + ofs
;
1518 unsigned int in2
= in1
+ nelts
* 2;
1521 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1522 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1524 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1526 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1527 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1531 return build_vector (type
, elts
);
1537 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1540 /* Make sure type and arg0 have the same saturating flag. */
1541 gcc_checking_assert (TYPE_SATURATING (type
)
1542 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1544 return const_binop (code
, arg1
, arg2
);
1547 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1548 Return zero if computing the constants is not possible. */
1551 const_unop (enum tree_code code
, tree type
, tree arg0
)
1553 /* Don't perform the operation, other than NEGATE and ABS, if
1554 flag_signaling_nans is on and the operand is a signaling NaN. */
1555 if (TREE_CODE (arg0
) == REAL_CST
1556 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
1557 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1558 && code
!= NEGATE_EXPR
1559 && code
!= ABS_EXPR
)
1566 case FIX_TRUNC_EXPR
:
1567 case FIXED_CONVERT_EXPR
:
1568 return fold_convert_const (code
, type
, arg0
);
1570 case ADDR_SPACE_CONVERT_EXPR
:
1571 /* If the source address is 0, and the source address space
1572 cannot have a valid object at 0, fold to dest type null. */
1573 if (integer_zerop (arg0
)
1574 && !(targetm
.addr_space
.zero_address_valid
1575 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1576 return fold_convert_const (code
, type
, arg0
);
1579 case VIEW_CONVERT_EXPR
:
1580 return fold_view_convert_expr (type
, arg0
);
1584 /* Can't call fold_negate_const directly here as that doesn't
1585 handle all cases and we might not be able to negate some
1587 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1588 if (tem
&& CONSTANT_CLASS_P (tem
))
1594 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1595 return fold_abs_const (arg0
, type
);
1599 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1601 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1603 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1608 if (TREE_CODE (arg0
) == INTEGER_CST
)
1609 return fold_not_const (arg0
, type
);
1610 /* Perform BIT_NOT_EXPR on each element individually. */
1611 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1615 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1617 elements
= XALLOCAVEC (tree
, count
);
1618 for (i
= 0; i
< count
; i
++)
1620 elem
= VECTOR_CST_ELT (arg0
, i
);
1621 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1622 if (elem
== NULL_TREE
)
1627 return build_vector (type
, elements
);
1631 case TRUTH_NOT_EXPR
:
1632 if (TREE_CODE (arg0
) == INTEGER_CST
)
1633 return constant_boolean_node (integer_zerop (arg0
), type
);
1637 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1638 return fold_convert (type
, TREE_REALPART (arg0
));
1642 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1643 return fold_convert (type
, TREE_IMAGPART (arg0
));
1646 case VEC_UNPACK_LO_EXPR
:
1647 case VEC_UNPACK_HI_EXPR
:
1648 case VEC_UNPACK_FLOAT_LO_EXPR
:
1649 case VEC_UNPACK_FLOAT_HI_EXPR
:
1651 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1653 enum tree_code subcode
;
1655 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1656 if (TREE_CODE (arg0
) != VECTOR_CST
)
1659 elts
= XALLOCAVEC (tree
, nelts
* 2);
1660 if (!vec_cst_ctor_to_array (arg0
, elts
))
1663 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1664 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1667 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1670 subcode
= FLOAT_EXPR
;
1672 for (i
= 0; i
< nelts
; i
++)
1674 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1675 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1679 return build_vector (type
, elts
);
1682 case REDUC_MIN_EXPR
:
1683 case REDUC_MAX_EXPR
:
1684 case REDUC_PLUS_EXPR
:
1686 unsigned int nelts
, i
;
1688 enum tree_code subcode
;
1690 if (TREE_CODE (arg0
) != VECTOR_CST
)
1692 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1694 elts
= XALLOCAVEC (tree
, nelts
);
1695 if (!vec_cst_ctor_to_array (arg0
, elts
))
1700 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1701 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1702 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1703 default: gcc_unreachable ();
1706 for (i
= 1; i
< nelts
; i
++)
1708 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1709 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1723 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1724 indicates which particular sizetype to create. */
1727 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1729 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1732 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1733 is a tree code. The type of the result is taken from the operands.
1734 Both must be equivalent integer types, ala int_binop_types_match_p.
1735 If the operands are constant, so is the result. */
1738 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1740 tree type
= TREE_TYPE (arg0
);
1742 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1743 return error_mark_node
;
1745 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1748 /* Handle the special case of two integer constants faster. */
1749 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1751 /* And some specific cases even faster than that. */
1752 if (code
== PLUS_EXPR
)
1754 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1756 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1759 else if (code
== MINUS_EXPR
)
1761 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1764 else if (code
== MULT_EXPR
)
1766 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1770 /* Handle general case of two integer constants. For sizetype
1771 constant calculations we always want to know about overflow,
1772 even in the unsigned case. */
1773 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1776 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1779 /* Given two values, either both of sizetype or both of bitsizetype,
1780 compute the difference between the two values. Return the value
1781 in signed type corresponding to the type of the operands. */
1784 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1786 tree type
= TREE_TYPE (arg0
);
1789 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1792 /* If the type is already signed, just do the simple thing. */
1793 if (!TYPE_UNSIGNED (type
))
1794 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1796 if (type
== sizetype
)
1798 else if (type
== bitsizetype
)
1799 ctype
= sbitsizetype
;
1801 ctype
= signed_type_for (type
);
1803 /* If either operand is not a constant, do the conversions to the signed
1804 type and subtract. The hardware will do the right thing with any
1805 overflow in the subtraction. */
1806 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1807 return size_binop_loc (loc
, MINUS_EXPR
,
1808 fold_convert_loc (loc
, ctype
, arg0
),
1809 fold_convert_loc (loc
, ctype
, arg1
));
1811 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1812 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1813 overflow) and negate (which can't either). Special-case a result
1814 of zero while we're here. */
1815 if (tree_int_cst_equal (arg0
, arg1
))
1816 return build_int_cst (ctype
, 0);
1817 else if (tree_int_cst_lt (arg1
, arg0
))
1818 return fold_convert_loc (loc
, ctype
,
1819 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1821 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1822 fold_convert_loc (loc
, ctype
,
1823 size_binop_loc (loc
,
1828 /* A subroutine of fold_convert_const handling conversions of an
1829 INTEGER_CST to another integer type. */
1832 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1834 /* Given an integer constant, make new constant with new type,
1835 appropriately sign-extended or truncated. Use widest_int
1836 so that any extension is done according ARG1's type. */
1837 return force_fit_type (type
, wi::to_widest (arg1
),
1838 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1839 TREE_OVERFLOW (arg1
));
1842 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1843 to an integer type. */
1846 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1848 bool overflow
= false;
1851 /* The following code implements the floating point to integer
1852 conversion rules required by the Java Language Specification,
1853 that IEEE NaNs are mapped to zero and values that overflow
1854 the target precision saturate, i.e. values greater than
1855 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1856 are mapped to INT_MIN. These semantics are allowed by the
1857 C and C++ standards that simply state that the behavior of
1858 FP-to-integer conversion is unspecified upon overflow. */
1862 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1866 case FIX_TRUNC_EXPR
:
1867 real_trunc (&r
, VOIDmode
, &x
);
1874 /* If R is NaN, return zero and show we have an overflow. */
1875 if (REAL_VALUE_ISNAN (r
))
1878 val
= wi::zero (TYPE_PRECISION (type
));
1881 /* See if R is less than the lower bound or greater than the
1886 tree lt
= TYPE_MIN_VALUE (type
);
1887 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1888 if (real_less (&r
, &l
))
1897 tree ut
= TYPE_MAX_VALUE (type
);
1900 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1901 if (real_less (&u
, &r
))
1910 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1912 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1916 /* A subroutine of fold_convert_const handling conversions of a
1917 FIXED_CST to an integer type. */
1920 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1923 double_int temp
, temp_trunc
;
1926 /* Right shift FIXED_CST to temp by fbit. */
1927 temp
= TREE_FIXED_CST (arg1
).data
;
1928 mode
= TREE_FIXED_CST (arg1
).mode
;
1929 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1931 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1932 HOST_BITS_PER_DOUBLE_INT
,
1933 SIGNED_FIXED_POINT_MODE_P (mode
));
1935 /* Left shift temp to temp_trunc by fbit. */
1936 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1937 HOST_BITS_PER_DOUBLE_INT
,
1938 SIGNED_FIXED_POINT_MODE_P (mode
));
1942 temp
= double_int_zero
;
1943 temp_trunc
= double_int_zero
;
1946 /* If FIXED_CST is negative, we need to round the value toward 0.
1947 By checking if the fractional bits are not zero to add 1 to temp. */
1948 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1949 && temp_trunc
.is_negative ()
1950 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1951 temp
+= double_int_one
;
1953 /* Given a fixed-point constant, make new constant with new type,
1954 appropriately sign-extended or truncated. */
1955 t
= force_fit_type (type
, temp
, -1,
1956 (temp
.is_negative ()
1957 && (TYPE_UNSIGNED (type
)
1958 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1959 | TREE_OVERFLOW (arg1
));
1964 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1965 to another floating point type. */
1968 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1970 REAL_VALUE_TYPE value
;
1973 /* Don't perform the operation if flag_signaling_nans is on
1974 and the operand is a signaling NaN. */
1975 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
1976 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
1979 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1980 t
= build_real (type
, value
);
1982 /* If converting an infinity or NAN to a representation that doesn't
1983 have one, set the overflow bit so that we can produce some kind of
1984 error message at the appropriate point if necessary. It's not the
1985 most user-friendly message, but it's better than nothing. */
1986 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1987 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1988 TREE_OVERFLOW (t
) = 1;
1989 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1990 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1991 TREE_OVERFLOW (t
) = 1;
1992 /* Regular overflow, conversion produced an infinity in a mode that
1993 can't represent them. */
1994 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1995 && REAL_VALUE_ISINF (value
)
1996 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1997 TREE_OVERFLOW (t
) = 1;
1999 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2003 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2004 to a floating point type. */
2007 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2009 REAL_VALUE_TYPE value
;
2012 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2013 t
= build_real (type
, value
);
2015 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2019 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2020 to another fixed-point type. */
2023 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2025 FIXED_VALUE_TYPE value
;
2029 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2030 TYPE_SATURATING (type
));
2031 t
= build_fixed (type
, value
);
2033 /* Propagate overflow flags. */
2034 if (overflow_p
| TREE_OVERFLOW (arg1
))
2035 TREE_OVERFLOW (t
) = 1;
2039 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2040 to a fixed-point type. */
2043 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2045 FIXED_VALUE_TYPE value
;
2050 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2052 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2053 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2054 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2056 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2058 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2059 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2060 TYPE_SATURATING (type
));
2061 t
= build_fixed (type
, value
);
2063 /* Propagate overflow flags. */
2064 if (overflow_p
| TREE_OVERFLOW (arg1
))
2065 TREE_OVERFLOW (t
) = 1;
2069 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2070 to a fixed-point type. */
2073 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2075 FIXED_VALUE_TYPE value
;
2079 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2080 &TREE_REAL_CST (arg1
),
2081 TYPE_SATURATING (type
));
2082 t
= build_fixed (type
, value
);
2084 /* Propagate overflow flags. */
2085 if (overflow_p
| TREE_OVERFLOW (arg1
))
2086 TREE_OVERFLOW (t
) = 1;
2090 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2091 type TYPE. If no simplification can be done return NULL_TREE. */
2094 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2096 if (TREE_TYPE (arg1
) == type
)
2099 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2100 || TREE_CODE (type
) == OFFSET_TYPE
)
2102 if (TREE_CODE (arg1
) == INTEGER_CST
)
2103 return fold_convert_const_int_from_int (type
, arg1
);
2104 else if (TREE_CODE (arg1
) == REAL_CST
)
2105 return fold_convert_const_int_from_real (code
, type
, arg1
);
2106 else if (TREE_CODE (arg1
) == FIXED_CST
)
2107 return fold_convert_const_int_from_fixed (type
, arg1
);
2109 else if (TREE_CODE (type
) == REAL_TYPE
)
2111 if (TREE_CODE (arg1
) == INTEGER_CST
)
2112 return build_real_from_int_cst (type
, arg1
);
2113 else if (TREE_CODE (arg1
) == REAL_CST
)
2114 return fold_convert_const_real_from_real (type
, arg1
);
2115 else if (TREE_CODE (arg1
) == FIXED_CST
)
2116 return fold_convert_const_real_from_fixed (type
, arg1
);
2118 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2120 if (TREE_CODE (arg1
) == FIXED_CST
)
2121 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2122 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2123 return fold_convert_const_fixed_from_int (type
, arg1
);
2124 else if (TREE_CODE (arg1
) == REAL_CST
)
2125 return fold_convert_const_fixed_from_real (type
, arg1
);
2127 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2129 if (TREE_CODE (arg1
) == VECTOR_CST
2130 && TYPE_VECTOR_SUBPARTS (type
) == VECTOR_CST_NELTS (arg1
))
2132 int len
= TYPE_VECTOR_SUBPARTS (type
);
2133 tree elttype
= TREE_TYPE (type
);
2134 tree
*v
= XALLOCAVEC (tree
, len
);
2135 for (int i
= 0; i
< len
; ++i
)
2137 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2138 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2139 if (cvt
== NULL_TREE
)
2143 return build_vector (type
, v
);
2149 /* Construct a vector of zero elements of vector type TYPE. */
2152 build_zero_vector (tree type
)
2156 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2157 return build_vector_from_val (type
, t
);
2160 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2163 fold_convertible_p (const_tree type
, const_tree arg
)
2165 tree orig
= TREE_TYPE (arg
);
2170 if (TREE_CODE (arg
) == ERROR_MARK
2171 || TREE_CODE (type
) == ERROR_MARK
2172 || TREE_CODE (orig
) == ERROR_MARK
)
2175 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2178 switch (TREE_CODE (type
))
2180 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2181 case POINTER_TYPE
: case REFERENCE_TYPE
:
2183 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2184 || TREE_CODE (orig
) == OFFSET_TYPE
)
2186 return (TREE_CODE (orig
) == VECTOR_TYPE
2187 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2190 case FIXED_POINT_TYPE
:
2194 return TREE_CODE (type
) == TREE_CODE (orig
);
2201 /* Convert expression ARG to type TYPE. Used by the middle-end for
2202 simple conversions in preference to calling the front-end's convert. */
2205 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2207 tree orig
= TREE_TYPE (arg
);
2213 if (TREE_CODE (arg
) == ERROR_MARK
2214 || TREE_CODE (type
) == ERROR_MARK
2215 || TREE_CODE (orig
) == ERROR_MARK
)
2216 return error_mark_node
;
2218 switch (TREE_CODE (type
))
2221 case REFERENCE_TYPE
:
2222 /* Handle conversions between pointers to different address spaces. */
2223 if (POINTER_TYPE_P (orig
)
2224 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2225 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2226 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2229 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2231 if (TREE_CODE (arg
) == INTEGER_CST
)
2233 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2234 if (tem
!= NULL_TREE
)
2237 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2238 || TREE_CODE (orig
) == OFFSET_TYPE
)
2239 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2240 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2241 return fold_convert_loc (loc
, type
,
2242 fold_build1_loc (loc
, REALPART_EXPR
,
2243 TREE_TYPE (orig
), arg
));
2244 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2245 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2246 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2249 if (TREE_CODE (arg
) == INTEGER_CST
)
2251 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2252 if (tem
!= NULL_TREE
)
2255 else if (TREE_CODE (arg
) == REAL_CST
)
2257 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2258 if (tem
!= NULL_TREE
)
2261 else if (TREE_CODE (arg
) == FIXED_CST
)
2263 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2264 if (tem
!= NULL_TREE
)
2268 switch (TREE_CODE (orig
))
2271 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2272 case POINTER_TYPE
: case REFERENCE_TYPE
:
2273 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2276 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2278 case FIXED_POINT_TYPE
:
2279 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2282 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2283 return fold_convert_loc (loc
, type
, tem
);
2289 case FIXED_POINT_TYPE
:
2290 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2291 || TREE_CODE (arg
) == REAL_CST
)
2293 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2294 if (tem
!= NULL_TREE
)
2295 goto fold_convert_exit
;
2298 switch (TREE_CODE (orig
))
2300 case FIXED_POINT_TYPE
:
2305 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2308 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2309 return fold_convert_loc (loc
, type
, tem
);
2316 switch (TREE_CODE (orig
))
2319 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2320 case POINTER_TYPE
: case REFERENCE_TYPE
:
2322 case FIXED_POINT_TYPE
:
2323 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2324 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2325 fold_convert_loc (loc
, TREE_TYPE (type
),
2326 integer_zero_node
));
2331 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2333 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2334 TREE_OPERAND (arg
, 0));
2335 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2336 TREE_OPERAND (arg
, 1));
2337 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2340 arg
= save_expr (arg
);
2341 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2342 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2343 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2344 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2345 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2353 if (integer_zerop (arg
))
2354 return build_zero_vector (type
);
2355 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2356 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2357 || TREE_CODE (orig
) == VECTOR_TYPE
);
2358 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2361 tem
= fold_ignored_result (arg
);
2362 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2365 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2366 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2370 protected_set_expr_location_unshare (tem
, loc
);
2374 /* Return false if expr can be assumed not to be an lvalue, true
2378 maybe_lvalue_p (const_tree x
)
2380 /* We only need to wrap lvalue tree codes. */
2381 switch (TREE_CODE (x
))
2394 case ARRAY_RANGE_REF
:
2400 case PREINCREMENT_EXPR
:
2401 case PREDECREMENT_EXPR
:
2403 case TRY_CATCH_EXPR
:
2404 case WITH_CLEANUP_EXPR
:
2413 /* Assume the worst for front-end tree codes. */
2414 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2422 /* Return an expr equal to X but certainly not valid as an lvalue. */
2425 non_lvalue_loc (location_t loc
, tree x
)
2427 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2432 if (! maybe_lvalue_p (x
))
2434 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2437 /* When pedantic, return an expr equal to X but certainly not valid as a
2438 pedantic lvalue. Otherwise, return X. */
2441 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2443 return protected_set_expr_location_unshare (x
, loc
);
2446 /* Given a tree comparison code, return the code that is the logical inverse.
2447 It is generally not safe to do this for floating-point comparisons, except
2448 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2449 ERROR_MARK in this case. */
2452 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2454 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2455 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2465 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2467 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2469 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2471 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2485 return UNORDERED_EXPR
;
2486 case UNORDERED_EXPR
:
2487 return ORDERED_EXPR
;
2493 /* Similar, but return the comparison that results if the operands are
2494 swapped. This is safe for floating-point. */
2497 swap_tree_comparison (enum tree_code code
)
2504 case UNORDERED_EXPR
:
2530 /* Convert a comparison tree code from an enum tree_code representation
2531 into a compcode bit-based encoding. This function is the inverse of
2532 compcode_to_comparison. */
2534 static enum comparison_code
2535 comparison_to_compcode (enum tree_code code
)
2552 return COMPCODE_ORD
;
2553 case UNORDERED_EXPR
:
2554 return COMPCODE_UNORD
;
2556 return COMPCODE_UNLT
;
2558 return COMPCODE_UNEQ
;
2560 return COMPCODE_UNLE
;
2562 return COMPCODE_UNGT
;
2564 return COMPCODE_LTGT
;
2566 return COMPCODE_UNGE
;
2572 /* Convert a compcode bit-based encoding of a comparison operator back
2573 to GCC's enum tree_code representation. This function is the
2574 inverse of comparison_to_compcode. */
2576 static enum tree_code
2577 compcode_to_comparison (enum comparison_code code
)
2594 return ORDERED_EXPR
;
2595 case COMPCODE_UNORD
:
2596 return UNORDERED_EXPR
;
2614 /* Return a tree for the comparison which is the combination of
2615 doing the AND or OR (depending on CODE) of the two operations LCODE
2616 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2617 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2618 if this makes the transformation invalid. */
2621 combine_comparisons (location_t loc
,
2622 enum tree_code code
, enum tree_code lcode
,
2623 enum tree_code rcode
, tree truth_type
,
2624 tree ll_arg
, tree lr_arg
)
2626 bool honor_nans
= HONOR_NANS (ll_arg
);
2627 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2628 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2633 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2634 compcode
= lcompcode
& rcompcode
;
2637 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2638 compcode
= lcompcode
| rcompcode
;
2647 /* Eliminate unordered comparisons, as well as LTGT and ORD
2648 which are not used unless the mode has NaNs. */
2649 compcode
&= ~COMPCODE_UNORD
;
2650 if (compcode
== COMPCODE_LTGT
)
2651 compcode
= COMPCODE_NE
;
2652 else if (compcode
== COMPCODE_ORD
)
2653 compcode
= COMPCODE_TRUE
;
2655 else if (flag_trapping_math
)
2657 /* Check that the original operation and the optimized ones will trap
2658 under the same condition. */
2659 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2660 && (lcompcode
!= COMPCODE_EQ
)
2661 && (lcompcode
!= COMPCODE_ORD
);
2662 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2663 && (rcompcode
!= COMPCODE_EQ
)
2664 && (rcompcode
!= COMPCODE_ORD
);
2665 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2666 && (compcode
!= COMPCODE_EQ
)
2667 && (compcode
!= COMPCODE_ORD
);
2669 /* In a short-circuited boolean expression the LHS might be
2670 such that the RHS, if evaluated, will never trap. For
2671 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2672 if neither x nor y is NaN. (This is a mixed blessing: for
2673 example, the expression above will never trap, hence
2674 optimizing it to x < y would be invalid). */
2675 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2676 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2679 /* If the comparison was short-circuited, and only the RHS
2680 trapped, we may now generate a spurious trap. */
2682 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2685 /* If we changed the conditions that cause a trap, we lose. */
2686 if ((ltrap
|| rtrap
) != trap
)
2690 if (compcode
== COMPCODE_TRUE
)
2691 return constant_boolean_node (true, truth_type
);
2692 else if (compcode
== COMPCODE_FALSE
)
2693 return constant_boolean_node (false, truth_type
);
2696 enum tree_code tcode
;
2698 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2699 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2703 /* Return nonzero if two operands (typically of the same tree node)
2704 are necessarily equal. FLAGS modifies behavior as follows:
2706 If OEP_ONLY_CONST is set, only return nonzero for constants.
2707 This function tests whether the operands are indistinguishable;
2708 it does not test whether they are equal using C's == operation.
2709 The distinction is important for IEEE floating point, because
2710 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2711 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2713 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2714 even though it may hold multiple values during a function.
2715 This is because a GCC tree node guarantees that nothing else is
2716 executed between the evaluation of its "operands" (which may often
2717 be evaluated in arbitrary order). Hence if the operands themselves
2718 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2719 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2720 unset means assuming isochronic (or instantaneous) tree equivalence.
2721 Unless comparing arbitrary expression trees, such as from different
2722 statements, this flag can usually be left unset.
2724 If OEP_PURE_SAME is set, then pure functions with identical arguments
2725 are considered the same. It is used when the caller has other ways
2726 to ensure that global memory is unchanged in between.
2728 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2729 not values of expressions.
2731 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2732 any operand with side effect. This is unnecesarily conservative in the
2733 case we know that arg0 and arg1 are in disjoint code paths (such as in
2734 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2735 addresses with TREE_CONSTANT flag set so we know that &var == &var
2736 even if var is volatile. */
2739 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2741 /* If either is ERROR_MARK, they aren't equal. */
2742 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2743 || TREE_TYPE (arg0
) == error_mark_node
2744 || TREE_TYPE (arg1
) == error_mark_node
)
2747 /* Similar, if either does not have a type (like a released SSA name),
2748 they aren't equal. */
2749 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2752 /* We cannot consider pointers to different address space equal. */
2753 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2754 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2755 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2756 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2759 /* Check equality of integer constants before bailing out due to
2760 precision differences. */
2761 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2763 /* Address of INTEGER_CST is not defined; check that we did not forget
2764 to drop the OEP_ADDRESS_OF flags. */
2765 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2766 return tree_int_cst_equal (arg0
, arg1
);
2769 if (!(flags
& OEP_ADDRESS_OF
))
2771 /* If both types don't have the same signedness, then we can't consider
2772 them equal. We must check this before the STRIP_NOPS calls
2773 because they may change the signedness of the arguments. As pointers
2774 strictly don't have a signedness, require either two pointers or
2775 two non-pointers as well. */
2776 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2777 || POINTER_TYPE_P (TREE_TYPE (arg0
))
2778 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2781 /* If both types don't have the same precision, then it is not safe
2783 if (element_precision (TREE_TYPE (arg0
))
2784 != element_precision (TREE_TYPE (arg1
)))
2791 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2792 sanity check once the issue is solved. */
2794 /* Addresses of conversions and SSA_NAMEs (and many other things)
2795 are not defined. Check that we did not forget to drop the
2796 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2797 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
2798 && TREE_CODE (arg0
) != SSA_NAME
);
2801 /* In case both args are comparisons but with different comparison
2802 code, try to swap the comparison operands of one arg to produce
2803 a match and compare that variant. */
2804 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2805 && COMPARISON_CLASS_P (arg0
)
2806 && COMPARISON_CLASS_P (arg1
))
2808 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2810 if (TREE_CODE (arg0
) == swap_code
)
2811 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2812 TREE_OPERAND (arg1
, 1), flags
)
2813 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2814 TREE_OPERAND (arg1
, 0), flags
);
2817 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
2819 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2820 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
2822 else if (flags
& OEP_ADDRESS_OF
)
2824 /* If we are interested in comparing addresses ignore
2825 MEM_REF wrappings of the base that can appear just for
2827 if (TREE_CODE (arg0
) == MEM_REF
2829 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
2830 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
2831 && integer_zerop (TREE_OPERAND (arg0
, 1)))
2833 else if (TREE_CODE (arg1
) == MEM_REF
2835 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
2836 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
2837 && integer_zerop (TREE_OPERAND (arg1
, 1)))
2845 /* When not checking adddresses, this is needed for conversions and for
2846 COMPONENT_REF. Might as well play it safe and always test this. */
2847 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2848 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2849 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
2850 && !(flags
& OEP_ADDRESS_OF
)))
2853 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2854 We don't care about side effects in that case because the SAVE_EXPR
2855 takes care of that for us. In all other cases, two expressions are
2856 equal if they have no side effects. If we have two identical
2857 expressions with side effects that should be treated the same due
2858 to the only side effects being identical SAVE_EXPR's, that will
2859 be detected in the recursive calls below.
2860 If we are taking an invariant address of two identical objects
2861 they are necessarily equal as well. */
2862 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2863 && (TREE_CODE (arg0
) == SAVE_EXPR
2864 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
2865 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2868 /* Next handle constant cases, those for which we can return 1 even
2869 if ONLY_CONST is set. */
2870 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2871 switch (TREE_CODE (arg0
))
2874 return tree_int_cst_equal (arg0
, arg1
);
2877 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2878 TREE_FIXED_CST (arg1
));
2881 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
2885 if (!HONOR_SIGNED_ZEROS (arg0
))
2887 /* If we do not distinguish between signed and unsigned zero,
2888 consider them equal. */
2889 if (real_zerop (arg0
) && real_zerop (arg1
))
2898 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2901 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2903 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2904 VECTOR_CST_ELT (arg1
, i
), flags
))
2911 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2913 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2917 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2918 && ! memcmp (TREE_STRING_POINTER (arg0
),
2919 TREE_STRING_POINTER (arg1
),
2920 TREE_STRING_LENGTH (arg0
)));
2923 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2924 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2925 flags
| OEP_ADDRESS_OF
2926 | OEP_MATCH_SIDE_EFFECTS
);
2928 /* In GIMPLE empty constructors are allowed in initializers of
2930 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0
))
2931 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1
)));
2936 if (flags
& OEP_ONLY_CONST
)
2939 /* Define macros to test an operand from arg0 and arg1 for equality and a
2940 variant that allows null and views null as being different from any
2941 non-null value. In the latter case, if either is null, the both
2942 must be; otherwise, do the normal comparison. */
2943 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2944 TREE_OPERAND (arg1, N), flags)
2946 #define OP_SAME_WITH_NULL(N) \
2947 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2948 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2950 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2953 /* Two conversions are equal only if signedness and modes match. */
2954 switch (TREE_CODE (arg0
))
2957 case FIX_TRUNC_EXPR
:
2958 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2959 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2969 case tcc_comparison
:
2971 if (OP_SAME (0) && OP_SAME (1))
2974 /* For commutative ops, allow the other order. */
2975 return (commutative_tree_code (TREE_CODE (arg0
))
2976 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2977 TREE_OPERAND (arg1
, 1), flags
)
2978 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2979 TREE_OPERAND (arg1
, 0), flags
));
2982 /* If either of the pointer (or reference) expressions we are
2983 dereferencing contain a side effect, these cannot be equal,
2984 but their addresses can be. */
2985 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
2986 && (TREE_SIDE_EFFECTS (arg0
)
2987 || TREE_SIDE_EFFECTS (arg1
)))
2990 switch (TREE_CODE (arg0
))
2993 if (!(flags
& OEP_ADDRESS_OF
)
2994 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2995 != TYPE_ALIGN (TREE_TYPE (arg1
))))
2997 flags
&= ~OEP_ADDRESS_OF
;
3002 case VIEW_CONVERT_EXPR
:
3005 case TARGET_MEM_REF
:
3007 if (!(flags
& OEP_ADDRESS_OF
))
3009 /* Require equal access sizes */
3010 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3011 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3012 || !TYPE_SIZE (TREE_TYPE (arg1
))
3013 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3014 TYPE_SIZE (TREE_TYPE (arg1
)),
3017 /* Verify that accesses are TBAA compatible. */
3018 if (!alias_ptr_types_compatible_p
3019 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3020 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3021 || (MR_DEPENDENCE_CLIQUE (arg0
)
3022 != MR_DEPENDENCE_CLIQUE (arg1
))
3023 || (MR_DEPENDENCE_BASE (arg0
)
3024 != MR_DEPENDENCE_BASE (arg1
)))
3026 /* Verify that alignment is compatible. */
3027 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3028 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3031 flags
&= ~OEP_ADDRESS_OF
;
3032 return (OP_SAME (0) && OP_SAME (1)
3033 /* TARGET_MEM_REF require equal extra operands. */
3034 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3035 || (OP_SAME_WITH_NULL (2)
3036 && OP_SAME_WITH_NULL (3)
3037 && OP_SAME_WITH_NULL (4))));
3040 case ARRAY_RANGE_REF
:
3041 /* Operands 2 and 3 may be null.
3042 Compare the array index by value if it is constant first as we
3043 may have different types but same value here. */
3046 flags
&= ~OEP_ADDRESS_OF
;
3047 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3048 TREE_OPERAND (arg1
, 1))
3050 && OP_SAME_WITH_NULL (2)
3051 && OP_SAME_WITH_NULL (3));
3054 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3055 may be NULL when we're called to compare MEM_EXPRs. */
3056 if (!OP_SAME_WITH_NULL (0)
3059 flags
&= ~OEP_ADDRESS_OF
;
3060 return OP_SAME_WITH_NULL (2);
3065 flags
&= ~OEP_ADDRESS_OF
;
3066 return OP_SAME (1) && OP_SAME (2);
3072 case tcc_expression
:
3073 switch (TREE_CODE (arg0
))
3076 /* Be sure we pass right ADDRESS_OF flag. */
3077 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3078 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3079 TREE_OPERAND (arg1
, 0),
3080 flags
| OEP_ADDRESS_OF
);
3082 case TRUTH_NOT_EXPR
:
3085 case TRUTH_ANDIF_EXPR
:
3086 case TRUTH_ORIF_EXPR
:
3087 return OP_SAME (0) && OP_SAME (1);
3090 case WIDEN_MULT_PLUS_EXPR
:
3091 case WIDEN_MULT_MINUS_EXPR
:
3094 /* The multiplcation operands are commutative. */
3097 case TRUTH_AND_EXPR
:
3099 case TRUTH_XOR_EXPR
:
3100 if (OP_SAME (0) && OP_SAME (1))
3103 /* Otherwise take into account this is a commutative operation. */
3104 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3105 TREE_OPERAND (arg1
, 1), flags
)
3106 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3107 TREE_OPERAND (arg1
, 0), flags
));
3112 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3119 switch (TREE_CODE (arg0
))
3122 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3123 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3124 /* If not both CALL_EXPRs are either internal or normal function
3125 functions, then they are not equal. */
3127 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3129 /* If the CALL_EXPRs call different internal functions, then they
3131 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3136 /* If the CALL_EXPRs call different functions, then they are not
3138 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3143 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3145 unsigned int cef
= call_expr_flags (arg0
);
3146 if (flags
& OEP_PURE_SAME
)
3147 cef
&= ECF_CONST
| ECF_PURE
;
3154 /* Now see if all the arguments are the same. */
3156 const_call_expr_arg_iterator iter0
, iter1
;
3158 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3159 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3161 a0
= next_const_call_expr_arg (&iter0
),
3162 a1
= next_const_call_expr_arg (&iter1
))
3163 if (! operand_equal_p (a0
, a1
, flags
))
3166 /* If we get here and both argument lists are exhausted
3167 then the CALL_EXPRs are equal. */
3168 return ! (a0
|| a1
);
3174 case tcc_declaration
:
3175 /* Consider __builtin_sqrt equal to sqrt. */
3176 return (TREE_CODE (arg0
) == FUNCTION_DECL
3177 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3178 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3179 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3181 case tcc_exceptional
:
3182 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3184 /* In GIMPLE constructors are used only to build vectors from
3185 elements. Individual elements in the constructor must be
3186 indexed in increasing order and form an initial sequence.
3188 We make no effort to compare constructors in generic.
3189 (see sem_variable::equals in ipa-icf which can do so for
3191 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3192 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3195 /* Be sure that vectors constructed have the same representation.
3196 We only tested element precision and modes to match.
3197 Vectors may be BLKmode and thus also check that the number of
3199 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))
3200 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)))
3203 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3204 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3205 unsigned int len
= vec_safe_length (v0
);
3207 if (len
!= vec_safe_length (v1
))
3210 for (unsigned int i
= 0; i
< len
; i
++)
3212 constructor_elt
*c0
= &(*v0
)[i
];
3213 constructor_elt
*c1
= &(*v1
)[i
];
3215 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3216 /* In GIMPLE the indexes can be either NULL or matching i.
3217 Double check this so we won't get false
3218 positives for GENERIC. */
3220 && (TREE_CODE (c0
->index
) != INTEGER_CST
3221 || !compare_tree_int (c0
->index
, i
)))
3223 && (TREE_CODE (c1
->index
) != INTEGER_CST
3224 || !compare_tree_int (c1
->index
, i
))))
3236 #undef OP_SAME_WITH_NULL
3239 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3240 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3242 When in doubt, return 0. */
3245 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3247 int unsignedp1
, unsignedpo
;
3248 tree primarg0
, primarg1
, primother
;
3249 unsigned int correct_width
;
3251 if (operand_equal_p (arg0
, arg1
, 0))
3254 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3255 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3258 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3259 and see if the inner values are the same. This removes any
3260 signedness comparison, which doesn't matter here. */
3261 primarg0
= arg0
, primarg1
= arg1
;
3262 STRIP_NOPS (primarg0
);
3263 STRIP_NOPS (primarg1
);
3264 if (operand_equal_p (primarg0
, primarg1
, 0))
3267 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3268 actual comparison operand, ARG0.
3270 First throw away any conversions to wider types
3271 already present in the operands. */
3273 primarg1
= get_narrower (arg1
, &unsignedp1
);
3274 primother
= get_narrower (other
, &unsignedpo
);
3276 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3277 if (unsignedp1
== unsignedpo
3278 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3279 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3281 tree type
= TREE_TYPE (arg0
);
3283 /* Make sure shorter operand is extended the right way
3284 to match the longer operand. */
3285 primarg1
= fold_convert (signed_or_unsigned_type_for
3286 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3288 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3295 /* See if ARG is an expression that is either a comparison or is performing
3296 arithmetic on comparisons. The comparisons must only be comparing
3297 two different values, which will be stored in *CVAL1 and *CVAL2; if
3298 they are nonzero it means that some operands have already been found.
3299 No variables may be used anywhere else in the expression except in the
3300 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3301 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3303 If this is true, return 1. Otherwise, return zero. */
3306 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3308 enum tree_code code
= TREE_CODE (arg
);
3309 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3311 /* We can handle some of the tcc_expression cases here. */
3312 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3314 else if (tclass
== tcc_expression
3315 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3316 || code
== COMPOUND_EXPR
))
3317 tclass
= tcc_binary
;
3319 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3320 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3322 /* If we've already found a CVAL1 or CVAL2, this expression is
3323 two complex to handle. */
3324 if (*cval1
|| *cval2
)
3334 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3337 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3338 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3339 cval1
, cval2
, save_p
));
3344 case tcc_expression
:
3345 if (code
== COND_EXPR
)
3346 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3347 cval1
, cval2
, save_p
)
3348 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3349 cval1
, cval2
, save_p
)
3350 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3351 cval1
, cval2
, save_p
));
3354 case tcc_comparison
:
3355 /* First see if we can handle the first operand, then the second. For
3356 the second operand, we know *CVAL1 can't be zero. It must be that
3357 one side of the comparison is each of the values; test for the
3358 case where this isn't true by failing if the two operands
3361 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3362 TREE_OPERAND (arg
, 1), 0))
3366 *cval1
= TREE_OPERAND (arg
, 0);
3367 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3369 else if (*cval2
== 0)
3370 *cval2
= TREE_OPERAND (arg
, 0);
3371 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3376 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3378 else if (*cval2
== 0)
3379 *cval2
= TREE_OPERAND (arg
, 1);
3380 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3392 /* ARG is a tree that is known to contain just arithmetic operations and
3393 comparisons. Evaluate the operations in the tree substituting NEW0 for
3394 any occurrence of OLD0 as an operand of a comparison and likewise for
3398 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3399 tree old1
, tree new1
)
3401 tree type
= TREE_TYPE (arg
);
3402 enum tree_code code
= TREE_CODE (arg
);
3403 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3405 /* We can handle some of the tcc_expression cases here. */
3406 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3408 else if (tclass
== tcc_expression
3409 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3410 tclass
= tcc_binary
;
3415 return fold_build1_loc (loc
, code
, type
,
3416 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3417 old0
, new0
, old1
, new1
));
3420 return fold_build2_loc (loc
, code
, type
,
3421 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3422 old0
, new0
, old1
, new1
),
3423 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3424 old0
, new0
, old1
, new1
));
3426 case tcc_expression
:
3430 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3434 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3438 return fold_build3_loc (loc
, code
, type
,
3439 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3440 old0
, new0
, old1
, new1
),
3441 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3442 old0
, new0
, old1
, new1
),
3443 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3444 old0
, new0
, old1
, new1
));
3448 /* Fall through - ??? */
3450 case tcc_comparison
:
3452 tree arg0
= TREE_OPERAND (arg
, 0);
3453 tree arg1
= TREE_OPERAND (arg
, 1);
3455 /* We need to check both for exact equality and tree equality. The
3456 former will be true if the operand has a side-effect. In that
3457 case, we know the operand occurred exactly once. */
3459 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3461 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3464 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3466 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3469 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3477 /* Return a tree for the case when the result of an expression is RESULT
3478 converted to TYPE and OMITTED was previously an operand of the expression
3479 but is now not needed (e.g., we folded OMITTED * 0).
3481 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3482 the conversion of RESULT to TYPE. */
3485 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3487 tree t
= fold_convert_loc (loc
, type
, result
);
3489 /* If the resulting operand is an empty statement, just return the omitted
3490 statement casted to void. */
3491 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3492 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3493 fold_ignored_result (omitted
));
3495 if (TREE_SIDE_EFFECTS (omitted
))
3496 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3497 fold_ignored_result (omitted
), t
);
3499 return non_lvalue_loc (loc
, t
);
3502 /* Return a tree for the case when the result of an expression is RESULT
3503 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3504 of the expression but are now not needed.
3506 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3507 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3508 evaluated before OMITTED2. Otherwise, if neither has side effects,
3509 just do the conversion of RESULT to TYPE. */
3512 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3513 tree omitted1
, tree omitted2
)
3515 tree t
= fold_convert_loc (loc
, type
, result
);
3517 if (TREE_SIDE_EFFECTS (omitted2
))
3518 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3519 if (TREE_SIDE_EFFECTS (omitted1
))
3520 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3522 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3526 /* Return a simplified tree node for the truth-negation of ARG. This
3527 never alters ARG itself. We assume that ARG is an operation that
3528 returns a truth value (0 or 1).
3530 FIXME: one would think we would fold the result, but it causes
3531 problems with the dominator optimizer. */
3534 fold_truth_not_expr (location_t loc
, tree arg
)
3536 tree type
= TREE_TYPE (arg
);
3537 enum tree_code code
= TREE_CODE (arg
);
3538 location_t loc1
, loc2
;
3540 /* If this is a comparison, we can simply invert it, except for
3541 floating-point non-equality comparisons, in which case we just
3542 enclose a TRUTH_NOT_EXPR around what we have. */
3544 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3546 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3547 if (FLOAT_TYPE_P (op_type
)
3548 && flag_trapping_math
3549 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3550 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3553 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3554 if (code
== ERROR_MARK
)
3557 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3558 TREE_OPERAND (arg
, 1));
3564 return constant_boolean_node (integer_zerop (arg
), type
);
3566 case TRUTH_AND_EXPR
:
3567 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3568 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3569 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3570 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3571 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3574 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3575 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3576 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3577 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3578 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3580 case TRUTH_XOR_EXPR
:
3581 /* Here we can invert either operand. We invert the first operand
3582 unless the second operand is a TRUTH_NOT_EXPR in which case our
3583 result is the XOR of the first operand with the inside of the
3584 negation of the second operand. */
3586 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3587 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3588 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3590 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3591 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3592 TREE_OPERAND (arg
, 1));
3594 case TRUTH_ANDIF_EXPR
:
3595 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3596 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3597 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3598 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3599 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3601 case TRUTH_ORIF_EXPR
:
3602 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3603 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3604 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3605 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3606 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3608 case TRUTH_NOT_EXPR
:
3609 return TREE_OPERAND (arg
, 0);
3613 tree arg1
= TREE_OPERAND (arg
, 1);
3614 tree arg2
= TREE_OPERAND (arg
, 2);
3616 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3617 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3619 /* A COND_EXPR may have a throw as one operand, which
3620 then has void type. Just leave void operands
3622 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3623 VOID_TYPE_P (TREE_TYPE (arg1
))
3624 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3625 VOID_TYPE_P (TREE_TYPE (arg2
))
3626 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3630 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3631 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3632 TREE_OPERAND (arg
, 0),
3633 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3635 case NON_LVALUE_EXPR
:
3636 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3637 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3640 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3641 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3643 /* ... fall through ... */
3646 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3647 return build1_loc (loc
, TREE_CODE (arg
), type
,
3648 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3651 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3653 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3656 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3658 case CLEANUP_POINT_EXPR
:
3659 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3660 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3661 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3668 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3669 assume that ARG is an operation that returns a truth value (0 or 1
3670 for scalars, 0 or -1 for vectors). Return the folded expression if
3671 folding is successful. Otherwise, return NULL_TREE. */
3674 fold_invert_truthvalue (location_t loc
, tree arg
)
3676 tree type
= TREE_TYPE (arg
);
3677 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3683 /* Return a simplified tree node for the truth-negation of ARG. This
3684 never alters ARG itself. We assume that ARG is an operation that
3685 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3688 invert_truthvalue_loc (location_t loc
, tree arg
)
3690 if (TREE_CODE (arg
) == ERROR_MARK
)
3693 tree type
= TREE_TYPE (arg
);
3694 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3700 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3701 with code CODE. This optimization is unsafe. */
3703 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3704 tree arg0
, tree arg1
)
3706 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3707 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3709 /* (A / C) +- (B / C) -> (A +- B) / C. */
3711 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3712 TREE_OPERAND (arg1
, 1), 0))
3713 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3714 fold_build2_loc (loc
, code
, type
,
3715 TREE_OPERAND (arg0
, 0),
3716 TREE_OPERAND (arg1
, 0)),
3717 TREE_OPERAND (arg0
, 1));
3719 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3720 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3721 TREE_OPERAND (arg1
, 0), 0)
3722 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3723 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3725 REAL_VALUE_TYPE r0
, r1
;
3726 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3727 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3729 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3731 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3732 real_arithmetic (&r0
, code
, &r0
, &r1
);
3733 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3734 TREE_OPERAND (arg0
, 0),
3735 build_real (type
, r0
));
3741 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3742 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3743 and uses reverse storage order if REVERSEP is nonzero. */
3746 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3747 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
3748 int unsignedp
, int reversep
)
3750 tree result
, bftype
;
3752 if (bitpos
== 0 && !reversep
)
3754 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3755 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3756 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3757 && tree_fits_shwi_p (size
)
3758 && tree_to_shwi (size
) == bitsize
)
3759 return fold_convert_loc (loc
, type
, inner
);
3763 if (TYPE_PRECISION (bftype
) != bitsize
3764 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3765 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3767 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3768 size_int (bitsize
), bitsize_int (bitpos
));
3769 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
3772 result
= fold_convert_loc (loc
, type
, result
);
3777 /* Optimize a bit-field compare.
3779 There are two cases: First is a compare against a constant and the
3780 second is a comparison of two items where the fields are at the same
3781 bit position relative to the start of a chunk (byte, halfword, word)
3782 large enough to contain it. In these cases we can avoid the shift
3783 implicit in bitfield extractions.
3785 For constants, we emit a compare of the shifted constant with the
3786 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3787 compared. For two fields at the same position, we do the ANDs with the
3788 similar mask and compare the result of the ANDs.
3790 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3791 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3792 are the left and right operands of the comparison, respectively.
3794 If the optimization described above can be done, we return the resulting
3795 tree. Otherwise we return zero. */
3798 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3799 tree compare_type
, tree lhs
, tree rhs
)
3801 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3802 tree type
= TREE_TYPE (lhs
);
3804 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3805 machine_mode lmode
, rmode
, nmode
;
3806 int lunsignedp
, runsignedp
;
3807 int lreversep
, rreversep
;
3808 int lvolatilep
= 0, rvolatilep
= 0;
3809 tree linner
, rinner
= NULL_TREE
;
3813 /* Get all the information about the extractions being done. If the bit size
3814 if the same as the size of the underlying object, we aren't doing an
3815 extraction at all and so can do nothing. We also don't want to
3816 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3817 then will no longer be able to replace it. */
3818 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3819 &lunsignedp
, &lreversep
, &lvolatilep
, false);
3820 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3821 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3825 rreversep
= lreversep
;
3828 /* If this is not a constant, we can only do something if bit positions,
3829 sizes, signedness and storage order are the same. */
3831 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3832 &runsignedp
, &rreversep
, &rvolatilep
, false);
3834 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3835 || lunsignedp
!= runsignedp
|| lreversep
!= rreversep
|| offset
!= 0
3836 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3840 /* See if we can find a mode to refer to this field. We should be able to,
3841 but fail if we can't. */
3842 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3843 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3844 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3845 TYPE_ALIGN (TREE_TYPE (rinner
))),
3847 if (nmode
== VOIDmode
)
3850 /* Set signed and unsigned types of the precision of this mode for the
3852 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3854 /* Compute the bit position and size for the new reference and our offset
3855 within it. If the new reference is the same size as the original, we
3856 won't optimize anything, so return zero. */
3857 nbitsize
= GET_MODE_BITSIZE (nmode
);
3858 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3860 if (nbitsize
== lbitsize
)
3863 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
3864 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3866 /* Make the mask to be used against the extracted field. */
3867 mask
= build_int_cst_type (unsigned_type
, -1);
3868 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3869 mask
= const_binop (RSHIFT_EXPR
, mask
,
3870 size_int (nbitsize
- lbitsize
- lbitpos
));
3873 /* If not comparing with constant, just rework the comparison
3875 return fold_build2_loc (loc
, code
, compare_type
,
3876 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3877 make_bit_field_ref (loc
, linner
,
3882 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3883 make_bit_field_ref (loc
, rinner
,
3889 /* Otherwise, we are handling the constant case. See if the constant is too
3890 big for the field. Warn and return a tree for 0 (false) if so. We do
3891 this not only for its own sake, but to avoid having to test for this
3892 error case below. If we didn't, we might generate wrong code.
3894 For unsigned fields, the constant shifted right by the field length should
3895 be all zero. For signed fields, the high-order bits should agree with
3900 if (wi::lrshift (rhs
, lbitsize
) != 0)
3902 warning (0, "comparison is always %d due to width of bit-field",
3904 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3909 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3910 if (tem
!= 0 && tem
!= -1)
3912 warning (0, "comparison is always %d due to width of bit-field",
3914 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3918 /* Single-bit compares should always be against zero. */
3919 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3921 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3922 rhs
= build_int_cst (type
, 0);
3925 /* Make a new bitfield reference, shift the constant over the
3926 appropriate number of bits and mask it with the computed mask
3927 (in case this was a signed field). If we changed it, make a new one. */
3928 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1,
3931 rhs
= const_binop (BIT_AND_EXPR
,
3932 const_binop (LSHIFT_EXPR
,
3933 fold_convert_loc (loc
, unsigned_type
, rhs
),
3934 size_int (lbitpos
)),
3937 lhs
= build2_loc (loc
, code
, compare_type
,
3938 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3942 /* Subroutine for fold_truth_andor_1: decode a field reference.
3944 If EXP is a comparison reference, we return the innermost reference.
3946 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3947 set to the starting bit number.
3949 If the innermost field can be completely contained in a mode-sized
3950 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3952 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3953 otherwise it is not changed.
3955 *PUNSIGNEDP is set to the signedness of the field.
3957 *PREVERSEP is set to the storage order of the field.
3959 *PMASK is set to the mask used. This is either contained in a
3960 BIT_AND_EXPR or derived from the width of the field.
3962 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3964 Return 0 if this is not a component reference or is one that we can't
3965 do anything with. */
3968 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3969 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3970 int *punsignedp
, int *preversep
, int *pvolatilep
,
3971 tree
*pmask
, tree
*pand_mask
)
3973 tree outer_type
= 0;
3975 tree mask
, inner
, offset
;
3977 unsigned int precision
;
3979 /* All the optimizations using this function assume integer fields.
3980 There are problems with FP fields since the type_for_size call
3981 below can fail for, e.g., XFmode. */
3982 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3985 /* We are interested in the bare arrangement of bits, so strip everything
3986 that doesn't affect the machine mode. However, record the type of the
3987 outermost expression if it may matter below. */
3988 if (CONVERT_EXPR_P (exp
)
3989 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3990 outer_type
= TREE_TYPE (exp
);
3993 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3995 and_mask
= TREE_OPERAND (exp
, 1);
3996 exp
= TREE_OPERAND (exp
, 0);
3997 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3998 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4002 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4003 punsignedp
, preversep
, pvolatilep
, false);
4004 if ((inner
== exp
&& and_mask
== 0)
4005 || *pbitsize
< 0 || offset
!= 0
4006 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4009 /* If the number of bits in the reference is the same as the bitsize of
4010 the outer type, then the outer type gives the signedness. Otherwise
4011 (in case of a small bitfield) the signedness is unchanged. */
4012 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4013 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4015 /* Compute the mask to access the bitfield. */
4016 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4017 precision
= TYPE_PRECISION (unsigned_type
);
4019 mask
= build_int_cst_type (unsigned_type
, -1);
4021 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4022 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4024 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4026 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4027 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4030 *pand_mask
= and_mask
;
4034 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4035 bit positions and MASK is SIGNED. */
4038 all_ones_mask_p (const_tree mask
, unsigned int size
)
4040 tree type
= TREE_TYPE (mask
);
4041 unsigned int precision
= TYPE_PRECISION (type
);
4043 /* If this function returns true when the type of the mask is
4044 UNSIGNED, then there will be errors. In particular see
4045 gcc.c-torture/execute/990326-1.c. There does not appear to be
4046 any documentation paper trail as to why this is so. But the pre
4047 wide-int worked with that restriction and it has been preserved
4049 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4052 return wi::mask (size
, false, precision
) == mask
;
4055 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4056 represents the sign bit of EXP's type. If EXP represents a sign
4057 or zero extension, also test VAL against the unextended type.
4058 The return value is the (sub)expression whose sign bit is VAL,
4059 or NULL_TREE otherwise. */
4062 sign_bit_p (tree exp
, const_tree val
)
4067 /* Tree EXP must have an integral type. */
4068 t
= TREE_TYPE (exp
);
4069 if (! INTEGRAL_TYPE_P (t
))
4072 /* Tree VAL must be an integer constant. */
4073 if (TREE_CODE (val
) != INTEGER_CST
4074 || TREE_OVERFLOW (val
))
4077 width
= TYPE_PRECISION (t
);
4078 if (wi::only_sign_bit_p (val
, width
))
4081 /* Handle extension from a narrower type. */
4082 if (TREE_CODE (exp
) == NOP_EXPR
4083 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4084 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4089 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4090 to be evaluated unconditionally. */
4093 simple_operand_p (const_tree exp
)
4095 /* Strip any conversions that don't change the machine mode. */
4098 return (CONSTANT_CLASS_P (exp
)
4099 || TREE_CODE (exp
) == SSA_NAME
4101 && ! TREE_ADDRESSABLE (exp
)
4102 && ! TREE_THIS_VOLATILE (exp
)
4103 && ! DECL_NONLOCAL (exp
)
4104 /* Don't regard global variables as simple. They may be
4105 allocated in ways unknown to the compiler (shared memory,
4106 #pragma weak, etc). */
4107 && ! TREE_PUBLIC (exp
)
4108 && ! DECL_EXTERNAL (exp
)
4109 /* Weakrefs are not safe to be read, since they can be NULL.
4110 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4111 have DECL_WEAK flag set. */
4112 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4113 /* Loading a static variable is unduly expensive, but global
4114 registers aren't expensive. */
4115 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4118 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4119 to be evaluated unconditionally.
4120 I addition to simple_operand_p, we assume that comparisons, conversions,
4121 and logic-not operations are simple, if their operands are simple, too. */
4124 simple_operand_p_2 (tree exp
)
4126 enum tree_code code
;
4128 if (TREE_SIDE_EFFECTS (exp
)
4129 || tree_could_trap_p (exp
))
4132 while (CONVERT_EXPR_P (exp
))
4133 exp
= TREE_OPERAND (exp
, 0);
4135 code
= TREE_CODE (exp
);
4137 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4138 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4139 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4141 if (code
== TRUTH_NOT_EXPR
)
4142 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4144 return simple_operand_p (exp
);
4148 /* The following functions are subroutines to fold_range_test and allow it to
4149 try to change a logical combination of comparisons into a range test.
4152 X == 2 || X == 3 || X == 4 || X == 5
4156 (unsigned) (X - 2) <= 3
4158 We describe each set of comparisons as being either inside or outside
4159 a range, using a variable named like IN_P, and then describe the
4160 range with a lower and upper bound. If one of the bounds is omitted,
4161 it represents either the highest or lowest value of the type.
4163 In the comments below, we represent a range by two numbers in brackets
4164 preceded by a "+" to designate being inside that range, or a "-" to
4165 designate being outside that range, so the condition can be inverted by
4166 flipping the prefix. An omitted bound is represented by a "-". For
4167 example, "- [-, 10]" means being outside the range starting at the lowest
4168 possible value and ending at 10, in other words, being greater than 10.
4169 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4172 We set up things so that the missing bounds are handled in a consistent
4173 manner so neither a missing bound nor "true" and "false" need to be
4174 handled using a special case. */
4176 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4177 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4178 and UPPER1_P are nonzero if the respective argument is an upper bound
4179 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4180 must be specified for a comparison. ARG1 will be converted to ARG0's
4181 type if both are specified. */
4184 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4185 tree arg1
, int upper1_p
)
4191 /* If neither arg represents infinity, do the normal operation.
4192 Else, if not a comparison, return infinity. Else handle the special
4193 comparison rules. Note that most of the cases below won't occur, but
4194 are handled for consistency. */
4196 if (arg0
!= 0 && arg1
!= 0)
4198 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4199 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4201 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4204 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4207 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4208 for neither. In real maths, we cannot assume open ended ranges are
4209 the same. But, this is computer arithmetic, where numbers are finite.
4210 We can therefore make the transformation of any unbounded range with
4211 the value Z, Z being greater than any representable number. This permits
4212 us to treat unbounded ranges as equal. */
4213 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4214 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4218 result
= sgn0
== sgn1
;
4221 result
= sgn0
!= sgn1
;
4224 result
= sgn0
< sgn1
;
4227 result
= sgn0
<= sgn1
;
4230 result
= sgn0
> sgn1
;
4233 result
= sgn0
>= sgn1
;
4239 return constant_boolean_node (result
, type
);
4242 /* Helper routine for make_range. Perform one step for it, return
4243 new expression if the loop should continue or NULL_TREE if it should
4247 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4248 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4249 bool *strict_overflow_p
)
4251 tree arg0_type
= TREE_TYPE (arg0
);
4252 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4253 int in_p
= *p_in_p
, n_in_p
;
4257 case TRUTH_NOT_EXPR
:
4258 /* We can only do something if the range is testing for zero. */
4259 if (low
== NULL_TREE
|| high
== NULL_TREE
4260 || ! integer_zerop (low
) || ! integer_zerop (high
))
4265 case EQ_EXPR
: case NE_EXPR
:
4266 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4267 /* We can only do something if the range is testing for zero
4268 and if the second operand is an integer constant. Note that
4269 saying something is "in" the range we make is done by
4270 complementing IN_P since it will set in the initial case of
4271 being not equal to zero; "out" is leaving it alone. */
4272 if (low
== NULL_TREE
|| high
== NULL_TREE
4273 || ! integer_zerop (low
) || ! integer_zerop (high
)
4274 || TREE_CODE (arg1
) != INTEGER_CST
)
4279 case NE_EXPR
: /* - [c, c] */
4282 case EQ_EXPR
: /* + [c, c] */
4283 in_p
= ! in_p
, low
= high
= arg1
;
4285 case GT_EXPR
: /* - [-, c] */
4286 low
= 0, high
= arg1
;
4288 case GE_EXPR
: /* + [c, -] */
4289 in_p
= ! in_p
, low
= arg1
, high
= 0;
4291 case LT_EXPR
: /* - [c, -] */
4292 low
= arg1
, high
= 0;
4294 case LE_EXPR
: /* + [-, c] */
4295 in_p
= ! in_p
, low
= 0, high
= arg1
;
4301 /* If this is an unsigned comparison, we also know that EXP is
4302 greater than or equal to zero. We base the range tests we make
4303 on that fact, so we record it here so we can parse existing
4304 range tests. We test arg0_type since often the return type
4305 of, e.g. EQ_EXPR, is boolean. */
4306 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4308 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4310 build_int_cst (arg0_type
, 0),
4314 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4316 /* If the high bound is missing, but we have a nonzero low
4317 bound, reverse the range so it goes from zero to the low bound
4319 if (high
== 0 && low
&& ! integer_zerop (low
))
4322 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4323 build_int_cst (TREE_TYPE (low
), 1), 0);
4324 low
= build_int_cst (arg0_type
, 0);
4334 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4335 low and high are non-NULL, then normalize will DTRT. */
4336 if (!TYPE_UNSIGNED (arg0_type
)
4337 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4339 if (low
== NULL_TREE
)
4340 low
= TYPE_MIN_VALUE (arg0_type
);
4341 if (high
== NULL_TREE
)
4342 high
= TYPE_MAX_VALUE (arg0_type
);
4345 /* (-x) IN [a,b] -> x in [-b, -a] */
4346 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4347 build_int_cst (exp_type
, 0),
4349 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4350 build_int_cst (exp_type
, 0),
4352 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4358 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4359 build_int_cst (exp_type
, 1));
4363 if (TREE_CODE (arg1
) != INTEGER_CST
)
4366 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4367 move a constant to the other side. */
4368 if (!TYPE_UNSIGNED (arg0_type
)
4369 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4372 /* If EXP is signed, any overflow in the computation is undefined,
4373 so we don't worry about it so long as our computations on
4374 the bounds don't overflow. For unsigned, overflow is defined
4375 and this is exactly the right thing. */
4376 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4377 arg0_type
, low
, 0, arg1
, 0);
4378 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4379 arg0_type
, high
, 1, arg1
, 0);
4380 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4381 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4384 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4385 *strict_overflow_p
= true;
4388 /* Check for an unsigned range which has wrapped around the maximum
4389 value thus making n_high < n_low, and normalize it. */
4390 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4392 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4393 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4394 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4395 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4397 /* If the range is of the form +/- [ x+1, x ], we won't
4398 be able to normalize it. But then, it represents the
4399 whole range or the empty set, so make it
4401 if (tree_int_cst_equal (n_low
, low
)
4402 && tree_int_cst_equal (n_high
, high
))
4408 low
= n_low
, high
= n_high
;
4416 case NON_LVALUE_EXPR
:
4417 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4420 if (! INTEGRAL_TYPE_P (arg0_type
)
4421 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4422 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4425 n_low
= low
, n_high
= high
;
4428 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4431 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4433 /* If we're converting arg0 from an unsigned type, to exp,
4434 a signed type, we will be doing the comparison as unsigned.
4435 The tests above have already verified that LOW and HIGH
4438 So we have to ensure that we will handle large unsigned
4439 values the same way that the current signed bounds treat
4442 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4446 /* For fixed-point modes, we need to pass the saturating flag
4447 as the 2nd parameter. */
4448 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4450 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4451 TYPE_SATURATING (arg0_type
));
4454 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4456 /* A range without an upper bound is, naturally, unbounded.
4457 Since convert would have cropped a very large value, use
4458 the max value for the destination type. */
4460 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4461 : TYPE_MAX_VALUE (arg0_type
);
4463 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4464 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4465 fold_convert_loc (loc
, arg0_type
,
4467 build_int_cst (arg0_type
, 1));
4469 /* If the low bound is specified, "and" the range with the
4470 range for which the original unsigned value will be
4474 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4475 1, fold_convert_loc (loc
, arg0_type
,
4480 in_p
= (n_in_p
== in_p
);
4484 /* Otherwise, "or" the range with the range of the input
4485 that will be interpreted as negative. */
4486 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4487 1, fold_convert_loc (loc
, arg0_type
,
4492 in_p
= (in_p
!= n_in_p
);
4506 /* Given EXP, a logical expression, set the range it is testing into
4507 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4508 actually being tested. *PLOW and *PHIGH will be made of the same
4509 type as the returned expression. If EXP is not a comparison, we
4510 will most likely not be returning a useful value and range. Set
4511 *STRICT_OVERFLOW_P to true if the return value is only valid
4512 because signed overflow is undefined; otherwise, do not change
4513 *STRICT_OVERFLOW_P. */
4516 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4517 bool *strict_overflow_p
)
4519 enum tree_code code
;
4520 tree arg0
, arg1
= NULL_TREE
;
4521 tree exp_type
, nexp
;
4524 location_t loc
= EXPR_LOCATION (exp
);
4526 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4527 and see if we can refine the range. Some of the cases below may not
4528 happen, but it doesn't seem worth worrying about this. We "continue"
4529 the outer loop when we've changed something; otherwise we "break"
4530 the switch, which will "break" the while. */
4533 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4537 code
= TREE_CODE (exp
);
4538 exp_type
= TREE_TYPE (exp
);
4541 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4543 if (TREE_OPERAND_LENGTH (exp
) > 0)
4544 arg0
= TREE_OPERAND (exp
, 0);
4545 if (TREE_CODE_CLASS (code
) == tcc_binary
4546 || TREE_CODE_CLASS (code
) == tcc_comparison
4547 || (TREE_CODE_CLASS (code
) == tcc_expression
4548 && TREE_OPERAND_LENGTH (exp
) > 1))
4549 arg1
= TREE_OPERAND (exp
, 1);
4551 if (arg0
== NULL_TREE
)
4554 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4555 &high
, &in_p
, strict_overflow_p
);
4556 if (nexp
== NULL_TREE
)
4561 /* If EXP is a constant, we can evaluate whether this is true or false. */
4562 if (TREE_CODE (exp
) == INTEGER_CST
)
4564 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4566 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4572 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4576 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4577 type, TYPE, return an expression to test if EXP is in (or out of, depending
4578 on IN_P) the range. Return 0 if the test couldn't be created. */
4581 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4582 tree low
, tree high
)
4584 tree etype
= TREE_TYPE (exp
), value
;
4586 /* Disable this optimization for function pointer expressions
4587 on targets that require function pointer canonicalization. */
4588 if (targetm
.have_canonicalize_funcptr_for_compare ()
4589 && TREE_CODE (etype
) == POINTER_TYPE
4590 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4595 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4597 return invert_truthvalue_loc (loc
, value
);
4602 if (low
== 0 && high
== 0)
4603 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4606 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4607 fold_convert_loc (loc
, etype
, high
));
4610 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4611 fold_convert_loc (loc
, etype
, low
));
4613 if (operand_equal_p (low
, high
, 0))
4614 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4615 fold_convert_loc (loc
, etype
, low
));
4617 if (integer_zerop (low
))
4619 if (! TYPE_UNSIGNED (etype
))
4621 etype
= unsigned_type_for (etype
);
4622 high
= fold_convert_loc (loc
, etype
, high
);
4623 exp
= fold_convert_loc (loc
, etype
, exp
);
4625 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4628 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4629 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4631 int prec
= TYPE_PRECISION (etype
);
4633 if (wi::mask (prec
- 1, false, prec
) == high
)
4635 if (TYPE_UNSIGNED (etype
))
4637 tree signed_etype
= signed_type_for (etype
);
4638 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4640 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4642 etype
= signed_etype
;
4643 exp
= fold_convert_loc (loc
, etype
, exp
);
4645 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4646 build_int_cst (etype
, 0));
4650 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4651 This requires wrap-around arithmetics for the type of the expression.
4652 First make sure that arithmetics in this type is valid, then make sure
4653 that it wraps around. */
4654 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4655 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4656 TYPE_UNSIGNED (etype
));
4658 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4660 tree utype
, minv
, maxv
;
4662 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4663 for the type in question, as we rely on this here. */
4664 utype
= unsigned_type_for (etype
);
4665 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4666 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4667 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4668 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4670 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4677 high
= fold_convert_loc (loc
, etype
, high
);
4678 low
= fold_convert_loc (loc
, etype
, low
);
4679 exp
= fold_convert_loc (loc
, etype
, exp
);
4681 value
= const_binop (MINUS_EXPR
, high
, low
);
4684 if (POINTER_TYPE_P (etype
))
4686 if (value
!= 0 && !TREE_OVERFLOW (value
))
4688 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4689 return build_range_check (loc
, type
,
4690 fold_build_pointer_plus_loc (loc
, exp
, low
),
4691 1, build_int_cst (etype
, 0), value
);
4696 if (value
!= 0 && !TREE_OVERFLOW (value
))
4697 return build_range_check (loc
, type
,
4698 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4699 1, build_int_cst (etype
, 0), value
);
4704 /* Return the predecessor of VAL in its type, handling the infinite case. */
4707 range_predecessor (tree val
)
4709 tree type
= TREE_TYPE (val
);
4711 if (INTEGRAL_TYPE_P (type
)
4712 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4715 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4716 build_int_cst (TREE_TYPE (val
), 1), 0);
4719 /* Return the successor of VAL in its type, handling the infinite case. */
4722 range_successor (tree val
)
4724 tree type
= TREE_TYPE (val
);
4726 if (INTEGRAL_TYPE_P (type
)
4727 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4730 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4731 build_int_cst (TREE_TYPE (val
), 1), 0);
4734 /* Given two ranges, see if we can merge them into one. Return 1 if we
4735 can, 0 if we can't. Set the output range into the specified parameters. */
4738 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4739 tree high0
, int in1_p
, tree low1
, tree high1
)
4747 int lowequal
= ((low0
== 0 && low1
== 0)
4748 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4749 low0
, 0, low1
, 0)));
4750 int highequal
= ((high0
== 0 && high1
== 0)
4751 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4752 high0
, 1, high1
, 1)));
4754 /* Make range 0 be the range that starts first, or ends last if they
4755 start at the same value. Swap them if it isn't. */
4756 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4759 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4760 high1
, 1, high0
, 1))))
4762 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4763 tem
= low0
, low0
= low1
, low1
= tem
;
4764 tem
= high0
, high0
= high1
, high1
= tem
;
4767 /* Now flag two cases, whether the ranges are disjoint or whether the
4768 second range is totally subsumed in the first. Note that the tests
4769 below are simplified by the ones above. */
4770 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4771 high0
, 1, low1
, 0));
4772 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4773 high1
, 1, high0
, 1));
4775 /* We now have four cases, depending on whether we are including or
4776 excluding the two ranges. */
4779 /* If they don't overlap, the result is false. If the second range
4780 is a subset it is the result. Otherwise, the range is from the start
4781 of the second to the end of the first. */
4783 in_p
= 0, low
= high
= 0;
4785 in_p
= 1, low
= low1
, high
= high1
;
4787 in_p
= 1, low
= low1
, high
= high0
;
4790 else if (in0_p
&& ! in1_p
)
4792 /* If they don't overlap, the result is the first range. If they are
4793 equal, the result is false. If the second range is a subset of the
4794 first, and the ranges begin at the same place, we go from just after
4795 the end of the second range to the end of the first. If the second
4796 range is not a subset of the first, or if it is a subset and both
4797 ranges end at the same place, the range starts at the start of the
4798 first range and ends just before the second range.
4799 Otherwise, we can't describe this as a single range. */
4801 in_p
= 1, low
= low0
, high
= high0
;
4802 else if (lowequal
&& highequal
)
4803 in_p
= 0, low
= high
= 0;
4804 else if (subset
&& lowequal
)
4806 low
= range_successor (high1
);
4811 /* We are in the weird situation where high0 > high1 but
4812 high1 has no successor. Punt. */
4816 else if (! subset
|| highequal
)
4819 high
= range_predecessor (low1
);
4823 /* low0 < low1 but low1 has no predecessor. Punt. */
4831 else if (! in0_p
&& in1_p
)
4833 /* If they don't overlap, the result is the second range. If the second
4834 is a subset of the first, the result is false. Otherwise,
4835 the range starts just after the first range and ends at the
4836 end of the second. */
4838 in_p
= 1, low
= low1
, high
= high1
;
4839 else if (subset
|| highequal
)
4840 in_p
= 0, low
= high
= 0;
4843 low
= range_successor (high0
);
4848 /* high1 > high0 but high0 has no successor. Punt. */
4856 /* The case where we are excluding both ranges. Here the complex case
4857 is if they don't overlap. In that case, the only time we have a
4858 range is if they are adjacent. If the second is a subset of the
4859 first, the result is the first. Otherwise, the range to exclude
4860 starts at the beginning of the first range and ends at the end of the
4864 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4865 range_successor (high0
),
4867 in_p
= 0, low
= low0
, high
= high1
;
4870 /* Canonicalize - [min, x] into - [-, x]. */
4871 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4872 switch (TREE_CODE (TREE_TYPE (low0
)))
4875 if (TYPE_PRECISION (TREE_TYPE (low0
))
4876 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4880 if (tree_int_cst_equal (low0
,
4881 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4885 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4886 && integer_zerop (low0
))
4893 /* Canonicalize - [x, max] into - [x, -]. */
4894 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4895 switch (TREE_CODE (TREE_TYPE (high1
)))
4898 if (TYPE_PRECISION (TREE_TYPE (high1
))
4899 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4903 if (tree_int_cst_equal (high1
,
4904 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4908 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4909 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4911 build_int_cst (TREE_TYPE (high1
), 1),
4919 /* The ranges might be also adjacent between the maximum and
4920 minimum values of the given type. For
4921 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4922 return + [x + 1, y - 1]. */
4923 if (low0
== 0 && high1
== 0)
4925 low
= range_successor (high0
);
4926 high
= range_predecessor (low1
);
4927 if (low
== 0 || high
== 0)
4937 in_p
= 0, low
= low0
, high
= high0
;
4939 in_p
= 0, low
= low0
, high
= high1
;
4942 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4947 /* Subroutine of fold, looking inside expressions of the form
4948 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4949 of the COND_EXPR. This function is being used also to optimize
4950 A op B ? C : A, by reversing the comparison first.
4952 Return a folded expression whose code is not a COND_EXPR
4953 anymore, or NULL_TREE if no folding opportunity is found. */
4956 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4957 tree arg0
, tree arg1
, tree arg2
)
4959 enum tree_code comp_code
= TREE_CODE (arg0
);
4960 tree arg00
= TREE_OPERAND (arg0
, 0);
4961 tree arg01
= TREE_OPERAND (arg0
, 1);
4962 tree arg1_type
= TREE_TYPE (arg1
);
4968 /* If we have A op 0 ? A : -A, consider applying the following
4971 A == 0? A : -A same as -A
4972 A != 0? A : -A same as A
4973 A >= 0? A : -A same as abs (A)
4974 A > 0? A : -A same as abs (A)
4975 A <= 0? A : -A same as -abs (A)
4976 A < 0? A : -A same as -abs (A)
4978 None of these transformations work for modes with signed
4979 zeros. If A is +/-0, the first two transformations will
4980 change the sign of the result (from +0 to -0, or vice
4981 versa). The last four will fix the sign of the result,
4982 even though the original expressions could be positive or
4983 negative, depending on the sign of A.
4985 Note that all these transformations are correct if A is
4986 NaN, since the two alternatives (A and -A) are also NaNs. */
4987 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4988 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4989 ? real_zerop (arg01
)
4990 : integer_zerop (arg01
))
4991 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4992 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4993 /* In the case that A is of the form X-Y, '-A' (arg2) may
4994 have already been folded to Y-X, check for that. */
4995 || (TREE_CODE (arg1
) == MINUS_EXPR
4996 && TREE_CODE (arg2
) == MINUS_EXPR
4997 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4998 TREE_OPERAND (arg2
, 1), 0)
4999 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5000 TREE_OPERAND (arg2
, 0), 0))))
5005 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5006 return pedantic_non_lvalue_loc (loc
,
5007 fold_convert_loc (loc
, type
,
5008 negate_expr (tem
)));
5011 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5014 if (flag_trapping_math
)
5019 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5021 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5022 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5025 if (flag_trapping_math
)
5029 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5031 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5032 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5034 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5038 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5039 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5040 both transformations are correct when A is NaN: A != 0
5041 is then true, and A == 0 is false. */
5043 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5044 && integer_zerop (arg01
) && integer_zerop (arg2
))
5046 if (comp_code
== NE_EXPR
)
5047 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5048 else if (comp_code
== EQ_EXPR
)
5049 return build_zero_cst (type
);
5052 /* Try some transformations of A op B ? A : B.
5054 A == B? A : B same as B
5055 A != B? A : B same as A
5056 A >= B? A : B same as max (A, B)
5057 A > B? A : B same as max (B, A)
5058 A <= B? A : B same as min (A, B)
5059 A < B? A : B same as min (B, A)
5061 As above, these transformations don't work in the presence
5062 of signed zeros. For example, if A and B are zeros of
5063 opposite sign, the first two transformations will change
5064 the sign of the result. In the last four, the original
5065 expressions give different results for (A=+0, B=-0) and
5066 (A=-0, B=+0), but the transformed expressions do not.
5068 The first two transformations are correct if either A or B
5069 is a NaN. In the first transformation, the condition will
5070 be false, and B will indeed be chosen. In the case of the
5071 second transformation, the condition A != B will be true,
5072 and A will be chosen.
5074 The conversions to max() and min() are not correct if B is
5075 a number and A is not. The conditions in the original
5076 expressions will be false, so all four give B. The min()
5077 and max() versions would give a NaN instead. */
5078 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5079 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5080 /* Avoid these transformations if the COND_EXPR may be used
5081 as an lvalue in the C++ front-end. PR c++/19199. */
5083 || VECTOR_TYPE_P (type
)
5084 || (! lang_GNU_CXX ()
5085 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5086 || ! maybe_lvalue_p (arg1
)
5087 || ! maybe_lvalue_p (arg2
)))
5089 tree comp_op0
= arg00
;
5090 tree comp_op1
= arg01
;
5091 tree comp_type
= TREE_TYPE (comp_op0
);
5093 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5094 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5104 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5106 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5111 /* In C++ a ?: expression can be an lvalue, so put the
5112 operand which will be used if they are equal first
5113 so that we can convert this back to the
5114 corresponding COND_EXPR. */
5115 if (!HONOR_NANS (arg1
))
5117 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5118 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5119 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5120 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5121 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5122 comp_op1
, comp_op0
);
5123 return pedantic_non_lvalue_loc (loc
,
5124 fold_convert_loc (loc
, type
, tem
));
5131 if (!HONOR_NANS (arg1
))
5133 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5134 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5135 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5136 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5137 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5138 comp_op1
, comp_op0
);
5139 return pedantic_non_lvalue_loc (loc
,
5140 fold_convert_loc (loc
, type
, tem
));
5144 if (!HONOR_NANS (arg1
))
5145 return pedantic_non_lvalue_loc (loc
,
5146 fold_convert_loc (loc
, type
, arg2
));
5149 if (!HONOR_NANS (arg1
))
5150 return pedantic_non_lvalue_loc (loc
,
5151 fold_convert_loc (loc
, type
, arg1
));
5154 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5159 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5160 we might still be able to simplify this. For example,
5161 if C1 is one less or one more than C2, this might have started
5162 out as a MIN or MAX and been transformed by this function.
5163 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5165 if (INTEGRAL_TYPE_P (type
)
5166 && TREE_CODE (arg01
) == INTEGER_CST
5167 && TREE_CODE (arg2
) == INTEGER_CST
)
5171 if (TREE_CODE (arg1
) == INTEGER_CST
)
5173 /* We can replace A with C1 in this case. */
5174 arg1
= fold_convert_loc (loc
, type
, arg01
);
5175 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5178 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5179 MIN_EXPR, to preserve the signedness of the comparison. */
5180 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5182 && operand_equal_p (arg01
,
5183 const_binop (PLUS_EXPR
, arg2
,
5184 build_int_cst (type
, 1)),
5187 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5188 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5190 return pedantic_non_lvalue_loc (loc
,
5191 fold_convert_loc (loc
, type
, tem
));
5196 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5198 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5200 && operand_equal_p (arg01
,
5201 const_binop (MINUS_EXPR
, arg2
,
5202 build_int_cst (type
, 1)),
5205 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5206 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5208 return pedantic_non_lvalue_loc (loc
,
5209 fold_convert_loc (loc
, type
, tem
));
5214 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5215 MAX_EXPR, to preserve the signedness of the comparison. */
5216 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5218 && operand_equal_p (arg01
,
5219 const_binop (MINUS_EXPR
, arg2
,
5220 build_int_cst (type
, 1)),
5223 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5224 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5226 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5231 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5232 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5234 && operand_equal_p (arg01
,
5235 const_binop (PLUS_EXPR
, arg2
,
5236 build_int_cst (type
, 1)),
5239 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5240 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5242 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5256 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5257 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5258 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5262 /* EXP is some logical combination of boolean tests. See if we can
5263 merge it into some range test. Return the new tree if so. */
5266 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5269 int or_op
= (code
== TRUTH_ORIF_EXPR
5270 || code
== TRUTH_OR_EXPR
);
5271 int in0_p
, in1_p
, in_p
;
5272 tree low0
, low1
, low
, high0
, high1
, high
;
5273 bool strict_overflow_p
= false;
5275 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5276 "when simplifying range test");
5278 if (!INTEGRAL_TYPE_P (type
))
5281 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5282 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5284 /* If this is an OR operation, invert both sides; we will invert
5285 again at the end. */
5287 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5289 /* If both expressions are the same, if we can merge the ranges, and we
5290 can build the range test, return it or it inverted. If one of the
5291 ranges is always true or always false, consider it to be the same
5292 expression as the other. */
5293 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5294 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5296 && 0 != (tem
= (build_range_check (loc
, type
,
5298 : rhs
!= 0 ? rhs
: integer_zero_node
,
5301 if (strict_overflow_p
)
5302 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5303 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5306 /* On machines where the branch cost is expensive, if this is a
5307 short-circuited branch and the underlying object on both sides
5308 is the same, make a non-short-circuit operation. */
5309 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5310 && lhs
!= 0 && rhs
!= 0
5311 && (code
== TRUTH_ANDIF_EXPR
5312 || code
== TRUTH_ORIF_EXPR
)
5313 && operand_equal_p (lhs
, rhs
, 0))
5315 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5316 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5317 which cases we can't do this. */
5318 if (simple_operand_p (lhs
))
5319 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5320 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5323 else if (!lang_hooks
.decls
.global_bindings_p ()
5324 && !CONTAINS_PLACEHOLDER_P (lhs
))
5326 tree common
= save_expr (lhs
);
5328 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5329 or_op
? ! in0_p
: in0_p
,
5331 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5332 or_op
? ! in1_p
: in1_p
,
5335 if (strict_overflow_p
)
5336 fold_overflow_warning (warnmsg
,
5337 WARN_STRICT_OVERFLOW_COMPARISON
);
5338 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5339 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5348 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5349 bit value. Arrange things so the extra bits will be set to zero if and
5350 only if C is signed-extended to its full width. If MASK is nonzero,
5351 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5354 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5356 tree type
= TREE_TYPE (c
);
5357 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5360 if (p
== modesize
|| unsignedp
)
5363 /* We work by getting just the sign bit into the low-order bit, then
5364 into the high-order bit, then sign-extend. We then XOR that value
5366 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5368 /* We must use a signed type in order to get an arithmetic right shift.
5369 However, we must also avoid introducing accidental overflows, so that
5370 a subsequent call to integer_zerop will work. Hence we must
5371 do the type conversion here. At this point, the constant is either
5372 zero or one, and the conversion to a signed type can never overflow.
5373 We could get an overflow if this conversion is done anywhere else. */
5374 if (TYPE_UNSIGNED (type
))
5375 temp
= fold_convert (signed_type_for (type
), temp
);
5377 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5378 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5380 temp
= const_binop (BIT_AND_EXPR
, temp
,
5381 fold_convert (TREE_TYPE (c
), mask
));
5382 /* If necessary, convert the type back to match the type of C. */
5383 if (TYPE_UNSIGNED (type
))
5384 temp
= fold_convert (type
, temp
);
5386 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5389 /* For an expression that has the form
5393 we can drop one of the inner expressions and simplify to
5397 LOC is the location of the resulting expression. OP is the inner
5398 logical operation; the left-hand side in the examples above, while CMPOP
5399 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5400 removing a condition that guards another, as in
5401 (A != NULL && A->...) || A == NULL
5402 which we must not transform. If RHS_ONLY is true, only eliminate the
5403 right-most operand of the inner logical operation. */
5406 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5409 tree type
= TREE_TYPE (cmpop
);
5410 enum tree_code code
= TREE_CODE (cmpop
);
5411 enum tree_code truthop_code
= TREE_CODE (op
);
5412 tree lhs
= TREE_OPERAND (op
, 0);
5413 tree rhs
= TREE_OPERAND (op
, 1);
5414 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5415 enum tree_code rhs_code
= TREE_CODE (rhs
);
5416 enum tree_code lhs_code
= TREE_CODE (lhs
);
5417 enum tree_code inv_code
;
5419 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5422 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5425 if (rhs_code
== truthop_code
)
5427 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5428 if (newrhs
!= NULL_TREE
)
5431 rhs_code
= TREE_CODE (rhs
);
5434 if (lhs_code
== truthop_code
&& !rhs_only
)
5436 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5437 if (newlhs
!= NULL_TREE
)
5440 lhs_code
= TREE_CODE (lhs
);
5444 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5445 if (inv_code
== rhs_code
5446 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5447 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5449 if (!rhs_only
&& inv_code
== lhs_code
5450 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5451 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5453 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5454 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5459 /* Find ways of folding logical expressions of LHS and RHS:
5460 Try to merge two comparisons to the same innermost item.
5461 Look for range tests like "ch >= '0' && ch <= '9'".
5462 Look for combinations of simple terms on machines with expensive branches
5463 and evaluate the RHS unconditionally.
5465 For example, if we have p->a == 2 && p->b == 4 and we can make an
5466 object large enough to span both A and B, we can do this with a comparison
5467 against the object ANDed with the a mask.
5469 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5470 operations to do this with one comparison.
5472 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5473 function and the one above.
5475 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5476 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5478 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5481 We return the simplified tree or 0 if no optimization is possible. */
5484 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5487 /* If this is the "or" of two comparisons, we can do something if
5488 the comparisons are NE_EXPR. If this is the "and", we can do something
5489 if the comparisons are EQ_EXPR. I.e.,
5490 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5492 WANTED_CODE is this operation code. For single bit fields, we can
5493 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5494 comparison for one-bit fields. */
5496 enum tree_code wanted_code
;
5497 enum tree_code lcode
, rcode
;
5498 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5499 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5500 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5501 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5502 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5503 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5504 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5505 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
5506 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5507 machine_mode lnmode
, rnmode
;
5508 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5509 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5510 tree l_const
, r_const
;
5511 tree lntype
, rntype
, result
;
5512 HOST_WIDE_INT first_bit
, end_bit
;
5515 /* Start by getting the comparison codes. Fail if anything is volatile.
5516 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5517 it were surrounded with a NE_EXPR. */
5519 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5522 lcode
= TREE_CODE (lhs
);
5523 rcode
= TREE_CODE (rhs
);
5525 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5527 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5528 build_int_cst (TREE_TYPE (lhs
), 0));
5532 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5534 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5535 build_int_cst (TREE_TYPE (rhs
), 0));
5539 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5540 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5543 ll_arg
= TREE_OPERAND (lhs
, 0);
5544 lr_arg
= TREE_OPERAND (lhs
, 1);
5545 rl_arg
= TREE_OPERAND (rhs
, 0);
5546 rr_arg
= TREE_OPERAND (rhs
, 1);
5548 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5549 if (simple_operand_p (ll_arg
)
5550 && simple_operand_p (lr_arg
))
5552 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5553 && operand_equal_p (lr_arg
, rr_arg
, 0))
5555 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5556 truth_type
, ll_arg
, lr_arg
);
5560 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5561 && operand_equal_p (lr_arg
, rl_arg
, 0))
5563 result
= combine_comparisons (loc
, code
, lcode
,
5564 swap_tree_comparison (rcode
),
5565 truth_type
, ll_arg
, lr_arg
);
5571 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5572 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5574 /* If the RHS can be evaluated unconditionally and its operands are
5575 simple, it wins to evaluate the RHS unconditionally on machines
5576 with expensive branches. In this case, this isn't a comparison
5577 that can be merged. */
5579 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5581 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5582 && simple_operand_p (rl_arg
)
5583 && simple_operand_p (rr_arg
))
5585 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5586 if (code
== TRUTH_OR_EXPR
5587 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5588 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5589 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5590 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5591 return build2_loc (loc
, NE_EXPR
, truth_type
,
5592 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5594 build_int_cst (TREE_TYPE (ll_arg
), 0));
5596 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5597 if (code
== TRUTH_AND_EXPR
5598 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5599 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5600 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5601 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5602 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5603 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5605 build_int_cst (TREE_TYPE (ll_arg
), 0));
5608 /* See if the comparisons can be merged. Then get all the parameters for
5611 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5612 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5615 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
5617 ll_inner
= decode_field_reference (loc
, ll_arg
,
5618 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5619 &ll_unsignedp
, &ll_reversep
, &volatilep
,
5620 &ll_mask
, &ll_and_mask
);
5621 lr_inner
= decode_field_reference (loc
, lr_arg
,
5622 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5623 &lr_unsignedp
, &lr_reversep
, &volatilep
,
5624 &lr_mask
, &lr_and_mask
);
5625 rl_inner
= decode_field_reference (loc
, rl_arg
,
5626 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5627 &rl_unsignedp
, &rl_reversep
, &volatilep
,
5628 &rl_mask
, &rl_and_mask
);
5629 rr_inner
= decode_field_reference (loc
, rr_arg
,
5630 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5631 &rr_unsignedp
, &rr_reversep
, &volatilep
,
5632 &rr_mask
, &rr_and_mask
);
5634 /* It must be true that the inner operation on the lhs of each
5635 comparison must be the same if we are to be able to do anything.
5636 Then see if we have constants. If not, the same must be true for
5639 || ll_reversep
!= rl_reversep
5640 || ll_inner
== 0 || rl_inner
== 0
5641 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5644 if (TREE_CODE (lr_arg
) == INTEGER_CST
5645 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5647 l_const
= lr_arg
, r_const
= rr_arg
;
5648 lr_reversep
= ll_reversep
;
5650 else if (lr_reversep
!= rr_reversep
5651 || lr_inner
== 0 || rr_inner
== 0
5652 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5655 l_const
= r_const
= 0;
5657 /* If either comparison code is not correct for our logical operation,
5658 fail. However, we can convert a one-bit comparison against zero into
5659 the opposite comparison against that bit being set in the field. */
5661 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5662 if (lcode
!= wanted_code
)
5664 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5666 /* Make the left operand unsigned, since we are only interested
5667 in the value of one bit. Otherwise we are doing the wrong
5676 /* This is analogous to the code for l_const above. */
5677 if (rcode
!= wanted_code
)
5679 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5688 /* See if we can find a mode that contains both fields being compared on
5689 the left. If we can't, fail. Otherwise, update all constants and masks
5690 to be relative to a field of that size. */
5691 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5692 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5693 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5694 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5696 if (lnmode
== VOIDmode
)
5699 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5700 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5701 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5702 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5704 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5706 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5707 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5710 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5711 size_int (xll_bitpos
));
5712 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5713 size_int (xrl_bitpos
));
5717 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5718 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5719 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5720 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5721 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5724 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5726 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5731 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5732 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5733 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5734 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5735 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5738 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5740 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5744 /* If the right sides are not constant, do the same for it. Also,
5745 disallow this optimization if a size or signedness mismatch occurs
5746 between the left and right sides. */
5749 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5750 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5751 /* Make sure the two fields on the right
5752 correspond to the left without being swapped. */
5753 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5756 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5757 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5758 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5759 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5761 if (rnmode
== VOIDmode
)
5764 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5765 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5766 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5767 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5769 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5771 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5772 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5775 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5777 size_int (xlr_bitpos
));
5778 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5780 size_int (xrr_bitpos
));
5782 /* Make a mask that corresponds to both fields being compared.
5783 Do this for both items being compared. If the operands are the
5784 same size and the bits being compared are in the same position
5785 then we can do this by masking both and comparing the masked
5787 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5788 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5789 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5791 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5792 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5793 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5794 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5796 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5797 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
5798 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5799 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5801 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5804 /* There is still another way we can do something: If both pairs of
5805 fields being compared are adjacent, we may be able to make a wider
5806 field containing them both.
5808 Note that we still must mask the lhs/rhs expressions. Furthermore,
5809 the mask must be shifted to account for the shift done by
5810 make_bit_field_ref. */
5811 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5812 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5813 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5814 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5818 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5819 ll_bitsize
+ rl_bitsize
,
5820 MIN (ll_bitpos
, rl_bitpos
),
5821 ll_unsignedp
, ll_reversep
);
5822 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5823 lr_bitsize
+ rr_bitsize
,
5824 MIN (lr_bitpos
, rr_bitpos
),
5825 lr_unsignedp
, lr_reversep
);
5827 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5828 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5829 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5830 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5832 /* Convert to the smaller type before masking out unwanted bits. */
5834 if (lntype
!= rntype
)
5836 if (lnbitsize
> rnbitsize
)
5838 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5839 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5842 else if (lnbitsize
< rnbitsize
)
5844 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5845 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5850 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5851 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5853 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5854 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5856 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5862 /* Handle the case of comparisons with constants. If there is something in
5863 common between the masks, those bits of the constants must be the same.
5864 If not, the condition is always false. Test for this to avoid generating
5865 incorrect code below. */
5866 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5867 if (! integer_zerop (result
)
5868 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5869 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5871 if (wanted_code
== NE_EXPR
)
5873 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5874 return constant_boolean_node (true, truth_type
);
5878 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5879 return constant_boolean_node (false, truth_type
);
5883 /* Construct the expression we will return. First get the component
5884 reference we will make. Unless the mask is all ones the width of
5885 that field, perform the mask operation. Then compare with the
5887 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5888 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5890 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5891 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5892 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5894 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5895 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5898 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5902 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5906 enum tree_code op_code
;
5909 int consts_equal
, consts_lt
;
5912 STRIP_SIGN_NOPS (arg0
);
5914 op_code
= TREE_CODE (arg0
);
5915 minmax_const
= TREE_OPERAND (arg0
, 1);
5916 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5917 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5918 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5919 inner
= TREE_OPERAND (arg0
, 0);
5921 /* If something does not permit us to optimize, return the original tree. */
5922 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5923 || TREE_CODE (comp_const
) != INTEGER_CST
5924 || TREE_OVERFLOW (comp_const
)
5925 || TREE_CODE (minmax_const
) != INTEGER_CST
5926 || TREE_OVERFLOW (minmax_const
))
5929 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5930 and GT_EXPR, doing the rest with recursive calls using logical
5934 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5937 = optimize_minmax_comparison (loc
,
5938 invert_tree_comparison (code
, false),
5941 return invert_truthvalue_loc (loc
, tem
);
5947 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5948 optimize_minmax_comparison
5949 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5950 optimize_minmax_comparison
5951 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5954 if (op_code
== MAX_EXPR
&& consts_equal
)
5955 /* MAX (X, 0) == 0 -> X <= 0 */
5956 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5958 else if (op_code
== MAX_EXPR
&& consts_lt
)
5959 /* MAX (X, 0) == 5 -> X == 5 */
5960 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5962 else if (op_code
== MAX_EXPR
)
5963 /* MAX (X, 0) == -1 -> false */
5964 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5966 else if (consts_equal
)
5967 /* MIN (X, 0) == 0 -> X >= 0 */
5968 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5971 /* MIN (X, 0) == 5 -> false */
5972 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5975 /* MIN (X, 0) == -1 -> X == -1 */
5976 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5979 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5980 /* MAX (X, 0) > 0 -> X > 0
5981 MAX (X, 0) > 5 -> X > 5 */
5982 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5984 else if (op_code
== MAX_EXPR
)
5985 /* MAX (X, 0) > -1 -> true */
5986 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5988 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5989 /* MIN (X, 0) > 0 -> false
5990 MIN (X, 0) > 5 -> false */
5991 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5994 /* MIN (X, 0) > -1 -> X > -1 */
5995 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
6002 /* T is an integer expression that is being multiplied, divided, or taken a
6003 modulus (CODE says which and what kind of divide or modulus) by a
6004 constant C. See if we can eliminate that operation by folding it with
6005 other operations already in T. WIDE_TYPE, if non-null, is a type that
6006 should be used for the computation if wider than our type.
6008 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6009 (X * 2) + (Y * 4). We must, however, be assured that either the original
6010 expression would not overflow or that overflow is undefined for the type
6011 in the language in question.
6013 If we return a non-null expression, it is an equivalent form of the
6014 original computation, but need not be in the original type.
6016 We set *STRICT_OVERFLOW_P to true if the return values depends on
6017 signed overflow being undefined. Otherwise we do not change
6018 *STRICT_OVERFLOW_P. */
6021 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6022 bool *strict_overflow_p
)
6024 /* To avoid exponential search depth, refuse to allow recursion past
6025 three levels. Beyond that (1) it's highly unlikely that we'll find
6026 something interesting and (2) we've probably processed it before
6027 when we built the inner expression. */
6036 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6043 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6044 bool *strict_overflow_p
)
6046 tree type
= TREE_TYPE (t
);
6047 enum tree_code tcode
= TREE_CODE (t
);
6048 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6049 > GET_MODE_SIZE (TYPE_MODE (type
)))
6050 ? wide_type
: type
);
6052 int same_p
= tcode
== code
;
6053 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6054 bool sub_strict_overflow_p
;
6056 /* Don't deal with constants of zero here; they confuse the code below. */
6057 if (integer_zerop (c
))
6060 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6061 op0
= TREE_OPERAND (t
, 0);
6063 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6064 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6066 /* Note that we need not handle conditional operations here since fold
6067 already handles those cases. So just do arithmetic here. */
6071 /* For a constant, we can always simplify if we are a multiply
6072 or (for divide and modulus) if it is a multiple of our constant. */
6073 if (code
== MULT_EXPR
6074 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
6076 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6077 fold_convert (ctype
, c
));
6078 /* If the multiplication overflowed to INT_MIN then we lost sign
6079 information on it and a subsequent multiplication might
6080 spuriously overflow. See PR68142. */
6081 if (TREE_OVERFLOW (tem
)
6082 && wi::eq_p (tem
, wi::min_value (TYPE_PRECISION (ctype
), SIGNED
)))
6088 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6089 /* If op0 is an expression ... */
6090 if ((COMPARISON_CLASS_P (op0
)
6091 || UNARY_CLASS_P (op0
)
6092 || BINARY_CLASS_P (op0
)
6093 || VL_EXP_CLASS_P (op0
)
6094 || EXPRESSION_CLASS_P (op0
))
6095 /* ... and has wrapping overflow, and its type is smaller
6096 than ctype, then we cannot pass through as widening. */
6097 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6098 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
6099 && (TYPE_PRECISION (ctype
)
6100 > TYPE_PRECISION (TREE_TYPE (op0
))))
6101 /* ... or this is a truncation (t is narrower than op0),
6102 then we cannot pass through this narrowing. */
6103 || (TYPE_PRECISION (type
)
6104 < TYPE_PRECISION (TREE_TYPE (op0
)))
6105 /* ... or signedness changes for division or modulus,
6106 then we cannot pass through this conversion. */
6107 || (code
!= MULT_EXPR
6108 && (TYPE_UNSIGNED (ctype
)
6109 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6110 /* ... or has undefined overflow while the converted to
6111 type has not, we cannot do the operation in the inner type
6112 as that would introduce undefined overflow. */
6113 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6114 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6115 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6118 /* Pass the constant down and see if we can make a simplification. If
6119 we can, replace this expression with the inner simplification for
6120 possible later conversion to our or some other type. */
6121 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6122 && TREE_CODE (t2
) == INTEGER_CST
6123 && !TREE_OVERFLOW (t2
)
6124 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6126 ? ctype
: NULL_TREE
,
6127 strict_overflow_p
))))
6132 /* If widening the type changes it from signed to unsigned, then we
6133 must avoid building ABS_EXPR itself as unsigned. */
6134 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6136 tree cstype
= (*signed_type_for
) (ctype
);
6137 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6140 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6141 return fold_convert (ctype
, t1
);
6145 /* If the constant is negative, we cannot simplify this. */
6146 if (tree_int_cst_sgn (c
) == -1)
6150 /* For division and modulus, type can't be unsigned, as e.g.
6151 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6152 For signed types, even with wrapping overflow, this is fine. */
6153 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6155 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6157 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6160 case MIN_EXPR
: case MAX_EXPR
:
6161 /* If widening the type changes the signedness, then we can't perform
6162 this optimization as that changes the result. */
6163 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6166 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6167 sub_strict_overflow_p
= false;
6168 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6169 &sub_strict_overflow_p
)) != 0
6170 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6171 &sub_strict_overflow_p
)) != 0)
6173 if (tree_int_cst_sgn (c
) < 0)
6174 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6175 if (sub_strict_overflow_p
)
6176 *strict_overflow_p
= true;
6177 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6178 fold_convert (ctype
, t2
));
6182 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6183 /* If the second operand is constant, this is a multiplication
6184 or floor division, by a power of two, so we can treat it that
6185 way unless the multiplier or divisor overflows. Signed
6186 left-shift overflow is implementation-defined rather than
6187 undefined in C90, so do not convert signed left shift into
6189 if (TREE_CODE (op1
) == INTEGER_CST
6190 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6191 /* const_binop may not detect overflow correctly,
6192 so check for it explicitly here. */
6193 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6194 && 0 != (t1
= fold_convert (ctype
,
6195 const_binop (LSHIFT_EXPR
,
6198 && !TREE_OVERFLOW (t1
))
6199 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6200 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6202 fold_convert (ctype
, op0
),
6204 c
, code
, wide_type
, strict_overflow_p
);
6207 case PLUS_EXPR
: case MINUS_EXPR
:
6208 /* See if we can eliminate the operation on both sides. If we can, we
6209 can return a new PLUS or MINUS. If we can't, the only remaining
6210 cases where we can do anything are if the second operand is a
6212 sub_strict_overflow_p
= false;
6213 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6214 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6215 if (t1
!= 0 && t2
!= 0
6216 && (code
== MULT_EXPR
6217 /* If not multiplication, we can only do this if both operands
6218 are divisible by c. */
6219 || (multiple_of_p (ctype
, op0
, c
)
6220 && multiple_of_p (ctype
, op1
, c
))))
6222 if (sub_strict_overflow_p
)
6223 *strict_overflow_p
= true;
6224 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6225 fold_convert (ctype
, t2
));
6228 /* If this was a subtraction, negate OP1 and set it to be an addition.
6229 This simplifies the logic below. */
6230 if (tcode
== MINUS_EXPR
)
6232 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6233 /* If OP1 was not easily negatable, the constant may be OP0. */
6234 if (TREE_CODE (op0
) == INTEGER_CST
)
6236 std::swap (op0
, op1
);
6241 if (TREE_CODE (op1
) != INTEGER_CST
)
6244 /* If either OP1 or C are negative, this optimization is not safe for
6245 some of the division and remainder types while for others we need
6246 to change the code. */
6247 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6249 if (code
== CEIL_DIV_EXPR
)
6250 code
= FLOOR_DIV_EXPR
;
6251 else if (code
== FLOOR_DIV_EXPR
)
6252 code
= CEIL_DIV_EXPR
;
6253 else if (code
!= MULT_EXPR
6254 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6258 /* If it's a multiply or a division/modulus operation of a multiple
6259 of our constant, do the operation and verify it doesn't overflow. */
6260 if (code
== MULT_EXPR
6261 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6263 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6264 fold_convert (ctype
, c
));
6265 /* We allow the constant to overflow with wrapping semantics. */
6267 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6273 /* If we have an unsigned type, we cannot widen the operation since it
6274 will change the result if the original computation overflowed. */
6275 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6278 /* If we were able to eliminate our operation from the first side,
6279 apply our operation to the second side and reform the PLUS. */
6280 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6281 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6283 /* The last case is if we are a multiply. In that case, we can
6284 apply the distributive law to commute the multiply and addition
6285 if the multiplication of the constants doesn't overflow
6286 and overflow is defined. With undefined overflow
6287 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6288 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6289 return fold_build2 (tcode
, ctype
,
6290 fold_build2 (code
, ctype
,
6291 fold_convert (ctype
, op0
),
6292 fold_convert (ctype
, c
)),
6298 /* We have a special case here if we are doing something like
6299 (C * 8) % 4 since we know that's zero. */
6300 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6301 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6302 /* If the multiplication can overflow we cannot optimize this. */
6303 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6304 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6305 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6307 *strict_overflow_p
= true;
6308 return omit_one_operand (type
, integer_zero_node
, op0
);
6311 /* ... fall through ... */
6313 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6314 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6315 /* If we can extract our operation from the LHS, do so and return a
6316 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6317 do something only if the second operand is a constant. */
6319 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6320 strict_overflow_p
)) != 0)
6321 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6322 fold_convert (ctype
, op1
));
6323 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6324 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6325 strict_overflow_p
)) != 0)
6326 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6327 fold_convert (ctype
, t1
));
6328 else if (TREE_CODE (op1
) != INTEGER_CST
)
6331 /* If these are the same operation types, we can associate them
6332 assuming no overflow. */
6335 bool overflow_p
= false;
6336 bool overflow_mul_p
;
6337 signop sign
= TYPE_SIGN (ctype
);
6338 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6339 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6341 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6345 mul
= wide_int::from (mul
, TYPE_PRECISION (ctype
),
6346 TYPE_SIGN (TREE_TYPE (op1
)));
6347 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6348 wide_int_to_tree (ctype
, mul
));
6352 /* If these operations "cancel" each other, we have the main
6353 optimizations of this pass, which occur when either constant is a
6354 multiple of the other, in which case we replace this with either an
6355 operation or CODE or TCODE.
6357 If we have an unsigned type, we cannot do this since it will change
6358 the result if the original computation overflowed. */
6359 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6360 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6361 || (tcode
== MULT_EXPR
6362 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6363 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6364 && code
!= MULT_EXPR
)))
6366 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6368 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6369 *strict_overflow_p
= true;
6370 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6371 fold_convert (ctype
,
6372 const_binop (TRUNC_DIV_EXPR
,
6375 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6377 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6378 *strict_overflow_p
= true;
6379 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6380 fold_convert (ctype
,
6381 const_binop (TRUNC_DIV_EXPR
,
6394 /* Return a node which has the indicated constant VALUE (either 0 or
6395 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6396 and is of the indicated TYPE. */
6399 constant_boolean_node (bool value
, tree type
)
6401 if (type
== integer_type_node
)
6402 return value
? integer_one_node
: integer_zero_node
;
6403 else if (type
== boolean_type_node
)
6404 return value
? boolean_true_node
: boolean_false_node
;
6405 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6406 return build_vector_from_val (type
,
6407 build_int_cst (TREE_TYPE (type
),
6410 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6414 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6415 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6416 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6417 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6418 COND is the first argument to CODE; otherwise (as in the example
6419 given here), it is the second argument. TYPE is the type of the
6420 original expression. Return NULL_TREE if no simplification is
6424 fold_binary_op_with_conditional_arg (location_t loc
,
6425 enum tree_code code
,
6426 tree type
, tree op0
, tree op1
,
6427 tree cond
, tree arg
, int cond_first_p
)
6429 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6430 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6431 tree test
, true_value
, false_value
;
6432 tree lhs
= NULL_TREE
;
6433 tree rhs
= NULL_TREE
;
6434 enum tree_code cond_code
= COND_EXPR
;
6436 if (TREE_CODE (cond
) == COND_EXPR
6437 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6439 test
= TREE_OPERAND (cond
, 0);
6440 true_value
= TREE_OPERAND (cond
, 1);
6441 false_value
= TREE_OPERAND (cond
, 2);
6442 /* If this operand throws an expression, then it does not make
6443 sense to try to perform a logical or arithmetic operation
6445 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6447 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6452 tree testtype
= TREE_TYPE (cond
);
6454 true_value
= constant_boolean_node (true, testtype
);
6455 false_value
= constant_boolean_node (false, testtype
);
6458 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6459 cond_code
= VEC_COND_EXPR
;
6461 /* This transformation is only worthwhile if we don't have to wrap ARG
6462 in a SAVE_EXPR and the operation can be simplified without recursing
6463 on at least one of the branches once its pushed inside the COND_EXPR. */
6464 if (!TREE_CONSTANT (arg
)
6465 && (TREE_SIDE_EFFECTS (arg
)
6466 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6467 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6470 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6473 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6475 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6477 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6481 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6483 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6485 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6488 /* Check that we have simplified at least one of the branches. */
6489 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6492 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6496 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6498 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6499 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6500 ADDEND is the same as X.
6502 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6503 and finite. The problematic cases are when X is zero, and its mode
6504 has signed zeros. In the case of rounding towards -infinity,
6505 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6506 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6509 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6511 if (!real_zerop (addend
))
6514 /* Don't allow the fold with -fsignaling-nans. */
6515 if (HONOR_SNANS (element_mode (type
)))
6518 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6519 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6522 /* In a vector or complex, we would need to check the sign of all zeros. */
6523 if (TREE_CODE (addend
) != REAL_CST
)
6526 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6527 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6530 /* The mode has signed zeros, and we have to honor their sign.
6531 In this situation, there is only one case we can return true for.
6532 X - 0 is the same as X unless rounding towards -infinity is
6534 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6537 /* Subroutine of fold() that optimizes comparisons of a division by
6538 a nonzero integer constant against an integer constant, i.e.
6541 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6542 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6543 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6545 The function returns the constant folded tree if a simplification
6546 can be made, and NULL_TREE otherwise. */
6549 fold_div_compare (location_t loc
,
6550 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6552 tree prod
, tmp
, hi
, lo
;
6553 tree arg00
= TREE_OPERAND (arg0
, 0);
6554 tree arg01
= TREE_OPERAND (arg0
, 1);
6555 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6556 bool neg_overflow
= false;
6559 /* We have to do this the hard way to detect unsigned overflow.
6560 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6561 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6562 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6563 neg_overflow
= false;
6565 if (sign
== UNSIGNED
)
6567 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6568 build_int_cst (TREE_TYPE (arg01
), 1));
6571 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6572 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6573 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6574 -1, overflow
| TREE_OVERFLOW (prod
));
6576 else if (tree_int_cst_sgn (arg01
) >= 0)
6578 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6579 build_int_cst (TREE_TYPE (arg01
), 1));
6580 switch (tree_int_cst_sgn (arg1
))
6583 neg_overflow
= true;
6584 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6589 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6594 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6604 /* A negative divisor reverses the relational operators. */
6605 code
= swap_tree_comparison (code
);
6607 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6608 build_int_cst (TREE_TYPE (arg01
), 1));
6609 switch (tree_int_cst_sgn (arg1
))
6612 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6617 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6622 neg_overflow
= true;
6623 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6635 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6636 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6637 if (TREE_OVERFLOW (hi
))
6638 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6639 if (TREE_OVERFLOW (lo
))
6640 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6641 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6644 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6645 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6646 if (TREE_OVERFLOW (hi
))
6647 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6648 if (TREE_OVERFLOW (lo
))
6649 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6650 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6653 if (TREE_OVERFLOW (lo
))
6655 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6656 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6658 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6661 if (TREE_OVERFLOW (hi
))
6663 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6664 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6666 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6669 if (TREE_OVERFLOW (hi
))
6671 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6672 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6674 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6677 if (TREE_OVERFLOW (lo
))
6679 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6680 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6682 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6692 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6693 equality/inequality test, then return a simplified form of the test
6694 using a sign testing. Otherwise return NULL. TYPE is the desired
6698 fold_single_bit_test_into_sign_test (location_t loc
,
6699 enum tree_code code
, tree arg0
, tree arg1
,
6702 /* If this is testing a single bit, we can optimize the test. */
6703 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6704 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6705 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6707 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6708 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6709 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6711 if (arg00
!= NULL_TREE
6712 /* This is only a win if casting to a signed type is cheap,
6713 i.e. when arg00's type is not a partial mode. */
6714 && TYPE_PRECISION (TREE_TYPE (arg00
))
6715 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6717 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6718 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6720 fold_convert_loc (loc
, stype
, arg00
),
6721 build_int_cst (stype
, 0));
6728 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6729 equality/inequality test, then return a simplified form of
6730 the test using shifts and logical operations. Otherwise return
6731 NULL. TYPE is the desired result type. */
6734 fold_single_bit_test (location_t loc
, enum tree_code code
,
6735 tree arg0
, tree arg1
, tree result_type
)
6737 /* If this is testing a single bit, we can optimize the test. */
6738 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6739 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6740 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6742 tree inner
= TREE_OPERAND (arg0
, 0);
6743 tree type
= TREE_TYPE (arg0
);
6744 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6745 machine_mode operand_mode
= TYPE_MODE (type
);
6747 tree signed_type
, unsigned_type
, intermediate_type
;
6750 /* First, see if we can fold the single bit test into a sign-bit
6752 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6757 /* Otherwise we have (A & C) != 0 where C is a single bit,
6758 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6759 Similarly for (A & C) == 0. */
6761 /* If INNER is a right shift of a constant and it plus BITNUM does
6762 not overflow, adjust BITNUM and INNER. */
6763 if (TREE_CODE (inner
) == RSHIFT_EXPR
6764 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6765 && bitnum
< TYPE_PRECISION (type
)
6766 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6767 TYPE_PRECISION (type
) - bitnum
))
6769 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6770 inner
= TREE_OPERAND (inner
, 0);
6773 /* If we are going to be able to omit the AND below, we must do our
6774 operations as unsigned. If we must use the AND, we have a choice.
6775 Normally unsigned is faster, but for some machines signed is. */
6776 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6777 && !flag_syntax_only
) ? 0 : 1;
6779 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6780 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6781 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6782 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6785 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6786 inner
, size_int (bitnum
));
6788 one
= build_int_cst (intermediate_type
, 1);
6790 if (code
== EQ_EXPR
)
6791 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6793 /* Put the AND last so it can combine with more things. */
6794 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6796 /* Make sure to return the proper type. */
6797 inner
= fold_convert_loc (loc
, result_type
, inner
);
6804 /* Check whether we are allowed to reorder operands arg0 and arg1,
6805 such that the evaluation of arg1 occurs before arg0. */
6808 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6810 if (! flag_evaluation_order
)
6812 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6814 return ! TREE_SIDE_EFFECTS (arg0
)
6815 && ! TREE_SIDE_EFFECTS (arg1
);
6818 /* Test whether it is preferable two swap two operands, ARG0 and
6819 ARG1, for example because ARG0 is an integer constant and ARG1
6820 isn't. If REORDER is true, only recommend swapping if we can
6821 evaluate the operands in reverse order. */
6824 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6826 if (CONSTANT_CLASS_P (arg1
))
6828 if (CONSTANT_CLASS_P (arg0
))
6834 if (TREE_CONSTANT (arg1
))
6836 if (TREE_CONSTANT (arg0
))
6839 if (reorder
&& flag_evaluation_order
6840 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6843 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6844 for commutative and comparison operators. Ensuring a canonical
6845 form allows the optimizers to find additional redundancies without
6846 having to explicitly check for both orderings. */
6847 if (TREE_CODE (arg0
) == SSA_NAME
6848 && TREE_CODE (arg1
) == SSA_NAME
6849 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6852 /* Put SSA_NAMEs last. */
6853 if (TREE_CODE (arg1
) == SSA_NAME
)
6855 if (TREE_CODE (arg0
) == SSA_NAME
)
6858 /* Put variables last. */
6868 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6869 means A >= Y && A != MAX, but in this case we know that
6870 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6873 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6875 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6877 if (TREE_CODE (bound
) == LT_EXPR
)
6878 a
= TREE_OPERAND (bound
, 0);
6879 else if (TREE_CODE (bound
) == GT_EXPR
)
6880 a
= TREE_OPERAND (bound
, 1);
6884 typea
= TREE_TYPE (a
);
6885 if (!INTEGRAL_TYPE_P (typea
)
6886 && !POINTER_TYPE_P (typea
))
6889 if (TREE_CODE (ineq
) == LT_EXPR
)
6891 a1
= TREE_OPERAND (ineq
, 1);
6892 y
= TREE_OPERAND (ineq
, 0);
6894 else if (TREE_CODE (ineq
) == GT_EXPR
)
6896 a1
= TREE_OPERAND (ineq
, 0);
6897 y
= TREE_OPERAND (ineq
, 1);
6902 if (TREE_TYPE (a1
) != typea
)
6905 if (POINTER_TYPE_P (typea
))
6907 /* Convert the pointer types into integer before taking the difference. */
6908 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6909 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6910 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6913 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6915 if (!diff
|| !integer_onep (diff
))
6918 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6921 /* Fold a sum or difference of at least one multiplication.
6922 Returns the folded tree or NULL if no simplification could be made. */
6925 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6926 tree arg0
, tree arg1
)
6928 tree arg00
, arg01
, arg10
, arg11
;
6929 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6931 /* (A * C) +- (B * C) -> (A+-B) * C.
6932 (A * C) +- A -> A * (C+-1).
6933 We are most concerned about the case where C is a constant,
6934 but other combinations show up during loop reduction. Since
6935 it is not difficult, try all four possibilities. */
6937 if (TREE_CODE (arg0
) == MULT_EXPR
)
6939 arg00
= TREE_OPERAND (arg0
, 0);
6940 arg01
= TREE_OPERAND (arg0
, 1);
6942 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6944 arg00
= build_one_cst (type
);
6949 /* We cannot generate constant 1 for fract. */
6950 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6953 arg01
= build_one_cst (type
);
6955 if (TREE_CODE (arg1
) == MULT_EXPR
)
6957 arg10
= TREE_OPERAND (arg1
, 0);
6958 arg11
= TREE_OPERAND (arg1
, 1);
6960 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6962 arg10
= build_one_cst (type
);
6963 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6964 the purpose of this canonicalization. */
6965 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6966 && negate_expr_p (arg1
)
6967 && code
== PLUS_EXPR
)
6969 arg11
= negate_expr (arg1
);
6977 /* We cannot generate constant 1 for fract. */
6978 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6981 arg11
= build_one_cst (type
);
6985 if (operand_equal_p (arg01
, arg11
, 0))
6986 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6987 else if (operand_equal_p (arg00
, arg10
, 0))
6988 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6989 else if (operand_equal_p (arg00
, arg11
, 0))
6990 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6991 else if (operand_equal_p (arg01
, arg10
, 0))
6992 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6994 /* No identical multiplicands; see if we can find a common
6995 power-of-two factor in non-power-of-two multiplies. This
6996 can help in multi-dimensional array access. */
6997 else if (tree_fits_shwi_p (arg01
)
6998 && tree_fits_shwi_p (arg11
))
7000 HOST_WIDE_INT int01
, int11
, tmp
;
7003 int01
= tree_to_shwi (arg01
);
7004 int11
= tree_to_shwi (arg11
);
7006 /* Move min of absolute values to int11. */
7007 if (absu_hwi (int01
) < absu_hwi (int11
))
7009 tmp
= int01
, int01
= int11
, int11
= tmp
;
7010 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7017 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7018 /* The remainder should not be a constant, otherwise we
7019 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7020 increased the number of multiplications necessary. */
7021 && TREE_CODE (arg10
) != INTEGER_CST
)
7023 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7024 build_int_cst (TREE_TYPE (arg00
),
7029 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7034 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7035 fold_build2_loc (loc
, code
, type
,
7036 fold_convert_loc (loc
, type
, alt0
),
7037 fold_convert_loc (loc
, type
, alt1
)),
7038 fold_convert_loc (loc
, type
, same
));
7043 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7044 specified by EXPR into the buffer PTR of length LEN bytes.
7045 Return the number of bytes placed in the buffer, or zero
7049 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7051 tree type
= TREE_TYPE (expr
);
7052 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7053 int byte
, offset
, word
, words
;
7054 unsigned char value
;
7056 if ((off
== -1 && total_bytes
> len
)
7057 || off
>= total_bytes
)
7061 words
= total_bytes
/ UNITS_PER_WORD
;
7063 for (byte
= 0; byte
< total_bytes
; byte
++)
7065 int bitpos
= byte
* BITS_PER_UNIT
;
7066 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7068 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7070 if (total_bytes
> UNITS_PER_WORD
)
7072 word
= byte
/ UNITS_PER_WORD
;
7073 if (WORDS_BIG_ENDIAN
)
7074 word
= (words
- 1) - word
;
7075 offset
= word
* UNITS_PER_WORD
;
7076 if (BYTES_BIG_ENDIAN
)
7077 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7079 offset
+= byte
% UNITS_PER_WORD
;
7082 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7084 && offset
- off
< len
)
7085 ptr
[offset
- off
] = value
;
7087 return MIN (len
, total_bytes
- off
);
7091 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7092 specified by EXPR into the buffer PTR of length LEN bytes.
7093 Return the number of bytes placed in the buffer, or zero
7097 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7099 tree type
= TREE_TYPE (expr
);
7100 machine_mode mode
= TYPE_MODE (type
);
7101 int total_bytes
= GET_MODE_SIZE (mode
);
7102 FIXED_VALUE_TYPE value
;
7103 tree i_value
, i_type
;
7105 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7108 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7110 if (NULL_TREE
== i_type
7111 || TYPE_PRECISION (i_type
) != total_bytes
)
7114 value
= TREE_FIXED_CST (expr
);
7115 i_value
= double_int_to_tree (i_type
, value
.data
);
7117 return native_encode_int (i_value
, ptr
, len
, off
);
7121 /* Subroutine of native_encode_expr. Encode the REAL_CST
7122 specified by EXPR into the buffer PTR of length LEN bytes.
7123 Return the number of bytes placed in the buffer, or zero
7127 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7129 tree type
= TREE_TYPE (expr
);
7130 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7131 int byte
, offset
, word
, words
, bitpos
;
7132 unsigned char value
;
7134 /* There are always 32 bits in each long, no matter the size of
7135 the hosts long. We handle floating point representations with
7139 if ((off
== -1 && total_bytes
> len
)
7140 || off
>= total_bytes
)
7144 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7146 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7148 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7149 bitpos
+= BITS_PER_UNIT
)
7151 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7152 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7154 if (UNITS_PER_WORD
< 4)
7156 word
= byte
/ UNITS_PER_WORD
;
7157 if (WORDS_BIG_ENDIAN
)
7158 word
= (words
- 1) - word
;
7159 offset
= word
* UNITS_PER_WORD
;
7160 if (BYTES_BIG_ENDIAN
)
7161 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7163 offset
+= byte
% UNITS_PER_WORD
;
7166 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7167 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7169 && offset
- off
< len
)
7170 ptr
[offset
- off
] = value
;
7172 return MIN (len
, total_bytes
- off
);
7175 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7176 specified by EXPR into the buffer PTR of length LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero
7181 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7186 part
= TREE_REALPART (expr
);
7187 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7191 part
= TREE_IMAGPART (expr
);
7193 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7194 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7198 return rsize
+ isize
;
7202 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7203 specified by EXPR into the buffer PTR of length LEN bytes.
7204 Return the number of bytes placed in the buffer, or zero
7208 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7215 count
= VECTOR_CST_NELTS (expr
);
7216 itype
= TREE_TYPE (TREE_TYPE (expr
));
7217 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7218 for (i
= 0; i
< count
; i
++)
7225 elem
= VECTOR_CST_ELT (expr
, i
);
7226 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7227 if ((off
== -1 && res
!= size
)
7240 /* Subroutine of native_encode_expr. Encode the STRING_CST
7241 specified by EXPR into the buffer PTR of length LEN bytes.
7242 Return the number of bytes placed in the buffer, or zero
7246 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7248 tree type
= TREE_TYPE (expr
);
7249 HOST_WIDE_INT total_bytes
;
7251 if (TREE_CODE (type
) != ARRAY_TYPE
7252 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7253 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7254 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7256 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7257 if ((off
== -1 && total_bytes
> len
)
7258 || off
>= total_bytes
)
7262 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7265 if (off
< TREE_STRING_LENGTH (expr
))
7267 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7268 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7270 memset (ptr
+ written
, 0,
7271 MIN (total_bytes
- written
, len
- written
));
7274 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7275 return MIN (total_bytes
- off
, len
);
7279 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7280 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7281 buffer PTR of length LEN bytes. If OFF is not -1 then start
7282 the encoding at byte offset OFF and encode at most LEN bytes.
7283 Return the number of bytes placed in the buffer, or zero upon failure. */
7286 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7288 /* We don't support starting at negative offset and -1 is special. */
7292 switch (TREE_CODE (expr
))
7295 return native_encode_int (expr
, ptr
, len
, off
);
7298 return native_encode_real (expr
, ptr
, len
, off
);
7301 return native_encode_fixed (expr
, ptr
, len
, off
);
7304 return native_encode_complex (expr
, ptr
, len
, off
);
7307 return native_encode_vector (expr
, ptr
, len
, off
);
7310 return native_encode_string (expr
, ptr
, len
, off
);
7318 /* Subroutine of native_interpret_expr. Interpret the contents of
7319 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7320 If the buffer cannot be interpreted, return NULL_TREE. */
7323 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7325 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7327 if (total_bytes
> len
7328 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7331 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7333 return wide_int_to_tree (type
, result
);
7337 /* Subroutine of native_interpret_expr. Interpret the contents of
7338 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7339 If the buffer cannot be interpreted, return NULL_TREE. */
7342 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7344 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7346 FIXED_VALUE_TYPE fixed_value
;
7348 if (total_bytes
> len
7349 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7352 result
= double_int::from_buffer (ptr
, total_bytes
);
7353 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7355 return build_fixed (type
, fixed_value
);
7359 /* Subroutine of native_interpret_expr. Interpret the contents of
7360 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7361 If the buffer cannot be interpreted, return NULL_TREE. */
7364 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7366 machine_mode mode
= TYPE_MODE (type
);
7367 int total_bytes
= GET_MODE_SIZE (mode
);
7368 unsigned char value
;
7369 /* There are always 32 bits in each long, no matter the size of
7370 the hosts long. We handle floating point representations with
7375 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7376 if (total_bytes
> len
|| total_bytes
> 24)
7378 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7380 memset (tmp
, 0, sizeof (tmp
));
7381 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7382 bitpos
+= BITS_PER_UNIT
)
7384 /* Both OFFSET and BYTE index within a long;
7385 bitpos indexes the whole float. */
7386 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7387 if (UNITS_PER_WORD
< 4)
7389 int word
= byte
/ UNITS_PER_WORD
;
7390 if (WORDS_BIG_ENDIAN
)
7391 word
= (words
- 1) - word
;
7392 offset
= word
* UNITS_PER_WORD
;
7393 if (BYTES_BIG_ENDIAN
)
7394 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7396 offset
+= byte
% UNITS_PER_WORD
;
7401 if (BYTES_BIG_ENDIAN
)
7403 /* Reverse bytes within each long, or within the entire float
7404 if it's smaller than a long (for HFmode). */
7405 offset
= MIN (3, total_bytes
- 1) - offset
;
7406 gcc_assert (offset
>= 0);
7409 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7411 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7414 real_from_target (&r
, tmp
, mode
);
7415 return build_real (type
, r
);
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7424 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7426 tree etype
, rpart
, ipart
;
7429 etype
= TREE_TYPE (type
);
7430 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7433 rpart
= native_interpret_expr (etype
, ptr
, size
);
7436 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7439 return build_complex (type
, rpart
, ipart
);
7443 /* Subroutine of native_interpret_expr. Interpret the contents of
7444 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7445 If the buffer cannot be interpreted, return NULL_TREE. */
7448 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7454 etype
= TREE_TYPE (type
);
7455 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7456 count
= TYPE_VECTOR_SUBPARTS (type
);
7457 if (size
* count
> len
)
7460 elements
= XALLOCAVEC (tree
, count
);
7461 for (i
= count
- 1; i
>= 0; i
--)
7463 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7468 return build_vector (type
, elements
);
7472 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7473 the buffer PTR of length LEN as a constant of type TYPE. For
7474 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7475 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7476 return NULL_TREE. */
7479 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7481 switch (TREE_CODE (type
))
7487 case REFERENCE_TYPE
:
7488 return native_interpret_int (type
, ptr
, len
);
7491 return native_interpret_real (type
, ptr
, len
);
7493 case FIXED_POINT_TYPE
:
7494 return native_interpret_fixed (type
, ptr
, len
);
7497 return native_interpret_complex (type
, ptr
, len
);
7500 return native_interpret_vector (type
, ptr
, len
);
7507 /* Returns true if we can interpret the contents of a native encoding
7511 can_native_interpret_type_p (tree type
)
7513 switch (TREE_CODE (type
))
7519 case REFERENCE_TYPE
:
7520 case FIXED_POINT_TYPE
:
7530 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7531 TYPE at compile-time. If we're unable to perform the conversion
7532 return NULL_TREE. */
7535 fold_view_convert_expr (tree type
, tree expr
)
7537 /* We support up to 512-bit values (for V8DFmode). */
7538 unsigned char buffer
[64];
7541 /* Check that the host and target are sane. */
7542 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7545 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7549 return native_interpret_expr (type
, buffer
, len
);
7552 /* Build an expression for the address of T. Folds away INDIRECT_REF
7553 to avoid confusing the gimplify process. */
7556 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7558 /* The size of the object is not relevant when talking about its address. */
7559 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7560 t
= TREE_OPERAND (t
, 0);
7562 if (TREE_CODE (t
) == INDIRECT_REF
)
7564 t
= TREE_OPERAND (t
, 0);
7566 if (TREE_TYPE (t
) != ptrtype
)
7567 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7569 else if (TREE_CODE (t
) == MEM_REF
7570 && integer_zerop (TREE_OPERAND (t
, 1)))
7571 return TREE_OPERAND (t
, 0);
7572 else if (TREE_CODE (t
) == MEM_REF
7573 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7574 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7575 TREE_OPERAND (t
, 0),
7576 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7577 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7579 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7581 if (TREE_TYPE (t
) != ptrtype
)
7582 t
= fold_convert_loc (loc
, ptrtype
, t
);
7585 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7590 /* Build an expression for the address of T. */
7593 build_fold_addr_expr_loc (location_t loc
, tree t
)
7595 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7597 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7600 /* Fold a unary expression of code CODE and type TYPE with operand
7601 OP0. Return the folded expression if folding is successful.
7602 Otherwise, return NULL_TREE. */
7605 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7609 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7611 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7612 && TREE_CODE_LENGTH (code
) == 1);
7617 if (CONVERT_EXPR_CODE_P (code
)
7618 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7620 /* Don't use STRIP_NOPS, because signedness of argument type
7622 STRIP_SIGN_NOPS (arg0
);
7626 /* Strip any conversions that don't change the mode. This
7627 is safe for every expression, except for a comparison
7628 expression because its signedness is derived from its
7631 Note that this is done as an internal manipulation within
7632 the constant folder, in order to find the simplest
7633 representation of the arguments so that their form can be
7634 studied. In any cases, the appropriate type conversions
7635 should be put back in the tree that will get out of the
7640 if (CONSTANT_CLASS_P (arg0
))
7642 tree tem
= const_unop (code
, type
, arg0
);
7645 if (TREE_TYPE (tem
) != type
)
7646 tem
= fold_convert_loc (loc
, type
, tem
);
7652 tem
= generic_simplify (loc
, code
, type
, op0
);
7656 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7658 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7659 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7660 fold_build1_loc (loc
, code
, type
,
7661 fold_convert_loc (loc
, TREE_TYPE (op0
),
7662 TREE_OPERAND (arg0
, 1))));
7663 else if (TREE_CODE (arg0
) == COND_EXPR
)
7665 tree arg01
= TREE_OPERAND (arg0
, 1);
7666 tree arg02
= TREE_OPERAND (arg0
, 2);
7667 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7668 arg01
= fold_build1_loc (loc
, code
, type
,
7669 fold_convert_loc (loc
,
7670 TREE_TYPE (op0
), arg01
));
7671 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7672 arg02
= fold_build1_loc (loc
, code
, type
,
7673 fold_convert_loc (loc
,
7674 TREE_TYPE (op0
), arg02
));
7675 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7678 /* If this was a conversion, and all we did was to move into
7679 inside the COND_EXPR, bring it back out. But leave it if
7680 it is a conversion from integer to integer and the
7681 result precision is no wider than a word since such a
7682 conversion is cheap and may be optimized away by combine,
7683 while it couldn't if it were outside the COND_EXPR. Then return
7684 so we don't get into an infinite recursion loop taking the
7685 conversion out and then back in. */
7687 if ((CONVERT_EXPR_CODE_P (code
)
7688 || code
== NON_LVALUE_EXPR
)
7689 && TREE_CODE (tem
) == COND_EXPR
7690 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7691 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7692 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7693 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7694 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7695 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7696 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7698 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7699 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7700 || flag_syntax_only
))
7701 tem
= build1_loc (loc
, code
, type
,
7703 TREE_TYPE (TREE_OPERAND
7704 (TREE_OPERAND (tem
, 1), 0)),
7705 TREE_OPERAND (tem
, 0),
7706 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7707 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7715 case NON_LVALUE_EXPR
:
7716 if (!maybe_lvalue_p (op0
))
7717 return fold_convert_loc (loc
, type
, op0
);
7722 case FIX_TRUNC_EXPR
:
7723 if (COMPARISON_CLASS_P (op0
))
7725 /* If we have (type) (a CMP b) and type is an integral type, return
7726 new expression involving the new type. Canonicalize
7727 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7729 Do not fold the result as that would not simplify further, also
7730 folding again results in recursions. */
7731 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7732 return build2_loc (loc
, TREE_CODE (op0
), type
,
7733 TREE_OPERAND (op0
, 0),
7734 TREE_OPERAND (op0
, 1));
7735 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7736 && TREE_CODE (type
) != VECTOR_TYPE
)
7737 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7738 constant_boolean_node (true, type
),
7739 constant_boolean_node (false, type
));
7742 /* Handle (T *)&A.B.C for A being of type T and B and C
7743 living at offset zero. This occurs frequently in
7744 C++ upcasting and then accessing the base. */
7745 if (TREE_CODE (op0
) == ADDR_EXPR
7746 && POINTER_TYPE_P (type
)
7747 && handled_component_p (TREE_OPERAND (op0
, 0)))
7749 HOST_WIDE_INT bitsize
, bitpos
;
7752 int unsignedp
, reversep
, volatilep
;
7754 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
7755 &offset
, &mode
, &unsignedp
, &reversep
,
7757 /* If the reference was to a (constant) zero offset, we can use
7758 the address of the base if it has the same base type
7759 as the result type and the pointer type is unqualified. */
7760 if (! offset
&& bitpos
== 0
7761 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7762 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7763 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7764 return fold_convert_loc (loc
, type
,
7765 build_fold_addr_expr_loc (loc
, base
));
7768 if (TREE_CODE (op0
) == MODIFY_EXPR
7769 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7770 /* Detect assigning a bitfield. */
7771 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7773 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7775 /* Don't leave an assignment inside a conversion
7776 unless assigning a bitfield. */
7777 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7778 /* First do the assignment, then return converted constant. */
7779 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7780 TREE_NO_WARNING (tem
) = 1;
7781 TREE_USED (tem
) = 1;
7785 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7786 constants (if x has signed type, the sign bit cannot be set
7787 in c). This folds extension into the BIT_AND_EXPR.
7788 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7789 very likely don't have maximal range for their precision and this
7790 transformation effectively doesn't preserve non-maximal ranges. */
7791 if (TREE_CODE (type
) == INTEGER_TYPE
7792 && TREE_CODE (op0
) == BIT_AND_EXPR
7793 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7795 tree and_expr
= op0
;
7796 tree and0
= TREE_OPERAND (and_expr
, 0);
7797 tree and1
= TREE_OPERAND (and_expr
, 1);
7800 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7801 || (TYPE_PRECISION (type
)
7802 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7804 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7805 <= HOST_BITS_PER_WIDE_INT
7806 && tree_fits_uhwi_p (and1
))
7808 unsigned HOST_WIDE_INT cst
;
7810 cst
= tree_to_uhwi (and1
);
7811 cst
&= HOST_WIDE_INT_M1U
7812 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7813 change
= (cst
== 0);
7815 && !flag_syntax_only
7816 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7819 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7820 and0
= fold_convert_loc (loc
, uns
, and0
);
7821 and1
= fold_convert_loc (loc
, uns
, and1
);
7826 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7827 TREE_OVERFLOW (and1
));
7828 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7829 fold_convert_loc (loc
, type
, and0
), tem
);
7833 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7834 cast (T1)X will fold away. We assume that this happens when X itself
7836 if (POINTER_TYPE_P (type
)
7837 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7838 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
7840 tree arg00
= TREE_OPERAND (arg0
, 0);
7841 tree arg01
= TREE_OPERAND (arg0
, 1);
7843 return fold_build_pointer_plus_loc
7844 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7847 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7848 of the same precision, and X is an integer type not narrower than
7849 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7850 if (INTEGRAL_TYPE_P (type
)
7851 && TREE_CODE (op0
) == BIT_NOT_EXPR
7852 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7853 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7854 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7856 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7857 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7858 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7859 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7860 fold_convert_loc (loc
, type
, tem
));
7863 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7864 type of X and Y (integer types only). */
7865 if (INTEGRAL_TYPE_P (type
)
7866 && TREE_CODE (op0
) == MULT_EXPR
7867 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7868 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7870 /* Be careful not to introduce new overflows. */
7872 if (TYPE_OVERFLOW_WRAPS (type
))
7875 mult_type
= unsigned_type_for (type
);
7877 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7879 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7880 fold_convert_loc (loc
, mult_type
,
7881 TREE_OPERAND (op0
, 0)),
7882 fold_convert_loc (loc
, mult_type
,
7883 TREE_OPERAND (op0
, 1)));
7884 return fold_convert_loc (loc
, type
, tem
);
7890 case VIEW_CONVERT_EXPR
:
7891 if (TREE_CODE (op0
) == MEM_REF
)
7893 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
7894 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7895 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
7902 tem
= fold_negate_expr (loc
, arg0
);
7904 return fold_convert_loc (loc
, type
, tem
);
7908 /* Convert fabs((double)float) into (double)fabsf(float). */
7909 if (TREE_CODE (arg0
) == NOP_EXPR
7910 && TREE_CODE (type
) == REAL_TYPE
)
7912 tree targ0
= strip_float_extensions (arg0
);
7914 return fold_convert_loc (loc
, type
,
7915 fold_build1_loc (loc
, ABS_EXPR
,
7922 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7923 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7924 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7925 fold_convert_loc (loc
, type
,
7926 TREE_OPERAND (arg0
, 0)))))
7927 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7928 fold_convert_loc (loc
, type
,
7929 TREE_OPERAND (arg0
, 1)));
7930 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7931 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7932 fold_convert_loc (loc
, type
,
7933 TREE_OPERAND (arg0
, 1)))))
7934 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7935 fold_convert_loc (loc
, type
,
7936 TREE_OPERAND (arg0
, 0)), tem
);
7940 case TRUTH_NOT_EXPR
:
7941 /* Note that the operand of this must be an int
7942 and its values must be 0 or 1.
7943 ("true" is a fixed value perhaps depending on the language,
7944 but we don't handle values other than 1 correctly yet.) */
7945 tem
= fold_truth_not_expr (loc
, arg0
);
7948 return fold_convert_loc (loc
, type
, tem
);
7951 /* Fold *&X to X if X is an lvalue. */
7952 if (TREE_CODE (op0
) == ADDR_EXPR
)
7954 tree op00
= TREE_OPERAND (op0
, 0);
7955 if ((TREE_CODE (op00
) == VAR_DECL
7956 || TREE_CODE (op00
) == PARM_DECL
7957 || TREE_CODE (op00
) == RESULT_DECL
)
7958 && !TREE_READONLY (op00
))
7965 } /* switch (code) */
7969 /* If the operation was a conversion do _not_ mark a resulting constant
7970 with TREE_OVERFLOW if the original constant was not. These conversions
7971 have implementation defined behavior and retaining the TREE_OVERFLOW
7972 flag here would confuse later passes such as VRP. */
7974 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
7975 tree type
, tree op0
)
7977 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
7979 && TREE_CODE (res
) == INTEGER_CST
7980 && TREE_CODE (op0
) == INTEGER_CST
7981 && CONVERT_EXPR_CODE_P (code
))
7982 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
7987 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7988 operands OP0 and OP1. LOC is the location of the resulting expression.
7989 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7990 Return the folded expression if folding is successful. Otherwise,
7991 return NULL_TREE. */
7993 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
7994 tree arg0
, tree arg1
, tree op0
, tree op1
)
7998 /* We only do these simplifications if we are optimizing. */
8002 /* Check for things like (A || B) && (A || C). We can convert this
8003 to A || (B && C). Note that either operator can be any of the four
8004 truth and/or operations and the transformation will still be
8005 valid. Also note that we only care about order for the
8006 ANDIF and ORIF operators. If B contains side effects, this
8007 might change the truth-value of A. */
8008 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8009 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8010 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8011 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8012 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8013 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8015 tree a00
= TREE_OPERAND (arg0
, 0);
8016 tree a01
= TREE_OPERAND (arg0
, 1);
8017 tree a10
= TREE_OPERAND (arg1
, 0);
8018 tree a11
= TREE_OPERAND (arg1
, 1);
8019 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8020 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8021 && (code
== TRUTH_AND_EXPR
8022 || code
== TRUTH_OR_EXPR
));
8024 if (operand_equal_p (a00
, a10
, 0))
8025 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8026 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8027 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8028 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8029 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8030 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8031 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8032 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8034 /* This case if tricky because we must either have commutative
8035 operators or else A10 must not have side-effects. */
8037 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8038 && operand_equal_p (a01
, a11
, 0))
8039 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8040 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8044 /* See if we can build a range comparison. */
8045 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8048 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8049 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8051 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8053 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8056 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8057 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8059 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8061 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8064 /* Check for the possibility of merging component references. If our
8065 lhs is another similar operation, try to merge its rhs with our
8066 rhs. Then try to merge our lhs and rhs. */
8067 if (TREE_CODE (arg0
) == code
8068 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8069 TREE_OPERAND (arg0
, 1), arg1
)))
8070 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8072 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8075 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8076 && (code
== TRUTH_AND_EXPR
8077 || code
== TRUTH_ANDIF_EXPR
8078 || code
== TRUTH_OR_EXPR
8079 || code
== TRUTH_ORIF_EXPR
))
8081 enum tree_code ncode
, icode
;
8083 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8084 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8085 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8087 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8088 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8089 We don't want to pack more than two leafs to a non-IF AND/OR
8091 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8092 equal to IF-CODE, then we don't want to add right-hand operand.
8093 If the inner right-hand side of left-hand operand has
8094 side-effects, or isn't simple, then we can't add to it,
8095 as otherwise we might destroy if-sequence. */
8096 if (TREE_CODE (arg0
) == icode
8097 && simple_operand_p_2 (arg1
)
8098 /* Needed for sequence points to handle trappings, and
8100 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8102 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8104 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8107 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8108 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8109 else if (TREE_CODE (arg1
) == icode
8110 && simple_operand_p_2 (arg0
)
8111 /* Needed for sequence points to handle trappings, and
8113 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8115 tem
= fold_build2_loc (loc
, ncode
, type
,
8116 arg0
, TREE_OPERAND (arg1
, 0));
8117 return fold_build2_loc (loc
, icode
, type
, tem
,
8118 TREE_OPERAND (arg1
, 1));
8120 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8122 For sequence point consistancy, we need to check for trapping,
8123 and side-effects. */
8124 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8125 && simple_operand_p_2 (arg1
))
8126 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8132 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8133 by changing CODE to reduce the magnitude of constants involved in
8134 ARG0 of the comparison.
8135 Returns a canonicalized comparison tree if a simplification was
8136 possible, otherwise returns NULL_TREE.
8137 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8138 valid if signed overflow is undefined. */
8141 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8142 tree arg0
, tree arg1
,
8143 bool *strict_overflow_p
)
8145 enum tree_code code0
= TREE_CODE (arg0
);
8146 tree t
, cst0
= NULL_TREE
;
8149 /* Match A +- CST code arg1. We can change this only if overflow
8151 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8152 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8153 /* In principle pointers also have undefined overflow behavior,
8154 but that causes problems elsewhere. */
8155 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8156 && (code0
== MINUS_EXPR
8157 || code0
== PLUS_EXPR
)
8158 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
8161 /* Identify the constant in arg0 and its sign. */
8162 cst0
= TREE_OPERAND (arg0
, 1);
8163 sgn0
= tree_int_cst_sgn (cst0
);
8165 /* Overflowed constants and zero will cause problems. */
8166 if (integer_zerop (cst0
)
8167 || TREE_OVERFLOW (cst0
))
8170 /* See if we can reduce the magnitude of the constant in
8171 arg0 by changing the comparison code. */
8172 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8174 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8176 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8177 else if (code
== GT_EXPR
8178 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8180 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8181 else if (code
== LE_EXPR
8182 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8184 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8185 else if (code
== GE_EXPR
8186 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8190 *strict_overflow_p
= true;
8192 /* Now build the constant reduced in magnitude. But not if that
8193 would produce one outside of its types range. */
8194 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8196 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8197 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8199 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8200 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8203 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8204 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8205 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8206 t
= fold_convert (TREE_TYPE (arg1
), t
);
8208 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8211 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8212 overflow further. Try to decrease the magnitude of constants involved
8213 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8214 and put sole constants at the second argument position.
8215 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8218 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8219 tree arg0
, tree arg1
)
8222 bool strict_overflow_p
;
8223 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8224 "when reducing constant in comparison");
8226 /* Try canonicalization by simplifying arg0. */
8227 strict_overflow_p
= false;
8228 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8229 &strict_overflow_p
);
8232 if (strict_overflow_p
)
8233 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8237 /* Try canonicalization by simplifying arg1 using the swapped
8239 code
= swap_tree_comparison (code
);
8240 strict_overflow_p
= false;
8241 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8242 &strict_overflow_p
);
8243 if (t
&& strict_overflow_p
)
8244 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8248 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8249 space. This is used to avoid issuing overflow warnings for
8250 expressions like &p->x which can not wrap. */
8253 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8255 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8262 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8263 if (offset
== NULL_TREE
)
8264 wi_offset
= wi::zero (precision
);
8265 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8271 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8272 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8276 if (!wi::fits_uhwi_p (total
))
8279 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8283 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8285 if (TREE_CODE (base
) == ADDR_EXPR
)
8287 HOST_WIDE_INT base_size
;
8289 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8290 if (base_size
> 0 && size
< base_size
)
8294 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8297 /* Subroutine of fold_binary. This routine performs all of the
8298 transformations that are common to the equality/inequality
8299 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8300 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8301 fold_binary should call fold_binary. Fold a comparison with
8302 tree code CODE and type TYPE with operands OP0 and OP1. Return
8303 the folded comparison or NULL_TREE. */
8306 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8309 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8310 tree arg0
, arg1
, tem
;
8315 STRIP_SIGN_NOPS (arg0
);
8316 STRIP_SIGN_NOPS (arg1
);
8318 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8319 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8321 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8322 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
8323 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8324 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8325 && TREE_CODE (arg1
) == INTEGER_CST
8326 && !TREE_OVERFLOW (arg1
))
8328 const enum tree_code
8329 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8330 tree const1
= TREE_OPERAND (arg0
, 1);
8331 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8332 tree variable
= TREE_OPERAND (arg0
, 0);
8333 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8335 /* If the constant operation overflowed this can be
8336 simplified as a comparison against INT_MAX/INT_MIN. */
8337 if (TREE_OVERFLOW (new_const
)
8338 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8340 int const1_sgn
= tree_int_cst_sgn (const1
);
8341 enum tree_code code2
= code
;
8343 /* Get the sign of the constant on the lhs if the
8344 operation were VARIABLE + CONST1. */
8345 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8346 const1_sgn
= -const1_sgn
;
8348 /* The sign of the constant determines if we overflowed
8349 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8350 Canonicalize to the INT_MIN overflow by swapping the comparison
8352 if (const1_sgn
== -1)
8353 code2
= swap_tree_comparison (code
);
8355 /* We now can look at the canonicalized case
8356 VARIABLE + 1 CODE2 INT_MIN
8357 and decide on the result. */
8364 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8370 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8379 fold_overflow_warning ("assuming signed overflow does not occur "
8380 "when changing X +- C1 cmp C2 to "
8382 WARN_STRICT_OVERFLOW_COMPARISON
);
8383 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8387 /* For comparisons of pointers we can decompose it to a compile time
8388 comparison of the base objects and the offsets into the object.
8389 This requires at least one operand being an ADDR_EXPR or a
8390 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8391 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8392 && (TREE_CODE (arg0
) == ADDR_EXPR
8393 || TREE_CODE (arg1
) == ADDR_EXPR
8394 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8395 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8397 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8398 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8400 int volatilep
, reversep
, unsignedp
;
8401 bool indirect_base0
= false, indirect_base1
= false;
8403 /* Get base and offset for the access. Strip ADDR_EXPR for
8404 get_inner_reference, but put it back by stripping INDIRECT_REF
8405 off the base object if possible. indirect_baseN will be true
8406 if baseN is not an address but refers to the object itself. */
8408 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8411 = get_inner_reference (TREE_OPERAND (arg0
, 0),
8412 &bitsize
, &bitpos0
, &offset0
, &mode
,
8413 &unsignedp
, &reversep
, &volatilep
, false);
8414 if (TREE_CODE (base0
) == INDIRECT_REF
)
8415 base0
= TREE_OPERAND (base0
, 0);
8417 indirect_base0
= true;
8419 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8421 base0
= TREE_OPERAND (arg0
, 0);
8422 STRIP_SIGN_NOPS (base0
);
8423 if (TREE_CODE (base0
) == ADDR_EXPR
)
8426 = get_inner_reference (TREE_OPERAND (base0
, 0),
8427 &bitsize
, &bitpos0
, &offset0
, &mode
,
8428 &unsignedp
, &reversep
, &volatilep
,
8430 if (TREE_CODE (base0
) == INDIRECT_REF
)
8431 base0
= TREE_OPERAND (base0
, 0);
8433 indirect_base0
= true;
8435 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
8436 offset0
= TREE_OPERAND (arg0
, 1);
8438 offset0
= size_binop (PLUS_EXPR
, offset0
,
8439 TREE_OPERAND (arg0
, 1));
8440 if (TREE_CODE (offset0
) == INTEGER_CST
)
8442 offset_int tem
= wi::sext (wi::to_offset (offset0
),
8443 TYPE_PRECISION (sizetype
));
8444 tem
= wi::lshift (tem
, LOG2_BITS_PER_UNIT
);
8446 if (wi::fits_shwi_p (tem
))
8448 bitpos0
= tem
.to_shwi ();
8449 offset0
= NULL_TREE
;
8455 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8458 = get_inner_reference (TREE_OPERAND (arg1
, 0),
8459 &bitsize
, &bitpos1
, &offset1
, &mode
,
8460 &unsignedp
, &reversep
, &volatilep
, false);
8461 if (TREE_CODE (base1
) == INDIRECT_REF
)
8462 base1
= TREE_OPERAND (base1
, 0);
8464 indirect_base1
= true;
8466 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8468 base1
= TREE_OPERAND (arg1
, 0);
8469 STRIP_SIGN_NOPS (base1
);
8470 if (TREE_CODE (base1
) == ADDR_EXPR
)
8473 = get_inner_reference (TREE_OPERAND (base1
, 0),
8474 &bitsize
, &bitpos1
, &offset1
, &mode
,
8475 &unsignedp
, &reversep
, &volatilep
,
8477 if (TREE_CODE (base1
) == INDIRECT_REF
)
8478 base1
= TREE_OPERAND (base1
, 0);
8480 indirect_base1
= true;
8482 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
8483 offset1
= TREE_OPERAND (arg1
, 1);
8485 offset1
= size_binop (PLUS_EXPR
, offset1
,
8486 TREE_OPERAND (arg1
, 1));
8487 if (TREE_CODE (offset1
) == INTEGER_CST
)
8489 offset_int tem
= wi::sext (wi::to_offset (offset1
),
8490 TYPE_PRECISION (sizetype
));
8491 tem
= wi::lshift (tem
, LOG2_BITS_PER_UNIT
);
8493 if (wi::fits_shwi_p (tem
))
8495 bitpos1
= tem
.to_shwi ();
8496 offset1
= NULL_TREE
;
8501 /* If we have equivalent bases we might be able to simplify. */
8502 if (indirect_base0
== indirect_base1
8503 && operand_equal_p (base0
, base1
,
8504 indirect_base0
? OEP_ADDRESS_OF
: 0))
8506 /* We can fold this expression to a constant if the non-constant
8507 offset parts are equal. */
8508 if ((offset0
== offset1
8509 || (offset0
&& offset1
8510 && operand_equal_p (offset0
, offset1
, 0)))
8513 || (indirect_base0
&& DECL_P (base0
))
8514 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8518 && bitpos0
!= bitpos1
8519 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8520 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8521 fold_overflow_warning (("assuming pointer wraparound does not "
8522 "occur when comparing P +- C1 with "
8524 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8529 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8531 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8533 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8535 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8537 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8539 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8543 /* We can simplify the comparison to a comparison of the variable
8544 offset parts if the constant offset parts are equal.
8545 Be careful to use signed sizetype here because otherwise we
8546 mess with array offsets in the wrong way. This is possible
8547 because pointer arithmetic is restricted to retain within an
8548 object and overflow on pointer differences is undefined as of
8549 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8550 else if (bitpos0
== bitpos1
8552 || (indirect_base0
&& DECL_P (base0
))
8553 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8555 /* By converting to signed sizetype we cover middle-end pointer
8556 arithmetic which operates on unsigned pointer types of size
8557 type size and ARRAY_REF offsets which are properly sign or
8558 zero extended from their type in case it is narrower than
8560 if (offset0
== NULL_TREE
)
8561 offset0
= build_int_cst (ssizetype
, 0);
8563 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8564 if (offset1
== NULL_TREE
)
8565 offset1
= build_int_cst (ssizetype
, 0);
8567 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8570 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8571 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8572 fold_overflow_warning (("assuming pointer wraparound does not "
8573 "occur when comparing P +- C1 with "
8575 WARN_STRICT_OVERFLOW_COMPARISON
);
8577 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8580 /* For equal offsets we can simplify to a comparison of the
8582 else if (bitpos0
== bitpos1
8584 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8586 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8587 && ((offset0
== offset1
)
8588 || (offset0
&& offset1
8589 && operand_equal_p (offset0
, offset1
, 0))))
8592 base0
= build_fold_addr_expr_loc (loc
, base0
);
8594 base1
= build_fold_addr_expr_loc (loc
, base1
);
8595 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8599 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8600 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8601 the resulting offset is smaller in absolute value than the
8602 original one and has the same sign. */
8603 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8604 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8605 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8606 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8607 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8608 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8609 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8610 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8612 tree const1
= TREE_OPERAND (arg0
, 1);
8613 tree const2
= TREE_OPERAND (arg1
, 1);
8614 tree variable1
= TREE_OPERAND (arg0
, 0);
8615 tree variable2
= TREE_OPERAND (arg1
, 0);
8617 const char * const warnmsg
= G_("assuming signed overflow does not "
8618 "occur when combining constants around "
8621 /* Put the constant on the side where it doesn't overflow and is
8622 of lower absolute value and of same sign than before. */
8623 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8624 ? MINUS_EXPR
: PLUS_EXPR
,
8626 if (!TREE_OVERFLOW (cst
)
8627 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8628 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8630 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8631 return fold_build2_loc (loc
, code
, type
,
8633 fold_build2_loc (loc
, TREE_CODE (arg1
),
8638 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8639 ? MINUS_EXPR
: PLUS_EXPR
,
8641 if (!TREE_OVERFLOW (cst
)
8642 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8643 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8645 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8646 return fold_build2_loc (loc
, code
, type
,
8647 fold_build2_loc (loc
, TREE_CODE (arg0
),
8654 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8658 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8659 constant, we can simplify it. */
8660 if (TREE_CODE (arg1
) == INTEGER_CST
8661 && (TREE_CODE (arg0
) == MIN_EXPR
8662 || TREE_CODE (arg0
) == MAX_EXPR
)
8663 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8665 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
8670 /* If we are comparing an expression that just has comparisons
8671 of two integer values, arithmetic expressions of those comparisons,
8672 and constants, we can simplify it. There are only three cases
8673 to check: the two values can either be equal, the first can be
8674 greater, or the second can be greater. Fold the expression for
8675 those three values. Since each value must be 0 or 1, we have
8676 eight possibilities, each of which corresponds to the constant 0
8677 or 1 or one of the six possible comparisons.
8679 This handles common cases like (a > b) == 0 but also handles
8680 expressions like ((x > y) - (y > x)) > 0, which supposedly
8681 occur in macroized code. */
8683 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8685 tree cval1
= 0, cval2
= 0;
8688 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8689 /* Don't handle degenerate cases here; they should already
8690 have been handled anyway. */
8691 && cval1
!= 0 && cval2
!= 0
8692 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8693 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8694 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8695 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8696 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8697 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8698 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8700 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8701 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8703 /* We can't just pass T to eval_subst in case cval1 or cval2
8704 was the same as ARG1. */
8707 = fold_build2_loc (loc
, code
, type
,
8708 eval_subst (loc
, arg0
, cval1
, maxval
,
8712 = fold_build2_loc (loc
, code
, type
,
8713 eval_subst (loc
, arg0
, cval1
, maxval
,
8717 = fold_build2_loc (loc
, code
, type
,
8718 eval_subst (loc
, arg0
, cval1
, minval
,
8722 /* All three of these results should be 0 or 1. Confirm they are.
8723 Then use those values to select the proper code to use. */
8725 if (TREE_CODE (high_result
) == INTEGER_CST
8726 && TREE_CODE (equal_result
) == INTEGER_CST
8727 && TREE_CODE (low_result
) == INTEGER_CST
)
8729 /* Make a 3-bit mask with the high-order bit being the
8730 value for `>', the next for '=', and the low for '<'. */
8731 switch ((integer_onep (high_result
) * 4)
8732 + (integer_onep (equal_result
) * 2)
8733 + integer_onep (low_result
))
8737 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8758 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8763 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8764 SET_EXPR_LOCATION (tem
, loc
);
8767 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
8772 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8773 into a single range test. */
8774 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8775 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8776 && TREE_CODE (arg1
) == INTEGER_CST
8777 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8778 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8779 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8780 && !TREE_OVERFLOW (arg1
))
8782 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
8783 if (tem
!= NULL_TREE
)
8791 /* Subroutine of fold_binary. Optimize complex multiplications of the
8792 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8793 argument EXPR represents the expression "z" of type TYPE. */
8796 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
8798 tree itype
= TREE_TYPE (type
);
8799 tree rpart
, ipart
, tem
;
8801 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8803 rpart
= TREE_OPERAND (expr
, 0);
8804 ipart
= TREE_OPERAND (expr
, 1);
8806 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8808 rpart
= TREE_REALPART (expr
);
8809 ipart
= TREE_IMAGPART (expr
);
8813 expr
= save_expr (expr
);
8814 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
8815 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
8818 rpart
= save_expr (rpart
);
8819 ipart
= save_expr (ipart
);
8820 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
8821 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
8822 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
8823 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
8824 build_zero_cst (itype
));
8828 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8829 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8832 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
8834 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
8836 if (TREE_CODE (arg
) == VECTOR_CST
)
8838 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
8839 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
8841 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
8843 constructor_elt
*elt
;
8845 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
8846 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
8849 elts
[i
] = elt
->value
;
8853 for (; i
< nelts
; i
++)
8855 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
8859 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8860 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8861 NULL_TREE otherwise. */
8864 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
8866 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8868 bool need_ctor
= false;
8870 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
8871 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
8872 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
8873 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
8876 elts
= XALLOCAVEC (tree
, nelts
* 3);
8877 if (!vec_cst_ctor_to_array (arg0
, elts
)
8878 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
8881 for (i
= 0; i
< nelts
; i
++)
8883 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
8885 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
8890 vec
<constructor_elt
, va_gc
> *v
;
8891 vec_alloc (v
, nelts
);
8892 for (i
= 0; i
< nelts
; i
++)
8893 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
8894 return build_constructor (type
, v
);
8897 return build_vector (type
, &elts
[2 * nelts
]);
8900 /* Try to fold a pointer difference of type TYPE two address expressions of
8901 array references AREF0 and AREF1 using location LOC. Return a
8902 simplified expression for the difference or NULL_TREE. */
8905 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
8906 tree aref0
, tree aref1
)
8908 tree base0
= TREE_OPERAND (aref0
, 0);
8909 tree base1
= TREE_OPERAND (aref1
, 0);
8910 tree base_offset
= build_int_cst (type
, 0);
8912 /* If the bases are array references as well, recurse. If the bases
8913 are pointer indirections compute the difference of the pointers.
8914 If the bases are equal, we are set. */
8915 if ((TREE_CODE (base0
) == ARRAY_REF
8916 && TREE_CODE (base1
) == ARRAY_REF
8918 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
8919 || (INDIRECT_REF_P (base0
)
8920 && INDIRECT_REF_P (base1
)
8922 = fold_binary_loc (loc
, MINUS_EXPR
, type
,
8923 fold_convert (type
, TREE_OPERAND (base0
, 0)),
8925 TREE_OPERAND (base1
, 0)))))
8926 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
8928 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
8929 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
8930 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
8931 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
8932 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
8934 fold_build2_loc (loc
, MULT_EXPR
, type
,
8940 /* If the real or vector real constant CST of type TYPE has an exact
8941 inverse, return it, else return NULL. */
8944 exact_inverse (tree type
, tree cst
)
8947 tree unit_type
, *elts
;
8949 unsigned vec_nelts
, i
;
8951 switch (TREE_CODE (cst
))
8954 r
= TREE_REAL_CST (cst
);
8956 if (exact_real_inverse (TYPE_MODE (type
), &r
))
8957 return build_real (type
, r
);
8962 vec_nelts
= VECTOR_CST_NELTS (cst
);
8963 elts
= XALLOCAVEC (tree
, vec_nelts
);
8964 unit_type
= TREE_TYPE (type
);
8965 mode
= TYPE_MODE (unit_type
);
8967 for (i
= 0; i
< vec_nelts
; i
++)
8969 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
8970 if (!exact_real_inverse (mode
, &r
))
8972 elts
[i
] = build_real (unit_type
, r
);
8975 return build_vector (type
, elts
);
8982 /* Mask out the tz least significant bits of X of type TYPE where
8983 tz is the number of trailing zeroes in Y. */
8985 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
8987 int tz
= wi::ctz (y
);
8989 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
8993 /* Return true when T is an address and is known to be nonzero.
8994 For floating point we further ensure that T is not denormal.
8995 Similar logic is present in nonzero_address in rtlanal.h.
8997 If the return value is based on the assumption that signed overflow
8998 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8999 change *STRICT_OVERFLOW_P. */
9002 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9004 tree type
= TREE_TYPE (t
);
9005 enum tree_code code
;
9007 /* Doing something useful for floating point would need more work. */
9008 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9011 code
= TREE_CODE (t
);
9012 switch (TREE_CODE_CLASS (code
))
9015 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9018 case tcc_comparison
:
9019 return tree_binary_nonzero_warnv_p (code
, type
,
9020 TREE_OPERAND (t
, 0),
9021 TREE_OPERAND (t
, 1),
9024 case tcc_declaration
:
9026 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9034 case TRUTH_NOT_EXPR
:
9035 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9038 case TRUTH_AND_EXPR
:
9040 case TRUTH_XOR_EXPR
:
9041 return tree_binary_nonzero_warnv_p (code
, type
,
9042 TREE_OPERAND (t
, 0),
9043 TREE_OPERAND (t
, 1),
9051 case WITH_SIZE_EXPR
:
9053 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9058 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9062 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9067 tree fndecl
= get_callee_fndecl (t
);
9068 if (!fndecl
) return false;
9069 if (flag_delete_null_pointer_checks
&& !flag_check_new
9070 && DECL_IS_OPERATOR_NEW (fndecl
)
9071 && !TREE_NOTHROW (fndecl
))
9073 if (flag_delete_null_pointer_checks
9074 && lookup_attribute ("returns_nonnull",
9075 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9077 return alloca_call_p (t
);
9086 /* Return true when T is an address and is known to be nonzero.
9087 Handle warnings about undefined signed overflow. */
9090 tree_expr_nonzero_p (tree t
)
9092 bool ret
, strict_overflow_p
;
9094 strict_overflow_p
= false;
9095 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9096 if (strict_overflow_p
)
9097 fold_overflow_warning (("assuming signed overflow does not occur when "
9098 "determining that expression is always "
9100 WARN_STRICT_OVERFLOW_MISC
);
9104 /* Fold a binary expression of code CODE and type TYPE with operands
9105 OP0 and OP1. LOC is the location of the resulting expression.
9106 Return the folded expression if folding is successful. Otherwise,
9107 return NULL_TREE. */
9110 fold_binary_loc (location_t loc
,
9111 enum tree_code code
, tree type
, tree op0
, tree op1
)
9113 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9114 tree arg0
, arg1
, tem
;
9115 tree t1
= NULL_TREE
;
9116 bool strict_overflow_p
;
9119 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9120 && TREE_CODE_LENGTH (code
) == 2
9122 && op1
!= NULL_TREE
);
9127 /* Strip any conversions that don't change the mode. This is
9128 safe for every expression, except for a comparison expression
9129 because its signedness is derived from its operands. So, in
9130 the latter case, only strip conversions that don't change the
9131 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9134 Note that this is done as an internal manipulation within the
9135 constant folder, in order to find the simplest representation
9136 of the arguments so that their form can be studied. In any
9137 cases, the appropriate type conversions should be put back in
9138 the tree that will get out of the constant folder. */
9140 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9142 STRIP_SIGN_NOPS (arg0
);
9143 STRIP_SIGN_NOPS (arg1
);
9151 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9152 constant but we can't do arithmetic on them. */
9153 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9155 tem
= const_binop (code
, type
, arg0
, arg1
);
9156 if (tem
!= NULL_TREE
)
9158 if (TREE_TYPE (tem
) != type
)
9159 tem
= fold_convert_loc (loc
, type
, tem
);
9164 /* If this is a commutative operation, and ARG0 is a constant, move it
9165 to ARG1 to reduce the number of tests below. */
9166 if (commutative_tree_code (code
)
9167 && tree_swap_operands_p (arg0
, arg1
, true))
9168 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9170 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9171 to ARG1 to reduce the number of tests below. */
9172 if (kind
== tcc_comparison
9173 && tree_swap_operands_p (arg0
, arg1
, true))
9174 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9176 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9180 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9182 First check for cases where an arithmetic operation is applied to a
9183 compound, conditional, or comparison operation. Push the arithmetic
9184 operation inside the compound or conditional to see if any folding
9185 can then be done. Convert comparison to conditional for this purpose.
9186 The also optimizes non-constant cases that used to be done in
9189 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9190 one of the operands is a comparison and the other is a comparison, a
9191 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9192 code below would make the expression more complex. Change it to a
9193 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9194 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9196 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9197 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9198 && TREE_CODE (type
) != VECTOR_TYPE
9199 && ((truth_value_p (TREE_CODE (arg0
))
9200 && (truth_value_p (TREE_CODE (arg1
))
9201 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9202 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9203 || (truth_value_p (TREE_CODE (arg1
))
9204 && (truth_value_p (TREE_CODE (arg0
))
9205 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9206 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9208 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9209 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9212 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9213 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9215 if (code
== EQ_EXPR
)
9216 tem
= invert_truthvalue_loc (loc
, tem
);
9218 return fold_convert_loc (loc
, type
, tem
);
9221 if (TREE_CODE_CLASS (code
) == tcc_binary
9222 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9224 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9226 tem
= fold_build2_loc (loc
, code
, type
,
9227 fold_convert_loc (loc
, TREE_TYPE (op0
),
9228 TREE_OPERAND (arg0
, 1)), op1
);
9229 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9232 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9233 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9235 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9236 fold_convert_loc (loc
, TREE_TYPE (op1
),
9237 TREE_OPERAND (arg1
, 1)));
9238 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9242 if (TREE_CODE (arg0
) == COND_EXPR
9243 || TREE_CODE (arg0
) == VEC_COND_EXPR
9244 || COMPARISON_CLASS_P (arg0
))
9246 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9248 /*cond_first_p=*/1);
9249 if (tem
!= NULL_TREE
)
9253 if (TREE_CODE (arg1
) == COND_EXPR
9254 || TREE_CODE (arg1
) == VEC_COND_EXPR
9255 || COMPARISON_CLASS_P (arg1
))
9257 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9259 /*cond_first_p=*/0);
9260 if (tem
!= NULL_TREE
)
9268 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9269 if (TREE_CODE (arg0
) == ADDR_EXPR
9270 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9272 tree iref
= TREE_OPERAND (arg0
, 0);
9273 return fold_build2 (MEM_REF
, type
,
9274 TREE_OPERAND (iref
, 0),
9275 int_const_binop (PLUS_EXPR
, arg1
,
9276 TREE_OPERAND (iref
, 1)));
9279 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9280 if (TREE_CODE (arg0
) == ADDR_EXPR
9281 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9284 HOST_WIDE_INT coffset
;
9285 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9289 return fold_build2 (MEM_REF
, type
,
9290 build_fold_addr_expr (base
),
9291 int_const_binop (PLUS_EXPR
, arg1
,
9292 size_int (coffset
)));
9297 case POINTER_PLUS_EXPR
:
9298 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9299 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9300 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9301 return fold_convert_loc (loc
, type
,
9302 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9303 fold_convert_loc (loc
, sizetype
,
9305 fold_convert_loc (loc
, sizetype
,
9311 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9313 /* X + (X / CST) * -CST is X % CST. */
9314 if (TREE_CODE (arg1
) == MULT_EXPR
9315 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9316 && operand_equal_p (arg0
,
9317 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9319 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9320 tree cst1
= TREE_OPERAND (arg1
, 1);
9321 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9323 if (sum
&& integer_zerop (sum
))
9324 return fold_convert_loc (loc
, type
,
9325 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9326 TREE_TYPE (arg0
), arg0
,
9331 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9332 one. Make sure the type is not saturating and has the signedness of
9333 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9334 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9335 if ((TREE_CODE (arg0
) == MULT_EXPR
9336 || TREE_CODE (arg1
) == MULT_EXPR
)
9337 && !TYPE_SATURATING (type
)
9338 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9339 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9340 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9342 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9347 if (! FLOAT_TYPE_P (type
))
9349 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9350 (plus (plus (mult) (mult)) (foo)) so that we can
9351 take advantage of the factoring cases below. */
9352 if (ANY_INTEGRAL_TYPE_P (type
)
9353 && TYPE_OVERFLOW_WRAPS (type
)
9354 && (((TREE_CODE (arg0
) == PLUS_EXPR
9355 || TREE_CODE (arg0
) == MINUS_EXPR
)
9356 && TREE_CODE (arg1
) == MULT_EXPR
)
9357 || ((TREE_CODE (arg1
) == PLUS_EXPR
9358 || TREE_CODE (arg1
) == MINUS_EXPR
)
9359 && TREE_CODE (arg0
) == MULT_EXPR
)))
9361 tree parg0
, parg1
, parg
, marg
;
9362 enum tree_code pcode
;
9364 if (TREE_CODE (arg1
) == MULT_EXPR
)
9365 parg
= arg0
, marg
= arg1
;
9367 parg
= arg1
, marg
= arg0
;
9368 pcode
= TREE_CODE (parg
);
9369 parg0
= TREE_OPERAND (parg
, 0);
9370 parg1
= TREE_OPERAND (parg
, 1);
9374 if (TREE_CODE (parg0
) == MULT_EXPR
9375 && TREE_CODE (parg1
) != MULT_EXPR
)
9376 return fold_build2_loc (loc
, pcode
, type
,
9377 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9378 fold_convert_loc (loc
, type
,
9380 fold_convert_loc (loc
, type
,
9382 fold_convert_loc (loc
, type
, parg1
));
9383 if (TREE_CODE (parg0
) != MULT_EXPR
9384 && TREE_CODE (parg1
) == MULT_EXPR
)
9386 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9387 fold_convert_loc (loc
, type
, parg0
),
9388 fold_build2_loc (loc
, pcode
, type
,
9389 fold_convert_loc (loc
, type
, marg
),
9390 fold_convert_loc (loc
, type
,
9396 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9397 to __complex__ ( x, y ). This is not the same for SNaNs or
9398 if signed zeros are involved. */
9399 if (!HONOR_SNANS (element_mode (arg0
))
9400 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9401 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9403 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9404 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9405 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9406 bool arg0rz
= false, arg0iz
= false;
9407 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9408 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9410 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9411 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9412 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9414 tree rp
= arg1r
? arg1r
9415 : build1 (REALPART_EXPR
, rtype
, arg1
);
9416 tree ip
= arg0i
? arg0i
9417 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9418 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9420 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9422 tree rp
= arg0r
? arg0r
9423 : build1 (REALPART_EXPR
, rtype
, arg0
);
9424 tree ip
= arg1i
? arg1i
9425 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9426 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9431 if (flag_unsafe_math_optimizations
9432 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9433 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9434 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9437 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9438 We associate floats only if the user has specified
9439 -fassociative-math. */
9440 if (flag_associative_math
9441 && TREE_CODE (arg1
) == PLUS_EXPR
9442 && TREE_CODE (arg0
) != MULT_EXPR
)
9444 tree tree10
= TREE_OPERAND (arg1
, 0);
9445 tree tree11
= TREE_OPERAND (arg1
, 1);
9446 if (TREE_CODE (tree11
) == MULT_EXPR
9447 && TREE_CODE (tree10
) == MULT_EXPR
)
9450 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9451 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9454 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9455 We associate floats only if the user has specified
9456 -fassociative-math. */
9457 if (flag_associative_math
9458 && TREE_CODE (arg0
) == PLUS_EXPR
9459 && TREE_CODE (arg1
) != MULT_EXPR
)
9461 tree tree00
= TREE_OPERAND (arg0
, 0);
9462 tree tree01
= TREE_OPERAND (arg0
, 1);
9463 if (TREE_CODE (tree01
) == MULT_EXPR
9464 && TREE_CODE (tree00
) == MULT_EXPR
)
9467 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9468 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9474 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9475 is a rotate of A by C1 bits. */
9476 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9477 is a rotate of A by B bits. */
9479 enum tree_code code0
, code1
;
9481 code0
= TREE_CODE (arg0
);
9482 code1
= TREE_CODE (arg1
);
9483 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9484 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9485 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9486 TREE_OPERAND (arg1
, 0), 0)
9487 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9488 TYPE_UNSIGNED (rtype
))
9489 /* Only create rotates in complete modes. Other cases are not
9490 expanded properly. */
9491 && (element_precision (rtype
)
9492 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
9494 tree tree01
, tree11
;
9495 enum tree_code code01
, code11
;
9497 tree01
= TREE_OPERAND (arg0
, 1);
9498 tree11
= TREE_OPERAND (arg1
, 1);
9499 STRIP_NOPS (tree01
);
9500 STRIP_NOPS (tree11
);
9501 code01
= TREE_CODE (tree01
);
9502 code11
= TREE_CODE (tree11
);
9503 if (code01
== INTEGER_CST
9504 && code11
== INTEGER_CST
9505 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9506 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9508 tem
= build2_loc (loc
, LROTATE_EXPR
,
9509 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9510 TREE_OPERAND (arg0
, 0),
9511 code0
== LSHIFT_EXPR
9512 ? TREE_OPERAND (arg0
, 1)
9513 : TREE_OPERAND (arg1
, 1));
9514 return fold_convert_loc (loc
, type
, tem
);
9516 else if (code11
== MINUS_EXPR
)
9518 tree tree110
, tree111
;
9519 tree110
= TREE_OPERAND (tree11
, 0);
9520 tree111
= TREE_OPERAND (tree11
, 1);
9521 STRIP_NOPS (tree110
);
9522 STRIP_NOPS (tree111
);
9523 if (TREE_CODE (tree110
) == INTEGER_CST
9524 && 0 == compare_tree_int (tree110
,
9526 (TREE_TYPE (TREE_OPERAND
9528 && operand_equal_p (tree01
, tree111
, 0))
9530 fold_convert_loc (loc
, type
,
9531 build2 ((code0
== LSHIFT_EXPR
9534 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9535 TREE_OPERAND (arg0
, 0),
9536 TREE_OPERAND (arg0
, 1)));
9538 else if (code01
== MINUS_EXPR
)
9540 tree tree010
, tree011
;
9541 tree010
= TREE_OPERAND (tree01
, 0);
9542 tree011
= TREE_OPERAND (tree01
, 1);
9543 STRIP_NOPS (tree010
);
9544 STRIP_NOPS (tree011
);
9545 if (TREE_CODE (tree010
) == INTEGER_CST
9546 && 0 == compare_tree_int (tree010
,
9548 (TREE_TYPE (TREE_OPERAND
9550 && operand_equal_p (tree11
, tree011
, 0))
9551 return fold_convert_loc
9553 build2 ((code0
!= LSHIFT_EXPR
9556 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9557 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
9563 /* In most languages, can't associate operations on floats through
9564 parentheses. Rather than remember where the parentheses were, we
9565 don't associate floats at all, unless the user has specified
9567 And, we need to make sure type is not saturating. */
9569 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9570 && !TYPE_SATURATING (type
))
9572 tree var0
, con0
, lit0
, minus_lit0
;
9573 tree var1
, con1
, lit1
, minus_lit1
;
9577 /* Split both trees into variables, constants, and literals. Then
9578 associate each group together, the constants with literals,
9579 then the result with variables. This increases the chances of
9580 literals being recombined later and of generating relocatable
9581 expressions for the sum of a constant and literal. */
9582 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9583 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9584 code
== MINUS_EXPR
);
9586 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9587 if (code
== MINUS_EXPR
)
9590 /* With undefined overflow prefer doing association in a type
9591 which wraps on overflow, if that is one of the operand types. */
9592 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9593 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9595 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9596 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9597 atype
= TREE_TYPE (arg0
);
9598 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9599 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9600 atype
= TREE_TYPE (arg1
);
9601 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9604 /* With undefined overflow we can only associate constants with one
9605 variable, and constants whose association doesn't overflow. */
9606 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9607 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9613 bool one_neg
= false;
9615 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9617 tmp0
= TREE_OPERAND (tmp0
, 0);
9620 if (CONVERT_EXPR_P (tmp0
)
9621 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9622 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9623 <= TYPE_PRECISION (atype
)))
9624 tmp0
= TREE_OPERAND (tmp0
, 0);
9625 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9627 tmp1
= TREE_OPERAND (tmp1
, 0);
9630 if (CONVERT_EXPR_P (tmp1
)
9631 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9632 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9633 <= TYPE_PRECISION (atype
)))
9634 tmp1
= TREE_OPERAND (tmp1
, 0);
9635 /* The only case we can still associate with two variables
9636 is if they cancel out. */
9638 || !operand_equal_p (tmp0
, tmp1
, 0))
9643 /* Only do something if we found more than two objects. Otherwise,
9644 nothing has changed and we risk infinite recursion. */
9646 && (2 < ((var0
!= 0) + (var1
!= 0)
9647 + (con0
!= 0) + (con1
!= 0)
9648 + (lit0
!= 0) + (lit1
!= 0)
9649 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9651 bool any_overflows
= false;
9652 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
9653 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
9654 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
9655 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
9656 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9657 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9658 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9659 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9662 /* Preserve the MINUS_EXPR if the negative part of the literal is
9663 greater than the positive part. Otherwise, the multiplicative
9664 folding code (i.e extract_muldiv) may be fooled in case
9665 unsigned constants are subtracted, like in the following
9666 example: ((X*2 + 4) - 8U)/2. */
9667 if (minus_lit0
&& lit0
)
9669 if (TREE_CODE (lit0
) == INTEGER_CST
9670 && TREE_CODE (minus_lit0
) == INTEGER_CST
9671 && tree_int_cst_lt (lit0
, minus_lit0
))
9673 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9679 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9685 /* Don't introduce overflows through reassociation. */
9687 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
9688 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
9695 fold_convert_loc (loc
, type
,
9696 associate_trees (loc
, var0
, minus_lit0
,
9697 MINUS_EXPR
, atype
));
9700 con0
= associate_trees (loc
, con0
, minus_lit0
,
9703 fold_convert_loc (loc
, type
,
9704 associate_trees (loc
, var0
, con0
,
9709 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9711 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9719 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9720 if (TREE_CODE (arg0
) == NEGATE_EXPR
9721 && negate_expr_p (op1
)
9722 && reorder_operands_p (arg0
, arg1
))
9723 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9725 fold_convert_loc (loc
, type
,
9726 TREE_OPERAND (arg0
, 0)));
9728 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9729 __complex__ ( x, -y ). This is not the same for SNaNs or if
9730 signed zeros are involved. */
9731 if (!HONOR_SNANS (element_mode (arg0
))
9732 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9733 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9735 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9736 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9737 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9738 bool arg0rz
= false, arg0iz
= false;
9739 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9740 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9742 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9743 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9744 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9746 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9748 : build1 (REALPART_EXPR
, rtype
, arg1
));
9749 tree ip
= arg0i
? arg0i
9750 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9751 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9753 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9755 tree rp
= arg0r
? arg0r
9756 : build1 (REALPART_EXPR
, rtype
, arg0
);
9757 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9759 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9760 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9765 /* A - B -> A + (-B) if B is easily negatable. */
9766 if (negate_expr_p (op1
)
9767 && ! TYPE_OVERFLOW_SANITIZED (type
)
9768 && ((FLOAT_TYPE_P (type
)
9769 /* Avoid this transformation if B is a positive REAL_CST. */
9770 && (TREE_CODE (op1
) != REAL_CST
9771 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
9772 || INTEGRAL_TYPE_P (type
)))
9773 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9774 fold_convert_loc (loc
, type
, arg0
),
9777 /* Fold &a[i] - &a[j] to i-j. */
9778 if (TREE_CODE (arg0
) == ADDR_EXPR
9779 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9780 && TREE_CODE (arg1
) == ADDR_EXPR
9781 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9783 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
9784 TREE_OPERAND (arg0
, 0),
9785 TREE_OPERAND (arg1
, 0));
9790 if (FLOAT_TYPE_P (type
)
9791 && flag_unsafe_math_optimizations
9792 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9793 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9794 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9797 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9798 one. Make sure the type is not saturating and has the signedness of
9799 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9800 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9801 if ((TREE_CODE (arg0
) == MULT_EXPR
9802 || TREE_CODE (arg1
) == MULT_EXPR
)
9803 && !TYPE_SATURATING (type
)
9804 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9805 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9806 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9808 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9816 if (! FLOAT_TYPE_P (type
))
9818 /* Transform x * -C into -x * C if x is easily negatable. */
9819 if (TREE_CODE (op1
) == INTEGER_CST
9820 && tree_int_cst_sgn (op1
) == -1
9821 && negate_expr_p (op0
)
9822 && (tem
= negate_expr (op1
)) != op1
9823 && ! TREE_OVERFLOW (tem
))
9824 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9825 fold_convert_loc (loc
, type
,
9826 negate_expr (op0
)), tem
);
9828 /* (A + A) * C -> A * 2 * C */
9829 if (TREE_CODE (arg0
) == PLUS_EXPR
9830 && TREE_CODE (arg1
) == INTEGER_CST
9831 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9832 TREE_OPERAND (arg0
, 1), 0))
9833 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9834 omit_one_operand_loc (loc
, type
,
9835 TREE_OPERAND (arg0
, 0),
9836 TREE_OPERAND (arg0
, 1)),
9837 fold_build2_loc (loc
, MULT_EXPR
, type
,
9838 build_int_cst (type
, 2) , arg1
));
9840 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9841 sign-changing only. */
9842 if (TREE_CODE (arg1
) == INTEGER_CST
9843 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
9844 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
9845 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9847 strict_overflow_p
= false;
9848 if (TREE_CODE (arg1
) == INTEGER_CST
9849 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
9850 &strict_overflow_p
)))
9852 if (strict_overflow_p
)
9853 fold_overflow_warning (("assuming signed overflow does not "
9854 "occur when simplifying "
9856 WARN_STRICT_OVERFLOW_MISC
);
9857 return fold_convert_loc (loc
, type
, tem
);
9860 /* Optimize z * conj(z) for integer complex numbers. */
9861 if (TREE_CODE (arg0
) == CONJ_EXPR
9862 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9863 return fold_mult_zconjz (loc
, type
, arg1
);
9864 if (TREE_CODE (arg1
) == CONJ_EXPR
9865 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9866 return fold_mult_zconjz (loc
, type
, arg0
);
9870 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9871 This is not the same for NaNs or if signed zeros are
9873 if (!HONOR_NANS (arg0
)
9874 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9875 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9876 && TREE_CODE (arg1
) == COMPLEX_CST
9877 && real_zerop (TREE_REALPART (arg1
)))
9879 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9880 if (real_onep (TREE_IMAGPART (arg1
)))
9882 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9883 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
9885 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
9886 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9888 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9889 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
9890 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
9894 /* Optimize z * conj(z) for floating point complex numbers.
9895 Guarded by flag_unsafe_math_optimizations as non-finite
9896 imaginary components don't produce scalar results. */
9897 if (flag_unsafe_math_optimizations
9898 && TREE_CODE (arg0
) == CONJ_EXPR
9899 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9900 return fold_mult_zconjz (loc
, type
, arg1
);
9901 if (flag_unsafe_math_optimizations
9902 && TREE_CODE (arg1
) == CONJ_EXPR
9903 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9904 return fold_mult_zconjz (loc
, type
, arg0
);
9906 if (flag_unsafe_math_optimizations
)
9909 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9912 && operand_equal_p (arg0
, arg1
, 0))
9914 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
9918 tree arg
= build_real (type
, dconst2
);
9919 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
9927 /* Canonicalize (X & C1) | C2. */
9928 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9929 && TREE_CODE (arg1
) == INTEGER_CST
9930 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9932 int width
= TYPE_PRECISION (type
), w
;
9933 wide_int c1
= TREE_OPERAND (arg0
, 1);
9936 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9937 if ((c1
& c2
) == c1
)
9938 return omit_one_operand_loc (loc
, type
, arg1
,
9939 TREE_OPERAND (arg0
, 0));
9941 wide_int msk
= wi::mask (width
, false,
9942 TYPE_PRECISION (TREE_TYPE (arg1
)));
9944 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9945 if (msk
.and_not (c1
| c2
) == 0)
9946 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
9947 TREE_OPERAND (arg0
, 0), arg1
);
9949 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9950 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9951 mode which allows further optimizations. */
9954 wide_int c3
= c1
.and_not (c2
);
9955 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
9957 wide_int mask
= wi::mask (w
, false,
9958 TYPE_PRECISION (type
));
9959 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
9967 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
9968 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
9969 TREE_OPERAND (arg0
, 0),
9970 wide_int_to_tree (type
,
9975 /* See if this can be simplified into a rotate first. If that
9976 is unsuccessful continue in the association code. */
9980 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9981 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9982 && INTEGRAL_TYPE_P (type
)
9983 && integer_onep (TREE_OPERAND (arg0
, 1))
9984 && integer_onep (arg1
))
9985 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
9986 build_zero_cst (TREE_TYPE (arg0
)));
9988 /* See if this can be simplified into a rotate first. If that
9989 is unsuccessful continue in the association code. */
9993 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9994 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9995 && INTEGRAL_TYPE_P (type
)
9996 && integer_onep (TREE_OPERAND (arg0
, 1))
9997 && integer_onep (arg1
))
10000 tem
= TREE_OPERAND (arg0
, 0);
10001 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10002 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10004 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10005 build_zero_cst (TREE_TYPE (tem
)));
10007 /* Fold ~X & 1 as (X & 1) == 0. */
10008 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10009 && INTEGRAL_TYPE_P (type
)
10010 && integer_onep (arg1
))
10013 tem
= TREE_OPERAND (arg0
, 0);
10014 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10015 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10017 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10018 build_zero_cst (TREE_TYPE (tem
)));
10020 /* Fold !X & 1 as X == 0. */
10021 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10022 && integer_onep (arg1
))
10024 tem
= TREE_OPERAND (arg0
, 0);
10025 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
10026 build_zero_cst (TREE_TYPE (tem
)));
10029 /* Fold (X ^ Y) & Y as ~X & Y. */
10030 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10031 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10033 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10034 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10035 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10036 fold_convert_loc (loc
, type
, arg1
));
10038 /* Fold (X ^ Y) & X as ~Y & X. */
10039 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10040 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10041 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10043 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10044 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10045 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10046 fold_convert_loc (loc
, type
, arg1
));
10048 /* Fold X & (X ^ Y) as X & ~Y. */
10049 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10050 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10052 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10053 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10054 fold_convert_loc (loc
, type
, arg0
),
10055 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
10057 /* Fold X & (Y ^ X) as ~Y & X. */
10058 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10059 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10060 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10062 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10063 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10064 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10065 fold_convert_loc (loc
, type
, arg0
));
10068 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10069 multiple of 1 << CST. */
10070 if (TREE_CODE (arg1
) == INTEGER_CST
)
10072 wide_int cst1
= arg1
;
10073 wide_int ncst1
= -cst1
;
10074 if ((cst1
& ncst1
) == ncst1
10075 && multiple_of_p (type
, arg0
,
10076 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
10077 return fold_convert_loc (loc
, type
, arg0
);
10080 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10082 if (TREE_CODE (arg1
) == INTEGER_CST
10083 && TREE_CODE (arg0
) == MULT_EXPR
10084 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10086 wide_int warg1
= arg1
;
10087 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
10090 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
10092 else if (masked
!= warg1
)
10094 /* Avoid the transform if arg1 is a mask of some
10095 mode which allows further optimizations. */
10096 int pop
= wi::popcount (warg1
);
10097 if (!(pop
>= BITS_PER_UNIT
10098 && exact_log2 (pop
) != -1
10099 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10100 return fold_build2_loc (loc
, code
, type
, op0
,
10101 wide_int_to_tree (type
, masked
));
10105 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10106 ((A & N) + B) & M -> (A + B) & M
10107 Similarly if (N & M) == 0,
10108 ((A | N) + B) & M -> (A + B) & M
10109 and for - instead of + (or unary - instead of +)
10110 and/or ^ instead of |.
10111 If B is constant and (B & M) == 0, fold into A & M. */
10112 if (TREE_CODE (arg1
) == INTEGER_CST
)
10114 wide_int cst1
= arg1
;
10115 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10116 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10117 && (TREE_CODE (arg0
) == PLUS_EXPR
10118 || TREE_CODE (arg0
) == MINUS_EXPR
10119 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10120 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10121 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10127 /* Now we know that arg0 is (C + D) or (C - D) or
10128 -C and arg1 (M) is == (1LL << cst) - 1.
10129 Store C into PMOP[0] and D into PMOP[1]. */
10130 pmop
[0] = TREE_OPERAND (arg0
, 0);
10132 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10134 pmop
[1] = TREE_OPERAND (arg0
, 1);
10138 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10141 for (; which
>= 0; which
--)
10142 switch (TREE_CODE (pmop
[which
]))
10147 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10150 cst0
= TREE_OPERAND (pmop
[which
], 1);
10152 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10157 else if (cst0
!= 0)
10159 /* If C or D is of the form (A & N) where
10160 (N & M) == M, or of the form (A | N) or
10161 (A ^ N) where (N & M) == 0, replace it with A. */
10162 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10165 /* If C or D is a N where (N & M) == 0, it can be
10166 omitted (assumed 0). */
10167 if ((TREE_CODE (arg0
) == PLUS_EXPR
10168 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10169 && (cst1
& pmop
[which
]) == 0)
10170 pmop
[which
] = NULL
;
10176 /* Only build anything new if we optimized one or both arguments
10178 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10179 || (TREE_CODE (arg0
) != NEGATE_EXPR
10180 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10182 tree utype
= TREE_TYPE (arg0
);
10183 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10185 /* Perform the operations in a type that has defined
10186 overflow behavior. */
10187 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10188 if (pmop
[0] != NULL
)
10189 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10190 if (pmop
[1] != NULL
)
10191 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10194 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10195 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10196 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10198 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10199 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10201 else if (pmop
[0] != NULL
)
10203 else if (pmop
[1] != NULL
)
10206 return build_int_cst (type
, 0);
10208 else if (pmop
[0] == NULL
)
10209 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10211 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10213 /* TEM is now the new binary +, - or unary - replacement. */
10214 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10215 fold_convert_loc (loc
, utype
, arg1
));
10216 return fold_convert_loc (loc
, type
, tem
);
10221 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10222 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10223 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10225 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10227 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
10230 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10236 /* Don't touch a floating-point divide by zero unless the mode
10237 of the constant can represent infinity. */
10238 if (TREE_CODE (arg1
) == REAL_CST
10239 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10240 && real_zerop (arg1
))
10243 /* (-A) / (-B) -> A / B */
10244 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10245 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10246 TREE_OPERAND (arg0
, 0),
10247 negate_expr (arg1
));
10248 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10249 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10250 negate_expr (arg0
),
10251 TREE_OPERAND (arg1
, 0));
10254 case TRUNC_DIV_EXPR
:
10257 case FLOOR_DIV_EXPR
:
10258 /* Simplify A / (B << N) where A and B are positive and B is
10259 a power of 2, to A >> (N + log2(B)). */
10260 strict_overflow_p
= false;
10261 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10262 && (TYPE_UNSIGNED (type
)
10263 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
10265 tree sval
= TREE_OPERAND (arg1
, 0);
10266 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10268 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10269 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
10270 wi::exact_log2 (sval
));
10272 if (strict_overflow_p
)
10273 fold_overflow_warning (("assuming signed overflow does not "
10274 "occur when simplifying A / (B << N)"),
10275 WARN_STRICT_OVERFLOW_MISC
);
10277 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10279 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10280 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
10286 case ROUND_DIV_EXPR
:
10287 case CEIL_DIV_EXPR
:
10288 case EXACT_DIV_EXPR
:
10289 if (integer_zerop (arg1
))
10292 /* Convert -A / -B to A / B when the type is signed and overflow is
10294 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10295 && TREE_CODE (arg0
) == NEGATE_EXPR
10296 && negate_expr_p (op1
))
10298 if (INTEGRAL_TYPE_P (type
))
10299 fold_overflow_warning (("assuming signed overflow does not occur "
10300 "when distributing negation across "
10302 WARN_STRICT_OVERFLOW_MISC
);
10303 return fold_build2_loc (loc
, code
, type
,
10304 fold_convert_loc (loc
, type
,
10305 TREE_OPERAND (arg0
, 0)),
10306 negate_expr (op1
));
10308 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10309 && TREE_CODE (arg1
) == NEGATE_EXPR
10310 && negate_expr_p (op0
))
10312 if (INTEGRAL_TYPE_P (type
))
10313 fold_overflow_warning (("assuming signed overflow does not occur "
10314 "when distributing negation across "
10316 WARN_STRICT_OVERFLOW_MISC
);
10317 return fold_build2_loc (loc
, code
, type
,
10319 fold_convert_loc (loc
, type
,
10320 TREE_OPERAND (arg1
, 0)));
10323 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10324 operation, EXACT_DIV_EXPR.
10326 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10327 At one time others generated faster code, it's not clear if they do
10328 after the last round to changes to the DIV code in expmed.c. */
10329 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10330 && multiple_of_p (type
, arg0
, arg1
))
10331 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
10332 fold_convert (type
, arg0
),
10333 fold_convert (type
, arg1
));
10335 strict_overflow_p
= false;
10336 if (TREE_CODE (arg1
) == INTEGER_CST
10337 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10338 &strict_overflow_p
)))
10340 if (strict_overflow_p
)
10341 fold_overflow_warning (("assuming signed overflow does not occur "
10342 "when simplifying division"),
10343 WARN_STRICT_OVERFLOW_MISC
);
10344 return fold_convert_loc (loc
, type
, tem
);
10349 case CEIL_MOD_EXPR
:
10350 case FLOOR_MOD_EXPR
:
10351 case ROUND_MOD_EXPR
:
10352 case TRUNC_MOD_EXPR
:
10353 strict_overflow_p
= false;
10354 if (TREE_CODE (arg1
) == INTEGER_CST
10355 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10356 &strict_overflow_p
)))
10358 if (strict_overflow_p
)
10359 fold_overflow_warning (("assuming signed overflow does not occur "
10360 "when simplifying modulus"),
10361 WARN_STRICT_OVERFLOW_MISC
);
10362 return fold_convert_loc (loc
, type
, tem
);
10371 /* Since negative shift count is not well-defined,
10372 don't try to compute it in the compiler. */
10373 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10376 prec
= element_precision (type
);
10378 /* If we have a rotate of a bit operation with the rotate count and
10379 the second operand of the bit operation both constant,
10380 permute the two operations. */
10381 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10382 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10383 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10384 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10385 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10386 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
10387 fold_build2_loc (loc
, code
, type
,
10388 TREE_OPERAND (arg0
, 0), arg1
),
10389 fold_build2_loc (loc
, code
, type
,
10390 TREE_OPERAND (arg0
, 1), arg1
));
10392 /* Two consecutive rotates adding up to the some integer
10393 multiple of the precision of the type can be ignored. */
10394 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10395 && TREE_CODE (arg0
) == RROTATE_EXPR
10396 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10397 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
10399 return TREE_OPERAND (arg0
, 0);
10407 case TRUTH_ANDIF_EXPR
:
10408 /* Note that the operands of this must be ints
10409 and their values must be 0 or 1.
10410 ("true" is a fixed value perhaps depending on the language.) */
10411 /* If first arg is constant zero, return it. */
10412 if (integer_zerop (arg0
))
10413 return fold_convert_loc (loc
, type
, arg0
);
10414 case TRUTH_AND_EXPR
:
10415 /* If either arg is constant true, drop it. */
10416 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10417 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10418 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10419 /* Preserve sequence points. */
10420 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10421 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10422 /* If second arg is constant zero, result is zero, but first arg
10423 must be evaluated. */
10424 if (integer_zerop (arg1
))
10425 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10426 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10427 case will be handled here. */
10428 if (integer_zerop (arg0
))
10429 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10431 /* !X && X is always false. */
10432 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10433 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10434 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10435 /* X && !X is always false. */
10436 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10437 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10438 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10440 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10441 means A >= Y && A != MAX, but in this case we know that
10444 if (!TREE_SIDE_EFFECTS (arg0
)
10445 && !TREE_SIDE_EFFECTS (arg1
))
10447 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
10448 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10449 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
10451 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
10452 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10453 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
10456 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10462 case TRUTH_ORIF_EXPR
:
10463 /* Note that the operands of this must be ints
10464 and their values must be 0 or true.
10465 ("true" is a fixed value perhaps depending on the language.) */
10466 /* If first arg is constant true, return it. */
10467 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10468 return fold_convert_loc (loc
, type
, arg0
);
10469 case TRUTH_OR_EXPR
:
10470 /* If either arg is constant zero, drop it. */
10471 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10472 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10473 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10474 /* Preserve sequence points. */
10475 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10476 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10477 /* If second arg is constant true, result is true, but we must
10478 evaluate first arg. */
10479 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10480 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10481 /* Likewise for first arg, but note this only occurs here for
10483 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10484 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10486 /* !X || X is always true. */
10487 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10488 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10489 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10490 /* X || !X is always true. */
10491 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10492 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10493 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10495 /* (X && !Y) || (!X && Y) is X ^ Y */
10496 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
10497 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
10499 tree a0
, a1
, l0
, l1
, n0
, n1
;
10501 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10502 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10504 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10505 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10507 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
10508 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
10510 if ((operand_equal_p (n0
, a0
, 0)
10511 && operand_equal_p (n1
, a1
, 0))
10512 || (operand_equal_p (n0
, a1
, 0)
10513 && operand_equal_p (n1
, a0
, 0)))
10514 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
10517 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10523 case TRUTH_XOR_EXPR
:
10524 /* If the second arg is constant zero, drop it. */
10525 if (integer_zerop (arg1
))
10526 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10527 /* If the second arg is constant true, this is a logical inversion. */
10528 if (integer_onep (arg1
))
10530 tem
= invert_truthvalue_loc (loc
, arg0
);
10531 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
10533 /* Identical arguments cancel to zero. */
10534 if (operand_equal_p (arg0
, arg1
, 0))
10535 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10537 /* !X ^ X is always true. */
10538 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10539 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10540 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10542 /* X ^ !X is always true. */
10543 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10544 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10545 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10554 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10555 if (tem
!= NULL_TREE
)
10558 /* bool_var != 1 becomes !bool_var. */
10559 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10560 && code
== NE_EXPR
)
10561 return fold_convert_loc (loc
, type
,
10562 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10563 TREE_TYPE (arg0
), arg0
));
10565 /* bool_var == 0 becomes !bool_var. */
10566 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10567 && code
== EQ_EXPR
)
10568 return fold_convert_loc (loc
, type
,
10569 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10570 TREE_TYPE (arg0
), arg0
));
10572 /* !exp != 0 becomes !exp */
10573 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
10574 && code
== NE_EXPR
)
10575 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10577 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10578 if ((TREE_CODE (arg0
) == PLUS_EXPR
10579 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10580 || TREE_CODE (arg0
) == MINUS_EXPR
)
10581 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10584 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10585 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
10587 tree val
= TREE_OPERAND (arg0
, 1);
10588 val
= fold_build2_loc (loc
, code
, type
, val
,
10589 build_int_cst (TREE_TYPE (val
), 0));
10590 return omit_two_operands_loc (loc
, type
, val
,
10591 TREE_OPERAND (arg0
, 0), arg1
);
10594 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10595 if ((TREE_CODE (arg1
) == PLUS_EXPR
10596 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
10597 || TREE_CODE (arg1
) == MINUS_EXPR
)
10598 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1
,
10601 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10602 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
10604 tree val
= TREE_OPERAND (arg1
, 1);
10605 val
= fold_build2_loc (loc
, code
, type
, val
,
10606 build_int_cst (TREE_TYPE (val
), 0));
10607 return omit_two_operands_loc (loc
, type
, val
,
10608 TREE_OPERAND (arg1
, 0), arg0
);
10611 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10612 if (TREE_CODE (arg0
) == MINUS_EXPR
10613 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
10614 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10617 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
10618 return omit_two_operands_loc (loc
, type
,
10620 ? boolean_true_node
: boolean_false_node
,
10621 TREE_OPERAND (arg0
, 1), arg1
);
10623 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10624 if (TREE_CODE (arg1
) == MINUS_EXPR
10625 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == INTEGER_CST
10626 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1
,
10629 && wi::extract_uhwi (TREE_OPERAND (arg1
, 0), 0, 1) == 1)
10630 return omit_two_operands_loc (loc
, type
,
10632 ? boolean_true_node
: boolean_false_node
,
10633 TREE_OPERAND (arg1
, 1), arg0
);
10635 /* If this is an EQ or NE comparison with zero and ARG0 is
10636 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10637 two operations, but the latter can be done in one less insn
10638 on machines that have only two-operand insns or on which a
10639 constant cannot be the first operand. */
10640 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10641 && integer_zerop (arg1
))
10643 tree arg00
= TREE_OPERAND (arg0
, 0);
10644 tree arg01
= TREE_OPERAND (arg0
, 1);
10645 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10646 && integer_onep (TREE_OPERAND (arg00
, 0)))
10648 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
10649 arg01
, TREE_OPERAND (arg00
, 1));
10650 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10651 build_int_cst (TREE_TYPE (arg0
), 1));
10652 return fold_build2_loc (loc
, code
, type
,
10653 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10656 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
10657 && integer_onep (TREE_OPERAND (arg01
, 0)))
10659 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
10660 arg00
, TREE_OPERAND (arg01
, 1));
10661 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10662 build_int_cst (TREE_TYPE (arg0
), 1));
10663 return fold_build2_loc (loc
, code
, type
,
10664 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10669 /* If this is an NE or EQ comparison of zero against the result of a
10670 signed MOD operation whose second operand is a power of 2, make
10671 the MOD operation unsigned since it is simpler and equivalent. */
10672 if (integer_zerop (arg1
)
10673 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10674 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10675 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10676 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10677 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10678 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10680 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
10681 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
10682 fold_convert_loc (loc
, newtype
,
10683 TREE_OPERAND (arg0
, 0)),
10684 fold_convert_loc (loc
, newtype
,
10685 TREE_OPERAND (arg0
, 1)));
10687 return fold_build2_loc (loc
, code
, type
, newmod
,
10688 fold_convert_loc (loc
, newtype
, arg1
));
10691 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10692 C1 is a valid shift constant, and C2 is a power of two, i.e.
10694 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10695 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10696 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10698 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10699 && integer_zerop (arg1
))
10701 tree itype
= TREE_TYPE (arg0
);
10702 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10703 prec
= TYPE_PRECISION (itype
);
10705 /* Check for a valid shift count. */
10706 if (wi::ltu_p (arg001
, prec
))
10708 tree arg01
= TREE_OPERAND (arg0
, 1);
10709 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10710 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10711 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10712 can be rewritten as (X & (C2 << C1)) != 0. */
10713 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10715 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
10716 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
10717 return fold_build2_loc (loc
, code
, type
, tem
,
10718 fold_convert_loc (loc
, itype
, arg1
));
10720 /* Otherwise, for signed (arithmetic) shifts,
10721 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10722 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10723 else if (!TYPE_UNSIGNED (itype
))
10724 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10725 arg000
, build_int_cst (itype
, 0));
10726 /* Otherwise, of unsigned (logical) shifts,
10727 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10728 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10730 return omit_one_operand_loc (loc
, type
,
10731 code
== EQ_EXPR
? integer_one_node
10732 : integer_zero_node
,
10737 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10738 Similarly for NE_EXPR. */
10739 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10740 && TREE_CODE (arg1
) == INTEGER_CST
10741 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10743 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
10744 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
10745 TREE_OPERAND (arg0
, 1));
10747 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10748 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
10750 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10751 if (integer_nonzerop (dandnotc
))
10752 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
10755 /* If this is a comparison of a field, we may be able to simplify it. */
10756 if ((TREE_CODE (arg0
) == COMPONENT_REF
10757 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10758 /* Handle the constant case even without -O
10759 to make sure the warnings are given. */
10760 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10762 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
10767 /* Optimize comparisons of strlen vs zero to a compare of the
10768 first character of the string vs zero. To wit,
10769 strlen(ptr) == 0 => *ptr == 0
10770 strlen(ptr) != 0 => *ptr != 0
10771 Other cases should reduce to one of these two (or a constant)
10772 due to the return value of strlen being unsigned. */
10773 if (TREE_CODE (arg0
) == CALL_EXPR
10774 && integer_zerop (arg1
))
10776 tree fndecl
= get_callee_fndecl (arg0
);
10779 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10780 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10781 && call_expr_nargs (arg0
) == 1
10782 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
10784 tree iref
= build_fold_indirect_ref_loc (loc
,
10785 CALL_EXPR_ARG (arg0
, 0));
10786 return fold_build2_loc (loc
, code
, type
, iref
,
10787 build_int_cst (TREE_TYPE (iref
), 0));
10791 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10792 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10793 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10794 && integer_zerop (arg1
)
10795 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10797 tree arg00
= TREE_OPERAND (arg0
, 0);
10798 tree arg01
= TREE_OPERAND (arg0
, 1);
10799 tree itype
= TREE_TYPE (arg00
);
10800 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
10802 if (TYPE_UNSIGNED (itype
))
10804 itype
= signed_type_for (itype
);
10805 arg00
= fold_convert_loc (loc
, itype
, arg00
);
10807 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10808 type
, arg00
, build_zero_cst (itype
));
10812 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10813 (X & C) == 0 when C is a single bit. */
10814 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10815 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
10816 && integer_zerop (arg1
)
10817 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10819 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10820 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
10821 TREE_OPERAND (arg0
, 1));
10822 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
10824 fold_convert_loc (loc
, TREE_TYPE (arg0
),
10828 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10829 constant C is a power of two, i.e. a single bit. */
10830 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10831 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10832 && integer_zerop (arg1
)
10833 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10834 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10835 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10837 tree arg00
= TREE_OPERAND (arg0
, 0);
10838 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10839 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
10842 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10843 when is C is a power of two, i.e. a single bit. */
10844 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10845 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
10846 && integer_zerop (arg1
)
10847 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10848 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10849 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10851 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10852 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
10853 arg000
, TREE_OPERAND (arg0
, 1));
10854 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10855 tem
, build_int_cst (TREE_TYPE (tem
), 0));
10858 if (integer_zerop (arg1
)
10859 && tree_expr_nonzero_p (arg0
))
10861 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
10862 return omit_one_operand_loc (loc
, type
, res
, arg0
);
10865 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10866 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10867 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10869 tree arg00
= TREE_OPERAND (arg0
, 0);
10870 tree arg01
= TREE_OPERAND (arg0
, 1);
10871 tree arg10
= TREE_OPERAND (arg1
, 0);
10872 tree arg11
= TREE_OPERAND (arg1
, 1);
10873 tree itype
= TREE_TYPE (arg0
);
10875 if (operand_equal_p (arg01
, arg11
, 0))
10876 return fold_build2_loc (loc
, code
, type
,
10877 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10878 fold_build2_loc (loc
,
10879 BIT_XOR_EXPR
, itype
,
10882 build_zero_cst (itype
));
10884 if (operand_equal_p (arg01
, arg10
, 0))
10885 return fold_build2_loc (loc
, code
, type
,
10886 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10887 fold_build2_loc (loc
,
10888 BIT_XOR_EXPR
, itype
,
10891 build_zero_cst (itype
));
10893 if (operand_equal_p (arg00
, arg11
, 0))
10894 return fold_build2_loc (loc
, code
, type
,
10895 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10896 fold_build2_loc (loc
,
10897 BIT_XOR_EXPR
, itype
,
10900 build_zero_cst (itype
));
10902 if (operand_equal_p (arg00
, arg10
, 0))
10903 return fold_build2_loc (loc
, code
, type
,
10904 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10905 fold_build2_loc (loc
,
10906 BIT_XOR_EXPR
, itype
,
10909 build_zero_cst (itype
));
10912 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10913 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
10915 tree arg00
= TREE_OPERAND (arg0
, 0);
10916 tree arg01
= TREE_OPERAND (arg0
, 1);
10917 tree arg10
= TREE_OPERAND (arg1
, 0);
10918 tree arg11
= TREE_OPERAND (arg1
, 1);
10919 tree itype
= TREE_TYPE (arg0
);
10921 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10922 operand_equal_p guarantees no side-effects so we don't need
10923 to use omit_one_operand on Z. */
10924 if (operand_equal_p (arg01
, arg11
, 0))
10925 return fold_build2_loc (loc
, code
, type
, arg00
,
10926 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10928 if (operand_equal_p (arg01
, arg10
, 0))
10929 return fold_build2_loc (loc
, code
, type
, arg00
,
10930 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10932 if (operand_equal_p (arg00
, arg11
, 0))
10933 return fold_build2_loc (loc
, code
, type
, arg01
,
10934 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10936 if (operand_equal_p (arg00
, arg10
, 0))
10937 return fold_build2_loc (loc
, code
, type
, arg01
,
10938 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10941 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10942 if (TREE_CODE (arg01
) == INTEGER_CST
10943 && TREE_CODE (arg11
) == INTEGER_CST
)
10945 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
10946 fold_convert_loc (loc
, itype
, arg11
));
10947 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10948 return fold_build2_loc (loc
, code
, type
, tem
,
10949 fold_convert_loc (loc
, itype
, arg10
));
10953 /* Attempt to simplify equality/inequality comparisons of complex
10954 values. Only lower the comparison if the result is known or
10955 can be simplified to a single scalar comparison. */
10956 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
10957 || TREE_CODE (arg0
) == COMPLEX_CST
)
10958 && (TREE_CODE (arg1
) == COMPLEX_EXPR
10959 || TREE_CODE (arg1
) == COMPLEX_CST
))
10961 tree real0
, imag0
, real1
, imag1
;
10964 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
10966 real0
= TREE_OPERAND (arg0
, 0);
10967 imag0
= TREE_OPERAND (arg0
, 1);
10971 real0
= TREE_REALPART (arg0
);
10972 imag0
= TREE_IMAGPART (arg0
);
10975 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
10977 real1
= TREE_OPERAND (arg1
, 0);
10978 imag1
= TREE_OPERAND (arg1
, 1);
10982 real1
= TREE_REALPART (arg1
);
10983 imag1
= TREE_IMAGPART (arg1
);
10986 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
10987 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
10989 if (integer_zerop (rcond
))
10991 if (code
== EQ_EXPR
)
10992 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10994 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
10998 if (code
== NE_EXPR
)
10999 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11001 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
11005 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
11006 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
11008 if (integer_zerop (icond
))
11010 if (code
== EQ_EXPR
)
11011 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11013 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
11017 if (code
== NE_EXPR
)
11018 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11020 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
11031 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11032 if (tem
!= NULL_TREE
)
11035 /* Transform comparisons of the form X +- C CMP X. */
11036 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11037 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11038 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11039 && !HONOR_SNANS (arg0
))
11040 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11041 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
11043 tree arg01
= TREE_OPERAND (arg0
, 1);
11044 enum tree_code code0
= TREE_CODE (arg0
);
11047 if (TREE_CODE (arg01
) == REAL_CST
)
11048 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11050 is_positive
= tree_int_cst_sgn (arg01
);
11052 /* (X - c) > X becomes false. */
11053 if (code
== GT_EXPR
11054 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11055 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11057 if (TREE_CODE (arg01
) == INTEGER_CST
11058 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11059 fold_overflow_warning (("assuming signed overflow does not "
11060 "occur when assuming that (X - c) > X "
11061 "is always false"),
11062 WARN_STRICT_OVERFLOW_ALL
);
11063 return constant_boolean_node (0, type
);
11066 /* Likewise (X + c) < X becomes false. */
11067 if (code
== LT_EXPR
11068 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11069 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11071 if (TREE_CODE (arg01
) == INTEGER_CST
11072 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11073 fold_overflow_warning (("assuming signed overflow does not "
11074 "occur when assuming that "
11075 "(X + c) < X is always false"),
11076 WARN_STRICT_OVERFLOW_ALL
);
11077 return constant_boolean_node (0, type
);
11080 /* Convert (X - c) <= X to true. */
11081 if (!HONOR_NANS (arg1
)
11083 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11084 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11086 if (TREE_CODE (arg01
) == INTEGER_CST
11087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11088 fold_overflow_warning (("assuming signed overflow does not "
11089 "occur when assuming that "
11090 "(X - c) <= X is always true"),
11091 WARN_STRICT_OVERFLOW_ALL
);
11092 return constant_boolean_node (1, type
);
11095 /* Convert (X + c) >= X to true. */
11096 if (!HONOR_NANS (arg1
)
11098 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11099 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11101 if (TREE_CODE (arg01
) == INTEGER_CST
11102 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11103 fold_overflow_warning (("assuming signed overflow does not "
11104 "occur when assuming that "
11105 "(X + c) >= X is always true"),
11106 WARN_STRICT_OVERFLOW_ALL
);
11107 return constant_boolean_node (1, type
);
11110 if (TREE_CODE (arg01
) == INTEGER_CST
)
11112 /* Convert X + c > X and X - c < X to true for integers. */
11113 if (code
== GT_EXPR
11114 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11115 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11117 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11118 fold_overflow_warning (("assuming signed overflow does "
11119 "not occur when assuming that "
11120 "(X + c) > X is always true"),
11121 WARN_STRICT_OVERFLOW_ALL
);
11122 return constant_boolean_node (1, type
);
11125 if (code
== LT_EXPR
11126 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11127 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11129 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11130 fold_overflow_warning (("assuming signed overflow does "
11131 "not occur when assuming that "
11132 "(X - c) < X is always true"),
11133 WARN_STRICT_OVERFLOW_ALL
);
11134 return constant_boolean_node (1, type
);
11137 /* Convert X + c <= X and X - c >= X to false for integers. */
11138 if (code
== LE_EXPR
11139 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11140 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11142 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11143 fold_overflow_warning (("assuming signed overflow does "
11144 "not occur when assuming that "
11145 "(X + c) <= X is always false"),
11146 WARN_STRICT_OVERFLOW_ALL
);
11147 return constant_boolean_node (0, type
);
11150 if (code
== GE_EXPR
11151 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11152 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11154 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11155 fold_overflow_warning (("assuming signed overflow does "
11156 "not occur when assuming that "
11157 "(X - c) >= X is always false"),
11158 WARN_STRICT_OVERFLOW_ALL
);
11159 return constant_boolean_node (0, type
);
11164 /* If we are comparing an ABS_EXPR with a constant, we can
11165 convert all the cases into explicit comparisons, but they may
11166 well not be faster than doing the ABS and one comparison.
11167 But ABS (X) <= C is a range comparison, which becomes a subtraction
11168 and a comparison, and is probably faster. */
11169 if (code
== LE_EXPR
11170 && TREE_CODE (arg1
) == INTEGER_CST
11171 && TREE_CODE (arg0
) == ABS_EXPR
11172 && ! TREE_SIDE_EFFECTS (arg0
)
11173 && (0 != (tem
= negate_expr (arg1
)))
11174 && TREE_CODE (tem
) == INTEGER_CST
11175 && !TREE_OVERFLOW (tem
))
11176 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
11177 build2 (GE_EXPR
, type
,
11178 TREE_OPERAND (arg0
, 0), tem
),
11179 build2 (LE_EXPR
, type
,
11180 TREE_OPERAND (arg0
, 0), arg1
));
11182 /* Convert ABS_EXPR<x> >= 0 to true. */
11183 strict_overflow_p
= false;
11184 if (code
== GE_EXPR
11185 && (integer_zerop (arg1
)
11186 || (! HONOR_NANS (arg0
)
11187 && real_zerop (arg1
)))
11188 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11190 if (strict_overflow_p
)
11191 fold_overflow_warning (("assuming signed overflow does not occur "
11192 "when simplifying comparison of "
11193 "absolute value and zero"),
11194 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11195 return omit_one_operand_loc (loc
, type
,
11196 constant_boolean_node (true, type
),
11200 /* Convert ABS_EXPR<x> < 0 to false. */
11201 strict_overflow_p
= false;
11202 if (code
== LT_EXPR
11203 && (integer_zerop (arg1
) || real_zerop (arg1
))
11204 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11206 if (strict_overflow_p
)
11207 fold_overflow_warning (("assuming signed overflow does not occur "
11208 "when simplifying comparison of "
11209 "absolute value and zero"),
11210 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11211 return omit_one_operand_loc (loc
, type
,
11212 constant_boolean_node (false, type
),
11216 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11217 and similarly for >= into !=. */
11218 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11219 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11220 && TREE_CODE (arg1
) == LSHIFT_EXPR
11221 && integer_onep (TREE_OPERAND (arg1
, 0)))
11222 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11223 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11224 TREE_OPERAND (arg1
, 1)),
11225 build_zero_cst (TREE_TYPE (arg0
)));
11227 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11228 otherwise Y might be >= # of bits in X's type and thus e.g.
11229 (unsigned char) (1 << Y) for Y 15 might be 0.
11230 If the cast is widening, then 1 << Y should have unsigned type,
11231 otherwise if Y is number of bits in the signed shift type minus 1,
11232 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11233 31 might be 0xffffffff80000000. */
11234 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11235 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11236 && CONVERT_EXPR_P (arg1
)
11237 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11238 && (element_precision (TREE_TYPE (arg1
))
11239 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
11240 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
11241 || (element_precision (TREE_TYPE (arg1
))
11242 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
11243 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11245 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11246 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
11247 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11248 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
11249 build_zero_cst (TREE_TYPE (arg0
)));
11254 case UNORDERED_EXPR
:
11262 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11264 tree targ0
= strip_float_extensions (arg0
);
11265 tree targ1
= strip_float_extensions (arg1
);
11266 tree newtype
= TREE_TYPE (targ0
);
11268 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11269 newtype
= TREE_TYPE (targ1
);
11271 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11272 return fold_build2_loc (loc
, code
, type
,
11273 fold_convert_loc (loc
, newtype
, targ0
),
11274 fold_convert_loc (loc
, newtype
, targ1
));
11279 case COMPOUND_EXPR
:
11280 /* When pedantic, a compound expression can be neither an lvalue
11281 nor an integer constant expression. */
11282 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11284 /* Don't let (0, 0) be null pointer constant. */
11285 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11286 : fold_convert_loc (loc
, type
, arg1
);
11287 return pedantic_non_lvalue_loc (loc
, tem
);
11290 /* An ASSERT_EXPR should never be passed to fold_binary. */
11291 gcc_unreachable ();
11295 } /* switch (code) */
11298 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11299 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11303 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
11305 switch (TREE_CODE (*tp
))
11311 *walk_subtrees
= 0;
11313 /* ... fall through ... */
11320 /* Return whether the sub-tree ST contains a label which is accessible from
11321 outside the sub-tree. */
11324 contains_label_p (tree st
)
11327 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
11330 /* Fold a ternary expression of code CODE and type TYPE with operands
11331 OP0, OP1, and OP2. Return the folded expression if folding is
11332 successful. Otherwise, return NULL_TREE. */
11335 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
11336 tree op0
, tree op1
, tree op2
)
11339 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
11340 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11342 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11343 && TREE_CODE_LENGTH (code
) == 3);
11345 /* If this is a commutative operation, and OP0 is a constant, move it
11346 to OP1 to reduce the number of tests below. */
11347 if (commutative_ternary_tree_code (code
)
11348 && tree_swap_operands_p (op0
, op1
, true))
11349 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
11351 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
11355 /* Strip any conversions that don't change the mode. This is safe
11356 for every expression, except for a comparison expression because
11357 its signedness is derived from its operands. So, in the latter
11358 case, only strip conversions that don't change the signedness.
11360 Note that this is done as an internal manipulation within the
11361 constant folder, in order to find the simplest representation of
11362 the arguments so that their form can be studied. In any cases,
11363 the appropriate type conversions should be put back in the tree
11364 that will get out of the constant folder. */
11385 case COMPONENT_REF
:
11386 if (TREE_CODE (arg0
) == CONSTRUCTOR
11387 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11389 unsigned HOST_WIDE_INT idx
;
11391 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11398 case VEC_COND_EXPR
:
11399 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11400 so all simple results must be passed through pedantic_non_lvalue. */
11401 if (TREE_CODE (arg0
) == INTEGER_CST
)
11403 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11404 tem
= integer_zerop (arg0
) ? op2
: op1
;
11405 /* Only optimize constant conditions when the selected branch
11406 has the same type as the COND_EXPR. This avoids optimizing
11407 away "c ? x : throw", where the throw has a void type.
11408 Avoid throwing away that operand which contains label. */
11409 if ((!TREE_SIDE_EFFECTS (unused_op
)
11410 || !contains_label_p (unused_op
))
11411 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11412 || VOID_TYPE_P (type
)))
11413 return pedantic_non_lvalue_loc (loc
, tem
);
11416 else if (TREE_CODE (arg0
) == VECTOR_CST
)
11418 if ((TREE_CODE (arg1
) == VECTOR_CST
11419 || TREE_CODE (arg1
) == CONSTRUCTOR
)
11420 && (TREE_CODE (arg2
) == VECTOR_CST
11421 || TREE_CODE (arg2
) == CONSTRUCTOR
))
11423 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
11424 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
11425 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
11426 for (i
= 0; i
< nelts
; i
++)
11428 tree val
= VECTOR_CST_ELT (arg0
, i
);
11429 if (integer_all_onesp (val
))
11431 else if (integer_zerop (val
))
11432 sel
[i
] = nelts
+ i
;
11433 else /* Currently unreachable. */
11436 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
11437 if (t
!= NULL_TREE
)
11442 /* If we have A op B ? A : C, we may be able to convert this to a
11443 simpler expression, depending on the operation and the values
11444 of B and C. Signed zeros prevent all of these transformations,
11445 for reasons given above each one.
11447 Also try swapping the arguments and inverting the conditional. */
11448 if (COMPARISON_CLASS_P (arg0
)
11449 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11450 arg1
, TREE_OPERAND (arg0
, 1))
11451 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
11453 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
11458 if (COMPARISON_CLASS_P (arg0
)
11459 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11461 TREE_OPERAND (arg0
, 1))
11462 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
11464 location_t loc0
= expr_location_or (arg0
, loc
);
11465 tem
= fold_invert_truthvalue (loc0
, arg0
);
11466 if (tem
&& COMPARISON_CLASS_P (tem
))
11468 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
11474 /* If the second operand is simpler than the third, swap them
11475 since that produces better jump optimization results. */
11476 if (truth_value_p (TREE_CODE (arg0
))
11477 && tree_swap_operands_p (op1
, op2
, false))
11479 location_t loc0
= expr_location_or (arg0
, loc
);
11480 /* See if this can be inverted. If it can't, possibly because
11481 it was a floating-point inequality comparison, don't do
11483 tem
= fold_invert_truthvalue (loc0
, arg0
);
11485 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
11488 /* Convert A ? 1 : 0 to simply A. */
11489 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
11490 : (integer_onep (op1
)
11491 && !VECTOR_TYPE_P (type
)))
11492 && integer_zerop (op2
)
11493 /* If we try to convert OP0 to our type, the
11494 call to fold will try to move the conversion inside
11495 a COND, which will recurse. In that case, the COND_EXPR
11496 is probably the best choice, so leave it alone. */
11497 && type
== TREE_TYPE (arg0
))
11498 return pedantic_non_lvalue_loc (loc
, arg0
);
11500 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11501 over COND_EXPR in cases such as floating point comparisons. */
11502 if (integer_zerop (op1
)
11503 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
11504 : (integer_onep (op2
)
11505 && !VECTOR_TYPE_P (type
)))
11506 && truth_value_p (TREE_CODE (arg0
)))
11507 return pedantic_non_lvalue_loc (loc
,
11508 fold_convert_loc (loc
, type
,
11509 invert_truthvalue_loc (loc
,
11512 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11513 if (TREE_CODE (arg0
) == LT_EXPR
11514 && integer_zerop (TREE_OPERAND (arg0
, 1))
11515 && integer_zerop (op2
)
11516 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11518 /* sign_bit_p looks through both zero and sign extensions,
11519 but for this optimization only sign extensions are
11521 tree tem2
= TREE_OPERAND (arg0
, 0);
11522 while (tem
!= tem2
)
11524 if (TREE_CODE (tem2
) != NOP_EXPR
11525 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
11530 tem2
= TREE_OPERAND (tem2
, 0);
11532 /* sign_bit_p only checks ARG1 bits within A's precision.
11533 If <sign bit of A> has wider type than A, bits outside
11534 of A's precision in <sign bit of A> need to be checked.
11535 If they are all 0, this optimization needs to be done
11536 in unsigned A's type, if they are all 1 in signed A's type,
11537 otherwise this can't be done. */
11539 && TYPE_PRECISION (TREE_TYPE (tem
))
11540 < TYPE_PRECISION (TREE_TYPE (arg1
))
11541 && TYPE_PRECISION (TREE_TYPE (tem
))
11542 < TYPE_PRECISION (type
))
11544 int inner_width
, outer_width
;
11547 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11548 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11549 if (outer_width
> TYPE_PRECISION (type
))
11550 outer_width
= TYPE_PRECISION (type
);
11552 wide_int mask
= wi::shifted_mask
11553 (inner_width
, outer_width
- inner_width
, false,
11554 TYPE_PRECISION (TREE_TYPE (arg1
)));
11556 wide_int common
= mask
& arg1
;
11557 if (common
== mask
)
11559 tem_type
= signed_type_for (TREE_TYPE (tem
));
11560 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11562 else if (common
== 0)
11564 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
11565 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11573 fold_convert_loc (loc
, type
,
11574 fold_build2_loc (loc
, BIT_AND_EXPR
,
11575 TREE_TYPE (tem
), tem
,
11576 fold_convert_loc (loc
,
11581 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11582 already handled above. */
11583 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11584 && integer_onep (TREE_OPERAND (arg0
, 1))
11585 && integer_zerop (op2
)
11586 && integer_pow2p (arg1
))
11588 tree tem
= TREE_OPERAND (arg0
, 0);
11590 if (TREE_CODE (tem
) == RSHIFT_EXPR
11591 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
11592 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
11593 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
11594 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11595 TREE_OPERAND (tem
, 0), arg1
);
11598 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11599 is probably obsolete because the first operand should be a
11600 truth value (that's why we have the two cases above), but let's
11601 leave it in until we can confirm this for all front-ends. */
11602 if (integer_zerop (op2
)
11603 && TREE_CODE (arg0
) == NE_EXPR
11604 && integer_zerop (TREE_OPERAND (arg0
, 1))
11605 && integer_pow2p (arg1
)
11606 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11607 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11608 arg1
, OEP_ONLY_CONST
))
11609 return pedantic_non_lvalue_loc (loc
,
11610 fold_convert_loc (loc
, type
,
11611 TREE_OPERAND (arg0
, 0)));
11613 /* Disable the transformations below for vectors, since
11614 fold_binary_op_with_conditional_arg may undo them immediately,
11615 yielding an infinite loop. */
11616 if (code
== VEC_COND_EXPR
)
11619 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11620 if (integer_zerop (op2
)
11621 && truth_value_p (TREE_CODE (arg0
))
11622 && truth_value_p (TREE_CODE (arg1
))
11623 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11624 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
11625 : TRUTH_ANDIF_EXPR
,
11626 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
11628 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11629 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
11630 && truth_value_p (TREE_CODE (arg0
))
11631 && truth_value_p (TREE_CODE (arg1
))
11632 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11634 location_t loc0
= expr_location_or (arg0
, loc
);
11635 /* Only perform transformation if ARG0 is easily inverted. */
11636 tem
= fold_invert_truthvalue (loc0
, arg0
);
11638 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11641 type
, fold_convert_loc (loc
, type
, tem
),
11645 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11646 if (integer_zerop (arg1
)
11647 && truth_value_p (TREE_CODE (arg0
))
11648 && truth_value_p (TREE_CODE (op2
))
11649 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11651 location_t loc0
= expr_location_or (arg0
, loc
);
11652 /* Only perform transformation if ARG0 is easily inverted. */
11653 tem
= fold_invert_truthvalue (loc0
, arg0
);
11655 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11656 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
11657 type
, fold_convert_loc (loc
, type
, tem
),
11661 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11662 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
11663 && truth_value_p (TREE_CODE (arg0
))
11664 && truth_value_p (TREE_CODE (op2
))
11665 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11666 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11667 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
11668 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
11673 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11674 of fold_ternary on them. */
11675 gcc_unreachable ();
11677 case BIT_FIELD_REF
:
11678 if ((TREE_CODE (arg0
) == VECTOR_CST
11679 || (TREE_CODE (arg0
) == CONSTRUCTOR
11680 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
11681 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
11682 || (TREE_CODE (type
) == VECTOR_TYPE
11683 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
11685 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
11686 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
11687 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
11688 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
11691 && (idx
% width
) == 0
11692 && (n
% width
) == 0
11693 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11698 if (TREE_CODE (arg0
) == VECTOR_CST
)
11701 return VECTOR_CST_ELT (arg0
, idx
);
11703 tree
*vals
= XALLOCAVEC (tree
, n
);
11704 for (unsigned i
= 0; i
< n
; ++i
)
11705 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
11706 return build_vector (type
, vals
);
11709 /* Constructor elements can be subvectors. */
11710 unsigned HOST_WIDE_INT k
= 1;
11711 if (CONSTRUCTOR_NELTS (arg0
) != 0)
11713 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
11714 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
11715 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
11718 /* We keep an exact subset of the constructor elements. */
11719 if ((idx
% k
) == 0 && (n
% k
) == 0)
11721 if (CONSTRUCTOR_NELTS (arg0
) == 0)
11722 return build_constructor (type
, NULL
);
11727 if (idx
< CONSTRUCTOR_NELTS (arg0
))
11728 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
11729 return build_zero_cst (type
);
11732 vec
<constructor_elt
, va_gc
> *vals
;
11733 vec_alloc (vals
, n
);
11734 for (unsigned i
= 0;
11735 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
11737 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
11739 (arg0
, idx
+ i
)->value
);
11740 return build_constructor (type
, vals
);
11742 /* The bitfield references a single constructor element. */
11743 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
11745 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
11746 return build_zero_cst (type
);
11748 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
11750 return fold_build3_loc (loc
, code
, type
,
11751 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
11752 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
11757 /* A bit-field-ref that referenced the full argument can be stripped. */
11758 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11759 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
11760 && integer_zerop (op2
))
11761 return fold_convert_loc (loc
, type
, arg0
);
11763 /* On constants we can use native encode/interpret to constant
11764 fold (nearly) all BIT_FIELD_REFs. */
11765 if (CONSTANT_CLASS_P (arg0
)
11766 && can_native_interpret_type_p (type
)
11767 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
11768 /* This limitation should not be necessary, we just need to
11769 round this up to mode size. */
11770 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
11771 /* Need bit-shifting of the buffer to relax the following. */
11772 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
11774 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11775 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
11776 unsigned HOST_WIDE_INT clen
;
11777 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
11778 /* ??? We cannot tell native_encode_expr to start at
11779 some random byte only. So limit us to a reasonable amount
11783 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
11784 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
11786 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
11788 tree v
= native_interpret_expr (type
,
11789 b
+ bitpos
/ BITS_PER_UNIT
,
11790 bitsize
/ BITS_PER_UNIT
);
11800 /* For integers we can decompose the FMA if possible. */
11801 if (TREE_CODE (arg0
) == INTEGER_CST
11802 && TREE_CODE (arg1
) == INTEGER_CST
)
11803 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11804 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
11805 if (integer_zerop (arg2
))
11806 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11808 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
11810 case VEC_PERM_EXPR
:
11811 if (TREE_CODE (arg2
) == VECTOR_CST
)
11813 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
11814 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
11815 unsigned char *sel2
= sel
+ nelts
;
11816 bool need_mask_canon
= false;
11817 bool need_mask_canon2
= false;
11818 bool all_in_vec0
= true;
11819 bool all_in_vec1
= true;
11820 bool maybe_identity
= true;
11821 bool single_arg
= (op0
== op1
);
11822 bool changed
= false;
11824 mask2
= 2 * nelts
- 1;
11825 mask
= single_arg
? (nelts
- 1) : mask2
;
11826 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
11827 for (i
= 0; i
< nelts
; i
++)
11829 tree val
= VECTOR_CST_ELT (arg2
, i
);
11830 if (TREE_CODE (val
) != INTEGER_CST
)
11833 /* Make sure that the perm value is in an acceptable
11836 need_mask_canon
|= wi::gtu_p (t
, mask
);
11837 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
11838 sel
[i
] = t
.to_uhwi () & mask
;
11839 sel2
[i
] = t
.to_uhwi () & mask2
;
11841 if (sel
[i
] < nelts
)
11842 all_in_vec1
= false;
11844 all_in_vec0
= false;
11846 if ((sel
[i
] & (nelts
-1)) != i
)
11847 maybe_identity
= false;
11850 if (maybe_identity
)
11860 else if (all_in_vec1
)
11863 for (i
= 0; i
< nelts
; i
++)
11865 need_mask_canon
= true;
11868 if ((TREE_CODE (op0
) == VECTOR_CST
11869 || TREE_CODE (op0
) == CONSTRUCTOR
)
11870 && (TREE_CODE (op1
) == VECTOR_CST
11871 || TREE_CODE (op1
) == CONSTRUCTOR
))
11873 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
11874 if (t
!= NULL_TREE
)
11878 if (op0
== op1
&& !single_arg
)
11881 /* Some targets are deficient and fail to expand a single
11882 argument permutation while still allowing an equivalent
11883 2-argument version. */
11884 if (need_mask_canon
&& arg2
== op2
11885 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
11886 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
11888 need_mask_canon
= need_mask_canon2
;
11892 if (need_mask_canon
&& arg2
== op2
)
11894 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
11895 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
11896 for (i
= 0; i
< nelts
; i
++)
11897 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
11898 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
11903 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
11909 } /* switch (code) */
11912 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11913 of an array (or vector). */
11916 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
)
11918 tree index_type
= NULL_TREE
;
11919 offset_int low_bound
= 0;
11921 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
11923 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
11924 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
11926 /* Static constructors for variably sized objects makes no sense. */
11927 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
11928 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
11929 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
11934 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
11935 TYPE_SIGN (index_type
));
11937 offset_int index
= low_bound
- 1;
11939 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11940 TYPE_SIGN (index_type
));
11942 offset_int max_index
;
11943 unsigned HOST_WIDE_INT cnt
;
11946 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
11948 /* Array constructor might explicitly set index, or specify a range,
11949 or leave index NULL meaning that it is next index after previous
11953 if (TREE_CODE (cfield
) == INTEGER_CST
)
11954 max_index
= index
= wi::to_offset (cfield
);
11957 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
11958 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
11959 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
11966 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11967 TYPE_SIGN (index_type
));
11971 /* Do we have match? */
11972 if (wi::cmpu (access_index
, index
) >= 0
11973 && wi::cmpu (access_index
, max_index
) <= 0)
11979 /* Perform constant folding and related simplification of EXPR.
11980 The related simplifications include x*1 => x, x*0 => 0, etc.,
11981 and application of the associative law.
11982 NOP_EXPR conversions may be removed freely (as long as we
11983 are careful not to change the type of the overall expression).
11984 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11985 but we can constant-fold them if they have constant operands. */
11987 #ifdef ENABLE_FOLD_CHECKING
11988 # define fold(x) fold_1 (x)
11989 static tree
fold_1 (tree
);
11995 const tree t
= expr
;
11996 enum tree_code code
= TREE_CODE (t
);
11997 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11999 location_t loc
= EXPR_LOCATION (expr
);
12001 /* Return right away if a constant. */
12002 if (kind
== tcc_constant
)
12005 /* CALL_EXPR-like objects with variable numbers of operands are
12006 treated specially. */
12007 if (kind
== tcc_vl_exp
)
12009 if (code
== CALL_EXPR
)
12011 tem
= fold_call_expr (loc
, expr
, false);
12012 return tem
? tem
: expr
;
12017 if (IS_EXPR_CODE_CLASS (kind
))
12019 tree type
= TREE_TYPE (t
);
12020 tree op0
, op1
, op2
;
12022 switch (TREE_CODE_LENGTH (code
))
12025 op0
= TREE_OPERAND (t
, 0);
12026 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12027 return tem
? tem
: expr
;
12029 op0
= TREE_OPERAND (t
, 0);
12030 op1
= TREE_OPERAND (t
, 1);
12031 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12032 return tem
? tem
: expr
;
12034 op0
= TREE_OPERAND (t
, 0);
12035 op1
= TREE_OPERAND (t
, 1);
12036 op2
= TREE_OPERAND (t
, 2);
12037 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12038 return tem
? tem
: expr
;
12048 tree op0
= TREE_OPERAND (t
, 0);
12049 tree op1
= TREE_OPERAND (t
, 1);
12051 if (TREE_CODE (op1
) == INTEGER_CST
12052 && TREE_CODE (op0
) == CONSTRUCTOR
12053 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
12055 tree val
= get_array_ctor_element_at_index (op0
,
12056 wi::to_offset (op1
));
12064 /* Return a VECTOR_CST if possible. */
12067 tree type
= TREE_TYPE (t
);
12068 if (TREE_CODE (type
) != VECTOR_TYPE
)
12073 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
12074 if (! CONSTANT_CLASS_P (val
))
12077 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
12081 return fold (DECL_INITIAL (t
));
12085 } /* switch (code) */
12088 #ifdef ENABLE_FOLD_CHECKING
12091 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
12092 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
12093 static void fold_check_failed (const_tree
, const_tree
);
12094 void print_fold_checksum (const_tree
);
12096 /* When --enable-checking=fold, compute a digest of expr before
12097 and after actual fold call to see if fold did not accidentally
12098 change original expr. */
12104 struct md5_ctx ctx
;
12105 unsigned char checksum_before
[16], checksum_after
[16];
12106 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12108 md5_init_ctx (&ctx
);
12109 fold_checksum_tree (expr
, &ctx
, &ht
);
12110 md5_finish_ctx (&ctx
, checksum_before
);
12113 ret
= fold_1 (expr
);
12115 md5_init_ctx (&ctx
);
12116 fold_checksum_tree (expr
, &ctx
, &ht
);
12117 md5_finish_ctx (&ctx
, checksum_after
);
12119 if (memcmp (checksum_before
, checksum_after
, 16))
12120 fold_check_failed (expr
, ret
);
12126 print_fold_checksum (const_tree expr
)
12128 struct md5_ctx ctx
;
12129 unsigned char checksum
[16], cnt
;
12130 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12132 md5_init_ctx (&ctx
);
12133 fold_checksum_tree (expr
, &ctx
, &ht
);
12134 md5_finish_ctx (&ctx
, checksum
);
12135 for (cnt
= 0; cnt
< 16; ++cnt
)
12136 fprintf (stderr
, "%02x", checksum
[cnt
]);
12137 putc ('\n', stderr
);
12141 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
12143 internal_error ("fold check: original tree changed by fold");
12147 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
12148 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
12150 const tree_node
**slot
;
12151 enum tree_code code
;
12152 union tree_node buf
;
12158 slot
= ht
->find_slot (expr
, INSERT
);
12162 code
= TREE_CODE (expr
);
12163 if (TREE_CODE_CLASS (code
) == tcc_declaration
12164 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
12166 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12167 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12168 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
12169 buf
.decl_with_vis
.symtab_node
= NULL
;
12170 expr
= (tree
) &buf
;
12172 else if (TREE_CODE_CLASS (code
) == tcc_type
12173 && (TYPE_POINTER_TO (expr
)
12174 || TYPE_REFERENCE_TO (expr
)
12175 || TYPE_CACHED_VALUES_P (expr
)
12176 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
12177 || TYPE_NEXT_VARIANT (expr
)))
12179 /* Allow these fields to be modified. */
12181 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12182 expr
= tmp
= (tree
) &buf
;
12183 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
12184 TYPE_POINTER_TO (tmp
) = NULL
;
12185 TYPE_REFERENCE_TO (tmp
) = NULL
;
12186 TYPE_NEXT_VARIANT (tmp
) = NULL
;
12187 if (TYPE_CACHED_VALUES_P (tmp
))
12189 TYPE_CACHED_VALUES_P (tmp
) = 0;
12190 TYPE_CACHED_VALUES (tmp
) = NULL
;
12193 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12194 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
12195 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12196 if (TREE_CODE_CLASS (code
) != tcc_type
12197 && TREE_CODE_CLASS (code
) != tcc_declaration
12198 && code
!= TREE_LIST
12199 && code
!= SSA_NAME
12200 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
12201 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12202 switch (TREE_CODE_CLASS (code
))
12208 md5_process_bytes (TREE_STRING_POINTER (expr
),
12209 TREE_STRING_LENGTH (expr
), ctx
);
12212 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12213 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12216 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
12217 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
12223 case tcc_exceptional
:
12227 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12228 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12229 expr
= TREE_CHAIN (expr
);
12230 goto recursive_label
;
12233 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12234 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12240 case tcc_expression
:
12241 case tcc_reference
:
12242 case tcc_comparison
:
12245 case tcc_statement
:
12247 len
= TREE_OPERAND_LENGTH (expr
);
12248 for (i
= 0; i
< len
; ++i
)
12249 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12251 case tcc_declaration
:
12252 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12253 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12254 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12256 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12257 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12258 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12259 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12260 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12263 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12265 if (TREE_CODE (expr
) == FUNCTION_DECL
)
12267 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12268 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
12270 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12274 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12275 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12276 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12277 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12278 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12279 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12280 if (INTEGRAL_TYPE_P (expr
)
12281 || SCALAR_FLOAT_TYPE_P (expr
))
12283 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12284 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12286 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12287 if (TREE_CODE (expr
) == RECORD_TYPE
12288 || TREE_CODE (expr
) == UNION_TYPE
12289 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12290 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12291 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12298 /* Helper function for outputting the checksum of a tree T. When
12299 debugging with gdb, you can "define mynext" to be "next" followed
12300 by "call debug_fold_checksum (op0)", then just trace down till the
12303 DEBUG_FUNCTION
void
12304 debug_fold_checksum (const_tree t
)
12307 unsigned char checksum
[16];
12308 struct md5_ctx ctx
;
12309 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12311 md5_init_ctx (&ctx
);
12312 fold_checksum_tree (t
, &ctx
, &ht
);
12313 md5_finish_ctx (&ctx
, checksum
);
12316 for (i
= 0; i
< 16; i
++)
12317 fprintf (stderr
, "%d ", checksum
[i
]);
12319 fprintf (stderr
, "\n");
12324 /* Fold a unary tree expression with code CODE of type TYPE with an
12325 operand OP0. LOC is the location of the resulting expression.
12326 Return a folded expression if successful. Otherwise, return a tree
12327 expression with code CODE of type TYPE with an operand OP0. */
12330 fold_build1_stat_loc (location_t loc
,
12331 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12334 #ifdef ENABLE_FOLD_CHECKING
12335 unsigned char checksum_before
[16], checksum_after
[16];
12336 struct md5_ctx ctx
;
12337 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12339 md5_init_ctx (&ctx
);
12340 fold_checksum_tree (op0
, &ctx
, &ht
);
12341 md5_finish_ctx (&ctx
, checksum_before
);
12345 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12347 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
12349 #ifdef ENABLE_FOLD_CHECKING
12350 md5_init_ctx (&ctx
);
12351 fold_checksum_tree (op0
, &ctx
, &ht
);
12352 md5_finish_ctx (&ctx
, checksum_after
);
12354 if (memcmp (checksum_before
, checksum_after
, 16))
12355 fold_check_failed (op0
, tem
);
12360 /* Fold a binary tree expression with code CODE of type TYPE with
12361 operands OP0 and OP1. LOC is the location of the resulting
12362 expression. Return a folded expression if successful. Otherwise,
12363 return a tree expression with code CODE of type TYPE with operands
12367 fold_build2_stat_loc (location_t loc
,
12368 enum tree_code code
, tree type
, tree op0
, tree op1
12372 #ifdef ENABLE_FOLD_CHECKING
12373 unsigned char checksum_before_op0
[16],
12374 checksum_before_op1
[16],
12375 checksum_after_op0
[16],
12376 checksum_after_op1
[16];
12377 struct md5_ctx ctx
;
12378 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12380 md5_init_ctx (&ctx
);
12381 fold_checksum_tree (op0
, &ctx
, &ht
);
12382 md5_finish_ctx (&ctx
, checksum_before_op0
);
12385 md5_init_ctx (&ctx
);
12386 fold_checksum_tree (op1
, &ctx
, &ht
);
12387 md5_finish_ctx (&ctx
, checksum_before_op1
);
12391 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12393 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
12395 #ifdef ENABLE_FOLD_CHECKING
12396 md5_init_ctx (&ctx
);
12397 fold_checksum_tree (op0
, &ctx
, &ht
);
12398 md5_finish_ctx (&ctx
, checksum_after_op0
);
12401 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12402 fold_check_failed (op0
, tem
);
12404 md5_init_ctx (&ctx
);
12405 fold_checksum_tree (op1
, &ctx
, &ht
);
12406 md5_finish_ctx (&ctx
, checksum_after_op1
);
12408 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12409 fold_check_failed (op1
, tem
);
12414 /* Fold a ternary tree expression with code CODE of type TYPE with
12415 operands OP0, OP1, and OP2. Return a folded expression if
12416 successful. Otherwise, return a tree expression with code CODE of
12417 type TYPE with operands OP0, OP1, and OP2. */
12420 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
12421 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
12424 #ifdef ENABLE_FOLD_CHECKING
12425 unsigned char checksum_before_op0
[16],
12426 checksum_before_op1
[16],
12427 checksum_before_op2
[16],
12428 checksum_after_op0
[16],
12429 checksum_after_op1
[16],
12430 checksum_after_op2
[16];
12431 struct md5_ctx ctx
;
12432 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12434 md5_init_ctx (&ctx
);
12435 fold_checksum_tree (op0
, &ctx
, &ht
);
12436 md5_finish_ctx (&ctx
, checksum_before_op0
);
12439 md5_init_ctx (&ctx
);
12440 fold_checksum_tree (op1
, &ctx
, &ht
);
12441 md5_finish_ctx (&ctx
, checksum_before_op1
);
12444 md5_init_ctx (&ctx
);
12445 fold_checksum_tree (op2
, &ctx
, &ht
);
12446 md5_finish_ctx (&ctx
, checksum_before_op2
);
12450 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
12451 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12453 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12455 #ifdef ENABLE_FOLD_CHECKING
12456 md5_init_ctx (&ctx
);
12457 fold_checksum_tree (op0
, &ctx
, &ht
);
12458 md5_finish_ctx (&ctx
, checksum_after_op0
);
12461 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12462 fold_check_failed (op0
, tem
);
12464 md5_init_ctx (&ctx
);
12465 fold_checksum_tree (op1
, &ctx
, &ht
);
12466 md5_finish_ctx (&ctx
, checksum_after_op1
);
12469 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12470 fold_check_failed (op1
, tem
);
12472 md5_init_ctx (&ctx
);
12473 fold_checksum_tree (op2
, &ctx
, &ht
);
12474 md5_finish_ctx (&ctx
, checksum_after_op2
);
12476 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12477 fold_check_failed (op2
, tem
);
12482 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12483 arguments in ARGARRAY, and a null static chain.
12484 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12485 of type TYPE from the given operands as constructed by build_call_array. */
12488 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
12489 int nargs
, tree
*argarray
)
12492 #ifdef ENABLE_FOLD_CHECKING
12493 unsigned char checksum_before_fn
[16],
12494 checksum_before_arglist
[16],
12495 checksum_after_fn
[16],
12496 checksum_after_arglist
[16];
12497 struct md5_ctx ctx
;
12498 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12501 md5_init_ctx (&ctx
);
12502 fold_checksum_tree (fn
, &ctx
, &ht
);
12503 md5_finish_ctx (&ctx
, checksum_before_fn
);
12506 md5_init_ctx (&ctx
);
12507 for (i
= 0; i
< nargs
; i
++)
12508 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12509 md5_finish_ctx (&ctx
, checksum_before_arglist
);
12513 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
12515 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12517 #ifdef ENABLE_FOLD_CHECKING
12518 md5_init_ctx (&ctx
);
12519 fold_checksum_tree (fn
, &ctx
, &ht
);
12520 md5_finish_ctx (&ctx
, checksum_after_fn
);
12523 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
12524 fold_check_failed (fn
, tem
);
12526 md5_init_ctx (&ctx
);
12527 for (i
= 0; i
< nargs
; i
++)
12528 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12529 md5_finish_ctx (&ctx
, checksum_after_arglist
);
12531 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
12532 fold_check_failed (NULL_TREE
, tem
);
12537 /* Perform constant folding and related simplification of initializer
12538 expression EXPR. These behave identically to "fold_buildN" but ignore
12539 potential run-time traps and exceptions that fold must preserve. */
12541 #define START_FOLD_INIT \
12542 int saved_signaling_nans = flag_signaling_nans;\
12543 int saved_trapping_math = flag_trapping_math;\
12544 int saved_rounding_math = flag_rounding_math;\
12545 int saved_trapv = flag_trapv;\
12546 int saved_folding_initializer = folding_initializer;\
12547 flag_signaling_nans = 0;\
12548 flag_trapping_math = 0;\
12549 flag_rounding_math = 0;\
12551 folding_initializer = 1;
12553 #define END_FOLD_INIT \
12554 flag_signaling_nans = saved_signaling_nans;\
12555 flag_trapping_math = saved_trapping_math;\
12556 flag_rounding_math = saved_rounding_math;\
12557 flag_trapv = saved_trapv;\
12558 folding_initializer = saved_folding_initializer;
12561 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
12562 tree type
, tree op
)
12567 result
= fold_build1_loc (loc
, code
, type
, op
);
12574 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
12575 tree type
, tree op0
, tree op1
)
12580 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
12587 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
12588 int nargs
, tree
*argarray
)
12593 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12599 #undef START_FOLD_INIT
12600 #undef END_FOLD_INIT
12602 /* Determine if first argument is a multiple of second argument. Return 0 if
12603 it is not, or we cannot easily determined it to be.
12605 An example of the sort of thing we care about (at this point; this routine
12606 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12607 fold cases do now) is discovering that
12609 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12615 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12617 This code also handles discovering that
12619 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12621 is a multiple of 8 so we don't have to worry about dealing with a
12622 possible remainder.
12624 Note that we *look* inside a SAVE_EXPR only to determine how it was
12625 calculated; it is not safe for fold to do much of anything else with the
12626 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12627 at run time. For example, the latter example above *cannot* be implemented
12628 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12629 evaluation time of the original SAVE_EXPR is not necessarily the same at
12630 the time the new expression is evaluated. The only optimization of this
12631 sort that would be valid is changing
12633 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12637 SAVE_EXPR (I) * SAVE_EXPR (J)
12639 (where the same SAVE_EXPR (J) is used in the original and the
12640 transformed version). */
12643 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
12645 if (operand_equal_p (top
, bottom
, 0))
12648 if (TREE_CODE (type
) != INTEGER_TYPE
)
12651 switch (TREE_CODE (top
))
12654 /* Bitwise and provides a power of two multiple. If the mask is
12655 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12656 if (!integer_pow2p (bottom
))
12661 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12662 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12666 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12667 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12670 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
12674 op1
= TREE_OPERAND (top
, 1);
12675 /* const_binop may not detect overflow correctly,
12676 so check for it explicitly here. */
12677 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
12678 && 0 != (t1
= fold_convert (type
,
12679 const_binop (LSHIFT_EXPR
,
12682 && !TREE_OVERFLOW (t1
))
12683 return multiple_of_p (type
, t1
, bottom
);
12688 /* Can't handle conversions from non-integral or wider integral type. */
12689 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
12690 || (TYPE_PRECISION (type
)
12691 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
12694 /* .. fall through ... */
12697 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
12700 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12701 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
12704 if (TREE_CODE (bottom
) != INTEGER_CST
12705 || integer_zerop (bottom
)
12706 || (TYPE_UNSIGNED (type
)
12707 && (tree_int_cst_sgn (top
) < 0
12708 || tree_int_cst_sgn (bottom
) < 0)))
12710 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
12718 #define tree_expr_nonnegative_warnv_p(X, Y) \
12719 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12721 #define RECURSE(X) \
12722 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12724 /* Return true if CODE or TYPE is known to be non-negative. */
12727 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
12729 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
12730 && truth_value_p (code
))
12731 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12732 have a signed:1 type (where the value is -1 and 0). */
12737 /* Return true if (CODE OP0) is known to be non-negative. If the return
12738 value is based on the assumption that signed overflow is undefined,
12739 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12740 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12743 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12744 bool *strict_overflow_p
, int depth
)
12746 if (TYPE_UNSIGNED (type
))
12752 /* We can't return 1 if flag_wrapv is set because
12753 ABS_EXPR<INT_MIN> = INT_MIN. */
12754 if (!ANY_INTEGRAL_TYPE_P (type
))
12756 if (TYPE_OVERFLOW_UNDEFINED (type
))
12758 *strict_overflow_p
= true;
12763 case NON_LVALUE_EXPR
:
12765 case FIX_TRUNC_EXPR
:
12766 return RECURSE (op0
);
12770 tree inner_type
= TREE_TYPE (op0
);
12771 tree outer_type
= type
;
12773 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12775 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12776 return RECURSE (op0
);
12777 if (INTEGRAL_TYPE_P (inner_type
))
12779 if (TYPE_UNSIGNED (inner_type
))
12781 return RECURSE (op0
);
12784 else if (INTEGRAL_TYPE_P (outer_type
))
12786 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12787 return RECURSE (op0
);
12788 if (INTEGRAL_TYPE_P (inner_type
))
12789 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12790 && TYPE_UNSIGNED (inner_type
);
12796 return tree_simple_nonnegative_warnv_p (code
, type
);
12799 /* We don't know sign of `t', so be conservative and return false. */
12803 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12804 value is based on the assumption that signed overflow is undefined,
12805 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12806 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12809 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12810 tree op1
, bool *strict_overflow_p
,
12813 if (TYPE_UNSIGNED (type
))
12818 case POINTER_PLUS_EXPR
:
12820 if (FLOAT_TYPE_P (type
))
12821 return RECURSE (op0
) && RECURSE (op1
);
12823 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12824 both unsigned and at least 2 bits shorter than the result. */
12825 if (TREE_CODE (type
) == INTEGER_TYPE
12826 && TREE_CODE (op0
) == NOP_EXPR
12827 && TREE_CODE (op1
) == NOP_EXPR
)
12829 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
12830 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
12831 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12832 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12834 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12835 TYPE_PRECISION (inner2
)) + 1;
12836 return prec
< TYPE_PRECISION (type
);
12842 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12844 /* x * x is always non-negative for floating point x
12845 or without overflow. */
12846 if (operand_equal_p (op0
, op1
, 0)
12847 || (RECURSE (op0
) && RECURSE (op1
)))
12849 if (ANY_INTEGRAL_TYPE_P (type
)
12850 && TYPE_OVERFLOW_UNDEFINED (type
))
12851 *strict_overflow_p
= true;
12856 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12857 both unsigned and their total bits is shorter than the result. */
12858 if (TREE_CODE (type
) == INTEGER_TYPE
12859 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
12860 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
12862 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
12863 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
12865 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
12866 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
12869 bool unsigned0
= TYPE_UNSIGNED (inner0
);
12870 bool unsigned1
= TYPE_UNSIGNED (inner1
);
12872 if (TREE_CODE (op0
) == INTEGER_CST
)
12873 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
12875 if (TREE_CODE (op1
) == INTEGER_CST
)
12876 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
12878 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
12879 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
12881 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
12882 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
12883 : TYPE_PRECISION (inner0
);
12885 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
12886 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
12887 : TYPE_PRECISION (inner1
);
12889 return precision0
+ precision1
< TYPE_PRECISION (type
);
12896 return RECURSE (op0
) || RECURSE (op1
);
12902 case TRUNC_DIV_EXPR
:
12903 case CEIL_DIV_EXPR
:
12904 case FLOOR_DIV_EXPR
:
12905 case ROUND_DIV_EXPR
:
12906 return RECURSE (op0
) && RECURSE (op1
);
12908 case TRUNC_MOD_EXPR
:
12909 return RECURSE (op0
);
12911 case FLOOR_MOD_EXPR
:
12912 return RECURSE (op1
);
12914 case CEIL_MOD_EXPR
:
12915 case ROUND_MOD_EXPR
:
12917 return tree_simple_nonnegative_warnv_p (code
, type
);
12920 /* We don't know sign of `t', so be conservative and return false. */
12924 /* Return true if T is known to be non-negative. If the return
12925 value is based on the assumption that signed overflow is undefined,
12926 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12927 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12930 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
12932 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12935 switch (TREE_CODE (t
))
12938 return tree_int_cst_sgn (t
) >= 0;
12941 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12944 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
12947 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
12950 /* Limit the depth of recursion to avoid quadratic behavior.
12951 This is expected to catch almost all occurrences in practice.
12952 If this code misses important cases that unbounded recursion
12953 would not, passes that need this information could be revised
12954 to provide it through dataflow propagation. */
12955 return (!name_registered_for_update_p (t
)
12956 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
12957 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
12958 strict_overflow_p
, depth
));
12961 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
12965 /* Return true if T is known to be non-negative. If the return
12966 value is based on the assumption that signed overflow is undefined,
12967 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12968 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12971 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
12972 bool *strict_overflow_p
, int depth
)
12993 case CFN_BUILT_IN_BSWAP32
:
12994 case CFN_BUILT_IN_BSWAP64
:
12999 /* sqrt(-0.0) is -0.0. */
13000 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
13002 return RECURSE (arg0
);
13028 CASE_CFN_NEARBYINT
:
13035 CASE_CFN_SIGNIFICAND
:
13039 /* True if the 1st argument is nonnegative. */
13040 return RECURSE (arg0
);
13043 /* True if the 1st OR 2nd arguments are nonnegative. */
13044 return RECURSE (arg0
) || RECURSE (arg1
);
13047 /* True if the 1st AND 2nd arguments are nonnegative. */
13048 return RECURSE (arg0
) && RECURSE (arg1
);
13051 /* True if the 2nd argument is nonnegative. */
13052 return RECURSE (arg1
);
13055 /* True if the 1st argument is nonnegative or the second
13056 argument is an even integer. */
13057 if (TREE_CODE (arg1
) == INTEGER_CST
13058 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
13060 return RECURSE (arg0
);
13063 /* True if the 1st argument is nonnegative or the second
13064 argument is an even integer valued real. */
13065 if (TREE_CODE (arg1
) == REAL_CST
)
13070 c
= TREE_REAL_CST (arg1
);
13071 n
= real_to_integer (&c
);
13074 REAL_VALUE_TYPE cint
;
13075 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
13076 if (real_identical (&c
, &cint
))
13080 return RECURSE (arg0
);
13085 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
13088 /* Return true if T is known to be non-negative. If the return
13089 value is based on the assumption that signed overflow is undefined,
13090 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13091 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13094 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13096 enum tree_code code
= TREE_CODE (t
);
13097 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13104 tree temp
= TARGET_EXPR_SLOT (t
);
13105 t
= TARGET_EXPR_INITIAL (t
);
13107 /* If the initializer is non-void, then it's a normal expression
13108 that will be assigned to the slot. */
13109 if (!VOID_TYPE_P (t
))
13110 return RECURSE (t
);
13112 /* Otherwise, the initializer sets the slot in some way. One common
13113 way is an assignment statement at the end of the initializer. */
13116 if (TREE_CODE (t
) == BIND_EXPR
)
13117 t
= expr_last (BIND_EXPR_BODY (t
));
13118 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
13119 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
13120 t
= expr_last (TREE_OPERAND (t
, 0));
13121 else if (TREE_CODE (t
) == STATEMENT_LIST
)
13126 if (TREE_CODE (t
) == MODIFY_EXPR
13127 && TREE_OPERAND (t
, 0) == temp
)
13128 return RECURSE (TREE_OPERAND (t
, 1));
13135 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
13136 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
13138 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
13139 get_call_combined_fn (t
),
13142 strict_overflow_p
, depth
);
13144 case COMPOUND_EXPR
:
13146 return RECURSE (TREE_OPERAND (t
, 1));
13149 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
13152 return RECURSE (TREE_OPERAND (t
, 0));
13155 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
13160 #undef tree_expr_nonnegative_warnv_p
13162 /* Return true if T is known to be non-negative. If the return
13163 value is based on the assumption that signed overflow is undefined,
13164 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13165 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13168 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13170 enum tree_code code
;
13171 if (t
== error_mark_node
)
13174 code
= TREE_CODE (t
);
13175 switch (TREE_CODE_CLASS (code
))
13178 case tcc_comparison
:
13179 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13181 TREE_OPERAND (t
, 0),
13182 TREE_OPERAND (t
, 1),
13183 strict_overflow_p
, depth
);
13186 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13188 TREE_OPERAND (t
, 0),
13189 strict_overflow_p
, depth
);
13192 case tcc_declaration
:
13193 case tcc_reference
:
13194 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13202 case TRUTH_AND_EXPR
:
13203 case TRUTH_OR_EXPR
:
13204 case TRUTH_XOR_EXPR
:
13205 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13207 TREE_OPERAND (t
, 0),
13208 TREE_OPERAND (t
, 1),
13209 strict_overflow_p
, depth
);
13210 case TRUTH_NOT_EXPR
:
13211 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13213 TREE_OPERAND (t
, 0),
13214 strict_overflow_p
, depth
);
13221 case WITH_SIZE_EXPR
:
13223 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13226 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13230 /* Return true if `t' is known to be non-negative. Handle warnings
13231 about undefined signed overflow. */
13234 tree_expr_nonnegative_p (tree t
)
13236 bool ret
, strict_overflow_p
;
13238 strict_overflow_p
= false;
13239 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
13240 if (strict_overflow_p
)
13241 fold_overflow_warning (("assuming signed overflow does not occur when "
13242 "determining that expression is always "
13244 WARN_STRICT_OVERFLOW_MISC
);
13249 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13250 For floating point we further ensure that T is not denormal.
13251 Similar logic is present in nonzero_address in rtlanal.h.
13253 If the return value is based on the assumption that signed overflow
13254 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13255 change *STRICT_OVERFLOW_P. */
13258 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
13259 bool *strict_overflow_p
)
13264 return tree_expr_nonzero_warnv_p (op0
,
13265 strict_overflow_p
);
13269 tree inner_type
= TREE_TYPE (op0
);
13270 tree outer_type
= type
;
13272 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
13273 && tree_expr_nonzero_warnv_p (op0
,
13274 strict_overflow_p
));
13278 case NON_LVALUE_EXPR
:
13279 return tree_expr_nonzero_warnv_p (op0
,
13280 strict_overflow_p
);
13289 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13290 For floating point we further ensure that T is not denormal.
13291 Similar logic is present in nonzero_address in rtlanal.h.
13293 If the return value is based on the assumption that signed overflow
13294 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13295 change *STRICT_OVERFLOW_P. */
13298 tree_binary_nonzero_warnv_p (enum tree_code code
,
13301 tree op1
, bool *strict_overflow_p
)
13303 bool sub_strict_overflow_p
;
13306 case POINTER_PLUS_EXPR
:
13308 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
13310 /* With the presence of negative values it is hard
13311 to say something. */
13312 sub_strict_overflow_p
= false;
13313 if (!tree_expr_nonnegative_warnv_p (op0
,
13314 &sub_strict_overflow_p
)
13315 || !tree_expr_nonnegative_warnv_p (op1
,
13316 &sub_strict_overflow_p
))
13318 /* One of operands must be positive and the other non-negative. */
13319 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13320 overflows, on a twos-complement machine the sum of two
13321 nonnegative numbers can never be zero. */
13322 return (tree_expr_nonzero_warnv_p (op0
,
13324 || tree_expr_nonzero_warnv_p (op1
,
13325 strict_overflow_p
));
13330 if (TYPE_OVERFLOW_UNDEFINED (type
))
13332 if (tree_expr_nonzero_warnv_p (op0
,
13334 && tree_expr_nonzero_warnv_p (op1
,
13335 strict_overflow_p
))
13337 *strict_overflow_p
= true;
13344 sub_strict_overflow_p
= false;
13345 if (tree_expr_nonzero_warnv_p (op0
,
13346 &sub_strict_overflow_p
)
13347 && tree_expr_nonzero_warnv_p (op1
,
13348 &sub_strict_overflow_p
))
13350 if (sub_strict_overflow_p
)
13351 *strict_overflow_p
= true;
13356 sub_strict_overflow_p
= false;
13357 if (tree_expr_nonzero_warnv_p (op0
,
13358 &sub_strict_overflow_p
))
13360 if (sub_strict_overflow_p
)
13361 *strict_overflow_p
= true;
13363 /* When both operands are nonzero, then MAX must be too. */
13364 if (tree_expr_nonzero_warnv_p (op1
,
13365 strict_overflow_p
))
13368 /* MAX where operand 0 is positive is positive. */
13369 return tree_expr_nonnegative_warnv_p (op0
,
13370 strict_overflow_p
);
13372 /* MAX where operand 1 is positive is positive. */
13373 else if (tree_expr_nonzero_warnv_p (op1
,
13374 &sub_strict_overflow_p
)
13375 && tree_expr_nonnegative_warnv_p (op1
,
13376 &sub_strict_overflow_p
))
13378 if (sub_strict_overflow_p
)
13379 *strict_overflow_p
= true;
13385 return (tree_expr_nonzero_warnv_p (op1
,
13387 || tree_expr_nonzero_warnv_p (op0
,
13388 strict_overflow_p
));
13397 /* Return true when T is an address and is known to be nonzero.
13398 For floating point we further ensure that T is not denormal.
13399 Similar logic is present in nonzero_address in rtlanal.h.
13401 If the return value is based on the assumption that signed overflow
13402 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13403 change *STRICT_OVERFLOW_P. */
13406 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
13408 bool sub_strict_overflow_p
;
13409 switch (TREE_CODE (t
))
13412 return !integer_zerop (t
);
13416 tree base
= TREE_OPERAND (t
, 0);
13418 if (!DECL_P (base
))
13419 base
= get_base_address (base
);
13424 /* For objects in symbol table check if we know they are non-zero.
13425 Don't do anything for variables and functions before symtab is built;
13426 it is quite possible that they will be declared weak later. */
13427 if (DECL_P (base
) && decl_in_symtab_p (base
))
13429 struct symtab_node
*symbol
;
13431 symbol
= symtab_node::get_create (base
);
13433 return symbol
->nonzero_address ();
13438 /* Function local objects are never NULL. */
13440 && (DECL_CONTEXT (base
)
13441 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
13442 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
13445 /* Constants are never weak. */
13446 if (CONSTANT_CLASS_P (base
))
13453 sub_strict_overflow_p
= false;
13454 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13455 &sub_strict_overflow_p
)
13456 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
13457 &sub_strict_overflow_p
))
13459 if (sub_strict_overflow_p
)
13460 *strict_overflow_p
= true;
13471 #define integer_valued_real_p(X) \
13472 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13474 #define RECURSE(X) \
13475 ((integer_valued_real_p) (X, depth + 1))
13477 /* Return true if the floating point result of (CODE OP0) has an
13478 integer value. We also allow +Inf, -Inf and NaN to be considered
13479 integer values. Return false for signaling NaN.
13481 DEPTH is the current nesting depth of the query. */
13484 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
13492 return RECURSE (op0
);
13496 tree type
= TREE_TYPE (op0
);
13497 if (TREE_CODE (type
) == INTEGER_TYPE
)
13499 if (TREE_CODE (type
) == REAL_TYPE
)
13500 return RECURSE (op0
);
13510 /* Return true if the floating point result of (CODE OP0 OP1) has an
13511 integer value. We also allow +Inf, -Inf and NaN to be considered
13512 integer values. Return false for signaling NaN.
13514 DEPTH is the current nesting depth of the query. */
13517 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
13526 return RECURSE (op0
) && RECURSE (op1
);
13534 /* Return true if the floating point result of calling FNDECL with arguments
13535 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13536 considered integer values. Return false for signaling NaN. If FNDECL
13537 takes fewer than 2 arguments, the remaining ARGn are null.
13539 DEPTH is the current nesting depth of the query. */
13542 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
13548 CASE_CFN_NEARBYINT
:
13556 return RECURSE (arg0
) && RECURSE (arg1
);
13564 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13565 has an integer value. We also allow +Inf, -Inf and NaN to be
13566 considered integer values. Return false for signaling NaN.
13568 DEPTH is the current nesting depth of the query. */
13571 integer_valued_real_single_p (tree t
, int depth
)
13573 switch (TREE_CODE (t
))
13576 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
13579 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
13582 /* Limit the depth of recursion to avoid quadratic behavior.
13583 This is expected to catch almost all occurrences in practice.
13584 If this code misses important cases that unbounded recursion
13585 would not, passes that need this information could be revised
13586 to provide it through dataflow propagation. */
13587 return (!name_registered_for_update_p (t
)
13588 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
13589 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
13598 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13599 has an integer value. We also allow +Inf, -Inf and NaN to be
13600 considered integer values. Return false for signaling NaN.
13602 DEPTH is the current nesting depth of the query. */
13605 integer_valued_real_invalid_p (tree t
, int depth
)
13607 switch (TREE_CODE (t
))
13609 case COMPOUND_EXPR
:
13612 return RECURSE (TREE_OPERAND (t
, 1));
13615 return RECURSE (TREE_OPERAND (t
, 0));
13624 #undef integer_valued_real_p
13626 /* Return true if the floating point expression T has an integer value.
13627 We also allow +Inf, -Inf and NaN to be considered integer values.
13628 Return false for signaling NaN.
13630 DEPTH is the current nesting depth of the query. */
13633 integer_valued_real_p (tree t
, int depth
)
13635 if (t
== error_mark_node
)
13638 tree_code code
= TREE_CODE (t
);
13639 switch (TREE_CODE_CLASS (code
))
13642 case tcc_comparison
:
13643 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
13644 TREE_OPERAND (t
, 1), depth
);
13647 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
13650 case tcc_declaration
:
13651 case tcc_reference
:
13652 return integer_valued_real_single_p (t
, depth
);
13662 return integer_valued_real_single_p (t
, depth
);
13666 tree arg0
= (call_expr_nargs (t
) > 0
13667 ? CALL_EXPR_ARG (t
, 0)
13669 tree arg1
= (call_expr_nargs (t
) > 1
13670 ? CALL_EXPR_ARG (t
, 1)
13672 return integer_valued_real_call_p (get_call_combined_fn (t
),
13673 arg0
, arg1
, depth
);
13677 return integer_valued_real_invalid_p (t
, depth
);
13681 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13682 attempt to fold the expression to a constant without modifying TYPE,
13685 If the expression could be simplified to a constant, then return
13686 the constant. If the expression would not be simplified to a
13687 constant, then return NULL_TREE. */
13690 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13692 tree tem
= fold_binary (code
, type
, op0
, op1
);
13693 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13696 /* Given the components of a unary expression CODE, TYPE and OP0,
13697 attempt to fold the expression to a constant without modifying
13700 If the expression could be simplified to a constant, then return
13701 the constant. If the expression would not be simplified to a
13702 constant, then return NULL_TREE. */
13705 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13707 tree tem
= fold_unary (code
, type
, op0
);
13708 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13711 /* If EXP represents referencing an element in a constant string
13712 (either via pointer arithmetic or array indexing), return the
13713 tree representing the value accessed, otherwise return NULL. */
13716 fold_read_from_constant_string (tree exp
)
13718 if ((TREE_CODE (exp
) == INDIRECT_REF
13719 || TREE_CODE (exp
) == ARRAY_REF
)
13720 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13722 tree exp1
= TREE_OPERAND (exp
, 0);
13725 location_t loc
= EXPR_LOCATION (exp
);
13727 if (TREE_CODE (exp
) == INDIRECT_REF
)
13728 string
= string_constant (exp1
, &index
);
13731 tree low_bound
= array_ref_low_bound (exp
);
13732 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
13734 /* Optimize the special-case of a zero lower bound.
13736 We convert the low_bound to sizetype to avoid some problems
13737 with constant folding. (E.g. suppose the lower bound is 1,
13738 and its mode is QI. Without the conversion,l (ARRAY
13739 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13740 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13741 if (! integer_zerop (low_bound
))
13742 index
= size_diffop_loc (loc
, index
,
13743 fold_convert_loc (loc
, sizetype
, low_bound
));
13749 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13750 && TREE_CODE (string
) == STRING_CST
13751 && TREE_CODE (index
) == INTEGER_CST
13752 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13753 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13755 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13756 return build_int_cst_type (TREE_TYPE (exp
),
13757 (TREE_STRING_POINTER (string
)
13758 [TREE_INT_CST_LOW (index
)]));
13763 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13764 an integer constant, real, or fixed-point constant.
13766 TYPE is the type of the result. */
13769 fold_negate_const (tree arg0
, tree type
)
13771 tree t
= NULL_TREE
;
13773 switch (TREE_CODE (arg0
))
13778 wide_int val
= wi::neg (arg0
, &overflow
);
13779 t
= force_fit_type (type
, val
, 1,
13780 (overflow
| TREE_OVERFLOW (arg0
))
13781 && !TYPE_UNSIGNED (type
));
13786 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13791 FIXED_VALUE_TYPE f
;
13792 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
13793 &(TREE_FIXED_CST (arg0
)), NULL
,
13794 TYPE_SATURATING (type
));
13795 t
= build_fixed (type
, f
);
13796 /* Propagate overflow flags. */
13797 if (overflow_p
| TREE_OVERFLOW (arg0
))
13798 TREE_OVERFLOW (t
) = 1;
13803 gcc_unreachable ();
13809 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13810 an integer constant or real constant.
13812 TYPE is the type of the result. */
13815 fold_abs_const (tree arg0
, tree type
)
13817 tree t
= NULL_TREE
;
13819 switch (TREE_CODE (arg0
))
13823 /* If the value is unsigned or non-negative, then the absolute value
13824 is the same as the ordinary value. */
13825 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
13828 /* If the value is negative, then the absolute value is
13833 wide_int val
= wi::neg (arg0
, &overflow
);
13834 t
= force_fit_type (type
, val
, -1,
13835 overflow
| TREE_OVERFLOW (arg0
));
13841 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13842 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13848 gcc_unreachable ();
13854 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13855 constant. TYPE is the type of the result. */
13858 fold_not_const (const_tree arg0
, tree type
)
13860 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13862 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
13865 /* Given CODE, a relational operator, the target type, TYPE and two
13866 constant operands OP0 and OP1, return the result of the
13867 relational operation. If the result is not a compile time
13868 constant, then return NULL_TREE. */
13871 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13873 int result
, invert
;
13875 /* From here on, the only cases we handle are when the result is
13876 known to be a constant. */
13878 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13880 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13881 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13883 /* Handle the cases where either operand is a NaN. */
13884 if (real_isnan (c0
) || real_isnan (c1
))
13894 case UNORDERED_EXPR
:
13908 if (flag_trapping_math
)
13914 gcc_unreachable ();
13917 return constant_boolean_node (result
, type
);
13920 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13923 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
13925 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
13926 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
13927 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
13930 /* Handle equality/inequality of complex constants. */
13931 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
13933 tree rcond
= fold_relational_const (code
, type
,
13934 TREE_REALPART (op0
),
13935 TREE_REALPART (op1
));
13936 tree icond
= fold_relational_const (code
, type
,
13937 TREE_IMAGPART (op0
),
13938 TREE_IMAGPART (op1
));
13939 if (code
== EQ_EXPR
)
13940 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
13941 else if (code
== NE_EXPR
)
13942 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
13947 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
13949 unsigned count
= VECTOR_CST_NELTS (op0
);
13950 tree
*elts
= XALLOCAVEC (tree
, count
);
13951 gcc_assert (VECTOR_CST_NELTS (op1
) == count
13952 && TYPE_VECTOR_SUBPARTS (type
) == count
);
13954 for (unsigned i
= 0; i
< count
; i
++)
13956 tree elem_type
= TREE_TYPE (type
);
13957 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13958 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13960 tree tem
= fold_relational_const (code
, elem_type
,
13963 if (tem
== NULL_TREE
)
13966 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
13969 return build_vector (type
, elts
);
13972 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13974 To compute GT, swap the arguments and do LT.
13975 To compute GE, do LT and invert the result.
13976 To compute LE, swap the arguments, do LT and invert the result.
13977 To compute NE, do EQ and invert the result.
13979 Therefore, the code below must handle only EQ and LT. */
13981 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13983 std::swap (op0
, op1
);
13984 code
= swap_tree_comparison (code
);
13987 /* Note that it is safe to invert for real values here because we
13988 have already handled the one case that it matters. */
13991 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13994 code
= invert_tree_comparison (code
, false);
13997 /* Compute a result for LT or EQ if args permit;
13998 Otherwise return T. */
13999 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
14001 if (code
== EQ_EXPR
)
14002 result
= tree_int_cst_equal (op0
, op1
);
14004 result
= tree_int_cst_lt (op0
, op1
);
14011 return constant_boolean_node (result
, type
);
14014 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14015 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14019 fold_build_cleanup_point_expr (tree type
, tree expr
)
14021 /* If the expression does not have side effects then we don't have to wrap
14022 it with a cleanup point expression. */
14023 if (!TREE_SIDE_EFFECTS (expr
))
14026 /* If the expression is a return, check to see if the expression inside the
14027 return has no side effects or the right hand side of the modify expression
14028 inside the return. If either don't have side effects set we don't need to
14029 wrap the expression in a cleanup point expression. Note we don't check the
14030 left hand side of the modify because it should always be a return decl. */
14031 if (TREE_CODE (expr
) == RETURN_EXPR
)
14033 tree op
= TREE_OPERAND (expr
, 0);
14034 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14036 op
= TREE_OPERAND (op
, 1);
14037 if (!TREE_SIDE_EFFECTS (op
))
14041 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14044 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14045 of an indirection through OP0, or NULL_TREE if no simplification is
14049 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
14055 subtype
= TREE_TYPE (sub
);
14056 if (!POINTER_TYPE_P (subtype
))
14059 if (TREE_CODE (sub
) == ADDR_EXPR
)
14061 tree op
= TREE_OPERAND (sub
, 0);
14062 tree optype
= TREE_TYPE (op
);
14063 /* *&CONST_DECL -> to the value of the const decl. */
14064 if (TREE_CODE (op
) == CONST_DECL
)
14065 return DECL_INITIAL (op
);
14066 /* *&p => p; make sure to handle *&"str"[cst] here. */
14067 if (type
== optype
)
14069 tree fop
= fold_read_from_constant_string (op
);
14075 /* *(foo *)&fooarray => fooarray[0] */
14076 else if (TREE_CODE (optype
) == ARRAY_TYPE
14077 && type
== TREE_TYPE (optype
)
14078 && (!in_gimple_form
14079 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14081 tree type_domain
= TYPE_DOMAIN (optype
);
14082 tree min_val
= size_zero_node
;
14083 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14084 min_val
= TYPE_MIN_VALUE (type_domain
);
14086 && TREE_CODE (min_val
) != INTEGER_CST
)
14088 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
14089 NULL_TREE
, NULL_TREE
);
14091 /* *(foo *)&complexfoo => __real__ complexfoo */
14092 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14093 && type
== TREE_TYPE (optype
))
14094 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
14095 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14096 else if (TREE_CODE (optype
) == VECTOR_TYPE
14097 && type
== TREE_TYPE (optype
))
14099 tree part_width
= TYPE_SIZE (type
);
14100 tree index
= bitsize_int (0);
14101 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
14105 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14106 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14108 tree op00
= TREE_OPERAND (sub
, 0);
14109 tree op01
= TREE_OPERAND (sub
, 1);
14112 if (TREE_CODE (op00
) == ADDR_EXPR
)
14115 op00
= TREE_OPERAND (op00
, 0);
14116 op00type
= TREE_TYPE (op00
);
14118 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14119 if (TREE_CODE (op00type
) == VECTOR_TYPE
14120 && type
== TREE_TYPE (op00type
))
14122 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
14123 tree part_width
= TYPE_SIZE (type
);
14124 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
14125 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
14126 tree index
= bitsize_int (indexi
);
14128 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
14129 return fold_build3_loc (loc
,
14130 BIT_FIELD_REF
, type
, op00
,
14131 part_width
, index
);
14134 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14135 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
14136 && type
== TREE_TYPE (op00type
))
14138 tree size
= TYPE_SIZE_UNIT (type
);
14139 if (tree_int_cst_equal (size
, op01
))
14140 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
14142 /* ((foo *)&fooarray)[1] => fooarray[1] */
14143 else if (TREE_CODE (op00type
) == ARRAY_TYPE
14144 && type
== TREE_TYPE (op00type
))
14146 tree type_domain
= TYPE_DOMAIN (op00type
);
14147 tree min_val
= size_zero_node
;
14148 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14149 min_val
= TYPE_MIN_VALUE (type_domain
);
14150 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
14151 TYPE_SIZE_UNIT (type
));
14152 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
14153 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
14154 NULL_TREE
, NULL_TREE
);
14159 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14160 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14161 && type
== TREE_TYPE (TREE_TYPE (subtype
))
14162 && (!in_gimple_form
14163 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14166 tree min_val
= size_zero_node
;
14167 sub
= build_fold_indirect_ref_loc (loc
, sub
);
14168 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14169 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14170 min_val
= TYPE_MIN_VALUE (type_domain
);
14172 && TREE_CODE (min_val
) != INTEGER_CST
)
14174 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
14181 /* Builds an expression for an indirection through T, simplifying some
14185 build_fold_indirect_ref_loc (location_t loc
, tree t
)
14187 tree type
= TREE_TYPE (TREE_TYPE (t
));
14188 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
14193 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
14196 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14199 fold_indirect_ref_loc (location_t loc
, tree t
)
14201 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14209 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14210 whose result is ignored. The type of the returned tree need not be
14211 the same as the original expression. */
14214 fold_ignored_result (tree t
)
14216 if (!TREE_SIDE_EFFECTS (t
))
14217 return integer_zero_node
;
14220 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14223 t
= TREE_OPERAND (t
, 0);
14227 case tcc_comparison
:
14228 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14229 t
= TREE_OPERAND (t
, 0);
14230 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14231 t
= TREE_OPERAND (t
, 1);
14236 case tcc_expression
:
14237 switch (TREE_CODE (t
))
14239 case COMPOUND_EXPR
:
14240 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14242 t
= TREE_OPERAND (t
, 0);
14246 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14247 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14249 t
= TREE_OPERAND (t
, 0);
14262 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14265 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
14267 tree div
= NULL_TREE
;
14272 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14273 have to do anything. Only do this when we are not given a const,
14274 because in that case, this check is more expensive than just
14276 if (TREE_CODE (value
) != INTEGER_CST
)
14278 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14280 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14284 /* If divisor is a power of two, simplify this to bit manipulation. */
14285 if (divisor
== (divisor
& -divisor
))
14287 if (TREE_CODE (value
) == INTEGER_CST
)
14289 wide_int val
= value
;
14292 if ((val
& (divisor
- 1)) == 0)
14295 overflow_p
= TREE_OVERFLOW (value
);
14296 val
+= divisor
- 1;
14297 val
&= - (int) divisor
;
14301 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
14307 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14308 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
14309 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
14310 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14316 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14317 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
14318 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14324 /* Likewise, but round down. */
14327 round_down_loc (location_t loc
, tree value
, int divisor
)
14329 tree div
= NULL_TREE
;
14331 gcc_assert (divisor
> 0);
14335 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14336 have to do anything. Only do this when we are not given a const,
14337 because in that case, this check is more expensive than just
14339 if (TREE_CODE (value
) != INTEGER_CST
)
14341 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14343 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14347 /* If divisor is a power of two, simplify this to bit manipulation. */
14348 if (divisor
== (divisor
& -divisor
))
14352 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14353 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14358 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14359 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
14360 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14366 /* Returns the pointer to the base of the object addressed by EXP and
14367 extracts the information about the offset of the access, storing it
14368 to PBITPOS and POFFSET. */
14371 split_address_to_core_and_offset (tree exp
,
14372 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
14376 int unsignedp
, reversep
, volatilep
;
14377 HOST_WIDE_INT bitsize
;
14378 location_t loc
= EXPR_LOCATION (exp
);
14380 if (TREE_CODE (exp
) == ADDR_EXPR
)
14382 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
14383 poffset
, &mode
, &unsignedp
, &reversep
,
14384 &volatilep
, false);
14385 core
= build_fold_addr_expr_loc (loc
, core
);
14391 *poffset
= NULL_TREE
;
14397 /* Returns true if addresses of E1 and E2 differ by a constant, false
14398 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14401 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
14404 HOST_WIDE_INT bitpos1
, bitpos2
;
14405 tree toffset1
, toffset2
, tdiff
, type
;
14407 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
14408 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
14410 if (bitpos1
% BITS_PER_UNIT
!= 0
14411 || bitpos2
% BITS_PER_UNIT
!= 0
14412 || !operand_equal_p (core1
, core2
, 0))
14415 if (toffset1
&& toffset2
)
14417 type
= TREE_TYPE (toffset1
);
14418 if (type
!= TREE_TYPE (toffset2
))
14419 toffset2
= fold_convert (type
, toffset2
);
14421 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
14422 if (!cst_and_fits_in_hwi (tdiff
))
14425 *diff
= int_cst_value (tdiff
);
14427 else if (toffset1
|| toffset2
)
14429 /* If only one of the offsets is non-constant, the difference cannot
14436 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
14440 /* Return OFF converted to a pointer offset type suitable as offset for
14441 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14443 convert_to_ptrofftype_loc (location_t loc
, tree off
)
14445 return fold_convert_loc (loc
, sizetype
, off
);
14448 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14450 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
14452 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14453 ptr
, convert_to_ptrofftype_loc (loc
, off
));
14456 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14458 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
14460 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14461 ptr
, size_int (off
));
14464 /* Return a char pointer for a C string if it is a string constant
14465 or sum of string constant and integer constant. */
14468 c_getstr (tree src
)
14472 src
= string_constant (src
, &offset_node
);
14476 if (offset_node
== 0)
14477 return TREE_STRING_POINTER (src
);
14478 else if (!tree_fits_uhwi_p (offset_node
)
14479 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
14482 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);