1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
57 #include "diagnostic-core.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
64 #include "tree-iterator.h"
67 #include "langhooks.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
80 #include "tree-ssanames.h"
83 /* Nonzero if we are folding constants inside an initializer; zero
85 int folding_initializer
= 0;
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code
{
109 static bool negate_expr_p (tree
);
110 static tree
negate_expr (tree
);
111 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
112 static enum comparison_code
comparison_to_compcode (enum tree_code
);
113 static enum tree_code
compcode_to_comparison (enum comparison_code
);
114 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
115 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
116 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
117 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
119 static int simple_operand_p (const_tree
);
120 static bool simple_operand_p_2 (tree
);
121 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
122 static tree
range_predecessor (tree
);
123 static tree
range_successor (tree
);
124 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
125 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
126 static tree
unextend (tree
, int, int, tree
);
127 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
128 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
129 static tree
fold_binary_op_with_conditional_arg (location_t
,
130 enum tree_code
, tree
,
133 static tree
fold_negate_const (tree
, tree
);
134 static tree
fold_not_const (const_tree
, tree
);
135 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
136 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
137 static tree
fold_view_convert_expr (tree
, tree
);
138 static bool vec_cst_ctor_to_array (tree
, tree
*);
139 static tree
fold_negate_expr (location_t
, tree
);
142 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
143 Otherwise, return LOC. */
146 expr_location_or (tree t
, location_t loc
)
148 location_t tloc
= EXPR_LOCATION (t
);
149 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
152 /* Similar to protected_set_expr_location, but never modify x in place,
153 if location can and needs to be set, unshare it. */
156 protected_set_expr_location_unshare (tree x
, location_t loc
)
158 if (CAN_HAVE_LOCATION_P (x
)
159 && EXPR_LOCATION (x
) != loc
160 && !(TREE_CODE (x
) == SAVE_EXPR
161 || TREE_CODE (x
) == TARGET_EXPR
162 || TREE_CODE (x
) == BIND_EXPR
))
165 SET_EXPR_LOCATION (x
, loc
);
170 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
171 division and returns the quotient. Otherwise returns
175 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
179 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
181 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
186 /* This is nonzero if we should defer warnings about undefined
187 overflow. This facility exists because these warnings are a
188 special case. The code to estimate loop iterations does not want
189 to issue any warnings, since it works with expressions which do not
190 occur in user code. Various bits of cleanup code call fold(), but
191 only use the result if it has certain characteristics (e.g., is a
192 constant); that code only wants to issue a warning if the result is
195 static int fold_deferring_overflow_warnings
;
197 /* If a warning about undefined overflow is deferred, this is the
198 warning. Note that this may cause us to turn two warnings into
199 one, but that is fine since it is sufficient to only give one
200 warning per expression. */
202 static const char* fold_deferred_overflow_warning
;
204 /* If a warning about undefined overflow is deferred, this is the
205 level at which the warning should be emitted. */
207 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
209 /* Start deferring overflow warnings. We could use a stack here to
210 permit nested calls, but at present it is not necessary. */
213 fold_defer_overflow_warnings (void)
215 ++fold_deferring_overflow_warnings
;
218 /* Stop deferring overflow warnings. If there is a pending warning,
219 and ISSUE is true, then issue the warning if appropriate. STMT is
220 the statement with which the warning should be associated (used for
221 location information); STMT may be NULL. CODE is the level of the
222 warning--a warn_strict_overflow_code value. This function will use
223 the smaller of CODE and the deferred code when deciding whether to
224 issue the warning. CODE may be zero to mean to always use the
228 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
233 gcc_assert (fold_deferring_overflow_warnings
> 0);
234 --fold_deferring_overflow_warnings
;
235 if (fold_deferring_overflow_warnings
> 0)
237 if (fold_deferred_overflow_warning
!= NULL
239 && code
< (int) fold_deferred_overflow_code
)
240 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
244 warnmsg
= fold_deferred_overflow_warning
;
245 fold_deferred_overflow_warning
= NULL
;
247 if (!issue
|| warnmsg
== NULL
)
250 if (gimple_no_warning_p (stmt
))
253 /* Use the smallest code level when deciding to issue the
255 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
256 code
= fold_deferred_overflow_code
;
258 if (!issue_strict_overflow_warning (code
))
262 locus
= input_location
;
264 locus
= gimple_location (stmt
);
265 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
268 /* Stop deferring overflow warnings, ignoring any deferred
272 fold_undefer_and_ignore_overflow_warnings (void)
274 fold_undefer_overflow_warnings (false, NULL
, 0);
277 /* Whether we are deferring overflow warnings. */
280 fold_deferring_overflow_warnings_p (void)
282 return fold_deferring_overflow_warnings
> 0;
285 /* This is called when we fold something based on the fact that signed
286 overflow is undefined. */
289 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
291 if (fold_deferring_overflow_warnings
> 0)
293 if (fold_deferred_overflow_warning
== NULL
294 || wc
< fold_deferred_overflow_code
)
296 fold_deferred_overflow_warning
= gmsgid
;
297 fold_deferred_overflow_code
= wc
;
300 else if (issue_strict_overflow_warning (wc
))
301 warning (OPT_Wstrict_overflow
, gmsgid
);
304 /* Return true if the built-in mathematical function specified by CODE
305 is odd, i.e. -f(x) == f(-x). */
308 negate_mathfn_p (combined_fn fn
)
341 return !flag_rounding_math
;
349 /* Check whether we may negate an integer constant T without causing
353 may_negate_without_overflow_p (const_tree t
)
357 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
359 type
= TREE_TYPE (t
);
360 if (TYPE_UNSIGNED (type
))
363 return !wi::only_sign_bit_p (t
);
366 /* Determine whether an expression T can be cheaply negated using
367 the function negate_expr without introducing undefined overflow. */
370 negate_expr_p (tree t
)
377 type
= TREE_TYPE (t
);
380 switch (TREE_CODE (t
))
383 if (INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
))
386 /* Check that -CST will not overflow type. */
387 return may_negate_without_overflow_p (t
);
389 return (INTEGRAL_TYPE_P (type
)
390 && TYPE_OVERFLOW_WRAPS (type
));
396 return !TYPE_OVERFLOW_SANITIZED (type
);
399 /* We want to canonicalize to positive real constants. Pretend
400 that only negative ones can be easily negated. */
401 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
404 return negate_expr_p (TREE_REALPART (t
))
405 && negate_expr_p (TREE_IMAGPART (t
));
409 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
412 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
414 for (i
= 0; i
< count
; i
++)
415 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
422 return negate_expr_p (TREE_OPERAND (t
, 0))
423 && negate_expr_p (TREE_OPERAND (t
, 1));
426 return negate_expr_p (TREE_OPERAND (t
, 0));
429 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
430 || HONOR_SIGNED_ZEROS (element_mode (type
))
431 || (INTEGRAL_TYPE_P (type
)
432 && ! TYPE_OVERFLOW_WRAPS (type
)))
434 /* -(A + B) -> (-B) - A. */
435 if (negate_expr_p (TREE_OPERAND (t
, 1)))
437 /* -(A + B) -> (-A) - B. */
438 return negate_expr_p (TREE_OPERAND (t
, 0));
441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
442 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
443 && !HONOR_SIGNED_ZEROS (element_mode (type
))
444 && (! INTEGRAL_TYPE_P (type
)
445 || TYPE_OVERFLOW_WRAPS (type
));
448 if (TYPE_UNSIGNED (type
))
450 /* INT_MIN/n * n doesn't overflow while negating one operand it does
451 if n is a (negative) power of two. */
452 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
453 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
454 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
455 && wi::popcount (wi::abs (TREE_OPERAND (t
, 0))) != 1)
456 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
457 && wi::popcount (wi::abs (TREE_OPERAND (t
, 1))) != 1)))
463 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
464 return negate_expr_p (TREE_OPERAND (t
, 1))
465 || negate_expr_p (TREE_OPERAND (t
, 0));
471 if (TYPE_UNSIGNED (type
))
473 if (negate_expr_p (TREE_OPERAND (t
, 0)))
475 /* In general we can't negate B in A / B, because if A is INT_MIN and
476 B is 1, we may turn this into INT_MIN / -1 which is undefined
477 and actually traps on some architectures. */
478 if (! INTEGRAL_TYPE_P (TREE_TYPE (t
))
479 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
480 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
481 && ! integer_onep (TREE_OPERAND (t
, 1))))
482 return negate_expr_p (TREE_OPERAND (t
, 1));
486 /* Negate -((double)float) as (double)(-float). */
487 if (TREE_CODE (type
) == REAL_TYPE
)
489 tree tem
= strip_float_extensions (t
);
491 return negate_expr_p (tem
);
496 /* Negate -f(x) as f(-x). */
497 if (negate_mathfn_p (get_call_combined_fn (t
)))
498 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
502 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
503 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
505 tree op1
= TREE_OPERAND (t
, 1);
506 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
517 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
518 simplification is possible.
519 If negate_expr_p would return true for T, NULL_TREE will never be
523 fold_negate_expr_1 (location_t loc
, tree t
)
525 tree type
= TREE_TYPE (t
);
528 switch (TREE_CODE (t
))
530 /* Convert - (~A) to A + 1. */
532 if (INTEGRAL_TYPE_P (type
))
533 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
534 build_one_cst (type
));
538 tem
= fold_negate_const (t
, type
);
539 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
540 || (ANY_INTEGRAL_TYPE_P (type
)
541 && !TYPE_OVERFLOW_TRAPS (type
)
542 && TYPE_OVERFLOW_WRAPS (type
))
543 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
548 tem
= fold_negate_const (t
, type
);
552 tem
= fold_negate_const (t
, type
);
557 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
558 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
560 return build_complex (type
, rpart
, ipart
);
566 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
567 tree
*elts
= XALLOCAVEC (tree
, count
);
569 for (i
= 0; i
< count
; i
++)
571 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
572 if (elts
[i
] == NULL_TREE
)
576 return build_vector (type
, elts
);
580 if (negate_expr_p (t
))
581 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
582 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
583 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
587 if (negate_expr_p (t
))
588 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
589 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
593 if (!TYPE_OVERFLOW_SANITIZED (type
))
594 return TREE_OPERAND (t
, 0);
598 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
599 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
601 /* -(A + B) -> (-B) - A. */
602 if (negate_expr_p (TREE_OPERAND (t
, 1)))
604 tem
= negate_expr (TREE_OPERAND (t
, 1));
605 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
606 tem
, TREE_OPERAND (t
, 0));
609 /* -(A + B) -> (-A) - B. */
610 if (negate_expr_p (TREE_OPERAND (t
, 0)))
612 tem
= negate_expr (TREE_OPERAND (t
, 0));
613 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
614 tem
, TREE_OPERAND (t
, 1));
620 /* - (A - B) -> B - A */
621 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
622 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
623 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
624 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
628 if (TYPE_UNSIGNED (type
))
634 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
636 tem
= TREE_OPERAND (t
, 1);
637 if (negate_expr_p (tem
))
638 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
639 TREE_OPERAND (t
, 0), negate_expr (tem
));
640 tem
= TREE_OPERAND (t
, 0);
641 if (negate_expr_p (tem
))
642 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
643 negate_expr (tem
), TREE_OPERAND (t
, 1));
650 if (TYPE_UNSIGNED (type
))
652 if (negate_expr_p (TREE_OPERAND (t
, 0)))
653 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
654 negate_expr (TREE_OPERAND (t
, 0)),
655 TREE_OPERAND (t
, 1));
656 /* In general we can't negate B in A / B, because if A is INT_MIN and
657 B is 1, we may turn this into INT_MIN / -1 which is undefined
658 and actually traps on some architectures. */
659 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t
))
660 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
661 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
662 && ! integer_onep (TREE_OPERAND (t
, 1))))
663 && negate_expr_p (TREE_OPERAND (t
, 1)))
664 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
666 negate_expr (TREE_OPERAND (t
, 1)));
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type
) == REAL_TYPE
)
673 tem
= strip_float_extensions (t
);
674 if (tem
!= t
&& negate_expr_p (tem
))
675 return fold_convert_loc (loc
, type
, negate_expr (tem
));
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (get_call_combined_fn (t
))
682 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
686 fndecl
= get_callee_fndecl (t
);
687 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
688 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
694 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
696 tree op1
= TREE_OPERAND (t
, 1);
697 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
699 tree ntype
= TYPE_UNSIGNED (type
)
700 ? signed_type_for (type
)
701 : unsigned_type_for (type
);
702 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
703 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
704 return fold_convert_loc (loc
, type
, temp
);
716 /* A wrapper for fold_negate_expr_1. */
719 fold_negate_expr (location_t loc
, tree t
)
721 tree type
= TREE_TYPE (t
);
723 tree tem
= fold_negate_expr_1 (loc
, t
);
724 if (tem
== NULL_TREE
)
726 return fold_convert_loc (loc
, type
, tem
);
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
742 loc
= EXPR_LOCATION (t
);
743 type
= TREE_TYPE (t
);
746 tem
= fold_negate_expr (loc
, t
);
748 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
749 return fold_convert_loc (loc
, type
, tem
);
752 /* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
756 the literal in *LITP and return the variable part. If a part isn't
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead. If a variable part is of pointer
766 type, it is negated after converting to TYPE. This prevents us from
767 generating illegal MINUS pointer expression. LOC is the location of
768 the converted variable part.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
776 split_tree (tree in
, tree type
, enum tree_code code
,
777 tree
*minus_varp
, tree
*conp
, tree
*minus_conp
,
778 tree
*litp
, tree
*minus_litp
, int negate_p
)
787 /* Strip any conversions that don't change the machine mode or signedness. */
788 STRIP_SIGN_NOPS (in
);
790 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
791 || TREE_CODE (in
) == FIXED_CST
)
793 else if (TREE_CODE (in
) == code
794 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
795 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
796 /* We can associate addition and subtraction together (even
797 though the C standard doesn't say so) for integers because
798 the value is not affected. For reals, the value might be
799 affected, so we can't. */
800 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == POINTER_PLUS_EXPR
)
801 || (code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
802 || (code
== MINUS_EXPR
803 && (TREE_CODE (in
) == PLUS_EXPR
804 || TREE_CODE (in
) == POINTER_PLUS_EXPR
)))))
806 tree op0
= TREE_OPERAND (in
, 0);
807 tree op1
= TREE_OPERAND (in
, 1);
808 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
809 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
811 /* First see if either of the operands is a literal, then a constant. */
812 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
813 || TREE_CODE (op0
) == FIXED_CST
)
814 *litp
= op0
, op0
= 0;
815 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
816 || TREE_CODE (op1
) == FIXED_CST
)
817 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
819 if (op0
!= 0 && TREE_CONSTANT (op0
))
820 *conp
= op0
, op0
= 0;
821 else if (op1
!= 0 && TREE_CONSTANT (op1
))
822 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
824 /* If we haven't dealt with either operand, this is not a case we can
825 decompose. Otherwise, VAR is either of the ones remaining, if any. */
826 if (op0
!= 0 && op1
!= 0)
831 var
= op1
, neg_var_p
= neg1_p
;
833 /* Now do any needed negations. */
835 *minus_litp
= *litp
, *litp
= 0;
836 if (neg_conp_p
&& *conp
)
837 *minus_conp
= *conp
, *conp
= 0;
838 if (neg_var_p
&& var
)
839 *minus_varp
= var
, var
= 0;
841 else if (TREE_CONSTANT (in
))
843 else if (TREE_CODE (in
) == BIT_NOT_EXPR
844 && code
== PLUS_EXPR
)
846 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
847 when IN is constant. */
848 *litp
= build_minus_one_cst (type
);
849 *minus_varp
= TREE_OPERAND (in
, 0);
857 *minus_litp
= *litp
, *litp
= 0;
858 else if (*minus_litp
)
859 *litp
= *minus_litp
, *minus_litp
= 0;
861 *minus_conp
= *conp
, *conp
= 0;
862 else if (*minus_conp
)
863 *conp
= *minus_conp
, *minus_conp
= 0;
865 *minus_varp
= var
, var
= 0;
866 else if (*minus_varp
)
867 var
= *minus_varp
, *minus_varp
= 0;
871 && TREE_OVERFLOW_P (*litp
))
872 *litp
= drop_tree_overflow (*litp
);
874 && TREE_OVERFLOW_P (*minus_litp
))
875 *minus_litp
= drop_tree_overflow (*minus_litp
);
880 /* Re-associate trees split by the above function. T1 and T2 are
881 either expressions to associate or null. Return the new
882 expression, if any. LOC is the location of the new expression. If
883 we build an operation, do it in TYPE and with CODE. */
886 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
890 gcc_assert (t2
== 0 || code
!= MINUS_EXPR
);
896 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
897 try to fold this since we will have infinite recursion. But do
898 deal with any NEGATE_EXPRs. */
899 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
900 || TREE_CODE (t1
) == PLUS_EXPR
|| TREE_CODE (t2
) == PLUS_EXPR
901 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
903 if (code
== PLUS_EXPR
)
905 if (TREE_CODE (t1
) == NEGATE_EXPR
)
906 return build2_loc (loc
, MINUS_EXPR
, type
,
907 fold_convert_loc (loc
, type
, t2
),
908 fold_convert_loc (loc
, type
,
909 TREE_OPERAND (t1
, 0)));
910 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
911 return build2_loc (loc
, MINUS_EXPR
, type
,
912 fold_convert_loc (loc
, type
, t1
),
913 fold_convert_loc (loc
, type
,
914 TREE_OPERAND (t2
, 0)));
915 else if (integer_zerop (t2
))
916 return fold_convert_loc (loc
, type
, t1
);
918 else if (code
== MINUS_EXPR
)
920 if (integer_zerop (t2
))
921 return fold_convert_loc (loc
, type
, t1
);
924 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
925 fold_convert_loc (loc
, type
, t2
));
928 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
929 fold_convert_loc (loc
, type
, t2
));
932 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
933 for use in int_const_binop, size_binop and size_diffop. */
936 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
938 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
940 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
955 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
956 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
957 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
961 /* Combine two integer constants ARG1 and ARG2 under operation CODE
962 to produce a new constant. Return NULL_TREE if we don't know how
963 to evaluate CODE at compile-time. */
966 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
971 tree type
= TREE_TYPE (arg1
);
972 signop sign
= TYPE_SIGN (type
);
973 bool overflow
= false;
975 wide_int arg2
= wi::to_wide (parg2
, TYPE_PRECISION (type
));
980 res
= wi::bit_or (arg1
, arg2
);
984 res
= wi::bit_xor (arg1
, arg2
);
988 res
= wi::bit_and (arg1
, arg2
);
993 if (wi::neg_p (arg2
))
996 if (code
== RSHIFT_EXPR
)
1002 if (code
== RSHIFT_EXPR
)
1003 /* It's unclear from the C standard whether shifts can overflow.
1004 The following code ignores overflow; perhaps a C standard
1005 interpretation ruling is needed. */
1006 res
= wi::rshift (arg1
, arg2
, sign
);
1008 res
= wi::lshift (arg1
, arg2
);
1013 if (wi::neg_p (arg2
))
1016 if (code
== RROTATE_EXPR
)
1017 code
= LROTATE_EXPR
;
1019 code
= RROTATE_EXPR
;
1022 if (code
== RROTATE_EXPR
)
1023 res
= wi::rrotate (arg1
, arg2
);
1025 res
= wi::lrotate (arg1
, arg2
);
1029 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1033 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1037 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1040 case MULT_HIGHPART_EXPR
:
1041 res
= wi::mul_high (arg1
, arg2
, sign
);
1044 case TRUNC_DIV_EXPR
:
1045 case EXACT_DIV_EXPR
:
1048 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1051 case FLOOR_DIV_EXPR
:
1054 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1060 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1063 case ROUND_DIV_EXPR
:
1066 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1069 case TRUNC_MOD_EXPR
:
1072 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1075 case FLOOR_MOD_EXPR
:
1078 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1084 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1087 case ROUND_MOD_EXPR
:
1090 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1094 res
= wi::min (arg1
, arg2
, sign
);
1098 res
= wi::max (arg1
, arg2
, sign
);
1105 t
= force_fit_type (type
, res
, overflowable
,
1106 (((sign
== SIGNED
|| overflowable
== -1)
1108 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1114 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1116 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1119 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1120 constant. We assume ARG1 and ARG2 have the same data type, or at least
1121 are the same kind of constant and the same machine mode. Return zero if
1122 combining the constants is not allowed in the current operating mode. */
1125 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1127 /* Sanity check for the recursive cases. */
1134 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1136 if (code
== POINTER_PLUS_EXPR
)
1137 return int_const_binop (PLUS_EXPR
,
1138 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1140 return int_const_binop (code
, arg1
, arg2
);
1143 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1148 REAL_VALUE_TYPE value
;
1149 REAL_VALUE_TYPE result
;
1153 /* The following codes are handled by real_arithmetic. */
1168 d1
= TREE_REAL_CST (arg1
);
1169 d2
= TREE_REAL_CST (arg2
);
1171 type
= TREE_TYPE (arg1
);
1172 mode
= TYPE_MODE (type
);
1174 /* Don't perform operation if we honor signaling NaNs and
1175 either operand is a signaling NaN. */
1176 if (HONOR_SNANS (mode
)
1177 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1178 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1181 /* Don't perform operation if it would raise a division
1182 by zero exception. */
1183 if (code
== RDIV_EXPR
1184 && real_equal (&d2
, &dconst0
)
1185 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1188 /* If either operand is a NaN, just return it. Otherwise, set up
1189 for floating-point trap; we return an overflow. */
1190 if (REAL_VALUE_ISNAN (d1
))
1192 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1195 t
= build_real (type
, d1
);
1198 else if (REAL_VALUE_ISNAN (d2
))
1200 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1203 t
= build_real (type
, d2
);
1207 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1208 real_convert (&result
, mode
, &value
);
1210 /* Don't constant fold this floating point operation if
1211 the result has overflowed and flag_trapping_math. */
1212 if (flag_trapping_math
1213 && MODE_HAS_INFINITIES (mode
)
1214 && REAL_VALUE_ISINF (result
)
1215 && !REAL_VALUE_ISINF (d1
)
1216 && !REAL_VALUE_ISINF (d2
))
1219 /* Don't constant fold this floating point operation if the
1220 result may dependent upon the run-time rounding mode and
1221 flag_rounding_math is set, or if GCC's software emulation
1222 is unable to accurately represent the result. */
1223 if ((flag_rounding_math
1224 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1225 && (inexact
|| !real_identical (&result
, &value
)))
1228 t
= build_real (type
, result
);
1230 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1234 if (TREE_CODE (arg1
) == FIXED_CST
)
1236 FIXED_VALUE_TYPE f1
;
1237 FIXED_VALUE_TYPE f2
;
1238 FIXED_VALUE_TYPE result
;
1243 /* The following codes are handled by fixed_arithmetic. */
1249 case TRUNC_DIV_EXPR
:
1250 if (TREE_CODE (arg2
) != FIXED_CST
)
1252 f2
= TREE_FIXED_CST (arg2
);
1258 if (TREE_CODE (arg2
) != INTEGER_CST
)
1261 f2
.data
.high
= w2
.elt (1);
1262 f2
.data
.low
= w2
.ulow ();
1271 f1
= TREE_FIXED_CST (arg1
);
1272 type
= TREE_TYPE (arg1
);
1273 sat_p
= TYPE_SATURATING (type
);
1274 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1275 t
= build_fixed (type
, result
);
1276 /* Propagate overflow flags. */
1277 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1278 TREE_OVERFLOW (t
) = 1;
1282 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1284 tree type
= TREE_TYPE (arg1
);
1285 tree r1
= TREE_REALPART (arg1
);
1286 tree i1
= TREE_IMAGPART (arg1
);
1287 tree r2
= TREE_REALPART (arg2
);
1288 tree i2
= TREE_IMAGPART (arg2
);
1295 real
= const_binop (code
, r1
, r2
);
1296 imag
= const_binop (code
, i1
, i2
);
1300 if (COMPLEX_FLOAT_TYPE_P (type
))
1301 return do_mpc_arg2 (arg1
, arg2
, type
,
1302 /* do_nonfinite= */ folding_initializer
,
1305 real
= const_binop (MINUS_EXPR
,
1306 const_binop (MULT_EXPR
, r1
, r2
),
1307 const_binop (MULT_EXPR
, i1
, i2
));
1308 imag
= const_binop (PLUS_EXPR
,
1309 const_binop (MULT_EXPR
, r1
, i2
),
1310 const_binop (MULT_EXPR
, i1
, r2
));
1314 if (COMPLEX_FLOAT_TYPE_P (type
))
1315 return do_mpc_arg2 (arg1
, arg2
, type
,
1316 /* do_nonfinite= */ folding_initializer
,
1319 case TRUNC_DIV_EXPR
:
1321 case FLOOR_DIV_EXPR
:
1322 case ROUND_DIV_EXPR
:
1323 if (flag_complex_method
== 0)
1325 /* Keep this algorithm in sync with
1326 tree-complex.c:expand_complex_div_straight().
1328 Expand complex division to scalars, straightforward algorithm.
1329 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1333 = const_binop (PLUS_EXPR
,
1334 const_binop (MULT_EXPR
, r2
, r2
),
1335 const_binop (MULT_EXPR
, i2
, i2
));
1337 = const_binop (PLUS_EXPR
,
1338 const_binop (MULT_EXPR
, r1
, r2
),
1339 const_binop (MULT_EXPR
, i1
, i2
));
1341 = const_binop (MINUS_EXPR
,
1342 const_binop (MULT_EXPR
, i1
, r2
),
1343 const_binop (MULT_EXPR
, r1
, i2
));
1345 real
= const_binop (code
, t1
, magsquared
);
1346 imag
= const_binop (code
, t2
, magsquared
);
1350 /* Keep this algorithm in sync with
1351 tree-complex.c:expand_complex_div_wide().
1353 Expand complex division to scalars, modified algorithm to minimize
1354 overflow with wide input ranges. */
1355 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1356 fold_abs_const (r2
, TREE_TYPE (type
)),
1357 fold_abs_const (i2
, TREE_TYPE (type
)));
1359 if (integer_nonzerop (compare
))
1361 /* In the TRUE branch, we compute
1363 div = (br * ratio) + bi;
1364 tr = (ar * ratio) + ai;
1365 ti = (ai * ratio) - ar;
1368 tree ratio
= const_binop (code
, r2
, i2
);
1369 tree div
= const_binop (PLUS_EXPR
, i2
,
1370 const_binop (MULT_EXPR
, r2
, ratio
));
1371 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1372 real
= const_binop (PLUS_EXPR
, real
, i1
);
1373 real
= const_binop (code
, real
, div
);
1375 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1376 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1377 imag
= const_binop (code
, imag
, div
);
1381 /* In the FALSE branch, we compute
1383 divisor = (d * ratio) + c;
1384 tr = (b * ratio) + a;
1385 ti = b - (a * ratio);
1388 tree ratio
= const_binop (code
, i2
, r2
);
1389 tree div
= const_binop (PLUS_EXPR
, r2
,
1390 const_binop (MULT_EXPR
, i2
, ratio
));
1392 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1393 real
= const_binop (PLUS_EXPR
, real
, r1
);
1394 real
= const_binop (code
, real
, div
);
1396 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1397 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1398 imag
= const_binop (code
, imag
, div
);
1408 return build_complex (type
, real
, imag
);
1411 if (TREE_CODE (arg1
) == VECTOR_CST
1412 && TREE_CODE (arg2
) == VECTOR_CST
)
1414 tree type
= TREE_TYPE (arg1
);
1415 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1416 tree
*elts
= XALLOCAVEC (tree
, count
);
1418 for (i
= 0; i
< count
; i
++)
1420 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1421 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1423 elts
[i
] = const_binop (code
, elem1
, elem2
);
1425 /* It is possible that const_binop cannot handle the given
1426 code and return NULL_TREE */
1427 if (elts
[i
] == NULL_TREE
)
1431 return build_vector (type
, elts
);
1434 /* Shifts allow a scalar offset for a vector. */
1435 if (TREE_CODE (arg1
) == VECTOR_CST
1436 && TREE_CODE (arg2
) == INTEGER_CST
)
1438 tree type
= TREE_TYPE (arg1
);
1439 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1440 tree
*elts
= XALLOCAVEC (tree
, count
);
1442 for (i
= 0; i
< count
; i
++)
1444 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1446 elts
[i
] = const_binop (code
, elem1
, arg2
);
1448 /* It is possible that const_binop cannot handle the given
1449 code and return NULL_TREE. */
1450 if (elts
[i
] == NULL_TREE
)
1454 return build_vector (type
, elts
);
1459 /* Overload that adds a TYPE parameter to be able to dispatch
1460 to fold_relational_const. */
1463 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1465 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1466 return fold_relational_const (code
, type
, arg1
, arg2
);
1468 /* ??? Until we make the const_binop worker take the type of the
1469 result as argument put those cases that need it here. */
1473 if ((TREE_CODE (arg1
) == REAL_CST
1474 && TREE_CODE (arg2
) == REAL_CST
)
1475 || (TREE_CODE (arg1
) == INTEGER_CST
1476 && TREE_CODE (arg2
) == INTEGER_CST
))
1477 return build_complex (type
, arg1
, arg2
);
1480 case VEC_PACK_TRUNC_EXPR
:
1481 case VEC_PACK_FIX_TRUNC_EXPR
:
1483 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1486 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1487 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1488 if (TREE_CODE (arg1
) != VECTOR_CST
1489 || TREE_CODE (arg2
) != VECTOR_CST
)
1492 elts
= XALLOCAVEC (tree
, nelts
);
1493 if (!vec_cst_ctor_to_array (arg1
, elts
)
1494 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1497 for (i
= 0; i
< nelts
; i
++)
1499 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1500 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1501 TREE_TYPE (type
), elts
[i
]);
1502 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1506 return build_vector (type
, elts
);
1509 case VEC_WIDEN_MULT_LO_EXPR
:
1510 case VEC_WIDEN_MULT_HI_EXPR
:
1511 case VEC_WIDEN_MULT_EVEN_EXPR
:
1512 case VEC_WIDEN_MULT_ODD_EXPR
:
1514 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1515 unsigned int out
, ofs
, scale
;
1518 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1519 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1520 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1523 elts
= XALLOCAVEC (tree
, nelts
* 4);
1524 if (!vec_cst_ctor_to_array (arg1
, elts
)
1525 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1528 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1529 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1530 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1531 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1532 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1534 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1537 for (out
= 0; out
< nelts
; out
++)
1539 unsigned int in1
= (out
<< scale
) + ofs
;
1540 unsigned int in2
= in1
+ nelts
* 2;
1543 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1544 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1546 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1548 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1549 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1553 return build_vector (type
, elts
);
1559 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1562 /* Make sure type and arg0 have the same saturating flag. */
1563 gcc_checking_assert (TYPE_SATURATING (type
)
1564 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1566 return const_binop (code
, arg1
, arg2
);
1569 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1570 Return zero if computing the constants is not possible. */
1573 const_unop (enum tree_code code
, tree type
, tree arg0
)
1575 /* Don't perform the operation, other than NEGATE and ABS, if
1576 flag_signaling_nans is on and the operand is a signaling NaN. */
1577 if (TREE_CODE (arg0
) == REAL_CST
1578 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
1579 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1580 && code
!= NEGATE_EXPR
1581 && code
!= ABS_EXPR
)
1588 case FIX_TRUNC_EXPR
:
1589 case FIXED_CONVERT_EXPR
:
1590 return fold_convert_const (code
, type
, arg0
);
1592 case ADDR_SPACE_CONVERT_EXPR
:
1593 /* If the source address is 0, and the source address space
1594 cannot have a valid object at 0, fold to dest type null. */
1595 if (integer_zerop (arg0
)
1596 && !(targetm
.addr_space
.zero_address_valid
1597 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1598 return fold_convert_const (code
, type
, arg0
);
1601 case VIEW_CONVERT_EXPR
:
1602 return fold_view_convert_expr (type
, arg0
);
1606 /* Can't call fold_negate_const directly here as that doesn't
1607 handle all cases and we might not be able to negate some
1609 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1610 if (tem
&& CONSTANT_CLASS_P (tem
))
1616 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1617 return fold_abs_const (arg0
, type
);
1621 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1623 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1625 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1630 if (TREE_CODE (arg0
) == INTEGER_CST
)
1631 return fold_not_const (arg0
, type
);
1632 /* Perform BIT_NOT_EXPR on each element individually. */
1633 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1637 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1639 elements
= XALLOCAVEC (tree
, count
);
1640 for (i
= 0; i
< count
; i
++)
1642 elem
= VECTOR_CST_ELT (arg0
, i
);
1643 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1644 if (elem
== NULL_TREE
)
1649 return build_vector (type
, elements
);
1653 case TRUTH_NOT_EXPR
:
1654 if (TREE_CODE (arg0
) == INTEGER_CST
)
1655 return constant_boolean_node (integer_zerop (arg0
), type
);
1659 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1660 return fold_convert (type
, TREE_REALPART (arg0
));
1664 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1665 return fold_convert (type
, TREE_IMAGPART (arg0
));
1668 case VEC_UNPACK_LO_EXPR
:
1669 case VEC_UNPACK_HI_EXPR
:
1670 case VEC_UNPACK_FLOAT_LO_EXPR
:
1671 case VEC_UNPACK_FLOAT_HI_EXPR
:
1673 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1675 enum tree_code subcode
;
1677 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1678 if (TREE_CODE (arg0
) != VECTOR_CST
)
1681 elts
= XALLOCAVEC (tree
, nelts
* 2);
1682 if (!vec_cst_ctor_to_array (arg0
, elts
))
1685 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1686 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1689 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1692 subcode
= FLOAT_EXPR
;
1694 for (i
= 0; i
< nelts
; i
++)
1696 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1697 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1701 return build_vector (type
, elts
);
1704 case REDUC_MIN_EXPR
:
1705 case REDUC_MAX_EXPR
:
1706 case REDUC_PLUS_EXPR
:
1708 unsigned int nelts
, i
;
1710 enum tree_code subcode
;
1712 if (TREE_CODE (arg0
) != VECTOR_CST
)
1714 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1716 elts
= XALLOCAVEC (tree
, nelts
);
1717 if (!vec_cst_ctor_to_array (arg0
, elts
))
1722 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1723 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1724 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1725 default: gcc_unreachable ();
1728 for (i
= 1; i
< nelts
; i
++)
1730 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1731 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1745 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1746 indicates which particular sizetype to create. */
1749 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1751 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1754 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1755 is a tree code. The type of the result is taken from the operands.
1756 Both must be equivalent integer types, ala int_binop_types_match_p.
1757 If the operands are constant, so is the result. */
1760 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1762 tree type
= TREE_TYPE (arg0
);
1764 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1765 return error_mark_node
;
1767 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1770 /* Handle the special case of two integer constants faster. */
1771 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1773 /* And some specific cases even faster than that. */
1774 if (code
== PLUS_EXPR
)
1776 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1778 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1781 else if (code
== MINUS_EXPR
)
1783 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1786 else if (code
== MULT_EXPR
)
1788 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1792 /* Handle general case of two integer constants. For sizetype
1793 constant calculations we always want to know about overflow,
1794 even in the unsigned case. */
1795 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1798 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1801 /* Given two values, either both of sizetype or both of bitsizetype,
1802 compute the difference between the two values. Return the value
1803 in signed type corresponding to the type of the operands. */
1806 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1808 tree type
= TREE_TYPE (arg0
);
1811 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1814 /* If the type is already signed, just do the simple thing. */
1815 if (!TYPE_UNSIGNED (type
))
1816 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1818 if (type
== sizetype
)
1820 else if (type
== bitsizetype
)
1821 ctype
= sbitsizetype
;
1823 ctype
= signed_type_for (type
);
1825 /* If either operand is not a constant, do the conversions to the signed
1826 type and subtract. The hardware will do the right thing with any
1827 overflow in the subtraction. */
1828 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1829 return size_binop_loc (loc
, MINUS_EXPR
,
1830 fold_convert_loc (loc
, ctype
, arg0
),
1831 fold_convert_loc (loc
, ctype
, arg1
));
1833 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1834 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1835 overflow) and negate (which can't either). Special-case a result
1836 of zero while we're here. */
1837 if (tree_int_cst_equal (arg0
, arg1
))
1838 return build_int_cst (ctype
, 0);
1839 else if (tree_int_cst_lt (arg1
, arg0
))
1840 return fold_convert_loc (loc
, ctype
,
1841 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1843 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1844 fold_convert_loc (loc
, ctype
,
1845 size_binop_loc (loc
,
1850 /* A subroutine of fold_convert_const handling conversions of an
1851 INTEGER_CST to another integer type. */
1854 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1856 /* Given an integer constant, make new constant with new type,
1857 appropriately sign-extended or truncated. Use widest_int
1858 so that any extension is done according ARG1's type. */
1859 return force_fit_type (type
, wi::to_widest (arg1
),
1860 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1861 TREE_OVERFLOW (arg1
));
1864 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1865 to an integer type. */
1868 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1870 bool overflow
= false;
1873 /* The following code implements the floating point to integer
1874 conversion rules required by the Java Language Specification,
1875 that IEEE NaNs are mapped to zero and values that overflow
1876 the target precision saturate, i.e. values greater than
1877 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1878 are mapped to INT_MIN. These semantics are allowed by the
1879 C and C++ standards that simply state that the behavior of
1880 FP-to-integer conversion is unspecified upon overflow. */
1884 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1888 case FIX_TRUNC_EXPR
:
1889 real_trunc (&r
, VOIDmode
, &x
);
1896 /* If R is NaN, return zero and show we have an overflow. */
1897 if (REAL_VALUE_ISNAN (r
))
1900 val
= wi::zero (TYPE_PRECISION (type
));
1903 /* See if R is less than the lower bound or greater than the
1908 tree lt
= TYPE_MIN_VALUE (type
);
1909 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1910 if (real_less (&r
, &l
))
1919 tree ut
= TYPE_MAX_VALUE (type
);
1922 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1923 if (real_less (&u
, &r
))
1932 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1934 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1938 /* A subroutine of fold_convert_const handling conversions of a
1939 FIXED_CST to an integer type. */
1942 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1945 double_int temp
, temp_trunc
;
1948 /* Right shift FIXED_CST to temp by fbit. */
1949 temp
= TREE_FIXED_CST (arg1
).data
;
1950 mode
= TREE_FIXED_CST (arg1
).mode
;
1951 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1953 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1954 HOST_BITS_PER_DOUBLE_INT
,
1955 SIGNED_FIXED_POINT_MODE_P (mode
));
1957 /* Left shift temp to temp_trunc by fbit. */
1958 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1959 HOST_BITS_PER_DOUBLE_INT
,
1960 SIGNED_FIXED_POINT_MODE_P (mode
));
1964 temp
= double_int_zero
;
1965 temp_trunc
= double_int_zero
;
1968 /* If FIXED_CST is negative, we need to round the value toward 0.
1969 By checking if the fractional bits are not zero to add 1 to temp. */
1970 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1971 && temp_trunc
.is_negative ()
1972 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1973 temp
+= double_int_one
;
1975 /* Given a fixed-point constant, make new constant with new type,
1976 appropriately sign-extended or truncated. */
1977 t
= force_fit_type (type
, temp
, -1,
1978 (temp
.is_negative ()
1979 && (TYPE_UNSIGNED (type
)
1980 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1981 | TREE_OVERFLOW (arg1
));
1986 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1987 to another floating point type. */
1990 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1992 REAL_VALUE_TYPE value
;
1995 /* Don't perform the operation if flag_signaling_nans is on
1996 and the operand is a signaling NaN. */
1997 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
1998 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
2001 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2002 t
= build_real (type
, value
);
2004 /* If converting an infinity or NAN to a representation that doesn't
2005 have one, set the overflow bit so that we can produce some kind of
2006 error message at the appropriate point if necessary. It's not the
2007 most user-friendly message, but it's better than nothing. */
2008 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
2009 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2010 TREE_OVERFLOW (t
) = 1;
2011 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2012 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2013 TREE_OVERFLOW (t
) = 1;
2014 /* Regular overflow, conversion produced an infinity in a mode that
2015 can't represent them. */
2016 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2017 && REAL_VALUE_ISINF (value
)
2018 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2019 TREE_OVERFLOW (t
) = 1;
2021 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2025 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2026 to a floating point type. */
2029 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2031 REAL_VALUE_TYPE value
;
2034 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2035 t
= build_real (type
, value
);
2037 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2041 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2042 to another fixed-point type. */
2045 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2047 FIXED_VALUE_TYPE value
;
2051 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2052 TYPE_SATURATING (type
));
2053 t
= build_fixed (type
, value
);
2055 /* Propagate overflow flags. */
2056 if (overflow_p
| TREE_OVERFLOW (arg1
))
2057 TREE_OVERFLOW (t
) = 1;
2061 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2062 to a fixed-point type. */
2065 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2067 FIXED_VALUE_TYPE value
;
2072 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2074 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2075 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2076 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? HOST_WIDE_INT_M1
: 0;
2078 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2080 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2081 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2082 TYPE_SATURATING (type
));
2083 t
= build_fixed (type
, value
);
2085 /* Propagate overflow flags. */
2086 if (overflow_p
| TREE_OVERFLOW (arg1
))
2087 TREE_OVERFLOW (t
) = 1;
2091 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2092 to a fixed-point type. */
2095 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2097 FIXED_VALUE_TYPE value
;
2101 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2102 &TREE_REAL_CST (arg1
),
2103 TYPE_SATURATING (type
));
2104 t
= build_fixed (type
, value
);
2106 /* Propagate overflow flags. */
2107 if (overflow_p
| TREE_OVERFLOW (arg1
))
2108 TREE_OVERFLOW (t
) = 1;
2112 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2113 type TYPE. If no simplification can be done return NULL_TREE. */
2116 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2118 if (TREE_TYPE (arg1
) == type
)
2121 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2122 || TREE_CODE (type
) == OFFSET_TYPE
)
2124 if (TREE_CODE (arg1
) == INTEGER_CST
)
2125 return fold_convert_const_int_from_int (type
, arg1
);
2126 else if (TREE_CODE (arg1
) == REAL_CST
)
2127 return fold_convert_const_int_from_real (code
, type
, arg1
);
2128 else if (TREE_CODE (arg1
) == FIXED_CST
)
2129 return fold_convert_const_int_from_fixed (type
, arg1
);
2131 else if (TREE_CODE (type
) == REAL_TYPE
)
2133 if (TREE_CODE (arg1
) == INTEGER_CST
)
2134 return build_real_from_int_cst (type
, arg1
);
2135 else if (TREE_CODE (arg1
) == REAL_CST
)
2136 return fold_convert_const_real_from_real (type
, arg1
);
2137 else if (TREE_CODE (arg1
) == FIXED_CST
)
2138 return fold_convert_const_real_from_fixed (type
, arg1
);
2140 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2142 if (TREE_CODE (arg1
) == FIXED_CST
)
2143 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2144 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2145 return fold_convert_const_fixed_from_int (type
, arg1
);
2146 else if (TREE_CODE (arg1
) == REAL_CST
)
2147 return fold_convert_const_fixed_from_real (type
, arg1
);
2149 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2151 if (TREE_CODE (arg1
) == VECTOR_CST
2152 && TYPE_VECTOR_SUBPARTS (type
) == VECTOR_CST_NELTS (arg1
))
2154 int len
= TYPE_VECTOR_SUBPARTS (type
);
2155 tree elttype
= TREE_TYPE (type
);
2156 tree
*v
= XALLOCAVEC (tree
, len
);
2157 for (int i
= 0; i
< len
; ++i
)
2159 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2160 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2161 if (cvt
== NULL_TREE
)
2165 return build_vector (type
, v
);
2171 /* Construct a vector of zero elements of vector type TYPE. */
2174 build_zero_vector (tree type
)
2178 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2179 return build_vector_from_val (type
, t
);
2182 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2185 fold_convertible_p (const_tree type
, const_tree arg
)
2187 tree orig
= TREE_TYPE (arg
);
2192 if (TREE_CODE (arg
) == ERROR_MARK
2193 || TREE_CODE (type
) == ERROR_MARK
2194 || TREE_CODE (orig
) == ERROR_MARK
)
2197 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2200 switch (TREE_CODE (type
))
2202 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2203 case POINTER_TYPE
: case REFERENCE_TYPE
:
2205 return (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2206 || TREE_CODE (orig
) == OFFSET_TYPE
);
2209 case FIXED_POINT_TYPE
:
2212 return TREE_CODE (type
) == TREE_CODE (orig
);
2219 /* Convert expression ARG to type TYPE. Used by the middle-end for
2220 simple conversions in preference to calling the front-end's convert. */
2223 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2225 tree orig
= TREE_TYPE (arg
);
2231 if (TREE_CODE (arg
) == ERROR_MARK
2232 || TREE_CODE (type
) == ERROR_MARK
2233 || TREE_CODE (orig
) == ERROR_MARK
)
2234 return error_mark_node
;
2236 switch (TREE_CODE (type
))
2239 case REFERENCE_TYPE
:
2240 /* Handle conversions between pointers to different address spaces. */
2241 if (POINTER_TYPE_P (orig
)
2242 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2243 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2244 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2247 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2249 if (TREE_CODE (arg
) == INTEGER_CST
)
2251 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2252 if (tem
!= NULL_TREE
)
2255 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2256 || TREE_CODE (orig
) == OFFSET_TYPE
)
2257 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2258 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2259 return fold_convert_loc (loc
, type
,
2260 fold_build1_loc (loc
, REALPART_EXPR
,
2261 TREE_TYPE (orig
), arg
));
2262 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2263 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2264 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2267 if (TREE_CODE (arg
) == INTEGER_CST
)
2269 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2270 if (tem
!= NULL_TREE
)
2273 else if (TREE_CODE (arg
) == REAL_CST
)
2275 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2276 if (tem
!= NULL_TREE
)
2279 else if (TREE_CODE (arg
) == FIXED_CST
)
2281 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2282 if (tem
!= NULL_TREE
)
2286 switch (TREE_CODE (orig
))
2289 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2290 case POINTER_TYPE
: case REFERENCE_TYPE
:
2291 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2294 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2296 case FIXED_POINT_TYPE
:
2297 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2300 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2301 return fold_convert_loc (loc
, type
, tem
);
2307 case FIXED_POINT_TYPE
:
2308 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2309 || TREE_CODE (arg
) == REAL_CST
)
2311 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2312 if (tem
!= NULL_TREE
)
2313 goto fold_convert_exit
;
2316 switch (TREE_CODE (orig
))
2318 case FIXED_POINT_TYPE
:
2323 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2326 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2327 return fold_convert_loc (loc
, type
, tem
);
2334 switch (TREE_CODE (orig
))
2337 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2338 case POINTER_TYPE
: case REFERENCE_TYPE
:
2340 case FIXED_POINT_TYPE
:
2341 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2342 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2343 fold_convert_loc (loc
, TREE_TYPE (type
),
2344 integer_zero_node
));
2349 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2351 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2352 TREE_OPERAND (arg
, 0));
2353 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2354 TREE_OPERAND (arg
, 1));
2355 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2358 arg
= save_expr (arg
);
2359 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2360 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2361 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2362 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2363 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2371 if (integer_zerop (arg
))
2372 return build_zero_vector (type
);
2373 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2374 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2375 || TREE_CODE (orig
) == VECTOR_TYPE
);
2376 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2379 tem
= fold_ignored_result (arg
);
2380 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2383 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2384 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2388 protected_set_expr_location_unshare (tem
, loc
);
2392 /* Return false if expr can be assumed not to be an lvalue, true
2396 maybe_lvalue_p (const_tree x
)
2398 /* We only need to wrap lvalue tree codes. */
2399 switch (TREE_CODE (x
))
2412 case ARRAY_RANGE_REF
:
2418 case PREINCREMENT_EXPR
:
2419 case PREDECREMENT_EXPR
:
2421 case TRY_CATCH_EXPR
:
2422 case WITH_CLEANUP_EXPR
:
2431 /* Assume the worst for front-end tree codes. */
2432 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2440 /* Return an expr equal to X but certainly not valid as an lvalue. */
2443 non_lvalue_loc (location_t loc
, tree x
)
2445 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2450 if (! maybe_lvalue_p (x
))
2452 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2455 /* When pedantic, return an expr equal to X but certainly not valid as a
2456 pedantic lvalue. Otherwise, return X. */
2459 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2461 return protected_set_expr_location_unshare (x
, loc
);
2464 /* Given a tree comparison code, return the code that is the logical inverse.
2465 It is generally not safe to do this for floating-point comparisons, except
2466 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2467 ERROR_MARK in this case. */
2470 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2472 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2473 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2483 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2485 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2487 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2489 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2503 return UNORDERED_EXPR
;
2504 case UNORDERED_EXPR
:
2505 return ORDERED_EXPR
;
2511 /* Similar, but return the comparison that results if the operands are
2512 swapped. This is safe for floating-point. */
2515 swap_tree_comparison (enum tree_code code
)
2522 case UNORDERED_EXPR
:
2548 /* Convert a comparison tree code from an enum tree_code representation
2549 into a compcode bit-based encoding. This function is the inverse of
2550 compcode_to_comparison. */
2552 static enum comparison_code
2553 comparison_to_compcode (enum tree_code code
)
2570 return COMPCODE_ORD
;
2571 case UNORDERED_EXPR
:
2572 return COMPCODE_UNORD
;
2574 return COMPCODE_UNLT
;
2576 return COMPCODE_UNEQ
;
2578 return COMPCODE_UNLE
;
2580 return COMPCODE_UNGT
;
2582 return COMPCODE_LTGT
;
2584 return COMPCODE_UNGE
;
2590 /* Convert a compcode bit-based encoding of a comparison operator back
2591 to GCC's enum tree_code representation. This function is the
2592 inverse of comparison_to_compcode. */
2594 static enum tree_code
2595 compcode_to_comparison (enum comparison_code code
)
2612 return ORDERED_EXPR
;
2613 case COMPCODE_UNORD
:
2614 return UNORDERED_EXPR
;
2632 /* Return a tree for the comparison which is the combination of
2633 doing the AND or OR (depending on CODE) of the two operations LCODE
2634 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2635 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2636 if this makes the transformation invalid. */
2639 combine_comparisons (location_t loc
,
2640 enum tree_code code
, enum tree_code lcode
,
2641 enum tree_code rcode
, tree truth_type
,
2642 tree ll_arg
, tree lr_arg
)
2644 bool honor_nans
= HONOR_NANS (ll_arg
);
2645 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2646 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2651 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2652 compcode
= lcompcode
& rcompcode
;
2655 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2656 compcode
= lcompcode
| rcompcode
;
2665 /* Eliminate unordered comparisons, as well as LTGT and ORD
2666 which are not used unless the mode has NaNs. */
2667 compcode
&= ~COMPCODE_UNORD
;
2668 if (compcode
== COMPCODE_LTGT
)
2669 compcode
= COMPCODE_NE
;
2670 else if (compcode
== COMPCODE_ORD
)
2671 compcode
= COMPCODE_TRUE
;
2673 else if (flag_trapping_math
)
2675 /* Check that the original operation and the optimized ones will trap
2676 under the same condition. */
2677 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2678 && (lcompcode
!= COMPCODE_EQ
)
2679 && (lcompcode
!= COMPCODE_ORD
);
2680 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2681 && (rcompcode
!= COMPCODE_EQ
)
2682 && (rcompcode
!= COMPCODE_ORD
);
2683 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2684 && (compcode
!= COMPCODE_EQ
)
2685 && (compcode
!= COMPCODE_ORD
);
2687 /* In a short-circuited boolean expression the LHS might be
2688 such that the RHS, if evaluated, will never trap. For
2689 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2690 if neither x nor y is NaN. (This is a mixed blessing: for
2691 example, the expression above will never trap, hence
2692 optimizing it to x < y would be invalid). */
2693 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2694 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2697 /* If the comparison was short-circuited, and only the RHS
2698 trapped, we may now generate a spurious trap. */
2700 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2703 /* If we changed the conditions that cause a trap, we lose. */
2704 if ((ltrap
|| rtrap
) != trap
)
2708 if (compcode
== COMPCODE_TRUE
)
2709 return constant_boolean_node (true, truth_type
);
2710 else if (compcode
== COMPCODE_FALSE
)
2711 return constant_boolean_node (false, truth_type
);
2714 enum tree_code tcode
;
2716 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2717 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2721 /* Return nonzero if two operands (typically of the same tree node)
2722 are necessarily equal. FLAGS modifies behavior as follows:
2724 If OEP_ONLY_CONST is set, only return nonzero for constants.
2725 This function tests whether the operands are indistinguishable;
2726 it does not test whether they are equal using C's == operation.
2727 The distinction is important for IEEE floating point, because
2728 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2729 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2731 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2732 even though it may hold multiple values during a function.
2733 This is because a GCC tree node guarantees that nothing else is
2734 executed between the evaluation of its "operands" (which may often
2735 be evaluated in arbitrary order). Hence if the operands themselves
2736 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2737 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2738 unset means assuming isochronic (or instantaneous) tree equivalence.
2739 Unless comparing arbitrary expression trees, such as from different
2740 statements, this flag can usually be left unset.
2742 If OEP_PURE_SAME is set, then pure functions with identical arguments
2743 are considered the same. It is used when the caller has other ways
2744 to ensure that global memory is unchanged in between.
2746 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2747 not values of expressions.
2749 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2750 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2752 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2753 any operand with side effect. This is unnecesarily conservative in the
2754 case we know that arg0 and arg1 are in disjoint code paths (such as in
2755 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2756 addresses with TREE_CONSTANT flag set so we know that &var == &var
2757 even if var is volatile. */
2760 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2762 /* When checking, verify at the outermost operand_equal_p call that
2763 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2765 if (flag_checking
&& !(flags
& OEP_NO_HASH_CHECK
))
2767 if (operand_equal_p (arg0
, arg1
, flags
| OEP_NO_HASH_CHECK
))
2771 inchash::hash
hstate0 (0), hstate1 (0);
2772 inchash::add_expr (arg0
, hstate0
, flags
| OEP_HASH_CHECK
);
2773 inchash::add_expr (arg1
, hstate1
, flags
| OEP_HASH_CHECK
);
2774 hashval_t h0
= hstate0
.end ();
2775 hashval_t h1
= hstate1
.end ();
2776 gcc_assert (h0
== h1
);
2784 /* If either is ERROR_MARK, they aren't equal. */
2785 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2786 || TREE_TYPE (arg0
) == error_mark_node
2787 || TREE_TYPE (arg1
) == error_mark_node
)
2790 /* Similar, if either does not have a type (like a released SSA name),
2791 they aren't equal. */
2792 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2795 /* We cannot consider pointers to different address space equal. */
2796 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2797 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2798 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2799 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2802 /* Check equality of integer constants before bailing out due to
2803 precision differences. */
2804 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2806 /* Address of INTEGER_CST is not defined; check that we did not forget
2807 to drop the OEP_ADDRESS_OF flags. */
2808 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2809 return tree_int_cst_equal (arg0
, arg1
);
2812 if (!(flags
& OEP_ADDRESS_OF
))
2814 /* If both types don't have the same signedness, then we can't consider
2815 them equal. We must check this before the STRIP_NOPS calls
2816 because they may change the signedness of the arguments. As pointers
2817 strictly don't have a signedness, require either two pointers or
2818 two non-pointers as well. */
2819 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2820 || POINTER_TYPE_P (TREE_TYPE (arg0
))
2821 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2824 /* If both types don't have the same precision, then it is not safe
2826 if (element_precision (TREE_TYPE (arg0
))
2827 != element_precision (TREE_TYPE (arg1
)))
2834 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2835 sanity check once the issue is solved. */
2837 /* Addresses of conversions and SSA_NAMEs (and many other things)
2838 are not defined. Check that we did not forget to drop the
2839 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2840 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
2841 && TREE_CODE (arg0
) != SSA_NAME
);
2844 /* In case both args are comparisons but with different comparison
2845 code, try to swap the comparison operands of one arg to produce
2846 a match and compare that variant. */
2847 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2848 && COMPARISON_CLASS_P (arg0
)
2849 && COMPARISON_CLASS_P (arg1
))
2851 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2853 if (TREE_CODE (arg0
) == swap_code
)
2854 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2855 TREE_OPERAND (arg1
, 1), flags
)
2856 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2857 TREE_OPERAND (arg1
, 0), flags
);
2860 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
2862 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2863 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
2865 else if (flags
& OEP_ADDRESS_OF
)
2867 /* If we are interested in comparing addresses ignore
2868 MEM_REF wrappings of the base that can appear just for
2870 if (TREE_CODE (arg0
) == MEM_REF
2872 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
2873 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
2874 && integer_zerop (TREE_OPERAND (arg0
, 1)))
2876 else if (TREE_CODE (arg1
) == MEM_REF
2878 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
2879 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
2880 && integer_zerop (TREE_OPERAND (arg1
, 1)))
2888 /* When not checking adddresses, this is needed for conversions and for
2889 COMPONENT_REF. Might as well play it safe and always test this. */
2890 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2891 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2892 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
2893 && !(flags
& OEP_ADDRESS_OF
)))
2896 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2897 We don't care about side effects in that case because the SAVE_EXPR
2898 takes care of that for us. In all other cases, two expressions are
2899 equal if they have no side effects. If we have two identical
2900 expressions with side effects that should be treated the same due
2901 to the only side effects being identical SAVE_EXPR's, that will
2902 be detected in the recursive calls below.
2903 If we are taking an invariant address of two identical objects
2904 they are necessarily equal as well. */
2905 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2906 && (TREE_CODE (arg0
) == SAVE_EXPR
2907 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
2908 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2911 /* Next handle constant cases, those for which we can return 1 even
2912 if ONLY_CONST is set. */
2913 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2914 switch (TREE_CODE (arg0
))
2917 return tree_int_cst_equal (arg0
, arg1
);
2920 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2921 TREE_FIXED_CST (arg1
));
2924 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
2928 if (!HONOR_SIGNED_ZEROS (arg0
))
2930 /* If we do not distinguish between signed and unsigned zero,
2931 consider them equal. */
2932 if (real_zerop (arg0
) && real_zerop (arg1
))
2941 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2944 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2946 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2947 VECTOR_CST_ELT (arg1
, i
), flags
))
2954 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2956 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2960 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2961 && ! memcmp (TREE_STRING_POINTER (arg0
),
2962 TREE_STRING_POINTER (arg1
),
2963 TREE_STRING_LENGTH (arg0
)));
2966 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2967 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2968 flags
| OEP_ADDRESS_OF
2969 | OEP_MATCH_SIDE_EFFECTS
);
2971 /* In GIMPLE empty constructors are allowed in initializers of
2973 return !CONSTRUCTOR_NELTS (arg0
) && !CONSTRUCTOR_NELTS (arg1
);
2978 if (flags
& OEP_ONLY_CONST
)
2981 /* Define macros to test an operand from arg0 and arg1 for equality and a
2982 variant that allows null and views null as being different from any
2983 non-null value. In the latter case, if either is null, the both
2984 must be; otherwise, do the normal comparison. */
2985 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2986 TREE_OPERAND (arg1, N), flags)
2988 #define OP_SAME_WITH_NULL(N) \
2989 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2990 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2992 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2995 /* Two conversions are equal only if signedness and modes match. */
2996 switch (TREE_CODE (arg0
))
2999 case FIX_TRUNC_EXPR
:
3000 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3001 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3011 case tcc_comparison
:
3013 if (OP_SAME (0) && OP_SAME (1))
3016 /* For commutative ops, allow the other order. */
3017 return (commutative_tree_code (TREE_CODE (arg0
))
3018 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3019 TREE_OPERAND (arg1
, 1), flags
)
3020 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3021 TREE_OPERAND (arg1
, 0), flags
));
3024 /* If either of the pointer (or reference) expressions we are
3025 dereferencing contain a side effect, these cannot be equal,
3026 but their addresses can be. */
3027 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
3028 && (TREE_SIDE_EFFECTS (arg0
)
3029 || TREE_SIDE_EFFECTS (arg1
)))
3032 switch (TREE_CODE (arg0
))
3035 if (!(flags
& OEP_ADDRESS_OF
)
3036 && (TYPE_ALIGN (TREE_TYPE (arg0
))
3037 != TYPE_ALIGN (TREE_TYPE (arg1
))))
3039 flags
&= ~OEP_ADDRESS_OF
;
3043 /* Require the same offset. */
3044 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3045 TYPE_SIZE (TREE_TYPE (arg1
)),
3046 flags
& ~OEP_ADDRESS_OF
))
3051 case VIEW_CONVERT_EXPR
:
3054 case TARGET_MEM_REF
:
3056 if (!(flags
& OEP_ADDRESS_OF
))
3058 /* Require equal access sizes */
3059 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3060 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3061 || !TYPE_SIZE (TREE_TYPE (arg1
))
3062 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3063 TYPE_SIZE (TREE_TYPE (arg1
)),
3066 /* Verify that access happens in similar types. */
3067 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3069 /* Verify that accesses are TBAA compatible. */
3070 if (!alias_ptr_types_compatible_p
3071 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3072 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3073 || (MR_DEPENDENCE_CLIQUE (arg0
)
3074 != MR_DEPENDENCE_CLIQUE (arg1
))
3075 || (MR_DEPENDENCE_BASE (arg0
)
3076 != MR_DEPENDENCE_BASE (arg1
)))
3078 /* Verify that alignment is compatible. */
3079 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3080 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3083 flags
&= ~OEP_ADDRESS_OF
;
3084 return (OP_SAME (0) && OP_SAME (1)
3085 /* TARGET_MEM_REF require equal extra operands. */
3086 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3087 || (OP_SAME_WITH_NULL (2)
3088 && OP_SAME_WITH_NULL (3)
3089 && OP_SAME_WITH_NULL (4))));
3092 case ARRAY_RANGE_REF
:
3095 flags
&= ~OEP_ADDRESS_OF
;
3096 /* Compare the array index by value if it is constant first as we
3097 may have different types but same value here. */
3098 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3099 TREE_OPERAND (arg1
, 1))
3101 && OP_SAME_WITH_NULL (2)
3102 && OP_SAME_WITH_NULL (3)
3103 /* Compare low bound and element size as with OEP_ADDRESS_OF
3104 we have to account for the offset of the ref. */
3105 && (TREE_TYPE (TREE_OPERAND (arg0
, 0))
3106 == TREE_TYPE (TREE_OPERAND (arg1
, 0))
3107 || (operand_equal_p (array_ref_low_bound
3108 (CONST_CAST_TREE (arg0
)),
3110 (CONST_CAST_TREE (arg1
)), flags
)
3111 && operand_equal_p (array_ref_element_size
3112 (CONST_CAST_TREE (arg0
)),
3113 array_ref_element_size
3114 (CONST_CAST_TREE (arg1
)),
3118 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3119 may be NULL when we're called to compare MEM_EXPRs. */
3120 if (!OP_SAME_WITH_NULL (0)
3123 flags
&= ~OEP_ADDRESS_OF
;
3124 return OP_SAME_WITH_NULL (2);
3129 flags
&= ~OEP_ADDRESS_OF
;
3130 return OP_SAME (1) && OP_SAME (2);
3136 case tcc_expression
:
3137 switch (TREE_CODE (arg0
))
3140 /* Be sure we pass right ADDRESS_OF flag. */
3141 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3142 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3143 TREE_OPERAND (arg1
, 0),
3144 flags
| OEP_ADDRESS_OF
);
3146 case TRUTH_NOT_EXPR
:
3149 case TRUTH_ANDIF_EXPR
:
3150 case TRUTH_ORIF_EXPR
:
3151 return OP_SAME (0) && OP_SAME (1);
3154 case WIDEN_MULT_PLUS_EXPR
:
3155 case WIDEN_MULT_MINUS_EXPR
:
3158 /* The multiplcation operands are commutative. */
3161 case TRUTH_AND_EXPR
:
3163 case TRUTH_XOR_EXPR
:
3164 if (OP_SAME (0) && OP_SAME (1))
3167 /* Otherwise take into account this is a commutative operation. */
3168 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3169 TREE_OPERAND (arg1
, 1), flags
)
3170 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3171 TREE_OPERAND (arg1
, 0), flags
));
3174 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3176 flags
&= ~OEP_ADDRESS_OF
;
3179 case BIT_INSERT_EXPR
:
3180 /* BIT_INSERT_EXPR has an implict operand as the type precision
3181 of op1. Need to check to make sure they are the same. */
3182 if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3183 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3184 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
3185 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
3191 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3196 case PREDECREMENT_EXPR
:
3197 case PREINCREMENT_EXPR
:
3198 case POSTDECREMENT_EXPR
:
3199 case POSTINCREMENT_EXPR
:
3200 if (flags
& OEP_LEXICOGRAPHIC
)
3201 return OP_SAME (0) && OP_SAME (1);
3204 case CLEANUP_POINT_EXPR
:
3206 if (flags
& OEP_LEXICOGRAPHIC
)
3215 switch (TREE_CODE (arg0
))
3218 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3219 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3220 /* If not both CALL_EXPRs are either internal or normal function
3221 functions, then they are not equal. */
3223 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3225 /* If the CALL_EXPRs call different internal functions, then they
3227 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3232 /* If the CALL_EXPRs call different functions, then they are not
3234 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3239 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3241 unsigned int cef
= call_expr_flags (arg0
);
3242 if (flags
& OEP_PURE_SAME
)
3243 cef
&= ECF_CONST
| ECF_PURE
;
3246 if (!cef
&& !(flags
& OEP_LEXICOGRAPHIC
))
3250 /* Now see if all the arguments are the same. */
3252 const_call_expr_arg_iterator iter0
, iter1
;
3254 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3255 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3257 a0
= next_const_call_expr_arg (&iter0
),
3258 a1
= next_const_call_expr_arg (&iter1
))
3259 if (! operand_equal_p (a0
, a1
, flags
))
3262 /* If we get here and both argument lists are exhausted
3263 then the CALL_EXPRs are equal. */
3264 return ! (a0
|| a1
);
3270 case tcc_declaration
:
3271 /* Consider __builtin_sqrt equal to sqrt. */
3272 return (TREE_CODE (arg0
) == FUNCTION_DECL
3273 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3274 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3275 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3277 case tcc_exceptional
:
3278 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3280 /* In GIMPLE constructors are used only to build vectors from
3281 elements. Individual elements in the constructor must be
3282 indexed in increasing order and form an initial sequence.
3284 We make no effort to compare constructors in generic.
3285 (see sem_variable::equals in ipa-icf which can do so for
3287 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3288 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3291 /* Be sure that vectors constructed have the same representation.
3292 We only tested element precision and modes to match.
3293 Vectors may be BLKmode and thus also check that the number of
3295 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))
3296 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)))
3299 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3300 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3301 unsigned int len
= vec_safe_length (v0
);
3303 if (len
!= vec_safe_length (v1
))
3306 for (unsigned int i
= 0; i
< len
; i
++)
3308 constructor_elt
*c0
= &(*v0
)[i
];
3309 constructor_elt
*c1
= &(*v1
)[i
];
3311 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3312 /* In GIMPLE the indexes can be either NULL or matching i.
3313 Double check this so we won't get false
3314 positives for GENERIC. */
3316 && (TREE_CODE (c0
->index
) != INTEGER_CST
3317 || !compare_tree_int (c0
->index
, i
)))
3319 && (TREE_CODE (c1
->index
) != INTEGER_CST
3320 || !compare_tree_int (c1
->index
, i
))))
3325 else if (TREE_CODE (arg0
) == STATEMENT_LIST
3326 && (flags
& OEP_LEXICOGRAPHIC
))
3328 /* Compare the STATEMENT_LISTs. */
3329 tree_stmt_iterator tsi1
, tsi2
;
3330 tree body1
= CONST_CAST_TREE (arg0
);
3331 tree body2
= CONST_CAST_TREE (arg1
);
3332 for (tsi1
= tsi_start (body1
), tsi2
= tsi_start (body2
); ;
3333 tsi_next (&tsi1
), tsi_next (&tsi2
))
3335 /* The lists don't have the same number of statements. */
3336 if (tsi_end_p (tsi1
) ^ tsi_end_p (tsi2
))
3338 if (tsi_end_p (tsi1
) && tsi_end_p (tsi2
))
3340 if (!operand_equal_p (tsi_stmt (tsi1
), tsi_stmt (tsi2
),
3348 switch (TREE_CODE (arg0
))
3351 if (flags
& OEP_LEXICOGRAPHIC
)
3352 return OP_SAME_WITH_NULL (0);
3363 #undef OP_SAME_WITH_NULL
3366 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3367 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3369 When in doubt, return 0. */
3372 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3374 int unsignedp1
, unsignedpo
;
3375 tree primarg0
, primarg1
, primother
;
3376 unsigned int correct_width
;
3378 if (operand_equal_p (arg0
, arg1
, 0))
3381 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3382 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3385 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3386 and see if the inner values are the same. This removes any
3387 signedness comparison, which doesn't matter here. */
3388 primarg0
= arg0
, primarg1
= arg1
;
3389 STRIP_NOPS (primarg0
);
3390 STRIP_NOPS (primarg1
);
3391 if (operand_equal_p (primarg0
, primarg1
, 0))
3394 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3395 actual comparison operand, ARG0.
3397 First throw away any conversions to wider types
3398 already present in the operands. */
3400 primarg1
= get_narrower (arg1
, &unsignedp1
);
3401 primother
= get_narrower (other
, &unsignedpo
);
3403 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3404 if (unsignedp1
== unsignedpo
3405 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3406 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3408 tree type
= TREE_TYPE (arg0
);
3410 /* Make sure shorter operand is extended the right way
3411 to match the longer operand. */
3412 primarg1
= fold_convert (signed_or_unsigned_type_for
3413 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3415 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3422 /* See if ARG is an expression that is either a comparison or is performing
3423 arithmetic on comparisons. The comparisons must only be comparing
3424 two different values, which will be stored in *CVAL1 and *CVAL2; if
3425 they are nonzero it means that some operands have already been found.
3426 No variables may be used anywhere else in the expression except in the
3427 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3428 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3430 If this is true, return 1. Otherwise, return zero. */
3433 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3435 enum tree_code code
= TREE_CODE (arg
);
3436 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3438 /* We can handle some of the tcc_expression cases here. */
3439 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3441 else if (tclass
== tcc_expression
3442 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3443 || code
== COMPOUND_EXPR
))
3444 tclass
= tcc_binary
;
3446 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3447 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3449 /* If we've already found a CVAL1 or CVAL2, this expression is
3450 two complex to handle. */
3451 if (*cval1
|| *cval2
)
3461 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3464 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3465 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3466 cval1
, cval2
, save_p
));
3471 case tcc_expression
:
3472 if (code
== COND_EXPR
)
3473 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3474 cval1
, cval2
, save_p
)
3475 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3476 cval1
, cval2
, save_p
)
3477 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3478 cval1
, cval2
, save_p
));
3481 case tcc_comparison
:
3482 /* First see if we can handle the first operand, then the second. For
3483 the second operand, we know *CVAL1 can't be zero. It must be that
3484 one side of the comparison is each of the values; test for the
3485 case where this isn't true by failing if the two operands
3488 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3489 TREE_OPERAND (arg
, 1), 0))
3493 *cval1
= TREE_OPERAND (arg
, 0);
3494 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3496 else if (*cval2
== 0)
3497 *cval2
= TREE_OPERAND (arg
, 0);
3498 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3503 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3505 else if (*cval2
== 0)
3506 *cval2
= TREE_OPERAND (arg
, 1);
3507 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3519 /* ARG is a tree that is known to contain just arithmetic operations and
3520 comparisons. Evaluate the operations in the tree substituting NEW0 for
3521 any occurrence of OLD0 as an operand of a comparison and likewise for
3525 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3526 tree old1
, tree new1
)
3528 tree type
= TREE_TYPE (arg
);
3529 enum tree_code code
= TREE_CODE (arg
);
3530 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3532 /* We can handle some of the tcc_expression cases here. */
3533 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3535 else if (tclass
== tcc_expression
3536 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3537 tclass
= tcc_binary
;
3542 return fold_build1_loc (loc
, code
, type
,
3543 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3544 old0
, new0
, old1
, new1
));
3547 return fold_build2_loc (loc
, code
, type
,
3548 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3549 old0
, new0
, old1
, new1
),
3550 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3551 old0
, new0
, old1
, new1
));
3553 case tcc_expression
:
3557 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3561 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3565 return fold_build3_loc (loc
, code
, type
,
3566 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3567 old0
, new0
, old1
, new1
),
3568 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3569 old0
, new0
, old1
, new1
),
3570 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3571 old0
, new0
, old1
, new1
));
3575 /* Fall through - ??? */
3577 case tcc_comparison
:
3579 tree arg0
= TREE_OPERAND (arg
, 0);
3580 tree arg1
= TREE_OPERAND (arg
, 1);
3582 /* We need to check both for exact equality and tree equality. The
3583 former will be true if the operand has a side-effect. In that
3584 case, we know the operand occurred exactly once. */
3586 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3588 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3591 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3593 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3596 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3604 /* Return a tree for the case when the result of an expression is RESULT
3605 converted to TYPE and OMITTED was previously an operand of the expression
3606 but is now not needed (e.g., we folded OMITTED * 0).
3608 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3609 the conversion of RESULT to TYPE. */
3612 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3614 tree t
= fold_convert_loc (loc
, type
, result
);
3616 /* If the resulting operand is an empty statement, just return the omitted
3617 statement casted to void. */
3618 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3619 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3620 fold_ignored_result (omitted
));
3622 if (TREE_SIDE_EFFECTS (omitted
))
3623 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3624 fold_ignored_result (omitted
), t
);
3626 return non_lvalue_loc (loc
, t
);
3629 /* Return a tree for the case when the result of an expression is RESULT
3630 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3631 of the expression but are now not needed.
3633 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3634 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3635 evaluated before OMITTED2. Otherwise, if neither has side effects,
3636 just do the conversion of RESULT to TYPE. */
3639 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3640 tree omitted1
, tree omitted2
)
3642 tree t
= fold_convert_loc (loc
, type
, result
);
3644 if (TREE_SIDE_EFFECTS (omitted2
))
3645 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3646 if (TREE_SIDE_EFFECTS (omitted1
))
3647 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3649 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3653 /* Return a simplified tree node for the truth-negation of ARG. This
3654 never alters ARG itself. We assume that ARG is an operation that
3655 returns a truth value (0 or 1).
3657 FIXME: one would think we would fold the result, but it causes
3658 problems with the dominator optimizer. */
3661 fold_truth_not_expr (location_t loc
, tree arg
)
3663 tree type
= TREE_TYPE (arg
);
3664 enum tree_code code
= TREE_CODE (arg
);
3665 location_t loc1
, loc2
;
3667 /* If this is a comparison, we can simply invert it, except for
3668 floating-point non-equality comparisons, in which case we just
3669 enclose a TRUTH_NOT_EXPR around what we have. */
3671 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3673 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3674 if (FLOAT_TYPE_P (op_type
)
3675 && flag_trapping_math
3676 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3677 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3680 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3681 if (code
== ERROR_MARK
)
3684 tree ret
= build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3685 TREE_OPERAND (arg
, 1));
3686 if (TREE_NO_WARNING (arg
))
3687 TREE_NO_WARNING (ret
) = 1;
3694 return constant_boolean_node (integer_zerop (arg
), type
);
3696 case TRUTH_AND_EXPR
:
3697 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3698 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3699 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3700 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3701 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3704 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3705 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3706 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3707 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3708 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3710 case TRUTH_XOR_EXPR
:
3711 /* Here we can invert either operand. We invert the first operand
3712 unless the second operand is a TRUTH_NOT_EXPR in which case our
3713 result is the XOR of the first operand with the inside of the
3714 negation of the second operand. */
3716 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3717 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3718 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3720 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3721 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3722 TREE_OPERAND (arg
, 1));
3724 case TRUTH_ANDIF_EXPR
:
3725 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3726 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3727 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3728 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3729 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3731 case TRUTH_ORIF_EXPR
:
3732 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3733 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3734 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3735 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3736 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3738 case TRUTH_NOT_EXPR
:
3739 return TREE_OPERAND (arg
, 0);
3743 tree arg1
= TREE_OPERAND (arg
, 1);
3744 tree arg2
= TREE_OPERAND (arg
, 2);
3746 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3747 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3749 /* A COND_EXPR may have a throw as one operand, which
3750 then has void type. Just leave void operands
3752 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3753 VOID_TYPE_P (TREE_TYPE (arg1
))
3754 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3755 VOID_TYPE_P (TREE_TYPE (arg2
))
3756 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3760 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3761 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3762 TREE_OPERAND (arg
, 0),
3763 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3765 case NON_LVALUE_EXPR
:
3766 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3767 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3770 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3771 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3776 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3777 return build1_loc (loc
, TREE_CODE (arg
), type
,
3778 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3781 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3783 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3786 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3788 case CLEANUP_POINT_EXPR
:
3789 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3790 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3791 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3798 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3799 assume that ARG is an operation that returns a truth value (0 or 1
3800 for scalars, 0 or -1 for vectors). Return the folded expression if
3801 folding is successful. Otherwise, return NULL_TREE. */
3804 fold_invert_truthvalue (location_t loc
, tree arg
)
3806 tree type
= TREE_TYPE (arg
);
3807 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3813 /* Return a simplified tree node for the truth-negation of ARG. This
3814 never alters ARG itself. We assume that ARG is an operation that
3815 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3818 invert_truthvalue_loc (location_t loc
, tree arg
)
3820 if (TREE_CODE (arg
) == ERROR_MARK
)
3823 tree type
= TREE_TYPE (arg
);
3824 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3830 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3831 with code CODE. This optimization is unsafe. */
3833 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3834 tree arg0
, tree arg1
)
3836 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3837 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3839 /* (A / C) +- (B / C) -> (A +- B) / C. */
3841 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3842 TREE_OPERAND (arg1
, 1), 0))
3843 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3844 fold_build2_loc (loc
, code
, type
,
3845 TREE_OPERAND (arg0
, 0),
3846 TREE_OPERAND (arg1
, 0)),
3847 TREE_OPERAND (arg0
, 1));
3849 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3850 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3851 TREE_OPERAND (arg1
, 0), 0)
3852 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3853 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3855 REAL_VALUE_TYPE r0
, r1
;
3856 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3857 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3859 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3861 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3862 real_arithmetic (&r0
, code
, &r0
, &r1
);
3863 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3864 TREE_OPERAND (arg0
, 0),
3865 build_real (type
, r0
));
3871 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3872 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3873 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3874 is the original memory reference used to preserve the alias set of
3878 make_bit_field_ref (location_t loc
, tree inner
, tree orig_inner
, tree type
,
3879 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
3880 int unsignedp
, int reversep
)
3882 tree result
, bftype
;
3884 /* Attempt not to lose the access path if possible. */
3885 if (TREE_CODE (orig_inner
) == COMPONENT_REF
)
3887 tree ninner
= TREE_OPERAND (orig_inner
, 0);
3889 HOST_WIDE_INT nbitsize
, nbitpos
;
3891 int nunsignedp
, nreversep
, nvolatilep
= 0;
3892 tree base
= get_inner_reference (ninner
, &nbitsize
, &nbitpos
,
3893 &noffset
, &nmode
, &nunsignedp
,
3894 &nreversep
, &nvolatilep
);
3896 && noffset
== NULL_TREE
3897 && nbitsize
>= bitsize
3898 && nbitpos
<= bitpos
3899 && bitpos
+ bitsize
<= nbitpos
+ nbitsize
3909 alias_set_type iset
= get_alias_set (orig_inner
);
3910 if (iset
== 0 && get_alias_set (inner
) != iset
)
3911 inner
= fold_build2 (MEM_REF
, TREE_TYPE (inner
),
3912 build_fold_addr_expr (inner
),
3913 build_int_cst (ptr_type_node
, 0));
3915 if (bitpos
== 0 && !reversep
)
3917 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3918 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3919 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3920 && tree_fits_shwi_p (size
)
3921 && tree_to_shwi (size
) == bitsize
)
3922 return fold_convert_loc (loc
, type
, inner
);
3926 if (TYPE_PRECISION (bftype
) != bitsize
3927 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3928 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3930 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3931 bitsize_int (bitsize
), bitsize_int (bitpos
));
3932 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
3935 result
= fold_convert_loc (loc
, type
, result
);
3940 /* Optimize a bit-field compare.
3942 There are two cases: First is a compare against a constant and the
3943 second is a comparison of two items where the fields are at the same
3944 bit position relative to the start of a chunk (byte, halfword, word)
3945 large enough to contain it. In these cases we can avoid the shift
3946 implicit in bitfield extractions.
3948 For constants, we emit a compare of the shifted constant with the
3949 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3950 compared. For two fields at the same position, we do the ANDs with the
3951 similar mask and compare the result of the ANDs.
3953 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3954 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3955 are the left and right operands of the comparison, respectively.
3957 If the optimization described above can be done, we return the resulting
3958 tree. Otherwise we return zero. */
3961 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3962 tree compare_type
, tree lhs
, tree rhs
)
3964 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3965 tree type
= TREE_TYPE (lhs
);
3967 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3968 machine_mode lmode
, rmode
, nmode
;
3969 int lunsignedp
, runsignedp
;
3970 int lreversep
, rreversep
;
3971 int lvolatilep
= 0, rvolatilep
= 0;
3972 tree linner
, rinner
= NULL_TREE
;
3976 /* Get all the information about the extractions being done. If the bit size
3977 if the same as the size of the underlying object, we aren't doing an
3978 extraction at all and so can do nothing. We also don't want to
3979 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3980 then will no longer be able to replace it. */
3981 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3982 &lunsignedp
, &lreversep
, &lvolatilep
);
3983 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3984 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3988 rreversep
= lreversep
;
3991 /* If this is not a constant, we can only do something if bit positions,
3992 sizes, signedness and storage order are the same. */
3994 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3995 &runsignedp
, &rreversep
, &rvolatilep
);
3997 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3998 || lunsignedp
!= runsignedp
|| lreversep
!= rreversep
|| offset
!= 0
3999 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
4003 /* Honor the C++ memory model and mimic what RTL expansion does. */
4004 unsigned HOST_WIDE_INT bitstart
= 0;
4005 unsigned HOST_WIDE_INT bitend
= 0;
4006 if (TREE_CODE (lhs
) == COMPONENT_REF
)
4008 get_bit_range (&bitstart
, &bitend
, lhs
, &lbitpos
, &offset
);
4009 if (offset
!= NULL_TREE
)
4013 /* See if we can find a mode to refer to this field. We should be able to,
4014 but fail if we can't. */
4015 nmode
= get_best_mode (lbitsize
, lbitpos
, bitstart
, bitend
,
4016 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
4017 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
4018 TYPE_ALIGN (TREE_TYPE (rinner
))),
4020 if (nmode
== VOIDmode
)
4023 /* Set signed and unsigned types of the precision of this mode for the
4025 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4027 /* Compute the bit position and size for the new reference and our offset
4028 within it. If the new reference is the same size as the original, we
4029 won't optimize anything, so return zero. */
4030 nbitsize
= GET_MODE_BITSIZE (nmode
);
4031 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4033 if (nbitsize
== lbitsize
)
4036 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4037 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4039 /* Make the mask to be used against the extracted field. */
4040 mask
= build_int_cst_type (unsigned_type
, -1);
4041 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
4042 mask
= const_binop (RSHIFT_EXPR
, mask
,
4043 size_int (nbitsize
- lbitsize
- lbitpos
));
4046 /* If not comparing with constant, just rework the comparison
4048 return fold_build2_loc (loc
, code
, compare_type
,
4049 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4050 make_bit_field_ref (loc
, linner
, lhs
,
4055 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4056 make_bit_field_ref (loc
, rinner
, rhs
,
4062 /* Otherwise, we are handling the constant case. See if the constant is too
4063 big for the field. Warn and return a tree for 0 (false) if so. We do
4064 this not only for its own sake, but to avoid having to test for this
4065 error case below. If we didn't, we might generate wrong code.
4067 For unsigned fields, the constant shifted right by the field length should
4068 be all zero. For signed fields, the high-order bits should agree with
4073 if (wi::lrshift (rhs
, lbitsize
) != 0)
4075 warning (0, "comparison is always %d due to width of bit-field",
4077 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4082 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
4083 if (tem
!= 0 && tem
!= -1)
4085 warning (0, "comparison is always %d due to width of bit-field",
4087 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4091 /* Single-bit compares should always be against zero. */
4092 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4094 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4095 rhs
= build_int_cst (type
, 0);
4098 /* Make a new bitfield reference, shift the constant over the
4099 appropriate number of bits and mask it with the computed mask
4100 (in case this was a signed field). If we changed it, make a new one. */
4101 lhs
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4102 nbitsize
, nbitpos
, 1, lreversep
);
4104 rhs
= const_binop (BIT_AND_EXPR
,
4105 const_binop (LSHIFT_EXPR
,
4106 fold_convert_loc (loc
, unsigned_type
, rhs
),
4107 size_int (lbitpos
)),
4110 lhs
= build2_loc (loc
, code
, compare_type
,
4111 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
4115 /* Subroutine for fold_truth_andor_1: decode a field reference.
4117 If EXP is a comparison reference, we return the innermost reference.
4119 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4120 set to the starting bit number.
4122 If the innermost field can be completely contained in a mode-sized
4123 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4125 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4126 otherwise it is not changed.
4128 *PUNSIGNEDP is set to the signedness of the field.
4130 *PREVERSEP is set to the storage order of the field.
4132 *PMASK is set to the mask used. This is either contained in a
4133 BIT_AND_EXPR or derived from the width of the field.
4135 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4137 Return 0 if this is not a component reference or is one that we can't
4138 do anything with. */
4141 decode_field_reference (location_t loc
, tree
*exp_
, HOST_WIDE_INT
*pbitsize
,
4142 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
4143 int *punsignedp
, int *preversep
, int *pvolatilep
,
4144 tree
*pmask
, tree
*pand_mask
)
4147 tree outer_type
= 0;
4149 tree mask
, inner
, offset
;
4151 unsigned int precision
;
4153 /* All the optimizations using this function assume integer fields.
4154 There are problems with FP fields since the type_for_size call
4155 below can fail for, e.g., XFmode. */
4156 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4159 /* We are interested in the bare arrangement of bits, so strip everything
4160 that doesn't affect the machine mode. However, record the type of the
4161 outermost expression if it may matter below. */
4162 if (CONVERT_EXPR_P (exp
)
4163 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4164 outer_type
= TREE_TYPE (exp
);
4167 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4169 and_mask
= TREE_OPERAND (exp
, 1);
4170 exp
= TREE_OPERAND (exp
, 0);
4171 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4172 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4176 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4177 punsignedp
, preversep
, pvolatilep
);
4178 if ((inner
== exp
&& and_mask
== 0)
4179 || *pbitsize
< 0 || offset
!= 0
4180 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
4181 /* Reject out-of-bound accesses (PR79731). */
4182 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner
))
4183 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner
)),
4184 *pbitpos
+ *pbitsize
) < 0))
4189 /* If the number of bits in the reference is the same as the bitsize of
4190 the outer type, then the outer type gives the signedness. Otherwise
4191 (in case of a small bitfield) the signedness is unchanged. */
4192 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4193 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4195 /* Compute the mask to access the bitfield. */
4196 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4197 precision
= TYPE_PRECISION (unsigned_type
);
4199 mask
= build_int_cst_type (unsigned_type
, -1);
4201 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4202 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4204 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4206 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4207 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4210 *pand_mask
= and_mask
;
4214 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4215 bit positions and MASK is SIGNED. */
4218 all_ones_mask_p (const_tree mask
, unsigned int size
)
4220 tree type
= TREE_TYPE (mask
);
4221 unsigned int precision
= TYPE_PRECISION (type
);
4223 /* If this function returns true when the type of the mask is
4224 UNSIGNED, then there will be errors. In particular see
4225 gcc.c-torture/execute/990326-1.c. There does not appear to be
4226 any documentation paper trail as to why this is so. But the pre
4227 wide-int worked with that restriction and it has been preserved
4229 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4232 return wi::mask (size
, false, precision
) == mask
;
4235 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4236 represents the sign bit of EXP's type. If EXP represents a sign
4237 or zero extension, also test VAL against the unextended type.
4238 The return value is the (sub)expression whose sign bit is VAL,
4239 or NULL_TREE otherwise. */
4242 sign_bit_p (tree exp
, const_tree val
)
4247 /* Tree EXP must have an integral type. */
4248 t
= TREE_TYPE (exp
);
4249 if (! INTEGRAL_TYPE_P (t
))
4252 /* Tree VAL must be an integer constant. */
4253 if (TREE_CODE (val
) != INTEGER_CST
4254 || TREE_OVERFLOW (val
))
4257 width
= TYPE_PRECISION (t
);
4258 if (wi::only_sign_bit_p (val
, width
))
4261 /* Handle extension from a narrower type. */
4262 if (TREE_CODE (exp
) == NOP_EXPR
4263 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4264 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4269 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4270 to be evaluated unconditionally. */
4273 simple_operand_p (const_tree exp
)
4275 /* Strip any conversions that don't change the machine mode. */
4278 return (CONSTANT_CLASS_P (exp
)
4279 || TREE_CODE (exp
) == SSA_NAME
4281 && ! TREE_ADDRESSABLE (exp
)
4282 && ! TREE_THIS_VOLATILE (exp
)
4283 && ! DECL_NONLOCAL (exp
)
4284 /* Don't regard global variables as simple. They may be
4285 allocated in ways unknown to the compiler (shared memory,
4286 #pragma weak, etc). */
4287 && ! TREE_PUBLIC (exp
)
4288 && ! DECL_EXTERNAL (exp
)
4289 /* Weakrefs are not safe to be read, since they can be NULL.
4290 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4291 have DECL_WEAK flag set. */
4292 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4293 /* Loading a static variable is unduly expensive, but global
4294 registers aren't expensive. */
4295 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4298 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4299 to be evaluated unconditionally.
4300 I addition to simple_operand_p, we assume that comparisons, conversions,
4301 and logic-not operations are simple, if their operands are simple, too. */
4304 simple_operand_p_2 (tree exp
)
4306 enum tree_code code
;
4308 if (TREE_SIDE_EFFECTS (exp
)
4309 || tree_could_trap_p (exp
))
4312 while (CONVERT_EXPR_P (exp
))
4313 exp
= TREE_OPERAND (exp
, 0);
4315 code
= TREE_CODE (exp
);
4317 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4318 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4319 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4321 if (code
== TRUTH_NOT_EXPR
)
4322 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4324 return simple_operand_p (exp
);
4328 /* The following functions are subroutines to fold_range_test and allow it to
4329 try to change a logical combination of comparisons into a range test.
4332 X == 2 || X == 3 || X == 4 || X == 5
4336 (unsigned) (X - 2) <= 3
4338 We describe each set of comparisons as being either inside or outside
4339 a range, using a variable named like IN_P, and then describe the
4340 range with a lower and upper bound. If one of the bounds is omitted,
4341 it represents either the highest or lowest value of the type.
4343 In the comments below, we represent a range by two numbers in brackets
4344 preceded by a "+" to designate being inside that range, or a "-" to
4345 designate being outside that range, so the condition can be inverted by
4346 flipping the prefix. An omitted bound is represented by a "-". For
4347 example, "- [-, 10]" means being outside the range starting at the lowest
4348 possible value and ending at 10, in other words, being greater than 10.
4349 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4352 We set up things so that the missing bounds are handled in a consistent
4353 manner so neither a missing bound nor "true" and "false" need to be
4354 handled using a special case. */
4356 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4357 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4358 and UPPER1_P are nonzero if the respective argument is an upper bound
4359 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4360 must be specified for a comparison. ARG1 will be converted to ARG0's
4361 type if both are specified. */
4364 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4365 tree arg1
, int upper1_p
)
4371 /* If neither arg represents infinity, do the normal operation.
4372 Else, if not a comparison, return infinity. Else handle the special
4373 comparison rules. Note that most of the cases below won't occur, but
4374 are handled for consistency. */
4376 if (arg0
!= 0 && arg1
!= 0)
4378 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4379 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4381 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4384 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4387 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4388 for neither. In real maths, we cannot assume open ended ranges are
4389 the same. But, this is computer arithmetic, where numbers are finite.
4390 We can therefore make the transformation of any unbounded range with
4391 the value Z, Z being greater than any representable number. This permits
4392 us to treat unbounded ranges as equal. */
4393 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4394 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4398 result
= sgn0
== sgn1
;
4401 result
= sgn0
!= sgn1
;
4404 result
= sgn0
< sgn1
;
4407 result
= sgn0
<= sgn1
;
4410 result
= sgn0
> sgn1
;
4413 result
= sgn0
>= sgn1
;
4419 return constant_boolean_node (result
, type
);
4422 /* Helper routine for make_range. Perform one step for it, return
4423 new expression if the loop should continue or NULL_TREE if it should
4427 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4428 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4429 bool *strict_overflow_p
)
4431 tree arg0_type
= TREE_TYPE (arg0
);
4432 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4433 int in_p
= *p_in_p
, n_in_p
;
4437 case TRUTH_NOT_EXPR
:
4438 /* We can only do something if the range is testing for zero. */
4439 if (low
== NULL_TREE
|| high
== NULL_TREE
4440 || ! integer_zerop (low
) || ! integer_zerop (high
))
4445 case EQ_EXPR
: case NE_EXPR
:
4446 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4447 /* We can only do something if the range is testing for zero
4448 and if the second operand is an integer constant. Note that
4449 saying something is "in" the range we make is done by
4450 complementing IN_P since it will set in the initial case of
4451 being not equal to zero; "out" is leaving it alone. */
4452 if (low
== NULL_TREE
|| high
== NULL_TREE
4453 || ! integer_zerop (low
) || ! integer_zerop (high
)
4454 || TREE_CODE (arg1
) != INTEGER_CST
)
4459 case NE_EXPR
: /* - [c, c] */
4462 case EQ_EXPR
: /* + [c, c] */
4463 in_p
= ! in_p
, low
= high
= arg1
;
4465 case GT_EXPR
: /* - [-, c] */
4466 low
= 0, high
= arg1
;
4468 case GE_EXPR
: /* + [c, -] */
4469 in_p
= ! in_p
, low
= arg1
, high
= 0;
4471 case LT_EXPR
: /* - [c, -] */
4472 low
= arg1
, high
= 0;
4474 case LE_EXPR
: /* + [-, c] */
4475 in_p
= ! in_p
, low
= 0, high
= arg1
;
4481 /* If this is an unsigned comparison, we also know that EXP is
4482 greater than or equal to zero. We base the range tests we make
4483 on that fact, so we record it here so we can parse existing
4484 range tests. We test arg0_type since often the return type
4485 of, e.g. EQ_EXPR, is boolean. */
4486 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4488 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4490 build_int_cst (arg0_type
, 0),
4494 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4496 /* If the high bound is missing, but we have a nonzero low
4497 bound, reverse the range so it goes from zero to the low bound
4499 if (high
== 0 && low
&& ! integer_zerop (low
))
4502 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4503 build_int_cst (TREE_TYPE (low
), 1), 0);
4504 low
= build_int_cst (arg0_type
, 0);
4514 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4515 low and high are non-NULL, then normalize will DTRT. */
4516 if (!TYPE_UNSIGNED (arg0_type
)
4517 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4519 if (low
== NULL_TREE
)
4520 low
= TYPE_MIN_VALUE (arg0_type
);
4521 if (high
== NULL_TREE
)
4522 high
= TYPE_MAX_VALUE (arg0_type
);
4525 /* (-x) IN [a,b] -> x in [-b, -a] */
4526 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4527 build_int_cst (exp_type
, 0),
4529 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4530 build_int_cst (exp_type
, 0),
4532 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4538 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4539 build_int_cst (exp_type
, 1));
4543 if (TREE_CODE (arg1
) != INTEGER_CST
)
4546 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4547 move a constant to the other side. */
4548 if (!TYPE_UNSIGNED (arg0_type
)
4549 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4552 /* If EXP is signed, any overflow in the computation is undefined,
4553 so we don't worry about it so long as our computations on
4554 the bounds don't overflow. For unsigned, overflow is defined
4555 and this is exactly the right thing. */
4556 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4557 arg0_type
, low
, 0, arg1
, 0);
4558 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4559 arg0_type
, high
, 1, arg1
, 0);
4560 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4561 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4564 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4565 *strict_overflow_p
= true;
4568 /* Check for an unsigned range which has wrapped around the maximum
4569 value thus making n_high < n_low, and normalize it. */
4570 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4572 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4573 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4574 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4575 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4577 /* If the range is of the form +/- [ x+1, x ], we won't
4578 be able to normalize it. But then, it represents the
4579 whole range or the empty set, so make it
4581 if (tree_int_cst_equal (n_low
, low
)
4582 && tree_int_cst_equal (n_high
, high
))
4588 low
= n_low
, high
= n_high
;
4596 case NON_LVALUE_EXPR
:
4597 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4600 if (! INTEGRAL_TYPE_P (arg0_type
)
4601 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4602 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4605 n_low
= low
, n_high
= high
;
4608 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4611 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4613 /* If we're converting arg0 from an unsigned type, to exp,
4614 a signed type, we will be doing the comparison as unsigned.
4615 The tests above have already verified that LOW and HIGH
4618 So we have to ensure that we will handle large unsigned
4619 values the same way that the current signed bounds treat
4622 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4626 /* For fixed-point modes, we need to pass the saturating flag
4627 as the 2nd parameter. */
4628 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4630 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4631 TYPE_SATURATING (arg0_type
));
4634 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4636 /* A range without an upper bound is, naturally, unbounded.
4637 Since convert would have cropped a very large value, use
4638 the max value for the destination type. */
4640 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4641 : TYPE_MAX_VALUE (arg0_type
);
4643 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4644 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4645 fold_convert_loc (loc
, arg0_type
,
4647 build_int_cst (arg0_type
, 1));
4649 /* If the low bound is specified, "and" the range with the
4650 range for which the original unsigned value will be
4654 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4655 1, fold_convert_loc (loc
, arg0_type
,
4660 in_p
= (n_in_p
== in_p
);
4664 /* Otherwise, "or" the range with the range of the input
4665 that will be interpreted as negative. */
4666 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4667 1, fold_convert_loc (loc
, arg0_type
,
4672 in_p
= (in_p
!= n_in_p
);
4686 /* Given EXP, a logical expression, set the range it is testing into
4687 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4688 actually being tested. *PLOW and *PHIGH will be made of the same
4689 type as the returned expression. If EXP is not a comparison, we
4690 will most likely not be returning a useful value and range. Set
4691 *STRICT_OVERFLOW_P to true if the return value is only valid
4692 because signed overflow is undefined; otherwise, do not change
4693 *STRICT_OVERFLOW_P. */
4696 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4697 bool *strict_overflow_p
)
4699 enum tree_code code
;
4700 tree arg0
, arg1
= NULL_TREE
;
4701 tree exp_type
, nexp
;
4704 location_t loc
= EXPR_LOCATION (exp
);
4706 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4707 and see if we can refine the range. Some of the cases below may not
4708 happen, but it doesn't seem worth worrying about this. We "continue"
4709 the outer loop when we've changed something; otherwise we "break"
4710 the switch, which will "break" the while. */
4713 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4717 code
= TREE_CODE (exp
);
4718 exp_type
= TREE_TYPE (exp
);
4721 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4723 if (TREE_OPERAND_LENGTH (exp
) > 0)
4724 arg0
= TREE_OPERAND (exp
, 0);
4725 if (TREE_CODE_CLASS (code
) == tcc_binary
4726 || TREE_CODE_CLASS (code
) == tcc_comparison
4727 || (TREE_CODE_CLASS (code
) == tcc_expression
4728 && TREE_OPERAND_LENGTH (exp
) > 1))
4729 arg1
= TREE_OPERAND (exp
, 1);
4731 if (arg0
== NULL_TREE
)
4734 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4735 &high
, &in_p
, strict_overflow_p
);
4736 if (nexp
== NULL_TREE
)
4741 /* If EXP is a constant, we can evaluate whether this is true or false. */
4742 if (TREE_CODE (exp
) == INTEGER_CST
)
4744 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4746 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4752 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4756 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4757 a bitwise check i.e. when
4758 LOW == 0xXX...X00...0
4759 HIGH == 0xXX...X11...1
4760 Return corresponding mask in MASK and stem in VALUE. */
4763 maskable_range_p (const_tree low
, const_tree high
, tree type
, tree
*mask
,
4766 if (TREE_CODE (low
) != INTEGER_CST
4767 || TREE_CODE (high
) != INTEGER_CST
)
4770 unsigned prec
= TYPE_PRECISION (type
);
4771 wide_int lo
= wi::to_wide (low
, prec
);
4772 wide_int hi
= wi::to_wide (high
, prec
);
4774 wide_int end_mask
= lo
^ hi
;
4775 if ((end_mask
& (end_mask
+ 1)) != 0
4776 || (lo
& end_mask
) != 0)
4779 wide_int stem_mask
= ~end_mask
;
4780 wide_int stem
= lo
& stem_mask
;
4781 if (stem
!= (hi
& stem_mask
))
4784 *mask
= wide_int_to_tree (type
, stem_mask
);
4785 *value
= wide_int_to_tree (type
, stem
);
4790 /* Helper routine for build_range_check and match.pd. Return the type to
4791 perform the check or NULL if it shouldn't be optimized. */
4794 range_check_type (tree etype
)
4796 /* First make sure that arithmetics in this type is valid, then make sure
4797 that it wraps around. */
4798 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4799 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4800 TYPE_UNSIGNED (etype
));
4802 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4804 tree utype
, minv
, maxv
;
4806 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4807 for the type in question, as we rely on this here. */
4808 utype
= unsigned_type_for (etype
);
4809 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4810 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4811 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4812 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4814 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4823 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4824 type, TYPE, return an expression to test if EXP is in (or out of, depending
4825 on IN_P) the range. Return 0 if the test couldn't be created. */
4828 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4829 tree low
, tree high
)
4831 tree etype
= TREE_TYPE (exp
), mask
, value
;
4833 /* Disable this optimization for function pointer expressions
4834 on targets that require function pointer canonicalization. */
4835 if (targetm
.have_canonicalize_funcptr_for_compare ()
4836 && TREE_CODE (etype
) == POINTER_TYPE
4837 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4842 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4844 return invert_truthvalue_loc (loc
, value
);
4849 if (low
== 0 && high
== 0)
4850 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4853 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4854 fold_convert_loc (loc
, etype
, high
));
4857 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4858 fold_convert_loc (loc
, etype
, low
));
4860 if (operand_equal_p (low
, high
, 0))
4861 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4862 fold_convert_loc (loc
, etype
, low
));
4864 if (TREE_CODE (exp
) == BIT_AND_EXPR
4865 && maskable_range_p (low
, high
, etype
, &mask
, &value
))
4866 return fold_build2_loc (loc
, EQ_EXPR
, type
,
4867 fold_build2_loc (loc
, BIT_AND_EXPR
, etype
,
4871 if (integer_zerop (low
))
4873 if (! TYPE_UNSIGNED (etype
))
4875 etype
= unsigned_type_for (etype
);
4876 high
= fold_convert_loc (loc
, etype
, high
);
4877 exp
= fold_convert_loc (loc
, etype
, exp
);
4879 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4882 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4883 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4885 int prec
= TYPE_PRECISION (etype
);
4887 if (wi::mask (prec
- 1, false, prec
) == high
)
4889 if (TYPE_UNSIGNED (etype
))
4891 tree signed_etype
= signed_type_for (etype
);
4892 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4894 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4896 etype
= signed_etype
;
4897 exp
= fold_convert_loc (loc
, etype
, exp
);
4899 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4900 build_int_cst (etype
, 0));
4904 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4905 This requires wrap-around arithmetics for the type of the expression. */
4906 etype
= range_check_type (etype
);
4907 if (etype
== NULL_TREE
)
4910 if (POINTER_TYPE_P (etype
))
4911 etype
= unsigned_type_for (etype
);
4913 high
= fold_convert_loc (loc
, etype
, high
);
4914 low
= fold_convert_loc (loc
, etype
, low
);
4915 exp
= fold_convert_loc (loc
, etype
, exp
);
4917 value
= const_binop (MINUS_EXPR
, high
, low
);
4919 if (value
!= 0 && !TREE_OVERFLOW (value
))
4920 return build_range_check (loc
, type
,
4921 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4922 1, build_int_cst (etype
, 0), value
);
4927 /* Return the predecessor of VAL in its type, handling the infinite case. */
4930 range_predecessor (tree val
)
4932 tree type
= TREE_TYPE (val
);
4934 if (INTEGRAL_TYPE_P (type
)
4935 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4938 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4939 build_int_cst (TREE_TYPE (val
), 1), 0);
4942 /* Return the successor of VAL in its type, handling the infinite case. */
4945 range_successor (tree val
)
4947 tree type
= TREE_TYPE (val
);
4949 if (INTEGRAL_TYPE_P (type
)
4950 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4953 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4954 build_int_cst (TREE_TYPE (val
), 1), 0);
4957 /* Given two ranges, see if we can merge them into one. Return 1 if we
4958 can, 0 if we can't. Set the output range into the specified parameters. */
4961 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4962 tree high0
, int in1_p
, tree low1
, tree high1
)
4970 int lowequal
= ((low0
== 0 && low1
== 0)
4971 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4972 low0
, 0, low1
, 0)));
4973 int highequal
= ((high0
== 0 && high1
== 0)
4974 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4975 high0
, 1, high1
, 1)));
4977 /* Make range 0 be the range that starts first, or ends last if they
4978 start at the same value. Swap them if it isn't. */
4979 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4982 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4983 high1
, 1, high0
, 1))))
4985 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4986 tem
= low0
, low0
= low1
, low1
= tem
;
4987 tem
= high0
, high0
= high1
, high1
= tem
;
4990 /* Now flag two cases, whether the ranges are disjoint or whether the
4991 second range is totally subsumed in the first. Note that the tests
4992 below are simplified by the ones above. */
4993 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4994 high0
, 1, low1
, 0));
4995 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4996 high1
, 1, high0
, 1));
4998 /* We now have four cases, depending on whether we are including or
4999 excluding the two ranges. */
5002 /* If they don't overlap, the result is false. If the second range
5003 is a subset it is the result. Otherwise, the range is from the start
5004 of the second to the end of the first. */
5006 in_p
= 0, low
= high
= 0;
5008 in_p
= 1, low
= low1
, high
= high1
;
5010 in_p
= 1, low
= low1
, high
= high0
;
5013 else if (in0_p
&& ! in1_p
)
5015 /* If they don't overlap, the result is the first range. If they are
5016 equal, the result is false. If the second range is a subset of the
5017 first, and the ranges begin at the same place, we go from just after
5018 the end of the second range to the end of the first. If the second
5019 range is not a subset of the first, or if it is a subset and both
5020 ranges end at the same place, the range starts at the start of the
5021 first range and ends just before the second range.
5022 Otherwise, we can't describe this as a single range. */
5024 in_p
= 1, low
= low0
, high
= high0
;
5025 else if (lowequal
&& highequal
)
5026 in_p
= 0, low
= high
= 0;
5027 else if (subset
&& lowequal
)
5029 low
= range_successor (high1
);
5034 /* We are in the weird situation where high0 > high1 but
5035 high1 has no successor. Punt. */
5039 else if (! subset
|| highequal
)
5042 high
= range_predecessor (low1
);
5046 /* low0 < low1 but low1 has no predecessor. Punt. */
5054 else if (! in0_p
&& in1_p
)
5056 /* If they don't overlap, the result is the second range. If the second
5057 is a subset of the first, the result is false. Otherwise,
5058 the range starts just after the first range and ends at the
5059 end of the second. */
5061 in_p
= 1, low
= low1
, high
= high1
;
5062 else if (subset
|| highequal
)
5063 in_p
= 0, low
= high
= 0;
5066 low
= range_successor (high0
);
5071 /* high1 > high0 but high0 has no successor. Punt. */
5079 /* The case where we are excluding both ranges. Here the complex case
5080 is if they don't overlap. In that case, the only time we have a
5081 range is if they are adjacent. If the second is a subset of the
5082 first, the result is the first. Otherwise, the range to exclude
5083 starts at the beginning of the first range and ends at the end of the
5087 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5088 range_successor (high0
),
5090 in_p
= 0, low
= low0
, high
= high1
;
5093 /* Canonicalize - [min, x] into - [-, x]. */
5094 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5095 switch (TREE_CODE (TREE_TYPE (low0
)))
5098 if (TYPE_PRECISION (TREE_TYPE (low0
))
5099 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
5103 if (tree_int_cst_equal (low0
,
5104 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5108 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5109 && integer_zerop (low0
))
5116 /* Canonicalize - [x, max] into - [x, -]. */
5117 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5118 switch (TREE_CODE (TREE_TYPE (high1
)))
5121 if (TYPE_PRECISION (TREE_TYPE (high1
))
5122 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
5126 if (tree_int_cst_equal (high1
,
5127 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5131 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5132 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5134 build_int_cst (TREE_TYPE (high1
), 1),
5142 /* The ranges might be also adjacent between the maximum and
5143 minimum values of the given type. For
5144 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5145 return + [x + 1, y - 1]. */
5146 if (low0
== 0 && high1
== 0)
5148 low
= range_successor (high0
);
5149 high
= range_predecessor (low1
);
5150 if (low
== 0 || high
== 0)
5160 in_p
= 0, low
= low0
, high
= high0
;
5162 in_p
= 0, low
= low0
, high
= high1
;
5165 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5170 /* Subroutine of fold, looking inside expressions of the form
5171 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5172 of the COND_EXPR. This function is being used also to optimize
5173 A op B ? C : A, by reversing the comparison first.
5175 Return a folded expression whose code is not a COND_EXPR
5176 anymore, or NULL_TREE if no folding opportunity is found. */
5179 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5180 tree arg0
, tree arg1
, tree arg2
)
5182 enum tree_code comp_code
= TREE_CODE (arg0
);
5183 tree arg00
= TREE_OPERAND (arg0
, 0);
5184 tree arg01
= TREE_OPERAND (arg0
, 1);
5185 tree arg1_type
= TREE_TYPE (arg1
);
5191 /* If we have A op 0 ? A : -A, consider applying the following
5194 A == 0? A : -A same as -A
5195 A != 0? A : -A same as A
5196 A >= 0? A : -A same as abs (A)
5197 A > 0? A : -A same as abs (A)
5198 A <= 0? A : -A same as -abs (A)
5199 A < 0? A : -A same as -abs (A)
5201 None of these transformations work for modes with signed
5202 zeros. If A is +/-0, the first two transformations will
5203 change the sign of the result (from +0 to -0, or vice
5204 versa). The last four will fix the sign of the result,
5205 even though the original expressions could be positive or
5206 negative, depending on the sign of A.
5208 Note that all these transformations are correct if A is
5209 NaN, since the two alternatives (A and -A) are also NaNs. */
5210 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5211 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5212 ? real_zerop (arg01
)
5213 : integer_zerop (arg01
))
5214 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5215 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5216 /* In the case that A is of the form X-Y, '-A' (arg2) may
5217 have already been folded to Y-X, check for that. */
5218 || (TREE_CODE (arg1
) == MINUS_EXPR
5219 && TREE_CODE (arg2
) == MINUS_EXPR
5220 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5221 TREE_OPERAND (arg2
, 1), 0)
5222 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5223 TREE_OPERAND (arg2
, 0), 0))))
5228 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5229 return fold_convert_loc (loc
, type
, negate_expr (tem
));
5232 return fold_convert_loc (loc
, type
, arg1
);
5235 if (flag_trapping_math
)
5240 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5242 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5243 return fold_convert_loc (loc
, type
, tem
);
5246 if (flag_trapping_math
)
5251 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5253 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5254 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5256 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5260 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5261 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5262 both transformations are correct when A is NaN: A != 0
5263 is then true, and A == 0 is false. */
5265 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5266 && integer_zerop (arg01
) && integer_zerop (arg2
))
5268 if (comp_code
== NE_EXPR
)
5269 return fold_convert_loc (loc
, type
, arg1
);
5270 else if (comp_code
== EQ_EXPR
)
5271 return build_zero_cst (type
);
5274 /* Try some transformations of A op B ? A : B.
5276 A == B? A : B same as B
5277 A != B? A : B same as A
5278 A >= B? A : B same as max (A, B)
5279 A > B? A : B same as max (B, A)
5280 A <= B? A : B same as min (A, B)
5281 A < B? A : B same as min (B, A)
5283 As above, these transformations don't work in the presence
5284 of signed zeros. For example, if A and B are zeros of
5285 opposite sign, the first two transformations will change
5286 the sign of the result. In the last four, the original
5287 expressions give different results for (A=+0, B=-0) and
5288 (A=-0, B=+0), but the transformed expressions do not.
5290 The first two transformations are correct if either A or B
5291 is a NaN. In the first transformation, the condition will
5292 be false, and B will indeed be chosen. In the case of the
5293 second transformation, the condition A != B will be true,
5294 and A will be chosen.
5296 The conversions to max() and min() are not correct if B is
5297 a number and A is not. The conditions in the original
5298 expressions will be false, so all four give B. The min()
5299 and max() versions would give a NaN instead. */
5300 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5301 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5302 /* Avoid these transformations if the COND_EXPR may be used
5303 as an lvalue in the C++ front-end. PR c++/19199. */
5305 || VECTOR_TYPE_P (type
)
5306 || (! lang_GNU_CXX ()
5307 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5308 || ! maybe_lvalue_p (arg1
)
5309 || ! maybe_lvalue_p (arg2
)))
5311 tree comp_op0
= arg00
;
5312 tree comp_op1
= arg01
;
5313 tree comp_type
= TREE_TYPE (comp_op0
);
5318 return fold_convert_loc (loc
, type
, arg2
);
5320 return fold_convert_loc (loc
, type
, arg1
);
5325 /* In C++ a ?: expression can be an lvalue, so put the
5326 operand which will be used if they are equal first
5327 so that we can convert this back to the
5328 corresponding COND_EXPR. */
5329 if (!HONOR_NANS (arg1
))
5331 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5332 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5333 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5334 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5335 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5336 comp_op1
, comp_op0
);
5337 return fold_convert_loc (loc
, type
, tem
);
5344 if (!HONOR_NANS (arg1
))
5346 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5347 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5348 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5349 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5350 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5351 comp_op1
, comp_op0
);
5352 return fold_convert_loc (loc
, type
, tem
);
5356 if (!HONOR_NANS (arg1
))
5357 return fold_convert_loc (loc
, type
, arg2
);
5360 if (!HONOR_NANS (arg1
))
5361 return fold_convert_loc (loc
, type
, arg1
);
5364 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5374 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5375 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5376 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5380 /* EXP is some logical combination of boolean tests. See if we can
5381 merge it into some range test. Return the new tree if so. */
5384 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5387 int or_op
= (code
== TRUTH_ORIF_EXPR
5388 || code
== TRUTH_OR_EXPR
);
5389 int in0_p
, in1_p
, in_p
;
5390 tree low0
, low1
, low
, high0
, high1
, high
;
5391 bool strict_overflow_p
= false;
5393 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5394 "when simplifying range test");
5396 if (!INTEGRAL_TYPE_P (type
))
5399 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5400 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5402 /* If this is an OR operation, invert both sides; we will invert
5403 again at the end. */
5405 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5407 /* If both expressions are the same, if we can merge the ranges, and we
5408 can build the range test, return it or it inverted. If one of the
5409 ranges is always true or always false, consider it to be the same
5410 expression as the other. */
5411 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5412 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5414 && 0 != (tem
= (build_range_check (loc
, type
,
5416 : rhs
!= 0 ? rhs
: integer_zero_node
,
5419 if (strict_overflow_p
)
5420 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5421 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5424 /* On machines where the branch cost is expensive, if this is a
5425 short-circuited branch and the underlying object on both sides
5426 is the same, make a non-short-circuit operation. */
5427 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5428 && lhs
!= 0 && rhs
!= 0
5429 && (code
== TRUTH_ANDIF_EXPR
5430 || code
== TRUTH_ORIF_EXPR
)
5431 && operand_equal_p (lhs
, rhs
, 0))
5433 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5434 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5435 which cases we can't do this. */
5436 if (simple_operand_p (lhs
))
5437 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5438 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5441 else if (!lang_hooks
.decls
.global_bindings_p ()
5442 && !CONTAINS_PLACEHOLDER_P (lhs
))
5444 tree common
= save_expr (lhs
);
5446 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5447 or_op
? ! in0_p
: in0_p
,
5449 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5450 or_op
? ! in1_p
: in1_p
,
5453 if (strict_overflow_p
)
5454 fold_overflow_warning (warnmsg
,
5455 WARN_STRICT_OVERFLOW_COMPARISON
);
5456 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5457 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5466 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5467 bit value. Arrange things so the extra bits will be set to zero if and
5468 only if C is signed-extended to its full width. If MASK is nonzero,
5469 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5472 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5474 tree type
= TREE_TYPE (c
);
5475 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5478 if (p
== modesize
|| unsignedp
)
5481 /* We work by getting just the sign bit into the low-order bit, then
5482 into the high-order bit, then sign-extend. We then XOR that value
5484 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5486 /* We must use a signed type in order to get an arithmetic right shift.
5487 However, we must also avoid introducing accidental overflows, so that
5488 a subsequent call to integer_zerop will work. Hence we must
5489 do the type conversion here. At this point, the constant is either
5490 zero or one, and the conversion to a signed type can never overflow.
5491 We could get an overflow if this conversion is done anywhere else. */
5492 if (TYPE_UNSIGNED (type
))
5493 temp
= fold_convert (signed_type_for (type
), temp
);
5495 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5496 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5498 temp
= const_binop (BIT_AND_EXPR
, temp
,
5499 fold_convert (TREE_TYPE (c
), mask
));
5500 /* If necessary, convert the type back to match the type of C. */
5501 if (TYPE_UNSIGNED (type
))
5502 temp
= fold_convert (type
, temp
);
5504 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5507 /* For an expression that has the form
5511 we can drop one of the inner expressions and simplify to
5515 LOC is the location of the resulting expression. OP is the inner
5516 logical operation; the left-hand side in the examples above, while CMPOP
5517 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5518 removing a condition that guards another, as in
5519 (A != NULL && A->...) || A == NULL
5520 which we must not transform. If RHS_ONLY is true, only eliminate the
5521 right-most operand of the inner logical operation. */
5524 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5527 tree type
= TREE_TYPE (cmpop
);
5528 enum tree_code code
= TREE_CODE (cmpop
);
5529 enum tree_code truthop_code
= TREE_CODE (op
);
5530 tree lhs
= TREE_OPERAND (op
, 0);
5531 tree rhs
= TREE_OPERAND (op
, 1);
5532 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5533 enum tree_code rhs_code
= TREE_CODE (rhs
);
5534 enum tree_code lhs_code
= TREE_CODE (lhs
);
5535 enum tree_code inv_code
;
5537 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5540 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5543 if (rhs_code
== truthop_code
)
5545 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5546 if (newrhs
!= NULL_TREE
)
5549 rhs_code
= TREE_CODE (rhs
);
5552 if (lhs_code
== truthop_code
&& !rhs_only
)
5554 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5555 if (newlhs
!= NULL_TREE
)
5558 lhs_code
= TREE_CODE (lhs
);
5562 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5563 if (inv_code
== rhs_code
5564 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5565 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5567 if (!rhs_only
&& inv_code
== lhs_code
5568 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5569 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5571 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5572 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5577 /* Find ways of folding logical expressions of LHS and RHS:
5578 Try to merge two comparisons to the same innermost item.
5579 Look for range tests like "ch >= '0' && ch <= '9'".
5580 Look for combinations of simple terms on machines with expensive branches
5581 and evaluate the RHS unconditionally.
5583 For example, if we have p->a == 2 && p->b == 4 and we can make an
5584 object large enough to span both A and B, we can do this with a comparison
5585 against the object ANDed with the a mask.
5587 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5588 operations to do this with one comparison.
5590 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5591 function and the one above.
5593 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5594 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5596 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5599 We return the simplified tree or 0 if no optimization is possible. */
5602 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5605 /* If this is the "or" of two comparisons, we can do something if
5606 the comparisons are NE_EXPR. If this is the "and", we can do something
5607 if the comparisons are EQ_EXPR. I.e.,
5608 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5610 WANTED_CODE is this operation code. For single bit fields, we can
5611 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5612 comparison for one-bit fields. */
5614 enum tree_code wanted_code
;
5615 enum tree_code lcode
, rcode
;
5616 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5617 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5618 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5619 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5620 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5621 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5622 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5623 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
5624 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5625 machine_mode lnmode
, rnmode
;
5626 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5627 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5628 tree l_const
, r_const
;
5629 tree lntype
, rntype
, result
;
5630 HOST_WIDE_INT first_bit
, end_bit
;
5633 /* Start by getting the comparison codes. Fail if anything is volatile.
5634 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5635 it were surrounded with a NE_EXPR. */
5637 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5640 lcode
= TREE_CODE (lhs
);
5641 rcode
= TREE_CODE (rhs
);
5643 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5645 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5646 build_int_cst (TREE_TYPE (lhs
), 0));
5650 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5652 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5653 build_int_cst (TREE_TYPE (rhs
), 0));
5657 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5658 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5661 ll_arg
= TREE_OPERAND (lhs
, 0);
5662 lr_arg
= TREE_OPERAND (lhs
, 1);
5663 rl_arg
= TREE_OPERAND (rhs
, 0);
5664 rr_arg
= TREE_OPERAND (rhs
, 1);
5666 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5667 if (simple_operand_p (ll_arg
)
5668 && simple_operand_p (lr_arg
))
5670 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5671 && operand_equal_p (lr_arg
, rr_arg
, 0))
5673 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5674 truth_type
, ll_arg
, lr_arg
);
5678 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5679 && operand_equal_p (lr_arg
, rl_arg
, 0))
5681 result
= combine_comparisons (loc
, code
, lcode
,
5682 swap_tree_comparison (rcode
),
5683 truth_type
, ll_arg
, lr_arg
);
5689 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5690 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5692 /* If the RHS can be evaluated unconditionally and its operands are
5693 simple, it wins to evaluate the RHS unconditionally on machines
5694 with expensive branches. In this case, this isn't a comparison
5695 that can be merged. */
5697 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5699 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5700 && simple_operand_p (rl_arg
)
5701 && simple_operand_p (rr_arg
))
5703 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5704 if (code
== TRUTH_OR_EXPR
5705 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5706 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5707 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5708 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5709 return build2_loc (loc
, NE_EXPR
, truth_type
,
5710 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5712 build_int_cst (TREE_TYPE (ll_arg
), 0));
5714 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5715 if (code
== TRUTH_AND_EXPR
5716 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5717 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5718 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5719 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5720 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5721 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5723 build_int_cst (TREE_TYPE (ll_arg
), 0));
5726 /* See if the comparisons can be merged. Then get all the parameters for
5729 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5730 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5733 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
5735 ll_inner
= decode_field_reference (loc
, &ll_arg
,
5736 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5737 &ll_unsignedp
, &ll_reversep
, &volatilep
,
5738 &ll_mask
, &ll_and_mask
);
5739 lr_inner
= decode_field_reference (loc
, &lr_arg
,
5740 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5741 &lr_unsignedp
, &lr_reversep
, &volatilep
,
5742 &lr_mask
, &lr_and_mask
);
5743 rl_inner
= decode_field_reference (loc
, &rl_arg
,
5744 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5745 &rl_unsignedp
, &rl_reversep
, &volatilep
,
5746 &rl_mask
, &rl_and_mask
);
5747 rr_inner
= decode_field_reference (loc
, &rr_arg
,
5748 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5749 &rr_unsignedp
, &rr_reversep
, &volatilep
,
5750 &rr_mask
, &rr_and_mask
);
5752 /* It must be true that the inner operation on the lhs of each
5753 comparison must be the same if we are to be able to do anything.
5754 Then see if we have constants. If not, the same must be true for
5757 || ll_reversep
!= rl_reversep
5758 || ll_inner
== 0 || rl_inner
== 0
5759 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5762 if (TREE_CODE (lr_arg
) == INTEGER_CST
5763 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5765 l_const
= lr_arg
, r_const
= rr_arg
;
5766 lr_reversep
= ll_reversep
;
5768 else if (lr_reversep
!= rr_reversep
5769 || lr_inner
== 0 || rr_inner
== 0
5770 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5773 l_const
= r_const
= 0;
5775 /* If either comparison code is not correct for our logical operation,
5776 fail. However, we can convert a one-bit comparison against zero into
5777 the opposite comparison against that bit being set in the field. */
5779 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5780 if (lcode
!= wanted_code
)
5782 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5784 /* Make the left operand unsigned, since we are only interested
5785 in the value of one bit. Otherwise we are doing the wrong
5794 /* This is analogous to the code for l_const above. */
5795 if (rcode
!= wanted_code
)
5797 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5806 /* See if we can find a mode that contains both fields being compared on
5807 the left. If we can't, fail. Otherwise, update all constants and masks
5808 to be relative to a field of that size. */
5809 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5810 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5811 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5812 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5814 if (lnmode
== VOIDmode
)
5817 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5818 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5819 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5820 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5822 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5824 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5825 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5828 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5829 size_int (xll_bitpos
));
5830 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5831 size_int (xrl_bitpos
));
5835 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5836 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5837 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5838 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5839 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5842 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5844 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5849 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5850 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5851 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5852 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5853 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5856 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5858 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5862 /* If the right sides are not constant, do the same for it. Also,
5863 disallow this optimization if a size or signedness mismatch occurs
5864 between the left and right sides. */
5867 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5868 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5869 /* Make sure the two fields on the right
5870 correspond to the left without being swapped. */
5871 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5874 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5875 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5876 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5877 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5879 if (rnmode
== VOIDmode
)
5882 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5883 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5884 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5885 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5887 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5889 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5890 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5893 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5895 size_int (xlr_bitpos
));
5896 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5898 size_int (xrr_bitpos
));
5900 /* Make a mask that corresponds to both fields being compared.
5901 Do this for both items being compared. If the operands are the
5902 same size and the bits being compared are in the same position
5903 then we can do this by masking both and comparing the masked
5905 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5906 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5907 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5909 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
5910 lntype
, lnbitsize
, lnbitpos
,
5911 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5912 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5913 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5915 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
,
5916 rntype
, rnbitsize
, rnbitpos
,
5917 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
5918 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5919 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5921 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5924 /* There is still another way we can do something: If both pairs of
5925 fields being compared are adjacent, we may be able to make a wider
5926 field containing them both.
5928 Note that we still must mask the lhs/rhs expressions. Furthermore,
5929 the mask must be shifted to account for the shift done by
5930 make_bit_field_ref. */
5931 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5932 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5933 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5934 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5938 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
, lntype
,
5939 ll_bitsize
+ rl_bitsize
,
5940 MIN (ll_bitpos
, rl_bitpos
),
5941 ll_unsignedp
, ll_reversep
);
5942 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
, rntype
,
5943 lr_bitsize
+ rr_bitsize
,
5944 MIN (lr_bitpos
, rr_bitpos
),
5945 lr_unsignedp
, lr_reversep
);
5947 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5948 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5949 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5950 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5952 /* Convert to the smaller type before masking out unwanted bits. */
5954 if (lntype
!= rntype
)
5956 if (lnbitsize
> rnbitsize
)
5958 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5959 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5962 else if (lnbitsize
< rnbitsize
)
5964 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5965 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5970 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5971 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5973 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5974 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5976 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5982 /* Handle the case of comparisons with constants. If there is something in
5983 common between the masks, those bits of the constants must be the same.
5984 If not, the condition is always false. Test for this to avoid generating
5985 incorrect code below. */
5986 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5987 if (! integer_zerop (result
)
5988 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5989 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5991 if (wanted_code
== NE_EXPR
)
5993 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5994 return constant_boolean_node (true, truth_type
);
5998 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5999 return constant_boolean_node (false, truth_type
);
6003 /* Construct the expression we will return. First get the component
6004 reference we will make. Unless the mask is all ones the width of
6005 that field, perform the mask operation. Then compare with the
6007 result
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6008 lntype
, lnbitsize
, lnbitpos
,
6009 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6011 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6012 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6013 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
6015 return build2_loc (loc
, wanted_code
, truth_type
, result
,
6016 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
6019 /* T is an integer expression that is being multiplied, divided, or taken a
6020 modulus (CODE says which and what kind of divide or modulus) by a
6021 constant C. See if we can eliminate that operation by folding it with
6022 other operations already in T. WIDE_TYPE, if non-null, is a type that
6023 should be used for the computation if wider than our type.
6025 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6026 (X * 2) + (Y * 4). We must, however, be assured that either the original
6027 expression would not overflow or that overflow is undefined for the type
6028 in the language in question.
6030 If we return a non-null expression, it is an equivalent form of the
6031 original computation, but need not be in the original type.
6033 We set *STRICT_OVERFLOW_P to true if the return values depends on
6034 signed overflow being undefined. Otherwise we do not change
6035 *STRICT_OVERFLOW_P. */
6038 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6039 bool *strict_overflow_p
)
6041 /* To avoid exponential search depth, refuse to allow recursion past
6042 three levels. Beyond that (1) it's highly unlikely that we'll find
6043 something interesting and (2) we've probably processed it before
6044 when we built the inner expression. */
6053 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6060 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6061 bool *strict_overflow_p
)
6063 tree type
= TREE_TYPE (t
);
6064 enum tree_code tcode
= TREE_CODE (t
);
6065 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6066 > GET_MODE_SIZE (TYPE_MODE (type
)))
6067 ? wide_type
: type
);
6069 int same_p
= tcode
== code
;
6070 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6071 bool sub_strict_overflow_p
;
6073 /* Don't deal with constants of zero here; they confuse the code below. */
6074 if (integer_zerop (c
))
6077 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6078 op0
= TREE_OPERAND (t
, 0);
6080 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6081 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6083 /* Note that we need not handle conditional operations here since fold
6084 already handles those cases. So just do arithmetic here. */
6088 /* For a constant, we can always simplify if we are a multiply
6089 or (for divide and modulus) if it is a multiple of our constant. */
6090 if (code
== MULT_EXPR
6091 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
6093 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6094 fold_convert (ctype
, c
));
6095 /* If the multiplication overflowed, we lost information on it.
6096 See PR68142 and PR69845. */
6097 if (TREE_OVERFLOW (tem
))
6103 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6104 /* If op0 is an expression ... */
6105 if ((COMPARISON_CLASS_P (op0
)
6106 || UNARY_CLASS_P (op0
)
6107 || BINARY_CLASS_P (op0
)
6108 || VL_EXP_CLASS_P (op0
)
6109 || EXPRESSION_CLASS_P (op0
))
6110 /* ... and has wrapping overflow, and its type is smaller
6111 than ctype, then we cannot pass through as widening. */
6112 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6113 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
6114 && (TYPE_PRECISION (ctype
)
6115 > TYPE_PRECISION (TREE_TYPE (op0
))))
6116 /* ... or this is a truncation (t is narrower than op0),
6117 then we cannot pass through this narrowing. */
6118 || (TYPE_PRECISION (type
)
6119 < TYPE_PRECISION (TREE_TYPE (op0
)))
6120 /* ... or signedness changes for division or modulus,
6121 then we cannot pass through this conversion. */
6122 || (code
!= MULT_EXPR
6123 && (TYPE_UNSIGNED (ctype
)
6124 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6125 /* ... or has undefined overflow while the converted to
6126 type has not, we cannot do the operation in the inner type
6127 as that would introduce undefined overflow. */
6128 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6129 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6130 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6133 /* Pass the constant down and see if we can make a simplification. If
6134 we can, replace this expression with the inner simplification for
6135 possible later conversion to our or some other type. */
6136 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6137 && TREE_CODE (t2
) == INTEGER_CST
6138 && !TREE_OVERFLOW (t2
)
6139 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6141 ? ctype
: NULL_TREE
,
6142 strict_overflow_p
))))
6147 /* If widening the type changes it from signed to unsigned, then we
6148 must avoid building ABS_EXPR itself as unsigned. */
6149 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6151 tree cstype
= (*signed_type_for
) (ctype
);
6152 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6155 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6156 return fold_convert (ctype
, t1
);
6160 /* If the constant is negative, we cannot simplify this. */
6161 if (tree_int_cst_sgn (c
) == -1)
6165 /* For division and modulus, type can't be unsigned, as e.g.
6166 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6167 For signed types, even with wrapping overflow, this is fine. */
6168 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6170 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6172 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6175 case MIN_EXPR
: case MAX_EXPR
:
6176 /* If widening the type changes the signedness, then we can't perform
6177 this optimization as that changes the result. */
6178 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6181 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6182 sub_strict_overflow_p
= false;
6183 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6184 &sub_strict_overflow_p
)) != 0
6185 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6186 &sub_strict_overflow_p
)) != 0)
6188 if (tree_int_cst_sgn (c
) < 0)
6189 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6190 if (sub_strict_overflow_p
)
6191 *strict_overflow_p
= true;
6192 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6193 fold_convert (ctype
, t2
));
6197 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6198 /* If the second operand is constant, this is a multiplication
6199 or floor division, by a power of two, so we can treat it that
6200 way unless the multiplier or divisor overflows. Signed
6201 left-shift overflow is implementation-defined rather than
6202 undefined in C90, so do not convert signed left shift into
6204 if (TREE_CODE (op1
) == INTEGER_CST
6205 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6206 /* const_binop may not detect overflow correctly,
6207 so check for it explicitly here. */
6208 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6209 && 0 != (t1
= fold_convert (ctype
,
6210 const_binop (LSHIFT_EXPR
,
6213 && !TREE_OVERFLOW (t1
))
6214 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6215 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6217 fold_convert (ctype
, op0
),
6219 c
, code
, wide_type
, strict_overflow_p
);
6222 case PLUS_EXPR
: case MINUS_EXPR
:
6223 /* See if we can eliminate the operation on both sides. If we can, we
6224 can return a new PLUS or MINUS. If we can't, the only remaining
6225 cases where we can do anything are if the second operand is a
6227 sub_strict_overflow_p
= false;
6228 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6229 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6230 if (t1
!= 0 && t2
!= 0
6231 && TYPE_OVERFLOW_WRAPS (ctype
)
6232 && (code
== MULT_EXPR
6233 /* If not multiplication, we can only do this if both operands
6234 are divisible by c. */
6235 || (multiple_of_p (ctype
, op0
, c
)
6236 && multiple_of_p (ctype
, op1
, c
))))
6238 if (sub_strict_overflow_p
)
6239 *strict_overflow_p
= true;
6240 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6241 fold_convert (ctype
, t2
));
6244 /* If this was a subtraction, negate OP1 and set it to be an addition.
6245 This simplifies the logic below. */
6246 if (tcode
== MINUS_EXPR
)
6248 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6249 /* If OP1 was not easily negatable, the constant may be OP0. */
6250 if (TREE_CODE (op0
) == INTEGER_CST
)
6252 std::swap (op0
, op1
);
6257 if (TREE_CODE (op1
) != INTEGER_CST
)
6260 /* If either OP1 or C are negative, this optimization is not safe for
6261 some of the division and remainder types while for others we need
6262 to change the code. */
6263 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6265 if (code
== CEIL_DIV_EXPR
)
6266 code
= FLOOR_DIV_EXPR
;
6267 else if (code
== FLOOR_DIV_EXPR
)
6268 code
= CEIL_DIV_EXPR
;
6269 else if (code
!= MULT_EXPR
6270 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6274 /* If it's a multiply or a division/modulus operation of a multiple
6275 of our constant, do the operation and verify it doesn't overflow. */
6276 if (code
== MULT_EXPR
6277 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6279 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6280 fold_convert (ctype
, c
));
6281 /* We allow the constant to overflow with wrapping semantics. */
6283 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6289 /* If we have an unsigned type, we cannot widen the operation since it
6290 will change the result if the original computation overflowed. */
6291 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6294 /* The last case is if we are a multiply. In that case, we can
6295 apply the distributive law to commute the multiply and addition
6296 if the multiplication of the constants doesn't overflow
6297 and overflow is defined. With undefined overflow
6298 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6299 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6300 return fold_build2 (tcode
, ctype
,
6301 fold_build2 (code
, ctype
,
6302 fold_convert (ctype
, op0
),
6303 fold_convert (ctype
, c
)),
6309 /* We have a special case here if we are doing something like
6310 (C * 8) % 4 since we know that's zero. */
6311 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6312 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6313 /* If the multiplication can overflow we cannot optimize this. */
6314 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6315 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6316 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6318 *strict_overflow_p
= true;
6319 return omit_one_operand (type
, integer_zero_node
, op0
);
6322 /* ... fall through ... */
6324 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6325 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6326 /* If we can extract our operation from the LHS, do so and return a
6327 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6328 do something only if the second operand is a constant. */
6330 && TYPE_OVERFLOW_WRAPS (ctype
)
6331 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6332 strict_overflow_p
)) != 0)
6333 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6334 fold_convert (ctype
, op1
));
6335 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6336 && TYPE_OVERFLOW_WRAPS (ctype
)
6337 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6338 strict_overflow_p
)) != 0)
6339 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6340 fold_convert (ctype
, t1
));
6341 else if (TREE_CODE (op1
) != INTEGER_CST
)
6344 /* If these are the same operation types, we can associate them
6345 assuming no overflow. */
6348 bool overflow_p
= false;
6349 bool overflow_mul_p
;
6350 signop sign
= TYPE_SIGN (ctype
);
6351 unsigned prec
= TYPE_PRECISION (ctype
);
6352 wide_int mul
= wi::mul (wi::to_wide (op1
, prec
),
6353 wi::to_wide (c
, prec
),
6354 sign
, &overflow_mul_p
);
6355 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6357 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6360 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6361 wide_int_to_tree (ctype
, mul
));
6364 /* If these operations "cancel" each other, we have the main
6365 optimizations of this pass, which occur when either constant is a
6366 multiple of the other, in which case we replace this with either an
6367 operation or CODE or TCODE.
6369 If we have an unsigned type, we cannot do this since it will change
6370 the result if the original computation overflowed. */
6371 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6372 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6373 || (tcode
== MULT_EXPR
6374 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6375 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6376 && code
!= MULT_EXPR
)))
6378 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6380 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6381 *strict_overflow_p
= true;
6382 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6383 fold_convert (ctype
,
6384 const_binop (TRUNC_DIV_EXPR
,
6387 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6389 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6390 *strict_overflow_p
= true;
6391 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6392 fold_convert (ctype
,
6393 const_binop (TRUNC_DIV_EXPR
,
6406 /* Return a node which has the indicated constant VALUE (either 0 or
6407 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6408 and is of the indicated TYPE. */
6411 constant_boolean_node (bool value
, tree type
)
6413 if (type
== integer_type_node
)
6414 return value
? integer_one_node
: integer_zero_node
;
6415 else if (type
== boolean_type_node
)
6416 return value
? boolean_true_node
: boolean_false_node
;
6417 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6418 return build_vector_from_val (type
,
6419 build_int_cst (TREE_TYPE (type
),
6422 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6426 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6427 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6428 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6429 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6430 COND is the first argument to CODE; otherwise (as in the example
6431 given here), it is the second argument. TYPE is the type of the
6432 original expression. Return NULL_TREE if no simplification is
6436 fold_binary_op_with_conditional_arg (location_t loc
,
6437 enum tree_code code
,
6438 tree type
, tree op0
, tree op1
,
6439 tree cond
, tree arg
, int cond_first_p
)
6441 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6442 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6443 tree test
, true_value
, false_value
;
6444 tree lhs
= NULL_TREE
;
6445 tree rhs
= NULL_TREE
;
6446 enum tree_code cond_code
= COND_EXPR
;
6448 if (TREE_CODE (cond
) == COND_EXPR
6449 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6451 test
= TREE_OPERAND (cond
, 0);
6452 true_value
= TREE_OPERAND (cond
, 1);
6453 false_value
= TREE_OPERAND (cond
, 2);
6454 /* If this operand throws an expression, then it does not make
6455 sense to try to perform a logical or arithmetic operation
6457 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6459 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6462 else if (!(TREE_CODE (type
) != VECTOR_TYPE
6463 && TREE_CODE (TREE_TYPE (cond
)) == VECTOR_TYPE
))
6465 tree testtype
= TREE_TYPE (cond
);
6467 true_value
= constant_boolean_node (true, testtype
);
6468 false_value
= constant_boolean_node (false, testtype
);
6471 /* Detect the case of mixing vector and scalar types - bail out. */
6474 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6475 cond_code
= VEC_COND_EXPR
;
6477 /* This transformation is only worthwhile if we don't have to wrap ARG
6478 in a SAVE_EXPR and the operation can be simplified without recursing
6479 on at least one of the branches once its pushed inside the COND_EXPR. */
6480 if (!TREE_CONSTANT (arg
)
6481 && (TREE_SIDE_EFFECTS (arg
)
6482 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6483 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6486 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6489 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6491 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6493 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6497 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6499 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6501 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6504 /* Check that we have simplified at least one of the branches. */
6505 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6508 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6512 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6514 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6515 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6516 ADDEND is the same as X.
6518 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6519 and finite. The problematic cases are when X is zero, and its mode
6520 has signed zeros. In the case of rounding towards -infinity,
6521 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6522 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6525 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6527 if (!real_zerop (addend
))
6530 /* Don't allow the fold with -fsignaling-nans. */
6531 if (HONOR_SNANS (element_mode (type
)))
6534 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6535 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6538 /* In a vector or complex, we would need to check the sign of all zeros. */
6539 if (TREE_CODE (addend
) != REAL_CST
)
6542 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6543 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6546 /* The mode has signed zeros, and we have to honor their sign.
6547 In this situation, there is only one case we can return true for.
6548 X - 0 is the same as X unless rounding towards -infinity is
6550 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6553 /* Subroutine of match.pd that optimizes comparisons of a division by
6554 a nonzero integer constant against an integer constant, i.e.
6557 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6558 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6561 fold_div_compare (enum tree_code code
, tree c1
, tree c2
, tree
*lo
,
6562 tree
*hi
, bool *neg_overflow
)
6564 tree prod
, tmp
, type
= TREE_TYPE (c1
);
6565 signop sign
= TYPE_SIGN (type
);
6568 /* We have to do this the hard way to detect unsigned overflow.
6569 prod = int_const_binop (MULT_EXPR, c1, c2); */
6570 wide_int val
= wi::mul (c1
, c2
, sign
, &overflow
);
6571 prod
= force_fit_type (type
, val
, -1, overflow
);
6572 *neg_overflow
= false;
6574 if (sign
== UNSIGNED
)
6576 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
6579 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6580 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6581 *hi
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (prod
));
6583 else if (tree_int_cst_sgn (c1
) >= 0)
6585 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
6586 switch (tree_int_cst_sgn (c2
))
6589 *neg_overflow
= true;
6590 *lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6595 *lo
= fold_negate_const (tmp
, type
);
6600 *hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6610 /* A negative divisor reverses the relational operators. */
6611 code
= swap_tree_comparison (code
);
6613 tmp
= int_const_binop (PLUS_EXPR
, c1
, build_int_cst (type
, 1));
6614 switch (tree_int_cst_sgn (c2
))
6617 *hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6622 *hi
= fold_negate_const (tmp
, type
);
6627 *neg_overflow
= true;
6628 *lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6637 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
6640 if (TREE_OVERFLOW (*lo
)
6641 || operand_equal_p (*lo
, TYPE_MIN_VALUE (type
), 0))
6643 if (TREE_OVERFLOW (*hi
)
6644 || operand_equal_p (*hi
, TYPE_MAX_VALUE (type
), 0))
6651 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6652 equality/inequality test, then return a simplified form of the test
6653 using a sign testing. Otherwise return NULL. TYPE is the desired
6657 fold_single_bit_test_into_sign_test (location_t loc
,
6658 enum tree_code code
, tree arg0
, tree arg1
,
6661 /* If this is testing a single bit, we can optimize the test. */
6662 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6663 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6664 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6666 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6667 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6668 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6670 if (arg00
!= NULL_TREE
6671 /* This is only a win if casting to a signed type is cheap,
6672 i.e. when arg00's type is not a partial mode. */
6673 && TYPE_PRECISION (TREE_TYPE (arg00
))
6674 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6676 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6677 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6679 fold_convert_loc (loc
, stype
, arg00
),
6680 build_int_cst (stype
, 0));
6687 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6688 equality/inequality test, then return a simplified form of
6689 the test using shifts and logical operations. Otherwise return
6690 NULL. TYPE is the desired result type. */
6693 fold_single_bit_test (location_t loc
, enum tree_code code
,
6694 tree arg0
, tree arg1
, tree result_type
)
6696 /* If this is testing a single bit, we can optimize the test. */
6697 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6698 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6699 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6701 tree inner
= TREE_OPERAND (arg0
, 0);
6702 tree type
= TREE_TYPE (arg0
);
6703 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6704 machine_mode operand_mode
= TYPE_MODE (type
);
6706 tree signed_type
, unsigned_type
, intermediate_type
;
6709 /* First, see if we can fold the single bit test into a sign-bit
6711 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6716 /* Otherwise we have (A & C) != 0 where C is a single bit,
6717 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6718 Similarly for (A & C) == 0. */
6720 /* If INNER is a right shift of a constant and it plus BITNUM does
6721 not overflow, adjust BITNUM and INNER. */
6722 if (TREE_CODE (inner
) == RSHIFT_EXPR
6723 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6724 && bitnum
< TYPE_PRECISION (type
)
6725 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6726 TYPE_PRECISION (type
) - bitnum
))
6728 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6729 inner
= TREE_OPERAND (inner
, 0);
6732 /* If we are going to be able to omit the AND below, we must do our
6733 operations as unsigned. If we must use the AND, we have a choice.
6734 Normally unsigned is faster, but for some machines signed is. */
6735 ops_unsigned
= (load_extend_op (operand_mode
) == SIGN_EXTEND
6736 && !flag_syntax_only
) ? 0 : 1;
6738 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6739 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6740 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6741 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6744 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6745 inner
, size_int (bitnum
));
6747 one
= build_int_cst (intermediate_type
, 1);
6749 if (code
== EQ_EXPR
)
6750 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6752 /* Put the AND last so it can combine with more things. */
6753 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6755 /* Make sure to return the proper type. */
6756 inner
= fold_convert_loc (loc
, result_type
, inner
);
6763 /* Test whether it is preferable two swap two operands, ARG0 and
6764 ARG1, for example because ARG0 is an integer constant and ARG1
6768 tree_swap_operands_p (const_tree arg0
, const_tree arg1
)
6770 if (CONSTANT_CLASS_P (arg1
))
6772 if (CONSTANT_CLASS_P (arg0
))
6778 if (TREE_CONSTANT (arg1
))
6780 if (TREE_CONSTANT (arg0
))
6783 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6784 for commutative and comparison operators. Ensuring a canonical
6785 form allows the optimizers to find additional redundancies without
6786 having to explicitly check for both orderings. */
6787 if (TREE_CODE (arg0
) == SSA_NAME
6788 && TREE_CODE (arg1
) == SSA_NAME
6789 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6792 /* Put SSA_NAMEs last. */
6793 if (TREE_CODE (arg1
) == SSA_NAME
)
6795 if (TREE_CODE (arg0
) == SSA_NAME
)
6798 /* Put variables last. */
6808 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6809 means A >= Y && A != MAX, but in this case we know that
6810 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6813 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6815 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6817 if (TREE_CODE (bound
) == LT_EXPR
)
6818 a
= TREE_OPERAND (bound
, 0);
6819 else if (TREE_CODE (bound
) == GT_EXPR
)
6820 a
= TREE_OPERAND (bound
, 1);
6824 typea
= TREE_TYPE (a
);
6825 if (!INTEGRAL_TYPE_P (typea
)
6826 && !POINTER_TYPE_P (typea
))
6829 if (TREE_CODE (ineq
) == LT_EXPR
)
6831 a1
= TREE_OPERAND (ineq
, 1);
6832 y
= TREE_OPERAND (ineq
, 0);
6834 else if (TREE_CODE (ineq
) == GT_EXPR
)
6836 a1
= TREE_OPERAND (ineq
, 0);
6837 y
= TREE_OPERAND (ineq
, 1);
6842 if (TREE_TYPE (a1
) != typea
)
6845 if (POINTER_TYPE_P (typea
))
6847 /* Convert the pointer types into integer before taking the difference. */
6848 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6849 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6850 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6853 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6855 if (!diff
|| !integer_onep (diff
))
6858 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6861 /* Fold a sum or difference of at least one multiplication.
6862 Returns the folded tree or NULL if no simplification could be made. */
6865 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6866 tree arg0
, tree arg1
)
6868 tree arg00
, arg01
, arg10
, arg11
;
6869 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6871 /* (A * C) +- (B * C) -> (A+-B) * C.
6872 (A * C) +- A -> A * (C+-1).
6873 We are most concerned about the case where C is a constant,
6874 but other combinations show up during loop reduction. Since
6875 it is not difficult, try all four possibilities. */
6877 if (TREE_CODE (arg0
) == MULT_EXPR
)
6879 arg00
= TREE_OPERAND (arg0
, 0);
6880 arg01
= TREE_OPERAND (arg0
, 1);
6882 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6884 arg00
= build_one_cst (type
);
6889 /* We cannot generate constant 1 for fract. */
6890 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6893 arg01
= build_one_cst (type
);
6895 if (TREE_CODE (arg1
) == MULT_EXPR
)
6897 arg10
= TREE_OPERAND (arg1
, 0);
6898 arg11
= TREE_OPERAND (arg1
, 1);
6900 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6902 arg10
= build_one_cst (type
);
6903 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6904 the purpose of this canonicalization. */
6905 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6906 && negate_expr_p (arg1
)
6907 && code
== PLUS_EXPR
)
6909 arg11
= negate_expr (arg1
);
6917 /* We cannot generate constant 1 for fract. */
6918 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6921 arg11
= build_one_cst (type
);
6925 /* Prefer factoring a common non-constant. */
6926 if (operand_equal_p (arg00
, arg10
, 0))
6927 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6928 else if (operand_equal_p (arg01
, arg11
, 0))
6929 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6930 else if (operand_equal_p (arg00
, arg11
, 0))
6931 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6932 else if (operand_equal_p (arg01
, arg10
, 0))
6933 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6935 /* No identical multiplicands; see if we can find a common
6936 power-of-two factor in non-power-of-two multiplies. This
6937 can help in multi-dimensional array access. */
6938 else if (tree_fits_shwi_p (arg01
)
6939 && tree_fits_shwi_p (arg11
))
6941 HOST_WIDE_INT int01
, int11
, tmp
;
6944 int01
= tree_to_shwi (arg01
);
6945 int11
= tree_to_shwi (arg11
);
6947 /* Move min of absolute values to int11. */
6948 if (absu_hwi (int01
) < absu_hwi (int11
))
6950 tmp
= int01
, int01
= int11
, int11
= tmp
;
6951 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6958 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6959 /* The remainder should not be a constant, otherwise we
6960 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6961 increased the number of multiplications necessary. */
6962 && TREE_CODE (arg10
) != INTEGER_CST
)
6964 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6965 build_int_cst (TREE_TYPE (arg00
),
6970 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6977 if (! INTEGRAL_TYPE_P (type
)
6978 || TYPE_OVERFLOW_WRAPS (type
)
6979 /* We are neither factoring zero nor minus one. */
6980 || TREE_CODE (same
) == INTEGER_CST
)
6981 return fold_build2_loc (loc
, MULT_EXPR
, type
,
6982 fold_build2_loc (loc
, code
, type
,
6983 fold_convert_loc (loc
, type
, alt0
),
6984 fold_convert_loc (loc
, type
, alt1
)),
6985 fold_convert_loc (loc
, type
, same
));
6987 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6988 same may be minus one and thus the multiplication may overflow. Perform
6989 the operations in an unsigned type. */
6990 tree utype
= unsigned_type_for (type
);
6991 tree tem
= fold_build2_loc (loc
, code
, utype
,
6992 fold_convert_loc (loc
, utype
, alt0
),
6993 fold_convert_loc (loc
, utype
, alt1
));
6994 /* If the sum evaluated to a constant that is not -INF the multiplication
6996 if (TREE_CODE (tem
) == INTEGER_CST
6997 && ! wi::eq_p (tem
, wi::min_value (TYPE_PRECISION (utype
), SIGNED
)))
6998 return fold_build2_loc (loc
, MULT_EXPR
, type
,
6999 fold_convert (type
, tem
), same
);
7001 return fold_convert_loc (loc
, type
,
7002 fold_build2_loc (loc
, MULT_EXPR
, utype
, tem
,
7003 fold_convert_loc (loc
, utype
, same
)));
7006 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7007 specified by EXPR into the buffer PTR of length LEN bytes.
7008 Return the number of bytes placed in the buffer, or zero
7012 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7014 tree type
= TREE_TYPE (expr
);
7015 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7016 int byte
, offset
, word
, words
;
7017 unsigned char value
;
7019 if ((off
== -1 && total_bytes
> len
)
7020 || off
>= total_bytes
)
7024 words
= total_bytes
/ UNITS_PER_WORD
;
7026 for (byte
= 0; byte
< total_bytes
; byte
++)
7028 int bitpos
= byte
* BITS_PER_UNIT
;
7029 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7031 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7033 if (total_bytes
> UNITS_PER_WORD
)
7035 word
= byte
/ UNITS_PER_WORD
;
7036 if (WORDS_BIG_ENDIAN
)
7037 word
= (words
- 1) - word
;
7038 offset
= word
* UNITS_PER_WORD
;
7039 if (BYTES_BIG_ENDIAN
)
7040 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7042 offset
+= byte
% UNITS_PER_WORD
;
7045 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7047 && offset
- off
< len
)
7048 ptr
[offset
- off
] = value
;
7050 return MIN (len
, total_bytes
- off
);
7054 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7055 specified by EXPR into the buffer PTR of length LEN bytes.
7056 Return the number of bytes placed in the buffer, or zero
7060 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7062 tree type
= TREE_TYPE (expr
);
7063 machine_mode mode
= TYPE_MODE (type
);
7064 int total_bytes
= GET_MODE_SIZE (mode
);
7065 FIXED_VALUE_TYPE value
;
7066 tree i_value
, i_type
;
7068 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7071 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7073 if (NULL_TREE
== i_type
7074 || TYPE_PRECISION (i_type
) != total_bytes
)
7077 value
= TREE_FIXED_CST (expr
);
7078 i_value
= double_int_to_tree (i_type
, value
.data
);
7080 return native_encode_int (i_value
, ptr
, len
, off
);
7084 /* Subroutine of native_encode_expr. Encode the REAL_CST
7085 specified by EXPR into the buffer PTR of length LEN bytes.
7086 Return the number of bytes placed in the buffer, or zero
7090 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7092 tree type
= TREE_TYPE (expr
);
7093 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7094 int byte
, offset
, word
, words
, bitpos
;
7095 unsigned char value
;
7097 /* There are always 32 bits in each long, no matter the size of
7098 the hosts long. We handle floating point representations with
7102 if ((off
== -1 && total_bytes
> len
)
7103 || off
>= total_bytes
)
7107 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7109 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7111 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7112 bitpos
+= BITS_PER_UNIT
)
7114 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7115 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7117 if (UNITS_PER_WORD
< 4)
7119 word
= byte
/ UNITS_PER_WORD
;
7120 if (WORDS_BIG_ENDIAN
)
7121 word
= (words
- 1) - word
;
7122 offset
= word
* UNITS_PER_WORD
;
7123 if (BYTES_BIG_ENDIAN
)
7124 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7126 offset
+= byte
% UNITS_PER_WORD
;
7131 if (BYTES_BIG_ENDIAN
)
7133 /* Reverse bytes within each long, or within the entire float
7134 if it's smaller than a long (for HFmode). */
7135 offset
= MIN (3, total_bytes
- 1) - offset
;
7136 gcc_assert (offset
>= 0);
7139 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7141 && offset
- off
< len
)
7142 ptr
[offset
- off
] = value
;
7144 return MIN (len
, total_bytes
- off
);
7147 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7148 specified by EXPR into the buffer PTR of length LEN bytes.
7149 Return the number of bytes placed in the buffer, or zero
7153 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7158 part
= TREE_REALPART (expr
);
7159 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7163 part
= TREE_IMAGPART (expr
);
7165 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7166 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7170 return rsize
+ isize
;
7174 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7175 specified by EXPR into the buffer PTR of length LEN bytes.
7176 Return the number of bytes placed in the buffer, or zero
7180 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7187 count
= VECTOR_CST_NELTS (expr
);
7188 itype
= TREE_TYPE (TREE_TYPE (expr
));
7189 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7190 for (i
= 0; i
< count
; i
++)
7197 elem
= VECTOR_CST_ELT (expr
, i
);
7198 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7199 if ((off
== -1 && res
!= size
)
7212 /* Subroutine of native_encode_expr. Encode the STRING_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7218 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7220 tree type
= TREE_TYPE (expr
);
7221 HOST_WIDE_INT total_bytes
;
7223 if (TREE_CODE (type
) != ARRAY_TYPE
7224 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7225 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7226 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7228 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7229 if ((off
== -1 && total_bytes
> len
)
7230 || off
>= total_bytes
)
7234 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7237 if (off
< TREE_STRING_LENGTH (expr
))
7239 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7240 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7242 memset (ptr
+ written
, 0,
7243 MIN (total_bytes
- written
, len
- written
));
7246 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7247 return MIN (total_bytes
- off
, len
);
7251 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7252 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7253 buffer PTR of length LEN bytes. If OFF is not -1 then start
7254 the encoding at byte offset OFF and encode at most LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero upon failure. */
7258 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7260 /* We don't support starting at negative offset and -1 is special. */
7264 switch (TREE_CODE (expr
))
7267 return native_encode_int (expr
, ptr
, len
, off
);
7270 return native_encode_real (expr
, ptr
, len
, off
);
7273 return native_encode_fixed (expr
, ptr
, len
, off
);
7276 return native_encode_complex (expr
, ptr
, len
, off
);
7279 return native_encode_vector (expr
, ptr
, len
, off
);
7282 return native_encode_string (expr
, ptr
, len
, off
);
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7295 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7297 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7299 if (total_bytes
> len
7300 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7303 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7305 return wide_int_to_tree (type
, result
);
7309 /* Subroutine of native_interpret_expr. Interpret the contents of
7310 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7311 If the buffer cannot be interpreted, return NULL_TREE. */
7314 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7316 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7318 FIXED_VALUE_TYPE fixed_value
;
7320 if (total_bytes
> len
7321 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7324 result
= double_int::from_buffer (ptr
, total_bytes
);
7325 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7327 return build_fixed (type
, fixed_value
);
7331 /* Subroutine of native_interpret_expr. Interpret the contents of
7332 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7333 If the buffer cannot be interpreted, return NULL_TREE. */
7336 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7338 machine_mode mode
= TYPE_MODE (type
);
7339 int total_bytes
= GET_MODE_SIZE (mode
);
7340 unsigned char value
;
7341 /* There are always 32 bits in each long, no matter the size of
7342 the hosts long. We handle floating point representations with
7347 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7348 if (total_bytes
> len
|| total_bytes
> 24)
7350 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7352 memset (tmp
, 0, sizeof (tmp
));
7353 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7354 bitpos
+= BITS_PER_UNIT
)
7356 /* Both OFFSET and BYTE index within a long;
7357 bitpos indexes the whole float. */
7358 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7359 if (UNITS_PER_WORD
< 4)
7361 int word
= byte
/ UNITS_PER_WORD
;
7362 if (WORDS_BIG_ENDIAN
)
7363 word
= (words
- 1) - word
;
7364 offset
= word
* UNITS_PER_WORD
;
7365 if (BYTES_BIG_ENDIAN
)
7366 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7368 offset
+= byte
% UNITS_PER_WORD
;
7373 if (BYTES_BIG_ENDIAN
)
7375 /* Reverse bytes within each long, or within the entire float
7376 if it's smaller than a long (for HFmode). */
7377 offset
= MIN (3, total_bytes
- 1) - offset
;
7378 gcc_assert (offset
>= 0);
7381 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7383 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7386 real_from_target (&r
, tmp
, mode
);
7387 return build_real (type
, r
);
7391 /* Subroutine of native_interpret_expr. Interpret the contents of
7392 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7393 If the buffer cannot be interpreted, return NULL_TREE. */
7396 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7398 tree etype
, rpart
, ipart
;
7401 etype
= TREE_TYPE (type
);
7402 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7405 rpart
= native_interpret_expr (etype
, ptr
, size
);
7408 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7411 return build_complex (type
, rpart
, ipart
);
7415 /* Subroutine of native_interpret_expr. Interpret the contents of
7416 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7417 If the buffer cannot be interpreted, return NULL_TREE. */
7420 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7426 etype
= TREE_TYPE (type
);
7427 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7428 count
= TYPE_VECTOR_SUBPARTS (type
);
7429 if (size
* count
> len
)
7432 elements
= XALLOCAVEC (tree
, count
);
7433 for (i
= count
- 1; i
>= 0; i
--)
7435 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7440 return build_vector (type
, elements
);
7444 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7445 the buffer PTR of length LEN as a constant of type TYPE. For
7446 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7447 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7448 return NULL_TREE. */
7451 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7453 switch (TREE_CODE (type
))
7459 case REFERENCE_TYPE
:
7460 return native_interpret_int (type
, ptr
, len
);
7463 return native_interpret_real (type
, ptr
, len
);
7465 case FIXED_POINT_TYPE
:
7466 return native_interpret_fixed (type
, ptr
, len
);
7469 return native_interpret_complex (type
, ptr
, len
);
7472 return native_interpret_vector (type
, ptr
, len
);
7479 /* Returns true if we can interpret the contents of a native encoding
7483 can_native_interpret_type_p (tree type
)
7485 switch (TREE_CODE (type
))
7491 case REFERENCE_TYPE
:
7492 case FIXED_POINT_TYPE
:
7502 /* Return true iff a constant of type TYPE is accepted by
7503 native_encode_expr. */
7506 can_native_encode_type_p (tree type
)
7508 switch (TREE_CODE (type
))
7512 case FIXED_POINT_TYPE
:
7522 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7523 TYPE at compile-time. If we're unable to perform the conversion
7524 return NULL_TREE. */
7527 fold_view_convert_expr (tree type
, tree expr
)
7529 /* We support up to 512-bit values (for V8DFmode). */
7530 unsigned char buffer
[64];
7533 /* Check that the host and target are sane. */
7534 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7537 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7541 return native_interpret_expr (type
, buffer
, len
);
7544 /* Build an expression for the address of T. Folds away INDIRECT_REF
7545 to avoid confusing the gimplify process. */
7548 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7550 /* The size of the object is not relevant when talking about its address. */
7551 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7552 t
= TREE_OPERAND (t
, 0);
7554 if (TREE_CODE (t
) == INDIRECT_REF
)
7556 t
= TREE_OPERAND (t
, 0);
7558 if (TREE_TYPE (t
) != ptrtype
)
7559 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7561 else if (TREE_CODE (t
) == MEM_REF
7562 && integer_zerop (TREE_OPERAND (t
, 1)))
7563 return TREE_OPERAND (t
, 0);
7564 else if (TREE_CODE (t
) == MEM_REF
7565 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7566 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7567 TREE_OPERAND (t
, 0),
7568 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7569 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7571 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7573 if (TREE_TYPE (t
) != ptrtype
)
7574 t
= fold_convert_loc (loc
, ptrtype
, t
);
7577 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7582 /* Build an expression for the address of T. */
7585 build_fold_addr_expr_loc (location_t loc
, tree t
)
7587 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7589 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7592 /* Fold a unary expression of code CODE and type TYPE with operand
7593 OP0. Return the folded expression if folding is successful.
7594 Otherwise, return NULL_TREE. */
7597 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7601 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7603 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7604 && TREE_CODE_LENGTH (code
) == 1);
7609 if (CONVERT_EXPR_CODE_P (code
)
7610 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7612 /* Don't use STRIP_NOPS, because signedness of argument type
7614 STRIP_SIGN_NOPS (arg0
);
7618 /* Strip any conversions that don't change the mode. This
7619 is safe for every expression, except for a comparison
7620 expression because its signedness is derived from its
7623 Note that this is done as an internal manipulation within
7624 the constant folder, in order to find the simplest
7625 representation of the arguments so that their form can be
7626 studied. In any cases, the appropriate type conversions
7627 should be put back in the tree that will get out of the
7632 if (CONSTANT_CLASS_P (arg0
))
7634 tree tem
= const_unop (code
, type
, arg0
);
7637 if (TREE_TYPE (tem
) != type
)
7638 tem
= fold_convert_loc (loc
, type
, tem
);
7644 tem
= generic_simplify (loc
, code
, type
, op0
);
7648 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7650 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7651 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7652 fold_build1_loc (loc
, code
, type
,
7653 fold_convert_loc (loc
, TREE_TYPE (op0
),
7654 TREE_OPERAND (arg0
, 1))));
7655 else if (TREE_CODE (arg0
) == COND_EXPR
)
7657 tree arg01
= TREE_OPERAND (arg0
, 1);
7658 tree arg02
= TREE_OPERAND (arg0
, 2);
7659 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7660 arg01
= fold_build1_loc (loc
, code
, type
,
7661 fold_convert_loc (loc
,
7662 TREE_TYPE (op0
), arg01
));
7663 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7664 arg02
= fold_build1_loc (loc
, code
, type
,
7665 fold_convert_loc (loc
,
7666 TREE_TYPE (op0
), arg02
));
7667 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7670 /* If this was a conversion, and all we did was to move into
7671 inside the COND_EXPR, bring it back out. But leave it if
7672 it is a conversion from integer to integer and the
7673 result precision is no wider than a word since such a
7674 conversion is cheap and may be optimized away by combine,
7675 while it couldn't if it were outside the COND_EXPR. Then return
7676 so we don't get into an infinite recursion loop taking the
7677 conversion out and then back in. */
7679 if ((CONVERT_EXPR_CODE_P (code
)
7680 || code
== NON_LVALUE_EXPR
)
7681 && TREE_CODE (tem
) == COND_EXPR
7682 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7683 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7684 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7685 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7686 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7687 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7688 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7690 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7691 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7692 || flag_syntax_only
))
7693 tem
= build1_loc (loc
, code
, type
,
7695 TREE_TYPE (TREE_OPERAND
7696 (TREE_OPERAND (tem
, 1), 0)),
7697 TREE_OPERAND (tem
, 0),
7698 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7699 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7707 case NON_LVALUE_EXPR
:
7708 if (!maybe_lvalue_p (op0
))
7709 return fold_convert_loc (loc
, type
, op0
);
7714 case FIX_TRUNC_EXPR
:
7715 if (COMPARISON_CLASS_P (op0
))
7717 /* If we have (type) (a CMP b) and type is an integral type, return
7718 new expression involving the new type. Canonicalize
7719 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7721 Do not fold the result as that would not simplify further, also
7722 folding again results in recursions. */
7723 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7724 return build2_loc (loc
, TREE_CODE (op0
), type
,
7725 TREE_OPERAND (op0
, 0),
7726 TREE_OPERAND (op0
, 1));
7727 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7728 && TREE_CODE (type
) != VECTOR_TYPE
)
7729 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7730 constant_boolean_node (true, type
),
7731 constant_boolean_node (false, type
));
7734 /* Handle (T *)&A.B.C for A being of type T and B and C
7735 living at offset zero. This occurs frequently in
7736 C++ upcasting and then accessing the base. */
7737 if (TREE_CODE (op0
) == ADDR_EXPR
7738 && POINTER_TYPE_P (type
)
7739 && handled_component_p (TREE_OPERAND (op0
, 0)))
7741 HOST_WIDE_INT bitsize
, bitpos
;
7744 int unsignedp
, reversep
, volatilep
;
7746 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
7747 &offset
, &mode
, &unsignedp
, &reversep
,
7749 /* If the reference was to a (constant) zero offset, we can use
7750 the address of the base if it has the same base type
7751 as the result type and the pointer type is unqualified. */
7752 if (! offset
&& bitpos
== 0
7753 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7754 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7755 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7756 return fold_convert_loc (loc
, type
,
7757 build_fold_addr_expr_loc (loc
, base
));
7760 if (TREE_CODE (op0
) == MODIFY_EXPR
7761 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7762 /* Detect assigning a bitfield. */
7763 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7765 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7767 /* Don't leave an assignment inside a conversion
7768 unless assigning a bitfield. */
7769 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7770 /* First do the assignment, then return converted constant. */
7771 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7772 TREE_NO_WARNING (tem
) = 1;
7773 TREE_USED (tem
) = 1;
7777 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7778 constants (if x has signed type, the sign bit cannot be set
7779 in c). This folds extension into the BIT_AND_EXPR.
7780 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7781 very likely don't have maximal range for their precision and this
7782 transformation effectively doesn't preserve non-maximal ranges. */
7783 if (TREE_CODE (type
) == INTEGER_TYPE
7784 && TREE_CODE (op0
) == BIT_AND_EXPR
7785 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7787 tree and_expr
= op0
;
7788 tree and0
= TREE_OPERAND (and_expr
, 0);
7789 tree and1
= TREE_OPERAND (and_expr
, 1);
7792 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7793 || (TYPE_PRECISION (type
)
7794 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7796 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7797 <= HOST_BITS_PER_WIDE_INT
7798 && tree_fits_uhwi_p (and1
))
7800 unsigned HOST_WIDE_INT cst
;
7802 cst
= tree_to_uhwi (and1
);
7803 cst
&= HOST_WIDE_INT_M1U
7804 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7805 change
= (cst
== 0);
7807 && !flag_syntax_only
7808 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0
)))
7811 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7812 and0
= fold_convert_loc (loc
, uns
, and0
);
7813 and1
= fold_convert_loc (loc
, uns
, and1
);
7818 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7819 TREE_OVERFLOW (and1
));
7820 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7821 fold_convert_loc (loc
, type
, and0
), tem
);
7825 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7826 cast (T1)X will fold away. We assume that this happens when X itself
7828 if (POINTER_TYPE_P (type
)
7829 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7830 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
7832 tree arg00
= TREE_OPERAND (arg0
, 0);
7833 tree arg01
= TREE_OPERAND (arg0
, 1);
7835 return fold_build_pointer_plus_loc
7836 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7839 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7840 of the same precision, and X is an integer type not narrower than
7841 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7842 if (INTEGRAL_TYPE_P (type
)
7843 && TREE_CODE (op0
) == BIT_NOT_EXPR
7844 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7845 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7846 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7848 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7849 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7850 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7851 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7852 fold_convert_loc (loc
, type
, tem
));
7855 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7856 type of X and Y (integer types only). */
7857 if (INTEGRAL_TYPE_P (type
)
7858 && TREE_CODE (op0
) == MULT_EXPR
7859 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7860 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7862 /* Be careful not to introduce new overflows. */
7864 if (TYPE_OVERFLOW_WRAPS (type
))
7867 mult_type
= unsigned_type_for (type
);
7869 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7871 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7872 fold_convert_loc (loc
, mult_type
,
7873 TREE_OPERAND (op0
, 0)),
7874 fold_convert_loc (loc
, mult_type
,
7875 TREE_OPERAND (op0
, 1)));
7876 return fold_convert_loc (loc
, type
, tem
);
7882 case VIEW_CONVERT_EXPR
:
7883 if (TREE_CODE (op0
) == MEM_REF
)
7885 if (TYPE_ALIGN (TREE_TYPE (op0
)) != TYPE_ALIGN (type
))
7886 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op0
)));
7887 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
7888 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7889 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
7896 tem
= fold_negate_expr (loc
, arg0
);
7898 return fold_convert_loc (loc
, type
, tem
);
7902 /* Convert fabs((double)float) into (double)fabsf(float). */
7903 if (TREE_CODE (arg0
) == NOP_EXPR
7904 && TREE_CODE (type
) == REAL_TYPE
)
7906 tree targ0
= strip_float_extensions (arg0
);
7908 return fold_convert_loc (loc
, type
,
7909 fold_build1_loc (loc
, ABS_EXPR
,
7916 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7917 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7918 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7919 fold_convert_loc (loc
, type
,
7920 TREE_OPERAND (arg0
, 0)))))
7921 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7922 fold_convert_loc (loc
, type
,
7923 TREE_OPERAND (arg0
, 1)));
7924 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7925 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7926 fold_convert_loc (loc
, type
,
7927 TREE_OPERAND (arg0
, 1)))))
7928 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7929 fold_convert_loc (loc
, type
,
7930 TREE_OPERAND (arg0
, 0)), tem
);
7934 case TRUTH_NOT_EXPR
:
7935 /* Note that the operand of this must be an int
7936 and its values must be 0 or 1.
7937 ("true" is a fixed value perhaps depending on the language,
7938 but we don't handle values other than 1 correctly yet.) */
7939 tem
= fold_truth_not_expr (loc
, arg0
);
7942 return fold_convert_loc (loc
, type
, tem
);
7945 /* Fold *&X to X if X is an lvalue. */
7946 if (TREE_CODE (op0
) == ADDR_EXPR
)
7948 tree op00
= TREE_OPERAND (op0
, 0);
7950 || TREE_CODE (op00
) == PARM_DECL
7951 || TREE_CODE (op00
) == RESULT_DECL
)
7952 && !TREE_READONLY (op00
))
7959 } /* switch (code) */
7963 /* If the operation was a conversion do _not_ mark a resulting constant
7964 with TREE_OVERFLOW if the original constant was not. These conversions
7965 have implementation defined behavior and retaining the TREE_OVERFLOW
7966 flag here would confuse later passes such as VRP. */
7968 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
7969 tree type
, tree op0
)
7971 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
7973 && TREE_CODE (res
) == INTEGER_CST
7974 && TREE_CODE (op0
) == INTEGER_CST
7975 && CONVERT_EXPR_CODE_P (code
))
7976 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
7981 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7982 operands OP0 and OP1. LOC is the location of the resulting expression.
7983 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7984 Return the folded expression if folding is successful. Otherwise,
7985 return NULL_TREE. */
7987 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
7988 tree arg0
, tree arg1
, tree op0
, tree op1
)
7992 /* We only do these simplifications if we are optimizing. */
7996 /* Check for things like (A || B) && (A || C). We can convert this
7997 to A || (B && C). Note that either operator can be any of the four
7998 truth and/or operations and the transformation will still be
7999 valid. Also note that we only care about order for the
8000 ANDIF and ORIF operators. If B contains side effects, this
8001 might change the truth-value of A. */
8002 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8003 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8004 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8005 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8006 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8007 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8009 tree a00
= TREE_OPERAND (arg0
, 0);
8010 tree a01
= TREE_OPERAND (arg0
, 1);
8011 tree a10
= TREE_OPERAND (arg1
, 0);
8012 tree a11
= TREE_OPERAND (arg1
, 1);
8013 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8014 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8015 && (code
== TRUTH_AND_EXPR
8016 || code
== TRUTH_OR_EXPR
));
8018 if (operand_equal_p (a00
, a10
, 0))
8019 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8020 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8021 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8022 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8023 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8024 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8025 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8026 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8028 /* This case if tricky because we must either have commutative
8029 operators or else A10 must not have side-effects. */
8031 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8032 && operand_equal_p (a01
, a11
, 0))
8033 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8034 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8038 /* See if we can build a range comparison. */
8039 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8042 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8043 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8045 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8047 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8050 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8051 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8053 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8055 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8058 /* Check for the possibility of merging component references. If our
8059 lhs is another similar operation, try to merge its rhs with our
8060 rhs. Then try to merge our lhs and rhs. */
8061 if (TREE_CODE (arg0
) == code
8062 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8063 TREE_OPERAND (arg0
, 1), arg1
)))
8064 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8066 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8069 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8070 && (code
== TRUTH_AND_EXPR
8071 || code
== TRUTH_ANDIF_EXPR
8072 || code
== TRUTH_OR_EXPR
8073 || code
== TRUTH_ORIF_EXPR
))
8075 enum tree_code ncode
, icode
;
8077 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8078 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8079 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8081 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8082 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8083 We don't want to pack more than two leafs to a non-IF AND/OR
8085 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8086 equal to IF-CODE, then we don't want to add right-hand operand.
8087 If the inner right-hand side of left-hand operand has
8088 side-effects, or isn't simple, then we can't add to it,
8089 as otherwise we might destroy if-sequence. */
8090 if (TREE_CODE (arg0
) == icode
8091 && simple_operand_p_2 (arg1
)
8092 /* Needed for sequence points to handle trappings, and
8094 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8096 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8098 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8101 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8102 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8103 else if (TREE_CODE (arg1
) == icode
8104 && simple_operand_p_2 (arg0
)
8105 /* Needed for sequence points to handle trappings, and
8107 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8109 tem
= fold_build2_loc (loc
, ncode
, type
,
8110 arg0
, TREE_OPERAND (arg1
, 0));
8111 return fold_build2_loc (loc
, icode
, type
, tem
,
8112 TREE_OPERAND (arg1
, 1));
8114 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8116 For sequence point consistancy, we need to check for trapping,
8117 and side-effects. */
8118 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8119 && simple_operand_p_2 (arg1
))
8120 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8126 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8127 by changing CODE to reduce the magnitude of constants involved in
8128 ARG0 of the comparison.
8129 Returns a canonicalized comparison tree if a simplification was
8130 possible, otherwise returns NULL_TREE.
8131 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8132 valid if signed overflow is undefined. */
8135 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8136 tree arg0
, tree arg1
,
8137 bool *strict_overflow_p
)
8139 enum tree_code code0
= TREE_CODE (arg0
);
8140 tree t
, cst0
= NULL_TREE
;
8143 /* Match A +- CST code arg1. We can change this only if overflow
8145 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8146 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8147 /* In principle pointers also have undefined overflow behavior,
8148 but that causes problems elsewhere. */
8149 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8150 && (code0
== MINUS_EXPR
8151 || code0
== PLUS_EXPR
)
8152 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
8155 /* Identify the constant in arg0 and its sign. */
8156 cst0
= TREE_OPERAND (arg0
, 1);
8157 sgn0
= tree_int_cst_sgn (cst0
);
8159 /* Overflowed constants and zero will cause problems. */
8160 if (integer_zerop (cst0
)
8161 || TREE_OVERFLOW (cst0
))
8164 /* See if we can reduce the magnitude of the constant in
8165 arg0 by changing the comparison code. */
8166 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8168 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8170 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8171 else if (code
== GT_EXPR
8172 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8174 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8175 else if (code
== LE_EXPR
8176 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8178 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8179 else if (code
== GE_EXPR
8180 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8184 *strict_overflow_p
= true;
8186 /* Now build the constant reduced in magnitude. But not if that
8187 would produce one outside of its types range. */
8188 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8190 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8191 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8193 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8194 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8197 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8198 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8199 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8200 t
= fold_convert (TREE_TYPE (arg1
), t
);
8202 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8205 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8206 overflow further. Try to decrease the magnitude of constants involved
8207 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8208 and put sole constants at the second argument position.
8209 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8212 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8213 tree arg0
, tree arg1
)
8216 bool strict_overflow_p
;
8217 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8218 "when reducing constant in comparison");
8220 /* Try canonicalization by simplifying arg0. */
8221 strict_overflow_p
= false;
8222 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8223 &strict_overflow_p
);
8226 if (strict_overflow_p
)
8227 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8231 /* Try canonicalization by simplifying arg1 using the swapped
8233 code
= swap_tree_comparison (code
);
8234 strict_overflow_p
= false;
8235 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8236 &strict_overflow_p
);
8237 if (t
&& strict_overflow_p
)
8238 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8242 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8243 space. This is used to avoid issuing overflow warnings for
8244 expressions like &p->x which can not wrap. */
8247 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8249 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8256 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8257 if (offset
== NULL_TREE
)
8258 wi_offset
= wi::zero (precision
);
8259 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8265 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8266 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8270 if (!wi::fits_uhwi_p (total
))
8273 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8277 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8279 if (TREE_CODE (base
) == ADDR_EXPR
)
8281 HOST_WIDE_INT base_size
;
8283 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8284 if (base_size
> 0 && size
< base_size
)
8288 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8291 /* Return a positive integer when the symbol DECL is known to have
8292 a nonzero address, zero when it's known not to (e.g., it's a weak
8293 symbol), and a negative integer when the symbol is not yet in the
8294 symbol table and so whether or not its address is zero is unknown.
8295 For function local objects always return positive integer. */
8297 maybe_nonzero_address (tree decl
)
8299 if (DECL_P (decl
) && decl_in_symtab_p (decl
))
8300 if (struct symtab_node
*symbol
= symtab_node::get_create (decl
))
8301 return symbol
->nonzero_address ();
8303 /* Function local objects are never NULL. */
8305 && (DECL_CONTEXT (decl
)
8306 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
8307 && auto_var_in_fn_p (decl
, DECL_CONTEXT (decl
))))
8313 /* Subroutine of fold_binary. This routine performs all of the
8314 transformations that are common to the equality/inequality
8315 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8316 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8317 fold_binary should call fold_binary. Fold a comparison with
8318 tree code CODE and type TYPE with operands OP0 and OP1. Return
8319 the folded comparison or NULL_TREE. */
8322 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8325 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8326 tree arg0
, arg1
, tem
;
8331 STRIP_SIGN_NOPS (arg0
);
8332 STRIP_SIGN_NOPS (arg1
);
8334 /* For comparisons of pointers we can decompose it to a compile time
8335 comparison of the base objects and the offsets into the object.
8336 This requires at least one operand being an ADDR_EXPR or a
8337 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8338 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8339 && (TREE_CODE (arg0
) == ADDR_EXPR
8340 || TREE_CODE (arg1
) == ADDR_EXPR
8341 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8342 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8344 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8345 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8347 int volatilep
, reversep
, unsignedp
;
8348 bool indirect_base0
= false, indirect_base1
= false;
8350 /* Get base and offset for the access. Strip ADDR_EXPR for
8351 get_inner_reference, but put it back by stripping INDIRECT_REF
8352 off the base object if possible. indirect_baseN will be true
8353 if baseN is not an address but refers to the object itself. */
8355 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8358 = get_inner_reference (TREE_OPERAND (arg0
, 0),
8359 &bitsize
, &bitpos0
, &offset0
, &mode
,
8360 &unsignedp
, &reversep
, &volatilep
);
8361 if (TREE_CODE (base0
) == INDIRECT_REF
)
8362 base0
= TREE_OPERAND (base0
, 0);
8364 indirect_base0
= true;
8366 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8368 base0
= TREE_OPERAND (arg0
, 0);
8369 STRIP_SIGN_NOPS (base0
);
8370 if (TREE_CODE (base0
) == ADDR_EXPR
)
8373 = get_inner_reference (TREE_OPERAND (base0
, 0),
8374 &bitsize
, &bitpos0
, &offset0
, &mode
,
8375 &unsignedp
, &reversep
, &volatilep
);
8376 if (TREE_CODE (base0
) == INDIRECT_REF
)
8377 base0
= TREE_OPERAND (base0
, 0);
8379 indirect_base0
= true;
8381 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
8382 offset0
= TREE_OPERAND (arg0
, 1);
8384 offset0
= size_binop (PLUS_EXPR
, offset0
,
8385 TREE_OPERAND (arg0
, 1));
8386 if (TREE_CODE (offset0
) == INTEGER_CST
)
8388 offset_int tem
= wi::sext (wi::to_offset (offset0
),
8389 TYPE_PRECISION (sizetype
));
8390 tem
<<= LOG2_BITS_PER_UNIT
;
8392 if (wi::fits_shwi_p (tem
))
8394 bitpos0
= tem
.to_shwi ();
8395 offset0
= NULL_TREE
;
8401 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8404 = get_inner_reference (TREE_OPERAND (arg1
, 0),
8405 &bitsize
, &bitpos1
, &offset1
, &mode
,
8406 &unsignedp
, &reversep
, &volatilep
);
8407 if (TREE_CODE (base1
) == INDIRECT_REF
)
8408 base1
= TREE_OPERAND (base1
, 0);
8410 indirect_base1
= true;
8412 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8414 base1
= TREE_OPERAND (arg1
, 0);
8415 STRIP_SIGN_NOPS (base1
);
8416 if (TREE_CODE (base1
) == ADDR_EXPR
)
8419 = get_inner_reference (TREE_OPERAND (base1
, 0),
8420 &bitsize
, &bitpos1
, &offset1
, &mode
,
8421 &unsignedp
, &reversep
, &volatilep
);
8422 if (TREE_CODE (base1
) == INDIRECT_REF
)
8423 base1
= TREE_OPERAND (base1
, 0);
8425 indirect_base1
= true;
8427 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
8428 offset1
= TREE_OPERAND (arg1
, 1);
8430 offset1
= size_binop (PLUS_EXPR
, offset1
,
8431 TREE_OPERAND (arg1
, 1));
8432 if (TREE_CODE (offset1
) == INTEGER_CST
)
8434 offset_int tem
= wi::sext (wi::to_offset (offset1
),
8435 TYPE_PRECISION (sizetype
));
8436 tem
<<= LOG2_BITS_PER_UNIT
;
8438 if (wi::fits_shwi_p (tem
))
8440 bitpos1
= tem
.to_shwi ();
8441 offset1
= NULL_TREE
;
8446 /* If we have equivalent bases we might be able to simplify. */
8447 if (indirect_base0
== indirect_base1
8448 && operand_equal_p (base0
, base1
,
8449 indirect_base0
? OEP_ADDRESS_OF
: 0))
8451 /* We can fold this expression to a constant if the non-constant
8452 offset parts are equal. */
8453 if (offset0
== offset1
8454 || (offset0
&& offset1
8455 && operand_equal_p (offset0
, offset1
, 0)))
8458 && bitpos0
!= bitpos1
8459 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8460 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8461 fold_overflow_warning (("assuming pointer wraparound does not "
8462 "occur when comparing P +- C1 with "
8464 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8469 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8471 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8473 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8475 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8477 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8479 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8483 /* We can simplify the comparison to a comparison of the variable
8484 offset parts if the constant offset parts are equal.
8485 Be careful to use signed sizetype here because otherwise we
8486 mess with array offsets in the wrong way. This is possible
8487 because pointer arithmetic is restricted to retain within an
8488 object and overflow on pointer differences is undefined as of
8489 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8490 else if (bitpos0
== bitpos1
)
8492 /* By converting to signed sizetype we cover middle-end pointer
8493 arithmetic which operates on unsigned pointer types of size
8494 type size and ARRAY_REF offsets which are properly sign or
8495 zero extended from their type in case it is narrower than
8497 if (offset0
== NULL_TREE
)
8498 offset0
= build_int_cst (ssizetype
, 0);
8500 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8501 if (offset1
== NULL_TREE
)
8502 offset1
= build_int_cst (ssizetype
, 0);
8504 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8507 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8508 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8509 fold_overflow_warning (("assuming pointer wraparound does not "
8510 "occur when comparing P +- C1 with "
8512 WARN_STRICT_OVERFLOW_COMPARISON
);
8514 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8517 /* For equal offsets we can simplify to a comparison of the
8519 else if (bitpos0
== bitpos1
8521 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8523 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8524 && ((offset0
== offset1
)
8525 || (offset0
&& offset1
8526 && operand_equal_p (offset0
, offset1
, 0))))
8529 base0
= build_fold_addr_expr_loc (loc
, base0
);
8531 base1
= build_fold_addr_expr_loc (loc
, base1
);
8532 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8534 /* Comparison between an ordinary (non-weak) symbol and a null
8535 pointer can be eliminated since such symbols must have a non
8536 null address. In C, relational expressions between pointers
8537 to objects and null pointers are undefined. The results
8538 below follow the C++ rules with the additional property that
8539 every object pointer compares greater than a null pointer.
8541 else if (((DECL_P (base0
)
8542 && maybe_nonzero_address (base0
) > 0
8543 /* Avoid folding references to struct members at offset 0 to
8544 prevent tests like '&ptr->firstmember == 0' from getting
8545 eliminated. When ptr is null, although the -> expression
8546 is strictly speaking invalid, GCC retains it as a matter
8547 of QoI. See PR c/44555. */
8548 && (offset0
== NULL_TREE
&& bitpos0
!= 0))
8549 || CONSTANT_CLASS_P (base0
))
8551 /* The caller guarantees that when one of the arguments is
8552 constant (i.e., null in this case) it is second. */
8553 && integer_zerop (arg1
))
8560 return constant_boolean_node (false, type
);
8564 return constant_boolean_node (true, type
);
8571 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8572 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8573 the resulting offset is smaller in absolute value than the
8574 original one and has the same sign. */
8575 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8576 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8577 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8578 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8579 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8580 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8581 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8582 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8584 tree const1
= TREE_OPERAND (arg0
, 1);
8585 tree const2
= TREE_OPERAND (arg1
, 1);
8586 tree variable1
= TREE_OPERAND (arg0
, 0);
8587 tree variable2
= TREE_OPERAND (arg1
, 0);
8589 const char * const warnmsg
= G_("assuming signed overflow does not "
8590 "occur when combining constants around "
8593 /* Put the constant on the side where it doesn't overflow and is
8594 of lower absolute value and of same sign than before. */
8595 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8596 ? MINUS_EXPR
: PLUS_EXPR
,
8598 if (!TREE_OVERFLOW (cst
)
8599 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8600 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8602 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8603 return fold_build2_loc (loc
, code
, type
,
8605 fold_build2_loc (loc
, TREE_CODE (arg1
),
8610 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8611 ? MINUS_EXPR
: PLUS_EXPR
,
8613 if (!TREE_OVERFLOW (cst
)
8614 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8615 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8617 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8618 return fold_build2_loc (loc
, code
, type
,
8619 fold_build2_loc (loc
, TREE_CODE (arg0
),
8626 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8630 /* If we are comparing an expression that just has comparisons
8631 of two integer values, arithmetic expressions of those comparisons,
8632 and constants, we can simplify it. There are only three cases
8633 to check: the two values can either be equal, the first can be
8634 greater, or the second can be greater. Fold the expression for
8635 those three values. Since each value must be 0 or 1, we have
8636 eight possibilities, each of which corresponds to the constant 0
8637 or 1 or one of the six possible comparisons.
8639 This handles common cases like (a > b) == 0 but also handles
8640 expressions like ((x > y) - (y > x)) > 0, which supposedly
8641 occur in macroized code. */
8643 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8645 tree cval1
= 0, cval2
= 0;
8648 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8649 /* Don't handle degenerate cases here; they should already
8650 have been handled anyway. */
8651 && cval1
!= 0 && cval2
!= 0
8652 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8653 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8654 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8655 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8656 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8657 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8658 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8660 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8661 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8663 /* We can't just pass T to eval_subst in case cval1 or cval2
8664 was the same as ARG1. */
8667 = fold_build2_loc (loc
, code
, type
,
8668 eval_subst (loc
, arg0
, cval1
, maxval
,
8672 = fold_build2_loc (loc
, code
, type
,
8673 eval_subst (loc
, arg0
, cval1
, maxval
,
8677 = fold_build2_loc (loc
, code
, type
,
8678 eval_subst (loc
, arg0
, cval1
, minval
,
8682 /* All three of these results should be 0 or 1. Confirm they are.
8683 Then use those values to select the proper code to use. */
8685 if (TREE_CODE (high_result
) == INTEGER_CST
8686 && TREE_CODE (equal_result
) == INTEGER_CST
8687 && TREE_CODE (low_result
) == INTEGER_CST
)
8689 /* Make a 3-bit mask with the high-order bit being the
8690 value for `>', the next for '=', and the low for '<'. */
8691 switch ((integer_onep (high_result
) * 4)
8692 + (integer_onep (equal_result
) * 2)
8693 + integer_onep (low_result
))
8697 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8718 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8723 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8724 protected_set_expr_location (tem
, loc
);
8727 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
8736 /* Subroutine of fold_binary. Optimize complex multiplications of the
8737 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8738 argument EXPR represents the expression "z" of type TYPE. */
8741 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
8743 tree itype
= TREE_TYPE (type
);
8744 tree rpart
, ipart
, tem
;
8746 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8748 rpart
= TREE_OPERAND (expr
, 0);
8749 ipart
= TREE_OPERAND (expr
, 1);
8751 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8753 rpart
= TREE_REALPART (expr
);
8754 ipart
= TREE_IMAGPART (expr
);
8758 expr
= save_expr (expr
);
8759 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
8760 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
8763 rpart
= save_expr (rpart
);
8764 ipart
= save_expr (ipart
);
8765 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
8766 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
8767 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
8768 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
8769 build_zero_cst (itype
));
8773 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8774 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8777 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
8779 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
8781 if (TREE_CODE (arg
) == VECTOR_CST
)
8783 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
8784 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
8786 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
8788 constructor_elt
*elt
;
8790 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
8791 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
8794 elts
[i
] = elt
->value
;
8798 for (; i
< nelts
; i
++)
8800 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
8804 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8805 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8806 NULL_TREE otherwise. */
8809 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
8811 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8813 bool need_ctor
= false;
8815 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
8816 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
8817 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
8818 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
8821 elts
= XALLOCAVEC (tree
, nelts
* 3);
8822 if (!vec_cst_ctor_to_array (arg0
, elts
)
8823 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
8826 for (i
= 0; i
< nelts
; i
++)
8828 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
8830 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
8835 vec
<constructor_elt
, va_gc
> *v
;
8836 vec_alloc (v
, nelts
);
8837 for (i
= 0; i
< nelts
; i
++)
8838 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
8839 return build_constructor (type
, v
);
8842 return build_vector (type
, &elts
[2 * nelts
]);
8845 /* Try to fold a pointer difference of type TYPE two address expressions of
8846 array references AREF0 and AREF1 using location LOC. Return a
8847 simplified expression for the difference or NULL_TREE. */
8850 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
8851 tree aref0
, tree aref1
)
8853 tree base0
= TREE_OPERAND (aref0
, 0);
8854 tree base1
= TREE_OPERAND (aref1
, 0);
8855 tree base_offset
= build_int_cst (type
, 0);
8857 /* If the bases are array references as well, recurse. If the bases
8858 are pointer indirections compute the difference of the pointers.
8859 If the bases are equal, we are set. */
8860 if ((TREE_CODE (base0
) == ARRAY_REF
8861 && TREE_CODE (base1
) == ARRAY_REF
8863 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
8864 || (INDIRECT_REF_P (base0
)
8865 && INDIRECT_REF_P (base1
)
8867 = fold_binary_loc (loc
, MINUS_EXPR
, type
,
8868 fold_convert (type
, TREE_OPERAND (base0
, 0)),
8870 TREE_OPERAND (base1
, 0)))))
8871 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
8873 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
8874 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
8875 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
8876 tree diff
= fold_build2_loc (loc
, MINUS_EXPR
, type
, op0
, op1
);
8877 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
8879 fold_build2_loc (loc
, MULT_EXPR
, type
,
8885 /* If the real or vector real constant CST of type TYPE has an exact
8886 inverse, return it, else return NULL. */
8889 exact_inverse (tree type
, tree cst
)
8892 tree unit_type
, *elts
;
8894 unsigned vec_nelts
, i
;
8896 switch (TREE_CODE (cst
))
8899 r
= TREE_REAL_CST (cst
);
8901 if (exact_real_inverse (TYPE_MODE (type
), &r
))
8902 return build_real (type
, r
);
8907 vec_nelts
= VECTOR_CST_NELTS (cst
);
8908 elts
= XALLOCAVEC (tree
, vec_nelts
);
8909 unit_type
= TREE_TYPE (type
);
8910 mode
= TYPE_MODE (unit_type
);
8912 for (i
= 0; i
< vec_nelts
; i
++)
8914 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
8915 if (!exact_real_inverse (mode
, &r
))
8917 elts
[i
] = build_real (unit_type
, r
);
8920 return build_vector (type
, elts
);
8927 /* Mask out the tz least significant bits of X of type TYPE where
8928 tz is the number of trailing zeroes in Y. */
8930 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
8932 int tz
= wi::ctz (y
);
8934 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
8938 /* Return true when T is an address and is known to be nonzero.
8939 For floating point we further ensure that T is not denormal.
8940 Similar logic is present in nonzero_address in rtlanal.h.
8942 If the return value is based on the assumption that signed overflow
8943 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8944 change *STRICT_OVERFLOW_P. */
8947 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
8949 tree type
= TREE_TYPE (t
);
8950 enum tree_code code
;
8952 /* Doing something useful for floating point would need more work. */
8953 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
8956 code
= TREE_CODE (t
);
8957 switch (TREE_CODE_CLASS (code
))
8960 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8963 case tcc_comparison
:
8964 return tree_binary_nonzero_warnv_p (code
, type
,
8965 TREE_OPERAND (t
, 0),
8966 TREE_OPERAND (t
, 1),
8969 case tcc_declaration
:
8971 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
8979 case TRUTH_NOT_EXPR
:
8980 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8983 case TRUTH_AND_EXPR
:
8985 case TRUTH_XOR_EXPR
:
8986 return tree_binary_nonzero_warnv_p (code
, type
,
8987 TREE_OPERAND (t
, 0),
8988 TREE_OPERAND (t
, 1),
8996 case WITH_SIZE_EXPR
:
8998 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9003 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9007 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9012 tree fndecl
= get_callee_fndecl (t
);
9013 if (!fndecl
) return false;
9014 if (flag_delete_null_pointer_checks
&& !flag_check_new
9015 && DECL_IS_OPERATOR_NEW (fndecl
)
9016 && !TREE_NOTHROW (fndecl
))
9018 if (flag_delete_null_pointer_checks
9019 && lookup_attribute ("returns_nonnull",
9020 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9022 return alloca_call_p (t
);
9031 /* Return true when T is an address and is known to be nonzero.
9032 Handle warnings about undefined signed overflow. */
9035 tree_expr_nonzero_p (tree t
)
9037 bool ret
, strict_overflow_p
;
9039 strict_overflow_p
= false;
9040 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9041 if (strict_overflow_p
)
9042 fold_overflow_warning (("assuming signed overflow does not occur when "
9043 "determining that expression is always "
9045 WARN_STRICT_OVERFLOW_MISC
);
9049 /* Return true if T is known not to be equal to an integer W. */
9052 expr_not_equal_to (tree t
, const wide_int
&w
)
9054 wide_int min
, max
, nz
;
9055 value_range_type rtype
;
9056 switch (TREE_CODE (t
))
9059 return wi::ne_p (t
, w
);
9062 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
9064 rtype
= get_range_info (t
, &min
, &max
);
9065 if (rtype
== VR_RANGE
)
9067 if (wi::lt_p (max
, w
, TYPE_SIGN (TREE_TYPE (t
))))
9069 if (wi::lt_p (w
, min
, TYPE_SIGN (TREE_TYPE (t
))))
9072 else if (rtype
== VR_ANTI_RANGE
9073 && wi::le_p (min
, w
, TYPE_SIGN (TREE_TYPE (t
)))
9074 && wi::le_p (w
, max
, TYPE_SIGN (TREE_TYPE (t
))))
9076 /* If T has some known zero bits and W has any of those bits set,
9077 then T is known not to be equal to W. */
9078 if (wi::ne_p (wi::zext (wi::bit_and_not (w
, get_nonzero_bits (t
)),
9079 TYPE_PRECISION (TREE_TYPE (t
))), 0))
9088 /* Fold a binary expression of code CODE and type TYPE with operands
9089 OP0 and OP1. LOC is the location of the resulting expression.
9090 Return the folded expression if folding is successful. Otherwise,
9091 return NULL_TREE. */
9094 fold_binary_loc (location_t loc
,
9095 enum tree_code code
, tree type
, tree op0
, tree op1
)
9097 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9098 tree arg0
, arg1
, tem
;
9099 tree t1
= NULL_TREE
;
9100 bool strict_overflow_p
;
9103 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9104 && TREE_CODE_LENGTH (code
) == 2
9106 && op1
!= NULL_TREE
);
9111 /* Strip any conversions that don't change the mode. This is
9112 safe for every expression, except for a comparison expression
9113 because its signedness is derived from its operands. So, in
9114 the latter case, only strip conversions that don't change the
9115 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9118 Note that this is done as an internal manipulation within the
9119 constant folder, in order to find the simplest representation
9120 of the arguments so that their form can be studied. In any
9121 cases, the appropriate type conversions should be put back in
9122 the tree that will get out of the constant folder. */
9124 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9126 STRIP_SIGN_NOPS (arg0
);
9127 STRIP_SIGN_NOPS (arg1
);
9135 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9136 constant but we can't do arithmetic on them. */
9137 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9139 tem
= const_binop (code
, type
, arg0
, arg1
);
9140 if (tem
!= NULL_TREE
)
9142 if (TREE_TYPE (tem
) != type
)
9143 tem
= fold_convert_loc (loc
, type
, tem
);
9148 /* If this is a commutative operation, and ARG0 is a constant, move it
9149 to ARG1 to reduce the number of tests below. */
9150 if (commutative_tree_code (code
)
9151 && tree_swap_operands_p (arg0
, arg1
))
9152 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9154 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9155 to ARG1 to reduce the number of tests below. */
9156 if (kind
== tcc_comparison
9157 && tree_swap_operands_p (arg0
, arg1
))
9158 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9160 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9164 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9166 First check for cases where an arithmetic operation is applied to a
9167 compound, conditional, or comparison operation. Push the arithmetic
9168 operation inside the compound or conditional to see if any folding
9169 can then be done. Convert comparison to conditional for this purpose.
9170 The also optimizes non-constant cases that used to be done in
9173 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9174 one of the operands is a comparison and the other is a comparison, a
9175 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9176 code below would make the expression more complex. Change it to a
9177 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9178 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9180 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9181 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9182 && TREE_CODE (type
) != VECTOR_TYPE
9183 && ((truth_value_p (TREE_CODE (arg0
))
9184 && (truth_value_p (TREE_CODE (arg1
))
9185 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9186 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9187 || (truth_value_p (TREE_CODE (arg1
))
9188 && (truth_value_p (TREE_CODE (arg0
))
9189 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9190 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9192 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9193 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9196 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9197 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9199 if (code
== EQ_EXPR
)
9200 tem
= invert_truthvalue_loc (loc
, tem
);
9202 return fold_convert_loc (loc
, type
, tem
);
9205 if (TREE_CODE_CLASS (code
) == tcc_binary
9206 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9208 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9210 tem
= fold_build2_loc (loc
, code
, type
,
9211 fold_convert_loc (loc
, TREE_TYPE (op0
),
9212 TREE_OPERAND (arg0
, 1)), op1
);
9213 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9216 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
9218 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9219 fold_convert_loc (loc
, TREE_TYPE (op1
),
9220 TREE_OPERAND (arg1
, 1)));
9221 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9225 if (TREE_CODE (arg0
) == COND_EXPR
9226 || TREE_CODE (arg0
) == VEC_COND_EXPR
9227 || COMPARISON_CLASS_P (arg0
))
9229 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9231 /*cond_first_p=*/1);
9232 if (tem
!= NULL_TREE
)
9236 if (TREE_CODE (arg1
) == COND_EXPR
9237 || TREE_CODE (arg1
) == VEC_COND_EXPR
9238 || COMPARISON_CLASS_P (arg1
))
9240 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9242 /*cond_first_p=*/0);
9243 if (tem
!= NULL_TREE
)
9251 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9252 if (TREE_CODE (arg0
) == ADDR_EXPR
9253 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9255 tree iref
= TREE_OPERAND (arg0
, 0);
9256 return fold_build2 (MEM_REF
, type
,
9257 TREE_OPERAND (iref
, 0),
9258 int_const_binop (PLUS_EXPR
, arg1
,
9259 TREE_OPERAND (iref
, 1)));
9262 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9263 if (TREE_CODE (arg0
) == ADDR_EXPR
9264 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9267 HOST_WIDE_INT coffset
;
9268 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9272 return fold_build2 (MEM_REF
, type
,
9273 build_fold_addr_expr (base
),
9274 int_const_binop (PLUS_EXPR
, arg1
,
9275 size_int (coffset
)));
9280 case POINTER_PLUS_EXPR
:
9281 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9282 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9283 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9284 return fold_convert_loc (loc
, type
,
9285 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9286 fold_convert_loc (loc
, sizetype
,
9288 fold_convert_loc (loc
, sizetype
,
9294 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9296 /* X + (X / CST) * -CST is X % CST. */
9297 if (TREE_CODE (arg1
) == MULT_EXPR
9298 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9299 && operand_equal_p (arg0
,
9300 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9302 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9303 tree cst1
= TREE_OPERAND (arg1
, 1);
9304 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9306 if (sum
&& integer_zerop (sum
))
9307 return fold_convert_loc (loc
, type
,
9308 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9309 TREE_TYPE (arg0
), arg0
,
9314 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9315 one. Make sure the type is not saturating and has the signedness of
9316 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9317 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9318 if ((TREE_CODE (arg0
) == MULT_EXPR
9319 || TREE_CODE (arg1
) == MULT_EXPR
)
9320 && !TYPE_SATURATING (type
)
9321 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9322 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9323 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9325 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9330 if (! FLOAT_TYPE_P (type
))
9332 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9333 (plus (plus (mult) (mult)) (foo)) so that we can
9334 take advantage of the factoring cases below. */
9335 if (ANY_INTEGRAL_TYPE_P (type
)
9336 && TYPE_OVERFLOW_WRAPS (type
)
9337 && (((TREE_CODE (arg0
) == PLUS_EXPR
9338 || TREE_CODE (arg0
) == MINUS_EXPR
)
9339 && TREE_CODE (arg1
) == MULT_EXPR
)
9340 || ((TREE_CODE (arg1
) == PLUS_EXPR
9341 || TREE_CODE (arg1
) == MINUS_EXPR
)
9342 && TREE_CODE (arg0
) == MULT_EXPR
)))
9344 tree parg0
, parg1
, parg
, marg
;
9345 enum tree_code pcode
;
9347 if (TREE_CODE (arg1
) == MULT_EXPR
)
9348 parg
= arg0
, marg
= arg1
;
9350 parg
= arg1
, marg
= arg0
;
9351 pcode
= TREE_CODE (parg
);
9352 parg0
= TREE_OPERAND (parg
, 0);
9353 parg1
= TREE_OPERAND (parg
, 1);
9357 if (TREE_CODE (parg0
) == MULT_EXPR
9358 && TREE_CODE (parg1
) != MULT_EXPR
)
9359 return fold_build2_loc (loc
, pcode
, type
,
9360 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9361 fold_convert_loc (loc
, type
,
9363 fold_convert_loc (loc
, type
,
9365 fold_convert_loc (loc
, type
, parg1
));
9366 if (TREE_CODE (parg0
) != MULT_EXPR
9367 && TREE_CODE (parg1
) == MULT_EXPR
)
9369 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9370 fold_convert_loc (loc
, type
, parg0
),
9371 fold_build2_loc (loc
, pcode
, type
,
9372 fold_convert_loc (loc
, type
, marg
),
9373 fold_convert_loc (loc
, type
,
9379 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9380 to __complex__ ( x, y ). This is not the same for SNaNs or
9381 if signed zeros are involved. */
9382 if (!HONOR_SNANS (element_mode (arg0
))
9383 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9384 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9386 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9387 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9388 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9389 bool arg0rz
= false, arg0iz
= false;
9390 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9391 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9393 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9394 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9395 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9397 tree rp
= arg1r
? arg1r
9398 : build1 (REALPART_EXPR
, rtype
, arg1
);
9399 tree ip
= arg0i
? arg0i
9400 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9401 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9403 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9405 tree rp
= arg0r
? arg0r
9406 : build1 (REALPART_EXPR
, rtype
, arg0
);
9407 tree ip
= arg1i
? arg1i
9408 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9409 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9414 if (flag_unsafe_math_optimizations
9415 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9416 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9417 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9420 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9421 We associate floats only if the user has specified
9422 -fassociative-math. */
9423 if (flag_associative_math
9424 && TREE_CODE (arg1
) == PLUS_EXPR
9425 && TREE_CODE (arg0
) != MULT_EXPR
)
9427 tree tree10
= TREE_OPERAND (arg1
, 0);
9428 tree tree11
= TREE_OPERAND (arg1
, 1);
9429 if (TREE_CODE (tree11
) == MULT_EXPR
9430 && TREE_CODE (tree10
) == MULT_EXPR
)
9433 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9434 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9437 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9438 We associate floats only if the user has specified
9439 -fassociative-math. */
9440 if (flag_associative_math
9441 && TREE_CODE (arg0
) == PLUS_EXPR
9442 && TREE_CODE (arg1
) != MULT_EXPR
)
9444 tree tree00
= TREE_OPERAND (arg0
, 0);
9445 tree tree01
= TREE_OPERAND (arg0
, 1);
9446 if (TREE_CODE (tree01
) == MULT_EXPR
9447 && TREE_CODE (tree00
) == MULT_EXPR
)
9450 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9451 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9457 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9458 is a rotate of A by C1 bits. */
9459 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9460 is a rotate of A by B bits. */
9462 enum tree_code code0
, code1
;
9464 code0
= TREE_CODE (arg0
);
9465 code1
= TREE_CODE (arg1
);
9466 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9467 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9468 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9469 TREE_OPERAND (arg1
, 0), 0)
9470 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9471 TYPE_UNSIGNED (rtype
))
9472 /* Only create rotates in complete modes. Other cases are not
9473 expanded properly. */
9474 && (element_precision (rtype
)
9475 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
9477 tree tree01
, tree11
;
9478 enum tree_code code01
, code11
;
9480 tree01
= TREE_OPERAND (arg0
, 1);
9481 tree11
= TREE_OPERAND (arg1
, 1);
9482 STRIP_NOPS (tree01
);
9483 STRIP_NOPS (tree11
);
9484 code01
= TREE_CODE (tree01
);
9485 code11
= TREE_CODE (tree11
);
9486 if (code01
== INTEGER_CST
9487 && code11
== INTEGER_CST
9488 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9489 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9491 tem
= build2_loc (loc
, LROTATE_EXPR
,
9492 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9493 TREE_OPERAND (arg0
, 0),
9494 code0
== LSHIFT_EXPR
9495 ? TREE_OPERAND (arg0
, 1)
9496 : TREE_OPERAND (arg1
, 1));
9497 return fold_convert_loc (loc
, type
, tem
);
9499 else if (code11
== MINUS_EXPR
)
9501 tree tree110
, tree111
;
9502 tree110
= TREE_OPERAND (tree11
, 0);
9503 tree111
= TREE_OPERAND (tree11
, 1);
9504 STRIP_NOPS (tree110
);
9505 STRIP_NOPS (tree111
);
9506 if (TREE_CODE (tree110
) == INTEGER_CST
9507 && 0 == compare_tree_int (tree110
,
9509 (TREE_TYPE (TREE_OPERAND
9511 && operand_equal_p (tree01
, tree111
, 0))
9513 fold_convert_loc (loc
, type
,
9514 build2 ((code0
== LSHIFT_EXPR
9517 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9518 TREE_OPERAND (arg0
, 0),
9519 TREE_OPERAND (arg0
, 1)));
9521 else if (code01
== MINUS_EXPR
)
9523 tree tree010
, tree011
;
9524 tree010
= TREE_OPERAND (tree01
, 0);
9525 tree011
= TREE_OPERAND (tree01
, 1);
9526 STRIP_NOPS (tree010
);
9527 STRIP_NOPS (tree011
);
9528 if (TREE_CODE (tree010
) == INTEGER_CST
9529 && 0 == compare_tree_int (tree010
,
9531 (TREE_TYPE (TREE_OPERAND
9533 && operand_equal_p (tree11
, tree011
, 0))
9534 return fold_convert_loc
9536 build2 ((code0
!= LSHIFT_EXPR
9539 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9540 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
9546 /* In most languages, can't associate operations on floats through
9547 parentheses. Rather than remember where the parentheses were, we
9548 don't associate floats at all, unless the user has specified
9550 And, we need to make sure type is not saturating. */
9552 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9553 && !TYPE_SATURATING (type
))
9555 tree var0
, minus_var0
, con0
, minus_con0
, lit0
, minus_lit0
;
9556 tree var1
, minus_var1
, con1
, minus_con1
, lit1
, minus_lit1
;
9560 /* Split both trees into variables, constants, and literals. Then
9561 associate each group together, the constants with literals,
9562 then the result with variables. This increases the chances of
9563 literals being recombined later and of generating relocatable
9564 expressions for the sum of a constant and literal. */
9565 var0
= split_tree (arg0
, type
, code
,
9566 &minus_var0
, &con0
, &minus_con0
,
9567 &lit0
, &minus_lit0
, 0);
9568 var1
= split_tree (arg1
, type
, code
,
9569 &minus_var1
, &con1
, &minus_con1
,
9570 &lit1
, &minus_lit1
, code
== MINUS_EXPR
);
9572 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9573 if (code
== MINUS_EXPR
)
9576 /* With undefined overflow prefer doing association in a type
9577 which wraps on overflow, if that is one of the operand types. */
9578 if (POINTER_TYPE_P (type
)
9579 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9581 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9582 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9583 atype
= TREE_TYPE (arg0
);
9584 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9585 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9586 atype
= TREE_TYPE (arg1
);
9587 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9590 /* With undefined overflow we can only associate constants with one
9591 variable, and constants whose association doesn't overflow. */
9592 if (POINTER_TYPE_P (atype
)
9593 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9597 /* ??? If split_tree would handle NEGATE_EXPR we could
9598 simplify this down to the var0/minus_var1 cases. */
9601 bool one_neg
= false;
9603 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9605 tmp0
= TREE_OPERAND (tmp0
, 0);
9608 if (CONVERT_EXPR_P (tmp0
)
9609 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9610 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9611 <= TYPE_PRECISION (atype
)))
9612 tmp0
= TREE_OPERAND (tmp0
, 0);
9613 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9615 tmp1
= TREE_OPERAND (tmp1
, 0);
9618 if (CONVERT_EXPR_P (tmp1
)
9619 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9620 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9621 <= TYPE_PRECISION (atype
)))
9622 tmp1
= TREE_OPERAND (tmp1
, 0);
9623 /* The only case we can still associate with two variables
9624 is if they cancel out. */
9626 || !operand_equal_p (tmp0
, tmp1
, 0))
9629 else if ((var0
&& minus_var1
9630 && ! operand_equal_p (var0
, minus_var1
, 0))
9631 || (minus_var0
&& var1
9632 && ! operand_equal_p (minus_var0
, var1
, 0)))
9636 /* Only do something if we found more than two objects. Otherwise,
9637 nothing has changed and we risk infinite recursion. */
9639 && (2 < ((var0
!= 0) + (var1
!= 0)
9640 + (minus_var0
!= 0) + (minus_var1
!= 0)
9641 + (con0
!= 0) + (con1
!= 0)
9642 + (minus_con0
!= 0) + (minus_con1
!= 0)
9643 + (lit0
!= 0) + (lit1
!= 0)
9644 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9646 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9647 minus_var0
= associate_trees (loc
, minus_var0
, minus_var1
,
9649 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9650 minus_con0
= associate_trees (loc
, minus_con0
, minus_con1
,
9652 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9653 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9656 if (minus_var0
&& var0
)
9658 var0
= associate_trees (loc
, var0
, minus_var0
,
9662 if (minus_con0
&& con0
)
9664 con0
= associate_trees (loc
, con0
, minus_con0
,
9669 /* Preserve the MINUS_EXPR if the negative part of the literal is
9670 greater than the positive part. Otherwise, the multiplicative
9671 folding code (i.e extract_muldiv) may be fooled in case
9672 unsigned constants are subtracted, like in the following
9673 example: ((X*2 + 4) - 8U)/2. */
9674 if (minus_lit0
&& lit0
)
9676 if (TREE_CODE (lit0
) == INTEGER_CST
9677 && TREE_CODE (minus_lit0
) == INTEGER_CST
9678 && tree_int_cst_lt (lit0
, minus_lit0
)
9679 /* But avoid ending up with only negated parts. */
9682 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9688 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9694 /* Don't introduce overflows through reassociation. */
9695 if ((lit0
&& TREE_OVERFLOW_P (lit0
))
9696 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
)))
9699 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9700 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9702 minus_con0
= associate_trees (loc
, minus_con0
, minus_lit0
,
9706 /* Eliminate minus_con0. */
9710 con0
= associate_trees (loc
, con0
, minus_con0
,
9713 var0
= associate_trees (loc
, var0
, minus_con0
,
9720 /* Eliminate minus_var0. */
9724 con0
= associate_trees (loc
, con0
, minus_var0
,
9732 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9740 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9741 if (TREE_CODE (arg0
) == NEGATE_EXPR
9742 && negate_expr_p (op1
))
9743 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9745 fold_convert_loc (loc
, type
,
9746 TREE_OPERAND (arg0
, 0)));
9748 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9749 __complex__ ( x, -y ). This is not the same for SNaNs or if
9750 signed zeros are involved. */
9751 if (!HONOR_SNANS (element_mode (arg0
))
9752 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9753 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9755 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9756 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9757 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9758 bool arg0rz
= false, arg0iz
= false;
9759 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9760 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9762 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9763 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9764 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9766 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9768 : build1 (REALPART_EXPR
, rtype
, arg1
));
9769 tree ip
= arg0i
? arg0i
9770 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9771 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9773 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9775 tree rp
= arg0r
? arg0r
9776 : build1 (REALPART_EXPR
, rtype
, arg0
);
9777 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9779 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9780 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9785 /* A - B -> A + (-B) if B is easily negatable. */
9786 if (negate_expr_p (op1
)
9787 && ! TYPE_OVERFLOW_SANITIZED (type
)
9788 && ((FLOAT_TYPE_P (type
)
9789 /* Avoid this transformation if B is a positive REAL_CST. */
9790 && (TREE_CODE (op1
) != REAL_CST
9791 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
9792 || INTEGRAL_TYPE_P (type
)))
9793 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9794 fold_convert_loc (loc
, type
, arg0
),
9797 /* Fold &a[i] - &a[j] to i-j. */
9798 if (TREE_CODE (arg0
) == ADDR_EXPR
9799 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9800 && TREE_CODE (arg1
) == ADDR_EXPR
9801 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9803 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
9804 TREE_OPERAND (arg0
, 0),
9805 TREE_OPERAND (arg1
, 0));
9810 if (FLOAT_TYPE_P (type
)
9811 && flag_unsafe_math_optimizations
9812 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9813 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9814 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9817 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9818 one. Make sure the type is not saturating and has the signedness of
9819 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9820 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9821 if ((TREE_CODE (arg0
) == MULT_EXPR
9822 || TREE_CODE (arg1
) == MULT_EXPR
)
9823 && !TYPE_SATURATING (type
)
9824 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9825 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9826 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9828 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9836 if (! FLOAT_TYPE_P (type
))
9838 /* Transform x * -C into -x * C if x is easily negatable. */
9839 if (TREE_CODE (op1
) == INTEGER_CST
9840 && tree_int_cst_sgn (op1
) == -1
9841 && negate_expr_p (op0
)
9842 && negate_expr_p (op1
)
9843 && (tem
= negate_expr (op1
)) != op1
9844 && ! TREE_OVERFLOW (tem
))
9845 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9846 fold_convert_loc (loc
, type
,
9847 negate_expr (op0
)), tem
);
9849 strict_overflow_p
= false;
9850 if (TREE_CODE (arg1
) == INTEGER_CST
9851 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
9852 &strict_overflow_p
)))
9854 if (strict_overflow_p
)
9855 fold_overflow_warning (("assuming signed overflow does not "
9856 "occur when simplifying "
9858 WARN_STRICT_OVERFLOW_MISC
);
9859 return fold_convert_loc (loc
, type
, tem
);
9862 /* Optimize z * conj(z) for integer complex numbers. */
9863 if (TREE_CODE (arg0
) == CONJ_EXPR
9864 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9865 return fold_mult_zconjz (loc
, type
, arg1
);
9866 if (TREE_CODE (arg1
) == CONJ_EXPR
9867 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9868 return fold_mult_zconjz (loc
, type
, arg0
);
9872 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9873 This is not the same for NaNs or if signed zeros are
9875 if (!HONOR_NANS (arg0
)
9876 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9877 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9878 && TREE_CODE (arg1
) == COMPLEX_CST
9879 && real_zerop (TREE_REALPART (arg1
)))
9881 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9882 if (real_onep (TREE_IMAGPART (arg1
)))
9884 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9885 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
9887 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
9888 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9890 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9891 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
9892 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
9896 /* Optimize z * conj(z) for floating point complex numbers.
9897 Guarded by flag_unsafe_math_optimizations as non-finite
9898 imaginary components don't produce scalar results. */
9899 if (flag_unsafe_math_optimizations
9900 && TREE_CODE (arg0
) == CONJ_EXPR
9901 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9902 return fold_mult_zconjz (loc
, type
, arg1
);
9903 if (flag_unsafe_math_optimizations
9904 && TREE_CODE (arg1
) == CONJ_EXPR
9905 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9906 return fold_mult_zconjz (loc
, type
, arg0
);
9911 /* Canonicalize (X & C1) | C2. */
9912 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9913 && TREE_CODE (arg1
) == INTEGER_CST
9914 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9916 int width
= TYPE_PRECISION (type
), w
;
9917 wide_int c1
= TREE_OPERAND (arg0
, 1);
9920 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9921 if ((c1
& c2
) == c1
)
9922 return omit_one_operand_loc (loc
, type
, arg1
,
9923 TREE_OPERAND (arg0
, 0));
9925 wide_int msk
= wi::mask (width
, false,
9926 TYPE_PRECISION (TREE_TYPE (arg1
)));
9928 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9929 if (msk
.and_not (c1
| c2
) == 0)
9931 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9932 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
9935 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9936 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9937 mode which allows further optimizations. */
9940 wide_int c3
= c1
.and_not (c2
);
9941 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
9943 wide_int mask
= wi::mask (w
, false,
9944 TYPE_PRECISION (type
));
9945 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
9954 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9955 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
,
9956 wide_int_to_tree (type
, c3
));
9957 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
9961 /* See if this can be simplified into a rotate first. If that
9962 is unsuccessful continue in the association code. */
9966 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9967 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9968 && INTEGRAL_TYPE_P (type
)
9969 && integer_onep (TREE_OPERAND (arg0
, 1))
9970 && integer_onep (arg1
))
9971 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
9972 build_zero_cst (TREE_TYPE (arg0
)));
9974 /* See if this can be simplified into a rotate first. If that
9975 is unsuccessful continue in the association code. */
9979 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9980 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9981 && INTEGRAL_TYPE_P (type
)
9982 && integer_onep (TREE_OPERAND (arg0
, 1))
9983 && integer_onep (arg1
))
9986 tem
= TREE_OPERAND (arg0
, 0);
9987 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
9988 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
9990 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
9991 build_zero_cst (TREE_TYPE (tem
)));
9993 /* Fold ~X & 1 as (X & 1) == 0. */
9994 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9995 && INTEGRAL_TYPE_P (type
)
9996 && integer_onep (arg1
))
9999 tem
= TREE_OPERAND (arg0
, 0);
10000 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10001 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10003 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10004 build_zero_cst (TREE_TYPE (tem
)));
10006 /* Fold !X & 1 as X == 0. */
10007 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10008 && integer_onep (arg1
))
10010 tem
= TREE_OPERAND (arg0
, 0);
10011 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
10012 build_zero_cst (TREE_TYPE (tem
)));
10015 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10016 multiple of 1 << CST. */
10017 if (TREE_CODE (arg1
) == INTEGER_CST
)
10019 wide_int cst1
= arg1
;
10020 wide_int ncst1
= -cst1
;
10021 if ((cst1
& ncst1
) == ncst1
10022 && multiple_of_p (type
, arg0
,
10023 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
10024 return fold_convert_loc (loc
, type
, arg0
);
10027 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10029 if (TREE_CODE (arg1
) == INTEGER_CST
10030 && TREE_CODE (arg0
) == MULT_EXPR
10031 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10033 wide_int warg1
= arg1
;
10034 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
10037 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
10039 else if (masked
!= warg1
)
10041 /* Avoid the transform if arg1 is a mask of some
10042 mode which allows further optimizations. */
10043 int pop
= wi::popcount (warg1
);
10044 if (!(pop
>= BITS_PER_UNIT
10046 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10047 return fold_build2_loc (loc
, code
, type
, op0
,
10048 wide_int_to_tree (type
, masked
));
10052 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10053 ((A & N) + B) & M -> (A + B) & M
10054 Similarly if (N & M) == 0,
10055 ((A | N) + B) & M -> (A + B) & M
10056 and for - instead of + (or unary - instead of +)
10057 and/or ^ instead of |.
10058 If B is constant and (B & M) == 0, fold into A & M. */
10059 if (TREE_CODE (arg1
) == INTEGER_CST
)
10061 wide_int cst1
= arg1
;
10062 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10063 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10064 && (TREE_CODE (arg0
) == PLUS_EXPR
10065 || TREE_CODE (arg0
) == MINUS_EXPR
10066 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10067 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10068 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10074 /* Now we know that arg0 is (C + D) or (C - D) or
10075 -C and arg1 (M) is == (1LL << cst) - 1.
10076 Store C into PMOP[0] and D into PMOP[1]. */
10077 pmop
[0] = TREE_OPERAND (arg0
, 0);
10079 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10081 pmop
[1] = TREE_OPERAND (arg0
, 1);
10085 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10088 for (; which
>= 0; which
--)
10089 switch (TREE_CODE (pmop
[which
]))
10094 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10097 cst0
= TREE_OPERAND (pmop
[which
], 1);
10099 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10104 else if (cst0
!= 0)
10106 /* If C or D is of the form (A & N) where
10107 (N & M) == M, or of the form (A | N) or
10108 (A ^ N) where (N & M) == 0, replace it with A. */
10109 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10112 /* If C or D is a N where (N & M) == 0, it can be
10113 omitted (assumed 0). */
10114 if ((TREE_CODE (arg0
) == PLUS_EXPR
10115 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10116 && (cst1
& pmop
[which
]) == 0)
10117 pmop
[which
] = NULL
;
10123 /* Only build anything new if we optimized one or both arguments
10125 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10126 || (TREE_CODE (arg0
) != NEGATE_EXPR
10127 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10129 tree utype
= TREE_TYPE (arg0
);
10130 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10132 /* Perform the operations in a type that has defined
10133 overflow behavior. */
10134 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10135 if (pmop
[0] != NULL
)
10136 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10137 if (pmop
[1] != NULL
)
10138 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10141 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10142 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10143 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10145 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10146 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10148 else if (pmop
[0] != NULL
)
10150 else if (pmop
[1] != NULL
)
10153 return build_int_cst (type
, 0);
10155 else if (pmop
[0] == NULL
)
10156 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10158 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10160 /* TEM is now the new binary +, - or unary - replacement. */
10161 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10162 fold_convert_loc (loc
, utype
, arg1
));
10163 return fold_convert_loc (loc
, type
, tem
);
10168 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10169 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10170 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10172 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10174 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
10177 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10183 /* Don't touch a floating-point divide by zero unless the mode
10184 of the constant can represent infinity. */
10185 if (TREE_CODE (arg1
) == REAL_CST
10186 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10187 && real_zerop (arg1
))
10190 /* (-A) / (-B) -> A / B */
10191 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10192 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10193 TREE_OPERAND (arg0
, 0),
10194 negate_expr (arg1
));
10195 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10196 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10197 negate_expr (arg0
),
10198 TREE_OPERAND (arg1
, 0));
10201 case TRUNC_DIV_EXPR
:
10204 case FLOOR_DIV_EXPR
:
10205 /* Simplify A / (B << N) where A and B are positive and B is
10206 a power of 2, to A >> (N + log2(B)). */
10207 strict_overflow_p
= false;
10208 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10209 && (TYPE_UNSIGNED (type
)
10210 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
10212 tree sval
= TREE_OPERAND (arg1
, 0);
10213 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10215 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10216 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
10217 wi::exact_log2 (sval
));
10219 if (strict_overflow_p
)
10220 fold_overflow_warning (("assuming signed overflow does not "
10221 "occur when simplifying A / (B << N)"),
10222 WARN_STRICT_OVERFLOW_MISC
);
10224 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10226 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10227 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
10233 case ROUND_DIV_EXPR
:
10234 case CEIL_DIV_EXPR
:
10235 case EXACT_DIV_EXPR
:
10236 if (integer_zerop (arg1
))
10239 /* Convert -A / -B to A / B when the type is signed and overflow is
10241 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10242 && TREE_CODE (op0
) == NEGATE_EXPR
10243 && negate_expr_p (op1
))
10245 if (INTEGRAL_TYPE_P (type
))
10246 fold_overflow_warning (("assuming signed overflow does not occur "
10247 "when distributing negation across "
10249 WARN_STRICT_OVERFLOW_MISC
);
10250 return fold_build2_loc (loc
, code
, type
,
10251 fold_convert_loc (loc
, type
,
10252 TREE_OPERAND (arg0
, 0)),
10253 negate_expr (op1
));
10255 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10256 && TREE_CODE (arg1
) == NEGATE_EXPR
10257 && negate_expr_p (op0
))
10259 if (INTEGRAL_TYPE_P (type
))
10260 fold_overflow_warning (("assuming signed overflow does not occur "
10261 "when distributing negation across "
10263 WARN_STRICT_OVERFLOW_MISC
);
10264 return fold_build2_loc (loc
, code
, type
,
10266 fold_convert_loc (loc
, type
,
10267 TREE_OPERAND (arg1
, 0)));
10270 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10271 operation, EXACT_DIV_EXPR.
10273 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10274 At one time others generated faster code, it's not clear if they do
10275 after the last round to changes to the DIV code in expmed.c. */
10276 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10277 && multiple_of_p (type
, arg0
, arg1
))
10278 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
10279 fold_convert (type
, arg0
),
10280 fold_convert (type
, arg1
));
10282 strict_overflow_p
= false;
10283 if (TREE_CODE (arg1
) == INTEGER_CST
10284 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10285 &strict_overflow_p
)))
10287 if (strict_overflow_p
)
10288 fold_overflow_warning (("assuming signed overflow does not occur "
10289 "when simplifying division"),
10290 WARN_STRICT_OVERFLOW_MISC
);
10291 return fold_convert_loc (loc
, type
, tem
);
10296 case CEIL_MOD_EXPR
:
10297 case FLOOR_MOD_EXPR
:
10298 case ROUND_MOD_EXPR
:
10299 case TRUNC_MOD_EXPR
:
10300 strict_overflow_p
= false;
10301 if (TREE_CODE (arg1
) == INTEGER_CST
10302 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10303 &strict_overflow_p
)))
10305 if (strict_overflow_p
)
10306 fold_overflow_warning (("assuming signed overflow does not occur "
10307 "when simplifying modulus"),
10308 WARN_STRICT_OVERFLOW_MISC
);
10309 return fold_convert_loc (loc
, type
, tem
);
10318 /* Since negative shift count is not well-defined,
10319 don't try to compute it in the compiler. */
10320 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10323 prec
= element_precision (type
);
10325 /* If we have a rotate of a bit operation with the rotate count and
10326 the second operand of the bit operation both constant,
10327 permute the two operations. */
10328 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10329 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10330 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10331 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10332 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10334 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10335 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10336 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
10337 fold_build2_loc (loc
, code
, type
,
10339 fold_build2_loc (loc
, code
, type
,
10343 /* Two consecutive rotates adding up to the some integer
10344 multiple of the precision of the type can be ignored. */
10345 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10346 && TREE_CODE (arg0
) == RROTATE_EXPR
10347 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10348 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
10350 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10358 case TRUTH_ANDIF_EXPR
:
10359 /* Note that the operands of this must be ints
10360 and their values must be 0 or 1.
10361 ("true" is a fixed value perhaps depending on the language.) */
10362 /* If first arg is constant zero, return it. */
10363 if (integer_zerop (arg0
))
10364 return fold_convert_loc (loc
, type
, arg0
);
10366 case TRUTH_AND_EXPR
:
10367 /* If either arg is constant true, drop it. */
10368 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10369 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10370 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10371 /* Preserve sequence points. */
10372 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10373 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10374 /* If second arg is constant zero, result is zero, but first arg
10375 must be evaluated. */
10376 if (integer_zerop (arg1
))
10377 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10378 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10379 case will be handled here. */
10380 if (integer_zerop (arg0
))
10381 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10383 /* !X && X is always false. */
10384 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10385 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10386 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10387 /* X && !X is always false. */
10388 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10389 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10390 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10392 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10393 means A >= Y && A != MAX, but in this case we know that
10396 if (!TREE_SIDE_EFFECTS (arg0
)
10397 && !TREE_SIDE_EFFECTS (arg1
))
10399 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
10400 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10401 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
10403 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
10404 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10405 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
10408 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10414 case TRUTH_ORIF_EXPR
:
10415 /* Note that the operands of this must be ints
10416 and their values must be 0 or true.
10417 ("true" is a fixed value perhaps depending on the language.) */
10418 /* If first arg is constant true, return it. */
10419 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10420 return fold_convert_loc (loc
, type
, arg0
);
10422 case TRUTH_OR_EXPR
:
10423 /* If either arg is constant zero, drop it. */
10424 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10425 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10426 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10427 /* Preserve sequence points. */
10428 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10429 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10430 /* If second arg is constant true, result is true, but we must
10431 evaluate first arg. */
10432 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10433 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10434 /* Likewise for first arg, but note this only occurs here for
10436 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10437 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10439 /* !X || X is always true. */
10440 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10441 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10442 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10443 /* X || !X is always true. */
10444 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10445 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10446 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10448 /* (X && !Y) || (!X && Y) is X ^ Y */
10449 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
10450 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
10452 tree a0
, a1
, l0
, l1
, n0
, n1
;
10454 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10455 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10457 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10458 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10460 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
10461 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
10463 if ((operand_equal_p (n0
, a0
, 0)
10464 && operand_equal_p (n1
, a1
, 0))
10465 || (operand_equal_p (n0
, a1
, 0)
10466 && operand_equal_p (n1
, a0
, 0)))
10467 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
10470 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10476 case TRUTH_XOR_EXPR
:
10477 /* If the second arg is constant zero, drop it. */
10478 if (integer_zerop (arg1
))
10479 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10480 /* If the second arg is constant true, this is a logical inversion. */
10481 if (integer_onep (arg1
))
10483 tem
= invert_truthvalue_loc (loc
, arg0
);
10484 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
10486 /* Identical arguments cancel to zero. */
10487 if (operand_equal_p (arg0
, arg1
, 0))
10488 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10490 /* !X ^ X is always true. */
10491 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10492 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10493 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10495 /* X ^ !X is always true. */
10496 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10497 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10498 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10507 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10508 if (tem
!= NULL_TREE
)
10511 /* bool_var != 1 becomes !bool_var. */
10512 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10513 && code
== NE_EXPR
)
10514 return fold_convert_loc (loc
, type
,
10515 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10516 TREE_TYPE (arg0
), arg0
));
10518 /* bool_var == 0 becomes !bool_var. */
10519 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10520 && code
== EQ_EXPR
)
10521 return fold_convert_loc (loc
, type
,
10522 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10523 TREE_TYPE (arg0
), arg0
));
10525 /* !exp != 0 becomes !exp */
10526 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
10527 && code
== NE_EXPR
)
10528 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10530 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10531 if ((TREE_CODE (arg0
) == PLUS_EXPR
10532 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10533 || TREE_CODE (arg0
) == MINUS_EXPR
)
10534 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10537 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10538 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
10540 tree val
= TREE_OPERAND (arg0
, 1);
10541 val
= fold_build2_loc (loc
, code
, type
, val
,
10542 build_int_cst (TREE_TYPE (val
), 0));
10543 return omit_two_operands_loc (loc
, type
, val
,
10544 TREE_OPERAND (arg0
, 0), arg1
);
10547 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10548 if ((TREE_CODE (arg1
) == PLUS_EXPR
10549 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
10550 || TREE_CODE (arg1
) == MINUS_EXPR
)
10551 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1
,
10554 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10555 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
10557 tree val
= TREE_OPERAND (arg1
, 1);
10558 val
= fold_build2_loc (loc
, code
, type
, val
,
10559 build_int_cst (TREE_TYPE (val
), 0));
10560 return omit_two_operands_loc (loc
, type
, val
,
10561 TREE_OPERAND (arg1
, 0), arg0
);
10564 /* If this is an EQ or NE comparison with zero and ARG0 is
10565 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10566 two operations, but the latter can be done in one less insn
10567 on machines that have only two-operand insns or on which a
10568 constant cannot be the first operand. */
10569 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10570 && integer_zerop (arg1
))
10572 tree arg00
= TREE_OPERAND (arg0
, 0);
10573 tree arg01
= TREE_OPERAND (arg0
, 1);
10574 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10575 && integer_onep (TREE_OPERAND (arg00
, 0)))
10577 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
10578 arg01
, TREE_OPERAND (arg00
, 1));
10579 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10580 build_int_cst (TREE_TYPE (arg0
), 1));
10581 return fold_build2_loc (loc
, code
, type
,
10582 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10585 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
10586 && integer_onep (TREE_OPERAND (arg01
, 0)))
10588 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
10589 arg00
, TREE_OPERAND (arg01
, 1));
10590 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10591 build_int_cst (TREE_TYPE (arg0
), 1));
10592 return fold_build2_loc (loc
, code
, type
,
10593 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10598 /* If this is an NE or EQ comparison of zero against the result of a
10599 signed MOD operation whose second operand is a power of 2, make
10600 the MOD operation unsigned since it is simpler and equivalent. */
10601 if (integer_zerop (arg1
)
10602 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10603 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10604 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10605 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10606 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10607 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10609 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
10610 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
10611 fold_convert_loc (loc
, newtype
,
10612 TREE_OPERAND (arg0
, 0)),
10613 fold_convert_loc (loc
, newtype
,
10614 TREE_OPERAND (arg0
, 1)));
10616 return fold_build2_loc (loc
, code
, type
, newmod
,
10617 fold_convert_loc (loc
, newtype
, arg1
));
10620 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10621 C1 is a valid shift constant, and C2 is a power of two, i.e.
10623 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10624 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10625 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10627 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10628 && integer_zerop (arg1
))
10630 tree itype
= TREE_TYPE (arg0
);
10631 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10632 prec
= TYPE_PRECISION (itype
);
10634 /* Check for a valid shift count. */
10635 if (wi::ltu_p (arg001
, prec
))
10637 tree arg01
= TREE_OPERAND (arg0
, 1);
10638 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10639 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10640 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10641 can be rewritten as (X & (C2 << C1)) != 0. */
10642 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10644 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
10645 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
10646 return fold_build2_loc (loc
, code
, type
, tem
,
10647 fold_convert_loc (loc
, itype
, arg1
));
10649 /* Otherwise, for signed (arithmetic) shifts,
10650 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10651 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10652 else if (!TYPE_UNSIGNED (itype
))
10653 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10654 arg000
, build_int_cst (itype
, 0));
10655 /* Otherwise, of unsigned (logical) shifts,
10656 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10657 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10659 return omit_one_operand_loc (loc
, type
,
10660 code
== EQ_EXPR
? integer_one_node
10661 : integer_zero_node
,
10666 /* If this is a comparison of a field, we may be able to simplify it. */
10667 if ((TREE_CODE (arg0
) == COMPONENT_REF
10668 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10669 /* Handle the constant case even without -O
10670 to make sure the warnings are given. */
10671 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10673 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
10678 /* Optimize comparisons of strlen vs zero to a compare of the
10679 first character of the string vs zero. To wit,
10680 strlen(ptr) == 0 => *ptr == 0
10681 strlen(ptr) != 0 => *ptr != 0
10682 Other cases should reduce to one of these two (or a constant)
10683 due to the return value of strlen being unsigned. */
10684 if (TREE_CODE (arg0
) == CALL_EXPR
10685 && integer_zerop (arg1
))
10687 tree fndecl
= get_callee_fndecl (arg0
);
10690 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10691 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10692 && call_expr_nargs (arg0
) == 1
10693 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
10695 tree iref
= build_fold_indirect_ref_loc (loc
,
10696 CALL_EXPR_ARG (arg0
, 0));
10697 return fold_build2_loc (loc
, code
, type
, iref
,
10698 build_int_cst (TREE_TYPE (iref
), 0));
10702 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10703 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10704 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10705 && integer_zerop (arg1
)
10706 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10708 tree arg00
= TREE_OPERAND (arg0
, 0);
10709 tree arg01
= TREE_OPERAND (arg0
, 1);
10710 tree itype
= TREE_TYPE (arg00
);
10711 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
10713 if (TYPE_UNSIGNED (itype
))
10715 itype
= signed_type_for (itype
);
10716 arg00
= fold_convert_loc (loc
, itype
, arg00
);
10718 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10719 type
, arg00
, build_zero_cst (itype
));
10723 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10724 (X & C) == 0 when C is a single bit. */
10725 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10726 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
10727 && integer_zerop (arg1
)
10728 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10730 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10731 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
10732 TREE_OPERAND (arg0
, 1));
10733 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
10735 fold_convert_loc (loc
, TREE_TYPE (arg0
),
10739 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10740 constant C is a power of two, i.e. a single bit. */
10741 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10742 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10743 && integer_zerop (arg1
)
10744 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10745 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10746 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10748 tree arg00
= TREE_OPERAND (arg0
, 0);
10749 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10750 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
10753 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10754 when is C is a power of two, i.e. a single bit. */
10755 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10756 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
10757 && integer_zerop (arg1
)
10758 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10759 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10760 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10762 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10763 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
10764 arg000
, TREE_OPERAND (arg0
, 1));
10765 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10766 tem
, build_int_cst (TREE_TYPE (tem
), 0));
10769 if (integer_zerop (arg1
)
10770 && tree_expr_nonzero_p (arg0
))
10772 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
10773 return omit_one_operand_loc (loc
, type
, res
, arg0
);
10776 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10777 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10778 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10780 tree arg00
= TREE_OPERAND (arg0
, 0);
10781 tree arg01
= TREE_OPERAND (arg0
, 1);
10782 tree arg10
= TREE_OPERAND (arg1
, 0);
10783 tree arg11
= TREE_OPERAND (arg1
, 1);
10784 tree itype
= TREE_TYPE (arg0
);
10786 if (operand_equal_p (arg01
, arg11
, 0))
10788 tem
= fold_convert_loc (loc
, itype
, arg10
);
10789 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10790 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg01
);
10791 return fold_build2_loc (loc
, code
, type
, tem
,
10792 build_zero_cst (itype
));
10794 if (operand_equal_p (arg01
, arg10
, 0))
10796 tem
= fold_convert_loc (loc
, itype
, arg11
);
10797 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10798 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg01
);
10799 return fold_build2_loc (loc
, code
, type
, tem
,
10800 build_zero_cst (itype
));
10802 if (operand_equal_p (arg00
, arg11
, 0))
10804 tem
= fold_convert_loc (loc
, itype
, arg10
);
10805 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
, tem
);
10806 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg00
);
10807 return fold_build2_loc (loc
, code
, type
, tem
,
10808 build_zero_cst (itype
));
10810 if (operand_equal_p (arg00
, arg10
, 0))
10812 tem
= fold_convert_loc (loc
, itype
, arg11
);
10813 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
, tem
);
10814 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg00
);
10815 return fold_build2_loc (loc
, code
, type
, tem
,
10816 build_zero_cst (itype
));
10820 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10821 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
10823 tree arg00
= TREE_OPERAND (arg0
, 0);
10824 tree arg01
= TREE_OPERAND (arg0
, 1);
10825 tree arg10
= TREE_OPERAND (arg1
, 0);
10826 tree arg11
= TREE_OPERAND (arg1
, 1);
10827 tree itype
= TREE_TYPE (arg0
);
10829 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10830 operand_equal_p guarantees no side-effects so we don't need
10831 to use omit_one_operand on Z. */
10832 if (operand_equal_p (arg01
, arg11
, 0))
10833 return fold_build2_loc (loc
, code
, type
, arg00
,
10834 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10836 if (operand_equal_p (arg01
, arg10
, 0))
10837 return fold_build2_loc (loc
, code
, type
, arg00
,
10838 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10840 if (operand_equal_p (arg00
, arg11
, 0))
10841 return fold_build2_loc (loc
, code
, type
, arg01
,
10842 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10844 if (operand_equal_p (arg00
, arg10
, 0))
10845 return fold_build2_loc (loc
, code
, type
, arg01
,
10846 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10849 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10850 if (TREE_CODE (arg01
) == INTEGER_CST
10851 && TREE_CODE (arg11
) == INTEGER_CST
)
10853 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
10854 fold_convert_loc (loc
, itype
, arg11
));
10855 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10856 return fold_build2_loc (loc
, code
, type
, tem
,
10857 fold_convert_loc (loc
, itype
, arg10
));
10861 /* Attempt to simplify equality/inequality comparisons of complex
10862 values. Only lower the comparison if the result is known or
10863 can be simplified to a single scalar comparison. */
10864 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
10865 || TREE_CODE (arg0
) == COMPLEX_CST
)
10866 && (TREE_CODE (arg1
) == COMPLEX_EXPR
10867 || TREE_CODE (arg1
) == COMPLEX_CST
))
10869 tree real0
, imag0
, real1
, imag1
;
10872 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
10874 real0
= TREE_OPERAND (arg0
, 0);
10875 imag0
= TREE_OPERAND (arg0
, 1);
10879 real0
= TREE_REALPART (arg0
);
10880 imag0
= TREE_IMAGPART (arg0
);
10883 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
10885 real1
= TREE_OPERAND (arg1
, 0);
10886 imag1
= TREE_OPERAND (arg1
, 1);
10890 real1
= TREE_REALPART (arg1
);
10891 imag1
= TREE_IMAGPART (arg1
);
10894 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
10895 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
10897 if (integer_zerop (rcond
))
10899 if (code
== EQ_EXPR
)
10900 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10902 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
10906 if (code
== NE_EXPR
)
10907 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
10909 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
10913 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
10914 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
10916 if (integer_zerop (icond
))
10918 if (code
== EQ_EXPR
)
10919 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10921 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
10925 if (code
== NE_EXPR
)
10926 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
10928 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
10939 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10940 if (tem
!= NULL_TREE
)
10943 /* Transform comparisons of the form X +- C CMP X. */
10944 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
10945 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10946 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
10947 && !HONOR_SNANS (arg0
))
10948 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10949 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
10951 tree arg01
= TREE_OPERAND (arg0
, 1);
10952 enum tree_code code0
= TREE_CODE (arg0
);
10955 if (TREE_CODE (arg01
) == REAL_CST
)
10956 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
10958 is_positive
= tree_int_cst_sgn (arg01
);
10960 /* (X - c) > X becomes false. */
10961 if (code
== GT_EXPR
10962 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10963 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10965 if (TREE_CODE (arg01
) == INTEGER_CST
10966 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
10967 fold_overflow_warning (("assuming signed overflow does not "
10968 "occur when assuming that (X - c) > X "
10969 "is always false"),
10970 WARN_STRICT_OVERFLOW_ALL
);
10971 return constant_boolean_node (0, type
);
10974 /* Likewise (X + c) < X becomes false. */
10975 if (code
== LT_EXPR
10976 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
10977 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
10979 if (TREE_CODE (arg01
) == INTEGER_CST
10980 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
10981 fold_overflow_warning (("assuming signed overflow does not "
10982 "occur when assuming that "
10983 "(X + c) < X is always false"),
10984 WARN_STRICT_OVERFLOW_ALL
);
10985 return constant_boolean_node (0, type
);
10988 /* Convert (X - c) <= X to true. */
10989 if (!HONOR_NANS (arg1
)
10991 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10992 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10994 if (TREE_CODE (arg01
) == INTEGER_CST
10995 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
10996 fold_overflow_warning (("assuming signed overflow does not "
10997 "occur when assuming that "
10998 "(X - c) <= X is always true"),
10999 WARN_STRICT_OVERFLOW_ALL
);
11000 return constant_boolean_node (1, type
);
11003 /* Convert (X + c) >= X to true. */
11004 if (!HONOR_NANS (arg1
)
11006 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11007 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11009 if (TREE_CODE (arg01
) == INTEGER_CST
11010 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11011 fold_overflow_warning (("assuming signed overflow does not "
11012 "occur when assuming that "
11013 "(X + c) >= X is always true"),
11014 WARN_STRICT_OVERFLOW_ALL
);
11015 return constant_boolean_node (1, type
);
11018 if (TREE_CODE (arg01
) == INTEGER_CST
)
11020 /* Convert X + c > X and X - c < X to true for integers. */
11021 if (code
== GT_EXPR
11022 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11023 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11025 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11026 fold_overflow_warning (("assuming signed overflow does "
11027 "not occur when assuming that "
11028 "(X + c) > X is always true"),
11029 WARN_STRICT_OVERFLOW_ALL
);
11030 return constant_boolean_node (1, type
);
11033 if (code
== LT_EXPR
11034 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11035 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11037 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11038 fold_overflow_warning (("assuming signed overflow does "
11039 "not occur when assuming that "
11040 "(X - c) < X is always true"),
11041 WARN_STRICT_OVERFLOW_ALL
);
11042 return constant_boolean_node (1, type
);
11045 /* Convert X + c <= X and X - c >= X to false for integers. */
11046 if (code
== LE_EXPR
11047 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11048 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11050 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11051 fold_overflow_warning (("assuming signed overflow does "
11052 "not occur when assuming that "
11053 "(X + c) <= X is always false"),
11054 WARN_STRICT_OVERFLOW_ALL
);
11055 return constant_boolean_node (0, type
);
11058 if (code
== GE_EXPR
11059 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11060 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11062 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11063 fold_overflow_warning (("assuming signed overflow does "
11064 "not occur when assuming that "
11065 "(X - c) >= X is always false"),
11066 WARN_STRICT_OVERFLOW_ALL
);
11067 return constant_boolean_node (0, type
);
11072 /* If we are comparing an ABS_EXPR with a constant, we can
11073 convert all the cases into explicit comparisons, but they may
11074 well not be faster than doing the ABS and one comparison.
11075 But ABS (X) <= C is a range comparison, which becomes a subtraction
11076 and a comparison, and is probably faster. */
11077 if (code
== LE_EXPR
11078 && TREE_CODE (arg1
) == INTEGER_CST
11079 && TREE_CODE (arg0
) == ABS_EXPR
11080 && ! TREE_SIDE_EFFECTS (arg0
)
11081 && (0 != (tem
= negate_expr (arg1
)))
11082 && TREE_CODE (tem
) == INTEGER_CST
11083 && !TREE_OVERFLOW (tem
))
11084 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
11085 build2 (GE_EXPR
, type
,
11086 TREE_OPERAND (arg0
, 0), tem
),
11087 build2 (LE_EXPR
, type
,
11088 TREE_OPERAND (arg0
, 0), arg1
));
11090 /* Convert ABS_EXPR<x> >= 0 to true. */
11091 strict_overflow_p
= false;
11092 if (code
== GE_EXPR
11093 && (integer_zerop (arg1
)
11094 || (! HONOR_NANS (arg0
)
11095 && real_zerop (arg1
)))
11096 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11098 if (strict_overflow_p
)
11099 fold_overflow_warning (("assuming signed overflow does not occur "
11100 "when simplifying comparison of "
11101 "absolute value and zero"),
11102 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11103 return omit_one_operand_loc (loc
, type
,
11104 constant_boolean_node (true, type
),
11108 /* Convert ABS_EXPR<x> < 0 to false. */
11109 strict_overflow_p
= false;
11110 if (code
== LT_EXPR
11111 && (integer_zerop (arg1
) || real_zerop (arg1
))
11112 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11114 if (strict_overflow_p
)
11115 fold_overflow_warning (("assuming signed overflow does not occur "
11116 "when simplifying comparison of "
11117 "absolute value and zero"),
11118 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11119 return omit_one_operand_loc (loc
, type
,
11120 constant_boolean_node (false, type
),
11124 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11125 and similarly for >= into !=. */
11126 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11127 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11128 && TREE_CODE (arg1
) == LSHIFT_EXPR
11129 && integer_onep (TREE_OPERAND (arg1
, 0)))
11130 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11131 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11132 TREE_OPERAND (arg1
, 1)),
11133 build_zero_cst (TREE_TYPE (arg0
)));
11135 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11136 otherwise Y might be >= # of bits in X's type and thus e.g.
11137 (unsigned char) (1 << Y) for Y 15 might be 0.
11138 If the cast is widening, then 1 << Y should have unsigned type,
11139 otherwise if Y is number of bits in the signed shift type minus 1,
11140 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11141 31 might be 0xffffffff80000000. */
11142 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11143 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11144 && CONVERT_EXPR_P (arg1
)
11145 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11146 && (element_precision (TREE_TYPE (arg1
))
11147 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
11148 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
11149 || (element_precision (TREE_TYPE (arg1
))
11150 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
11151 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11153 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11154 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
11155 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11156 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
11157 build_zero_cst (TREE_TYPE (arg0
)));
11162 case UNORDERED_EXPR
:
11170 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11172 tree targ0
= strip_float_extensions (arg0
);
11173 tree targ1
= strip_float_extensions (arg1
);
11174 tree newtype
= TREE_TYPE (targ0
);
11176 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11177 newtype
= TREE_TYPE (targ1
);
11179 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11180 return fold_build2_loc (loc
, code
, type
,
11181 fold_convert_loc (loc
, newtype
, targ0
),
11182 fold_convert_loc (loc
, newtype
, targ1
));
11187 case COMPOUND_EXPR
:
11188 /* When pedantic, a compound expression can be neither an lvalue
11189 nor an integer constant expression. */
11190 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11192 /* Don't let (0, 0) be null pointer constant. */
11193 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11194 : fold_convert_loc (loc
, type
, arg1
);
11195 return pedantic_non_lvalue_loc (loc
, tem
);
11198 /* An ASSERT_EXPR should never be passed to fold_binary. */
11199 gcc_unreachable ();
11203 } /* switch (code) */
11206 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11207 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11211 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
11213 switch (TREE_CODE (*tp
))
11219 *walk_subtrees
= 0;
11228 /* Return whether the sub-tree ST contains a label which is accessible from
11229 outside the sub-tree. */
11232 contains_label_p (tree st
)
11235 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
11238 /* Fold a ternary expression of code CODE and type TYPE with operands
11239 OP0, OP1, and OP2. Return the folded expression if folding is
11240 successful. Otherwise, return NULL_TREE. */
11243 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
11244 tree op0
, tree op1
, tree op2
)
11247 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
11248 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11250 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11251 && TREE_CODE_LENGTH (code
) == 3);
11253 /* If this is a commutative operation, and OP0 is a constant, move it
11254 to OP1 to reduce the number of tests below. */
11255 if (commutative_ternary_tree_code (code
)
11256 && tree_swap_operands_p (op0
, op1
))
11257 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
11259 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
11263 /* Strip any conversions that don't change the mode. This is safe
11264 for every expression, except for a comparison expression because
11265 its signedness is derived from its operands. So, in the latter
11266 case, only strip conversions that don't change the signedness.
11268 Note that this is done as an internal manipulation within the
11269 constant folder, in order to find the simplest representation of
11270 the arguments so that their form can be studied. In any cases,
11271 the appropriate type conversions should be put back in the tree
11272 that will get out of the constant folder. */
11293 case COMPONENT_REF
:
11294 if (TREE_CODE (arg0
) == CONSTRUCTOR
11295 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11297 unsigned HOST_WIDE_INT idx
;
11299 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11306 case VEC_COND_EXPR
:
11307 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11308 so all simple results must be passed through pedantic_non_lvalue. */
11309 if (TREE_CODE (arg0
) == INTEGER_CST
)
11311 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11312 tem
= integer_zerop (arg0
) ? op2
: op1
;
11313 /* Only optimize constant conditions when the selected branch
11314 has the same type as the COND_EXPR. This avoids optimizing
11315 away "c ? x : throw", where the throw has a void type.
11316 Avoid throwing away that operand which contains label. */
11317 if ((!TREE_SIDE_EFFECTS (unused_op
)
11318 || !contains_label_p (unused_op
))
11319 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11320 || VOID_TYPE_P (type
)))
11321 return pedantic_non_lvalue_loc (loc
, tem
);
11324 else if (TREE_CODE (arg0
) == VECTOR_CST
)
11326 if ((TREE_CODE (arg1
) == VECTOR_CST
11327 || TREE_CODE (arg1
) == CONSTRUCTOR
)
11328 && (TREE_CODE (arg2
) == VECTOR_CST
11329 || TREE_CODE (arg2
) == CONSTRUCTOR
))
11331 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
11332 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
11333 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
11334 for (i
= 0; i
< nelts
; i
++)
11336 tree val
= VECTOR_CST_ELT (arg0
, i
);
11337 if (integer_all_onesp (val
))
11339 else if (integer_zerop (val
))
11340 sel
[i
] = nelts
+ i
;
11341 else /* Currently unreachable. */
11344 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
11345 if (t
!= NULL_TREE
)
11350 /* If we have A op B ? A : C, we may be able to convert this to a
11351 simpler expression, depending on the operation and the values
11352 of B and C. Signed zeros prevent all of these transformations,
11353 for reasons given above each one.
11355 Also try swapping the arguments and inverting the conditional. */
11356 if (COMPARISON_CLASS_P (arg0
)
11357 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11358 arg1
, TREE_OPERAND (arg0
, 1))
11359 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
11361 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
11366 if (COMPARISON_CLASS_P (arg0
)
11367 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11369 TREE_OPERAND (arg0
, 1))
11370 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
11372 location_t loc0
= expr_location_or (arg0
, loc
);
11373 tem
= fold_invert_truthvalue (loc0
, arg0
);
11374 if (tem
&& COMPARISON_CLASS_P (tem
))
11376 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
11382 /* If the second operand is simpler than the third, swap them
11383 since that produces better jump optimization results. */
11384 if (truth_value_p (TREE_CODE (arg0
))
11385 && tree_swap_operands_p (op1
, op2
))
11387 location_t loc0
= expr_location_or (arg0
, loc
);
11388 /* See if this can be inverted. If it can't, possibly because
11389 it was a floating-point inequality comparison, don't do
11391 tem
= fold_invert_truthvalue (loc0
, arg0
);
11393 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
11396 /* Convert A ? 1 : 0 to simply A. */
11397 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
11398 : (integer_onep (op1
)
11399 && !VECTOR_TYPE_P (type
)))
11400 && integer_zerop (op2
)
11401 /* If we try to convert OP0 to our type, the
11402 call to fold will try to move the conversion inside
11403 a COND, which will recurse. In that case, the COND_EXPR
11404 is probably the best choice, so leave it alone. */
11405 && type
== TREE_TYPE (arg0
))
11406 return pedantic_non_lvalue_loc (loc
, arg0
);
11408 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11409 over COND_EXPR in cases such as floating point comparisons. */
11410 if (integer_zerop (op1
)
11411 && code
== COND_EXPR
11412 && integer_onep (op2
)
11413 && !VECTOR_TYPE_P (type
)
11414 && truth_value_p (TREE_CODE (arg0
)))
11415 return pedantic_non_lvalue_loc (loc
,
11416 fold_convert_loc (loc
, type
,
11417 invert_truthvalue_loc (loc
,
11420 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11421 if (TREE_CODE (arg0
) == LT_EXPR
11422 && integer_zerop (TREE_OPERAND (arg0
, 1))
11423 && integer_zerop (op2
)
11424 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11426 /* sign_bit_p looks through both zero and sign extensions,
11427 but for this optimization only sign extensions are
11429 tree tem2
= TREE_OPERAND (arg0
, 0);
11430 while (tem
!= tem2
)
11432 if (TREE_CODE (tem2
) != NOP_EXPR
11433 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
11438 tem2
= TREE_OPERAND (tem2
, 0);
11440 /* sign_bit_p only checks ARG1 bits within A's precision.
11441 If <sign bit of A> has wider type than A, bits outside
11442 of A's precision in <sign bit of A> need to be checked.
11443 If they are all 0, this optimization needs to be done
11444 in unsigned A's type, if they are all 1 in signed A's type,
11445 otherwise this can't be done. */
11447 && TYPE_PRECISION (TREE_TYPE (tem
))
11448 < TYPE_PRECISION (TREE_TYPE (arg1
))
11449 && TYPE_PRECISION (TREE_TYPE (tem
))
11450 < TYPE_PRECISION (type
))
11452 int inner_width
, outer_width
;
11455 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11456 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11457 if (outer_width
> TYPE_PRECISION (type
))
11458 outer_width
= TYPE_PRECISION (type
);
11460 wide_int mask
= wi::shifted_mask
11461 (inner_width
, outer_width
- inner_width
, false,
11462 TYPE_PRECISION (TREE_TYPE (arg1
)));
11464 wide_int common
= mask
& arg1
;
11465 if (common
== mask
)
11467 tem_type
= signed_type_for (TREE_TYPE (tem
));
11468 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11470 else if (common
== 0)
11472 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
11473 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11481 fold_convert_loc (loc
, type
,
11482 fold_build2_loc (loc
, BIT_AND_EXPR
,
11483 TREE_TYPE (tem
), tem
,
11484 fold_convert_loc (loc
,
11489 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11490 already handled above. */
11491 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11492 && integer_onep (TREE_OPERAND (arg0
, 1))
11493 && integer_zerop (op2
)
11494 && integer_pow2p (arg1
))
11496 tree tem
= TREE_OPERAND (arg0
, 0);
11498 if (TREE_CODE (tem
) == RSHIFT_EXPR
11499 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
11500 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
)
11501 == tree_to_uhwi (TREE_OPERAND (tem
, 1)))
11502 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11503 fold_convert_loc (loc
, type
,
11504 TREE_OPERAND (tem
, 0)),
11508 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11509 is probably obsolete because the first operand should be a
11510 truth value (that's why we have the two cases above), but let's
11511 leave it in until we can confirm this for all front-ends. */
11512 if (integer_zerop (op2
)
11513 && TREE_CODE (arg0
) == NE_EXPR
11514 && integer_zerop (TREE_OPERAND (arg0
, 1))
11515 && integer_pow2p (arg1
)
11516 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11517 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11518 arg1
, OEP_ONLY_CONST
))
11519 return pedantic_non_lvalue_loc (loc
,
11520 fold_convert_loc (loc
, type
,
11521 TREE_OPERAND (arg0
, 0)));
11523 /* Disable the transformations below for vectors, since
11524 fold_binary_op_with_conditional_arg may undo them immediately,
11525 yielding an infinite loop. */
11526 if (code
== VEC_COND_EXPR
)
11529 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11530 if (integer_zerop (op2
)
11531 && truth_value_p (TREE_CODE (arg0
))
11532 && truth_value_p (TREE_CODE (arg1
))
11533 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11534 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
11535 : TRUTH_ANDIF_EXPR
,
11536 type
, fold_convert_loc (loc
, type
, arg0
), op1
);
11538 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11539 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
11540 && truth_value_p (TREE_CODE (arg0
))
11541 && truth_value_p (TREE_CODE (arg1
))
11542 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11544 location_t loc0
= expr_location_or (arg0
, loc
);
11545 /* Only perform transformation if ARG0 is easily inverted. */
11546 tem
= fold_invert_truthvalue (loc0
, arg0
);
11548 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11551 type
, fold_convert_loc (loc
, type
, tem
),
11555 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11556 if (integer_zerop (arg1
)
11557 && truth_value_p (TREE_CODE (arg0
))
11558 && truth_value_p (TREE_CODE (op2
))
11559 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11561 location_t loc0
= expr_location_or (arg0
, loc
);
11562 /* Only perform transformation if ARG0 is easily inverted. */
11563 tem
= fold_invert_truthvalue (loc0
, arg0
);
11565 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11566 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
11567 type
, fold_convert_loc (loc
, type
, tem
),
11571 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11572 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
11573 && truth_value_p (TREE_CODE (arg0
))
11574 && truth_value_p (TREE_CODE (op2
))
11575 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11576 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11577 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
11578 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
11583 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11584 of fold_ternary on them. */
11585 gcc_unreachable ();
11587 case BIT_FIELD_REF
:
11588 if (TREE_CODE (arg0
) == VECTOR_CST
11589 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
11590 || (TREE_CODE (type
) == VECTOR_TYPE
11591 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
11593 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
11594 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
11595 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
11596 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
11599 && (idx
% width
) == 0
11600 && (n
% width
) == 0
11601 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11606 if (TREE_CODE (arg0
) == VECTOR_CST
)
11609 return VECTOR_CST_ELT (arg0
, idx
);
11611 tree
*vals
= XALLOCAVEC (tree
, n
);
11612 for (unsigned i
= 0; i
< n
; ++i
)
11613 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
11614 return build_vector (type
, vals
);
11619 /* On constants we can use native encode/interpret to constant
11620 fold (nearly) all BIT_FIELD_REFs. */
11621 if (CONSTANT_CLASS_P (arg0
)
11622 && can_native_interpret_type_p (type
)
11623 && BITS_PER_UNIT
== 8)
11625 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11626 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
11627 /* Limit us to a reasonable amount of work. To relax the
11628 other limitations we need bit-shifting of the buffer
11629 and rounding up the size. */
11630 if (bitpos
% BITS_PER_UNIT
== 0
11631 && bitsize
% BITS_PER_UNIT
== 0
11632 && bitsize
<= MAX_BITSIZE_MODE_ANY_MODE
)
11634 unsigned char b
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
11635 unsigned HOST_WIDE_INT len
11636 = native_encode_expr (arg0
, b
, bitsize
/ BITS_PER_UNIT
,
11637 bitpos
/ BITS_PER_UNIT
);
11639 && len
* BITS_PER_UNIT
>= bitsize
)
11641 tree v
= native_interpret_expr (type
, b
,
11642 bitsize
/ BITS_PER_UNIT
);
11652 /* For integers we can decompose the FMA if possible. */
11653 if (TREE_CODE (arg0
) == INTEGER_CST
11654 && TREE_CODE (arg1
) == INTEGER_CST
)
11655 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11656 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
11657 if (integer_zerop (arg2
))
11658 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11660 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
11662 case VEC_PERM_EXPR
:
11663 if (TREE_CODE (arg2
) == VECTOR_CST
)
11665 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
11666 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
11667 unsigned char *sel2
= sel
+ nelts
;
11668 bool need_mask_canon
= false;
11669 bool need_mask_canon2
= false;
11670 bool all_in_vec0
= true;
11671 bool all_in_vec1
= true;
11672 bool maybe_identity
= true;
11673 bool single_arg
= (op0
== op1
);
11674 bool changed
= false;
11676 mask2
= 2 * nelts
- 1;
11677 mask
= single_arg
? (nelts
- 1) : mask2
;
11678 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
11679 for (i
= 0; i
< nelts
; i
++)
11681 tree val
= VECTOR_CST_ELT (arg2
, i
);
11682 if (TREE_CODE (val
) != INTEGER_CST
)
11685 /* Make sure that the perm value is in an acceptable
11688 need_mask_canon
|= wi::gtu_p (t
, mask
);
11689 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
11690 sel
[i
] = t
.to_uhwi () & mask
;
11691 sel2
[i
] = t
.to_uhwi () & mask2
;
11693 if (sel
[i
] < nelts
)
11694 all_in_vec1
= false;
11696 all_in_vec0
= false;
11698 if ((sel
[i
] & (nelts
-1)) != i
)
11699 maybe_identity
= false;
11702 if (maybe_identity
)
11712 else if (all_in_vec1
)
11715 for (i
= 0; i
< nelts
; i
++)
11717 need_mask_canon
= true;
11720 if ((TREE_CODE (op0
) == VECTOR_CST
11721 || TREE_CODE (op0
) == CONSTRUCTOR
)
11722 && (TREE_CODE (op1
) == VECTOR_CST
11723 || TREE_CODE (op1
) == CONSTRUCTOR
))
11725 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
11726 if (t
!= NULL_TREE
)
11730 if (op0
== op1
&& !single_arg
)
11733 /* Some targets are deficient and fail to expand a single
11734 argument permutation while still allowing an equivalent
11735 2-argument version. */
11736 if (need_mask_canon
&& arg2
== op2
11737 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
11738 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
11740 need_mask_canon
= need_mask_canon2
;
11744 if (need_mask_canon
&& arg2
== op2
)
11746 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
11747 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
11748 for (i
= 0; i
< nelts
; i
++)
11749 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
11750 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
11755 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
11759 case BIT_INSERT_EXPR
:
11760 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11761 if (TREE_CODE (arg0
) == INTEGER_CST
11762 && TREE_CODE (arg1
) == INTEGER_CST
)
11764 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11765 unsigned bitsize
= TYPE_PRECISION (TREE_TYPE (arg1
));
11766 wide_int tem
= wi::bit_and (arg0
,
11767 wi::shifted_mask (bitpos
, bitsize
, true,
11768 TYPE_PRECISION (type
)));
11770 = wi::lshift (wi::zext (wi::to_wide (arg1
, TYPE_PRECISION (type
)),
11772 return wide_int_to_tree (type
, wi::bit_or (tem
, tem2
));
11774 else if (TREE_CODE (arg0
) == VECTOR_CST
11775 && CONSTANT_CLASS_P (arg1
)
11776 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0
)),
11779 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11780 unsigned HOST_WIDE_INT elsize
11781 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1
)));
11782 if (bitpos
% elsize
== 0)
11784 unsigned k
= bitpos
/ elsize
;
11785 if (operand_equal_p (VECTOR_CST_ELT (arg0
, k
), arg1
, 0))
11789 tree
*elts
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
11790 memcpy (elts
, VECTOR_CST_ELTS (arg0
),
11791 sizeof (tree
) * TYPE_VECTOR_SUBPARTS (type
));
11793 return build_vector (type
, elts
);
11801 } /* switch (code) */
11804 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11805 of an array (or vector). */
11808 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
)
11810 tree index_type
= NULL_TREE
;
11811 offset_int low_bound
= 0;
11813 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
11815 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
11816 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
11818 /* Static constructors for variably sized objects makes no sense. */
11819 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
11820 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
11821 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
11826 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
11827 TYPE_SIGN (index_type
));
11829 offset_int index
= low_bound
- 1;
11831 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11832 TYPE_SIGN (index_type
));
11834 offset_int max_index
;
11835 unsigned HOST_WIDE_INT cnt
;
11838 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
11840 /* Array constructor might explicitly set index, or specify a range,
11841 or leave index NULL meaning that it is next index after previous
11845 if (TREE_CODE (cfield
) == INTEGER_CST
)
11846 max_index
= index
= wi::to_offset (cfield
);
11849 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
11850 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
11851 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
11858 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11859 TYPE_SIGN (index_type
));
11863 /* Do we have match? */
11864 if (wi::cmpu (access_index
, index
) >= 0
11865 && wi::cmpu (access_index
, max_index
) <= 0)
11871 /* Perform constant folding and related simplification of EXPR.
11872 The related simplifications include x*1 => x, x*0 => 0, etc.,
11873 and application of the associative law.
11874 NOP_EXPR conversions may be removed freely (as long as we
11875 are careful not to change the type of the overall expression).
11876 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11877 but we can constant-fold them if they have constant operands. */
11879 #ifdef ENABLE_FOLD_CHECKING
11880 # define fold(x) fold_1 (x)
11881 static tree
fold_1 (tree
);
11887 const tree t
= expr
;
11888 enum tree_code code
= TREE_CODE (t
);
11889 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11891 location_t loc
= EXPR_LOCATION (expr
);
11893 /* Return right away if a constant. */
11894 if (kind
== tcc_constant
)
11897 /* CALL_EXPR-like objects with variable numbers of operands are
11898 treated specially. */
11899 if (kind
== tcc_vl_exp
)
11901 if (code
== CALL_EXPR
)
11903 tem
= fold_call_expr (loc
, expr
, false);
11904 return tem
? tem
: expr
;
11909 if (IS_EXPR_CODE_CLASS (kind
))
11911 tree type
= TREE_TYPE (t
);
11912 tree op0
, op1
, op2
;
11914 switch (TREE_CODE_LENGTH (code
))
11917 op0
= TREE_OPERAND (t
, 0);
11918 tem
= fold_unary_loc (loc
, code
, type
, op0
);
11919 return tem
? tem
: expr
;
11921 op0
= TREE_OPERAND (t
, 0);
11922 op1
= TREE_OPERAND (t
, 1);
11923 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
11924 return tem
? tem
: expr
;
11926 op0
= TREE_OPERAND (t
, 0);
11927 op1
= TREE_OPERAND (t
, 1);
11928 op2
= TREE_OPERAND (t
, 2);
11929 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
11930 return tem
? tem
: expr
;
11940 tree op0
= TREE_OPERAND (t
, 0);
11941 tree op1
= TREE_OPERAND (t
, 1);
11943 if (TREE_CODE (op1
) == INTEGER_CST
11944 && TREE_CODE (op0
) == CONSTRUCTOR
11945 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
11947 tree val
= get_array_ctor_element_at_index (op0
,
11948 wi::to_offset (op1
));
11956 /* Return a VECTOR_CST if possible. */
11959 tree type
= TREE_TYPE (t
);
11960 if (TREE_CODE (type
) != VECTOR_TYPE
)
11965 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
11966 if (! CONSTANT_CLASS_P (val
))
11969 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
11973 return fold (DECL_INITIAL (t
));
11977 } /* switch (code) */
11980 #ifdef ENABLE_FOLD_CHECKING
11983 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
11984 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
11985 static void fold_check_failed (const_tree
, const_tree
);
11986 void print_fold_checksum (const_tree
);
11988 /* When --enable-checking=fold, compute a digest of expr before
11989 and after actual fold call to see if fold did not accidentally
11990 change original expr. */
11996 struct md5_ctx ctx
;
11997 unsigned char checksum_before
[16], checksum_after
[16];
11998 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12000 md5_init_ctx (&ctx
);
12001 fold_checksum_tree (expr
, &ctx
, &ht
);
12002 md5_finish_ctx (&ctx
, checksum_before
);
12005 ret
= fold_1 (expr
);
12007 md5_init_ctx (&ctx
);
12008 fold_checksum_tree (expr
, &ctx
, &ht
);
12009 md5_finish_ctx (&ctx
, checksum_after
);
12011 if (memcmp (checksum_before
, checksum_after
, 16))
12012 fold_check_failed (expr
, ret
);
12018 print_fold_checksum (const_tree expr
)
12020 struct md5_ctx ctx
;
12021 unsigned char checksum
[16], cnt
;
12022 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12024 md5_init_ctx (&ctx
);
12025 fold_checksum_tree (expr
, &ctx
, &ht
);
12026 md5_finish_ctx (&ctx
, checksum
);
12027 for (cnt
= 0; cnt
< 16; ++cnt
)
12028 fprintf (stderr
, "%02x", checksum
[cnt
]);
12029 putc ('\n', stderr
);
12033 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
12035 internal_error ("fold check: original tree changed by fold");
12039 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
12040 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
12042 const tree_node
**slot
;
12043 enum tree_code code
;
12044 union tree_node buf
;
12050 slot
= ht
->find_slot (expr
, INSERT
);
12054 code
= TREE_CODE (expr
);
12055 if (TREE_CODE_CLASS (code
) == tcc_declaration
12056 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
12058 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12059 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12060 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
12061 buf
.decl_with_vis
.symtab_node
= NULL
;
12062 expr
= (tree
) &buf
;
12064 else if (TREE_CODE_CLASS (code
) == tcc_type
12065 && (TYPE_POINTER_TO (expr
)
12066 || TYPE_REFERENCE_TO (expr
)
12067 || TYPE_CACHED_VALUES_P (expr
)
12068 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
12069 || TYPE_NEXT_VARIANT (expr
)
12070 || TYPE_ALIAS_SET_KNOWN_P (expr
)))
12072 /* Allow these fields to be modified. */
12074 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12075 expr
= tmp
= (tree
) &buf
;
12076 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
12077 TYPE_POINTER_TO (tmp
) = NULL
;
12078 TYPE_REFERENCE_TO (tmp
) = NULL
;
12079 TYPE_NEXT_VARIANT (tmp
) = NULL
;
12080 TYPE_ALIAS_SET (tmp
) = -1;
12081 if (TYPE_CACHED_VALUES_P (tmp
))
12083 TYPE_CACHED_VALUES_P (tmp
) = 0;
12084 TYPE_CACHED_VALUES (tmp
) = NULL
;
12087 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12088 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
12089 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12090 if (TREE_CODE_CLASS (code
) != tcc_type
12091 && TREE_CODE_CLASS (code
) != tcc_declaration
12092 && code
!= TREE_LIST
12093 && code
!= SSA_NAME
12094 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
12095 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12096 switch (TREE_CODE_CLASS (code
))
12102 md5_process_bytes (TREE_STRING_POINTER (expr
),
12103 TREE_STRING_LENGTH (expr
), ctx
);
12106 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12107 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12110 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
12111 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
12117 case tcc_exceptional
:
12121 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12122 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12123 expr
= TREE_CHAIN (expr
);
12124 goto recursive_label
;
12127 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12128 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12134 case tcc_expression
:
12135 case tcc_reference
:
12136 case tcc_comparison
:
12139 case tcc_statement
:
12141 len
= TREE_OPERAND_LENGTH (expr
);
12142 for (i
= 0; i
< len
; ++i
)
12143 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12145 case tcc_declaration
:
12146 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12147 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12148 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12150 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12151 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12152 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12153 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12154 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12157 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12159 if (TREE_CODE (expr
) == FUNCTION_DECL
)
12161 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12162 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
12164 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12168 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12169 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12170 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12171 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12172 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12173 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12174 if (INTEGRAL_TYPE_P (expr
)
12175 || SCALAR_FLOAT_TYPE_P (expr
))
12177 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12178 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12180 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12181 if (TREE_CODE (expr
) == RECORD_TYPE
12182 || TREE_CODE (expr
) == UNION_TYPE
12183 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12184 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12185 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12192 /* Helper function for outputting the checksum of a tree T. When
12193 debugging with gdb, you can "define mynext" to be "next" followed
12194 by "call debug_fold_checksum (op0)", then just trace down till the
12197 DEBUG_FUNCTION
void
12198 debug_fold_checksum (const_tree t
)
12201 unsigned char checksum
[16];
12202 struct md5_ctx ctx
;
12203 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12205 md5_init_ctx (&ctx
);
12206 fold_checksum_tree (t
, &ctx
, &ht
);
12207 md5_finish_ctx (&ctx
, checksum
);
12210 for (i
= 0; i
< 16; i
++)
12211 fprintf (stderr
, "%d ", checksum
[i
]);
12213 fprintf (stderr
, "\n");
12218 /* Fold a unary tree expression with code CODE of type TYPE with an
12219 operand OP0. LOC is the location of the resulting expression.
12220 Return a folded expression if successful. Otherwise, return a tree
12221 expression with code CODE of type TYPE with an operand OP0. */
12224 fold_build1_loc (location_t loc
,
12225 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12228 #ifdef ENABLE_FOLD_CHECKING
12229 unsigned char checksum_before
[16], checksum_after
[16];
12230 struct md5_ctx ctx
;
12231 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12233 md5_init_ctx (&ctx
);
12234 fold_checksum_tree (op0
, &ctx
, &ht
);
12235 md5_finish_ctx (&ctx
, checksum_before
);
12239 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12241 tem
= build1_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
12243 #ifdef ENABLE_FOLD_CHECKING
12244 md5_init_ctx (&ctx
);
12245 fold_checksum_tree (op0
, &ctx
, &ht
);
12246 md5_finish_ctx (&ctx
, checksum_after
);
12248 if (memcmp (checksum_before
, checksum_after
, 16))
12249 fold_check_failed (op0
, tem
);
12254 /* Fold a binary tree expression with code CODE of type TYPE with
12255 operands OP0 and OP1. LOC is the location of the resulting
12256 expression. Return a folded expression if successful. Otherwise,
12257 return a tree expression with code CODE of type TYPE with operands
12261 fold_build2_loc (location_t loc
,
12262 enum tree_code code
, tree type
, tree op0
, tree op1
12266 #ifdef ENABLE_FOLD_CHECKING
12267 unsigned char checksum_before_op0
[16],
12268 checksum_before_op1
[16],
12269 checksum_after_op0
[16],
12270 checksum_after_op1
[16];
12271 struct md5_ctx ctx
;
12272 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12274 md5_init_ctx (&ctx
);
12275 fold_checksum_tree (op0
, &ctx
, &ht
);
12276 md5_finish_ctx (&ctx
, checksum_before_op0
);
12279 md5_init_ctx (&ctx
);
12280 fold_checksum_tree (op1
, &ctx
, &ht
);
12281 md5_finish_ctx (&ctx
, checksum_before_op1
);
12285 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12287 tem
= build2_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
12289 #ifdef ENABLE_FOLD_CHECKING
12290 md5_init_ctx (&ctx
);
12291 fold_checksum_tree (op0
, &ctx
, &ht
);
12292 md5_finish_ctx (&ctx
, checksum_after_op0
);
12295 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12296 fold_check_failed (op0
, tem
);
12298 md5_init_ctx (&ctx
);
12299 fold_checksum_tree (op1
, &ctx
, &ht
);
12300 md5_finish_ctx (&ctx
, checksum_after_op1
);
12302 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12303 fold_check_failed (op1
, tem
);
12308 /* Fold a ternary tree expression with code CODE of type TYPE with
12309 operands OP0, OP1, and OP2. Return a folded expression if
12310 successful. Otherwise, return a tree expression with code CODE of
12311 type TYPE with operands OP0, OP1, and OP2. */
12314 fold_build3_loc (location_t loc
, enum tree_code code
, tree type
,
12315 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
12318 #ifdef ENABLE_FOLD_CHECKING
12319 unsigned char checksum_before_op0
[16],
12320 checksum_before_op1
[16],
12321 checksum_before_op2
[16],
12322 checksum_after_op0
[16],
12323 checksum_after_op1
[16],
12324 checksum_after_op2
[16];
12325 struct md5_ctx ctx
;
12326 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12328 md5_init_ctx (&ctx
);
12329 fold_checksum_tree (op0
, &ctx
, &ht
);
12330 md5_finish_ctx (&ctx
, checksum_before_op0
);
12333 md5_init_ctx (&ctx
);
12334 fold_checksum_tree (op1
, &ctx
, &ht
);
12335 md5_finish_ctx (&ctx
, checksum_before_op1
);
12338 md5_init_ctx (&ctx
);
12339 fold_checksum_tree (op2
, &ctx
, &ht
);
12340 md5_finish_ctx (&ctx
, checksum_before_op2
);
12344 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
12345 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12347 tem
= build3_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12349 #ifdef ENABLE_FOLD_CHECKING
12350 md5_init_ctx (&ctx
);
12351 fold_checksum_tree (op0
, &ctx
, &ht
);
12352 md5_finish_ctx (&ctx
, checksum_after_op0
);
12355 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12356 fold_check_failed (op0
, tem
);
12358 md5_init_ctx (&ctx
);
12359 fold_checksum_tree (op1
, &ctx
, &ht
);
12360 md5_finish_ctx (&ctx
, checksum_after_op1
);
12363 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12364 fold_check_failed (op1
, tem
);
12366 md5_init_ctx (&ctx
);
12367 fold_checksum_tree (op2
, &ctx
, &ht
);
12368 md5_finish_ctx (&ctx
, checksum_after_op2
);
12370 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12371 fold_check_failed (op2
, tem
);
12376 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12377 arguments in ARGARRAY, and a null static chain.
12378 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12379 of type TYPE from the given operands as constructed by build_call_array. */
12382 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
12383 int nargs
, tree
*argarray
)
12386 #ifdef ENABLE_FOLD_CHECKING
12387 unsigned char checksum_before_fn
[16],
12388 checksum_before_arglist
[16],
12389 checksum_after_fn
[16],
12390 checksum_after_arglist
[16];
12391 struct md5_ctx ctx
;
12392 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12395 md5_init_ctx (&ctx
);
12396 fold_checksum_tree (fn
, &ctx
, &ht
);
12397 md5_finish_ctx (&ctx
, checksum_before_fn
);
12400 md5_init_ctx (&ctx
);
12401 for (i
= 0; i
< nargs
; i
++)
12402 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12403 md5_finish_ctx (&ctx
, checksum_before_arglist
);
12407 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
12409 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12411 #ifdef ENABLE_FOLD_CHECKING
12412 md5_init_ctx (&ctx
);
12413 fold_checksum_tree (fn
, &ctx
, &ht
);
12414 md5_finish_ctx (&ctx
, checksum_after_fn
);
12417 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
12418 fold_check_failed (fn
, tem
);
12420 md5_init_ctx (&ctx
);
12421 for (i
= 0; i
< nargs
; i
++)
12422 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12423 md5_finish_ctx (&ctx
, checksum_after_arglist
);
12425 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
12426 fold_check_failed (NULL_TREE
, tem
);
12431 /* Perform constant folding and related simplification of initializer
12432 expression EXPR. These behave identically to "fold_buildN" but ignore
12433 potential run-time traps and exceptions that fold must preserve. */
12435 #define START_FOLD_INIT \
12436 int saved_signaling_nans = flag_signaling_nans;\
12437 int saved_trapping_math = flag_trapping_math;\
12438 int saved_rounding_math = flag_rounding_math;\
12439 int saved_trapv = flag_trapv;\
12440 int saved_folding_initializer = folding_initializer;\
12441 flag_signaling_nans = 0;\
12442 flag_trapping_math = 0;\
12443 flag_rounding_math = 0;\
12445 folding_initializer = 1;
12447 #define END_FOLD_INIT \
12448 flag_signaling_nans = saved_signaling_nans;\
12449 flag_trapping_math = saved_trapping_math;\
12450 flag_rounding_math = saved_rounding_math;\
12451 flag_trapv = saved_trapv;\
12452 folding_initializer = saved_folding_initializer;
12455 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
12456 tree type
, tree op
)
12461 result
= fold_build1_loc (loc
, code
, type
, op
);
12468 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
12469 tree type
, tree op0
, tree op1
)
12474 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
12481 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
12482 int nargs
, tree
*argarray
)
12487 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12493 #undef START_FOLD_INIT
12494 #undef END_FOLD_INIT
12496 /* Determine if first argument is a multiple of second argument. Return 0 if
12497 it is not, or we cannot easily determined it to be.
12499 An example of the sort of thing we care about (at this point; this routine
12500 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12501 fold cases do now) is discovering that
12503 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12509 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12511 This code also handles discovering that
12513 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12515 is a multiple of 8 so we don't have to worry about dealing with a
12516 possible remainder.
12518 Note that we *look* inside a SAVE_EXPR only to determine how it was
12519 calculated; it is not safe for fold to do much of anything else with the
12520 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12521 at run time. For example, the latter example above *cannot* be implemented
12522 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12523 evaluation time of the original SAVE_EXPR is not necessarily the same at
12524 the time the new expression is evaluated. The only optimization of this
12525 sort that would be valid is changing
12527 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12531 SAVE_EXPR (I) * SAVE_EXPR (J)
12533 (where the same SAVE_EXPR (J) is used in the original and the
12534 transformed version). */
12537 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
12542 if (operand_equal_p (top
, bottom
, 0))
12545 if (TREE_CODE (type
) != INTEGER_TYPE
)
12548 switch (TREE_CODE (top
))
12551 /* Bitwise and provides a power of two multiple. If the mask is
12552 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12553 if (!integer_pow2p (bottom
))
12558 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12559 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
12562 /* It is impossible to prove if op0 - op1 is multiple of bottom
12563 precisely, so be conservative here checking if both op0 and op1
12564 are multiple of bottom. Note we check the second operand first
12565 since it's usually simpler. */
12566 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12567 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
12570 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12571 as op0 - 3 if the expression has unsigned type. For example,
12572 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12573 op1
= TREE_OPERAND (top
, 1);
12574 if (TYPE_UNSIGNED (type
)
12575 && TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sign_bit (op1
))
12576 op1
= fold_build1 (NEGATE_EXPR
, type
, op1
);
12577 return (multiple_of_p (type
, op1
, bottom
)
12578 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
12581 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
12583 op1
= TREE_OPERAND (top
, 1);
12584 /* const_binop may not detect overflow correctly,
12585 so check for it explicitly here. */
12586 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
12587 && 0 != (t1
= fold_convert (type
,
12588 const_binop (LSHIFT_EXPR
,
12591 && !TREE_OVERFLOW (t1
))
12592 return multiple_of_p (type
, t1
, bottom
);
12597 /* Can't handle conversions from non-integral or wider integral type. */
12598 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
12599 || (TYPE_PRECISION (type
)
12600 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
12606 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
12609 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12610 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
12613 if (TREE_CODE (bottom
) != INTEGER_CST
12614 || integer_zerop (bottom
)
12615 || (TYPE_UNSIGNED (type
)
12616 && (tree_int_cst_sgn (top
) < 0
12617 || tree_int_cst_sgn (bottom
) < 0)))
12619 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
12623 if (TREE_CODE (bottom
) == INTEGER_CST
12624 && (stmt
= SSA_NAME_DEF_STMT (top
)) != NULL
12625 && gimple_code (stmt
) == GIMPLE_ASSIGN
)
12627 enum tree_code code
= gimple_assign_rhs_code (stmt
);
12629 /* Check for special cases to see if top is defined as multiple
12632 top = (X & ~(bottom - 1) ; bottom is power of 2
12638 if (code
== BIT_AND_EXPR
12639 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
12640 && TREE_CODE (op2
) == INTEGER_CST
12641 && integer_pow2p (bottom
)
12642 && wi::multiple_of_p (wi::to_widest (op2
),
12643 wi::to_widest (bottom
), UNSIGNED
))
12646 op1
= gimple_assign_rhs1 (stmt
);
12647 if (code
== MINUS_EXPR
12648 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
12649 && TREE_CODE (op2
) == SSA_NAME
12650 && (stmt
= SSA_NAME_DEF_STMT (op2
)) != NULL
12651 && gimple_code (stmt
) == GIMPLE_ASSIGN
12652 && (code
= gimple_assign_rhs_code (stmt
)) == TRUNC_MOD_EXPR
12653 && operand_equal_p (op1
, gimple_assign_rhs1 (stmt
), 0)
12654 && operand_equal_p (bottom
, gimple_assign_rhs2 (stmt
), 0))
12665 #define tree_expr_nonnegative_warnv_p(X, Y) \
12666 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12668 #define RECURSE(X) \
12669 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12671 /* Return true if CODE or TYPE is known to be non-negative. */
12674 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
12676 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
12677 && truth_value_p (code
))
12678 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12679 have a signed:1 type (where the value is -1 and 0). */
12684 /* Return true if (CODE OP0) is known to be non-negative. If the return
12685 value is based on the assumption that signed overflow is undefined,
12686 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12687 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12690 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12691 bool *strict_overflow_p
, int depth
)
12693 if (TYPE_UNSIGNED (type
))
12699 /* We can't return 1 if flag_wrapv is set because
12700 ABS_EXPR<INT_MIN> = INT_MIN. */
12701 if (!ANY_INTEGRAL_TYPE_P (type
))
12703 if (TYPE_OVERFLOW_UNDEFINED (type
))
12705 *strict_overflow_p
= true;
12710 case NON_LVALUE_EXPR
:
12712 case FIX_TRUNC_EXPR
:
12713 return RECURSE (op0
);
12717 tree inner_type
= TREE_TYPE (op0
);
12718 tree outer_type
= type
;
12720 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12722 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12723 return RECURSE (op0
);
12724 if (INTEGRAL_TYPE_P (inner_type
))
12726 if (TYPE_UNSIGNED (inner_type
))
12728 return RECURSE (op0
);
12731 else if (INTEGRAL_TYPE_P (outer_type
))
12733 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12734 return RECURSE (op0
);
12735 if (INTEGRAL_TYPE_P (inner_type
))
12736 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12737 && TYPE_UNSIGNED (inner_type
);
12743 return tree_simple_nonnegative_warnv_p (code
, type
);
12746 /* We don't know sign of `t', so be conservative and return false. */
12750 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12751 value is based on the assumption that signed overflow is undefined,
12752 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12753 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12756 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12757 tree op1
, bool *strict_overflow_p
,
12760 if (TYPE_UNSIGNED (type
))
12765 case POINTER_PLUS_EXPR
:
12767 if (FLOAT_TYPE_P (type
))
12768 return RECURSE (op0
) && RECURSE (op1
);
12770 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12771 both unsigned and at least 2 bits shorter than the result. */
12772 if (TREE_CODE (type
) == INTEGER_TYPE
12773 && TREE_CODE (op0
) == NOP_EXPR
12774 && TREE_CODE (op1
) == NOP_EXPR
)
12776 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
12777 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
12778 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12779 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12781 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12782 TYPE_PRECISION (inner2
)) + 1;
12783 return prec
< TYPE_PRECISION (type
);
12789 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12791 /* x * x is always non-negative for floating point x
12792 or without overflow. */
12793 if (operand_equal_p (op0
, op1
, 0)
12794 || (RECURSE (op0
) && RECURSE (op1
)))
12796 if (ANY_INTEGRAL_TYPE_P (type
)
12797 && TYPE_OVERFLOW_UNDEFINED (type
))
12798 *strict_overflow_p
= true;
12803 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12804 both unsigned and their total bits is shorter than the result. */
12805 if (TREE_CODE (type
) == INTEGER_TYPE
12806 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
12807 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
12809 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
12810 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
12812 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
12813 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
12816 bool unsigned0
= TYPE_UNSIGNED (inner0
);
12817 bool unsigned1
= TYPE_UNSIGNED (inner1
);
12819 if (TREE_CODE (op0
) == INTEGER_CST
)
12820 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
12822 if (TREE_CODE (op1
) == INTEGER_CST
)
12823 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
12825 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
12826 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
12828 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
12829 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
12830 : TYPE_PRECISION (inner0
);
12832 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
12833 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
12834 : TYPE_PRECISION (inner1
);
12836 return precision0
+ precision1
< TYPE_PRECISION (type
);
12843 return RECURSE (op0
) || RECURSE (op1
);
12849 case TRUNC_DIV_EXPR
:
12850 case CEIL_DIV_EXPR
:
12851 case FLOOR_DIV_EXPR
:
12852 case ROUND_DIV_EXPR
:
12853 return RECURSE (op0
) && RECURSE (op1
);
12855 case TRUNC_MOD_EXPR
:
12856 return RECURSE (op0
);
12858 case FLOOR_MOD_EXPR
:
12859 return RECURSE (op1
);
12861 case CEIL_MOD_EXPR
:
12862 case ROUND_MOD_EXPR
:
12864 return tree_simple_nonnegative_warnv_p (code
, type
);
12867 /* We don't know sign of `t', so be conservative and return false. */
12871 /* Return true if T is known to be non-negative. If the return
12872 value is based on the assumption that signed overflow is undefined,
12873 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12874 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12877 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
12879 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12882 switch (TREE_CODE (t
))
12885 return tree_int_cst_sgn (t
) >= 0;
12888 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12891 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
12894 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
12897 /* Limit the depth of recursion to avoid quadratic behavior.
12898 This is expected to catch almost all occurrences in practice.
12899 If this code misses important cases that unbounded recursion
12900 would not, passes that need this information could be revised
12901 to provide it through dataflow propagation. */
12902 return (!name_registered_for_update_p (t
)
12903 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
12904 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
12905 strict_overflow_p
, depth
));
12908 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
12912 /* Return true if T is known to be non-negative. If the return
12913 value is based on the assumption that signed overflow is undefined,
12914 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12915 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12918 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
12919 bool *strict_overflow_p
, int depth
)
12940 case CFN_BUILT_IN_BSWAP32
:
12941 case CFN_BUILT_IN_BSWAP64
:
12946 /* sqrt(-0.0) is -0.0. */
12947 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
12949 return RECURSE (arg0
);
12975 CASE_CFN_NEARBYINT
:
12982 CASE_CFN_SIGNIFICAND
:
12986 /* True if the 1st argument is nonnegative. */
12987 return RECURSE (arg0
);
12990 /* True if the 1st OR 2nd arguments are nonnegative. */
12991 return RECURSE (arg0
) || RECURSE (arg1
);
12994 /* True if the 1st AND 2nd arguments are nonnegative. */
12995 return RECURSE (arg0
) && RECURSE (arg1
);
12998 /* True if the 2nd argument is nonnegative. */
12999 return RECURSE (arg1
);
13002 /* True if the 1st argument is nonnegative or the second
13003 argument is an even integer. */
13004 if (TREE_CODE (arg1
) == INTEGER_CST
13005 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
13007 return RECURSE (arg0
);
13010 /* True if the 1st argument is nonnegative or the second
13011 argument is an even integer valued real. */
13012 if (TREE_CODE (arg1
) == REAL_CST
)
13017 c
= TREE_REAL_CST (arg1
);
13018 n
= real_to_integer (&c
);
13021 REAL_VALUE_TYPE cint
;
13022 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
13023 if (real_identical (&c
, &cint
))
13027 return RECURSE (arg0
);
13032 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
13035 /* Return true if T is known to be non-negative. If the return
13036 value is based on the assumption that signed overflow is undefined,
13037 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13038 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13041 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13043 enum tree_code code
= TREE_CODE (t
);
13044 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13051 tree temp
= TARGET_EXPR_SLOT (t
);
13052 t
= TARGET_EXPR_INITIAL (t
);
13054 /* If the initializer is non-void, then it's a normal expression
13055 that will be assigned to the slot. */
13056 if (!VOID_TYPE_P (t
))
13057 return RECURSE (t
);
13059 /* Otherwise, the initializer sets the slot in some way. One common
13060 way is an assignment statement at the end of the initializer. */
13063 if (TREE_CODE (t
) == BIND_EXPR
)
13064 t
= expr_last (BIND_EXPR_BODY (t
));
13065 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
13066 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
13067 t
= expr_last (TREE_OPERAND (t
, 0));
13068 else if (TREE_CODE (t
) == STATEMENT_LIST
)
13073 if (TREE_CODE (t
) == MODIFY_EXPR
13074 && TREE_OPERAND (t
, 0) == temp
)
13075 return RECURSE (TREE_OPERAND (t
, 1));
13082 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
13083 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
13085 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
13086 get_call_combined_fn (t
),
13089 strict_overflow_p
, depth
);
13091 case COMPOUND_EXPR
:
13093 return RECURSE (TREE_OPERAND (t
, 1));
13096 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
13099 return RECURSE (TREE_OPERAND (t
, 0));
13102 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
13107 #undef tree_expr_nonnegative_warnv_p
13109 /* Return true if T is known to be non-negative. If the return
13110 value is based on the assumption that signed overflow is undefined,
13111 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13112 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13115 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13117 enum tree_code code
;
13118 if (t
== error_mark_node
)
13121 code
= TREE_CODE (t
);
13122 switch (TREE_CODE_CLASS (code
))
13125 case tcc_comparison
:
13126 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13128 TREE_OPERAND (t
, 0),
13129 TREE_OPERAND (t
, 1),
13130 strict_overflow_p
, depth
);
13133 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13135 TREE_OPERAND (t
, 0),
13136 strict_overflow_p
, depth
);
13139 case tcc_declaration
:
13140 case tcc_reference
:
13141 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13149 case TRUTH_AND_EXPR
:
13150 case TRUTH_OR_EXPR
:
13151 case TRUTH_XOR_EXPR
:
13152 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13154 TREE_OPERAND (t
, 0),
13155 TREE_OPERAND (t
, 1),
13156 strict_overflow_p
, depth
);
13157 case TRUTH_NOT_EXPR
:
13158 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13160 TREE_OPERAND (t
, 0),
13161 strict_overflow_p
, depth
);
13168 case WITH_SIZE_EXPR
:
13170 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13173 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13177 /* Return true if `t' is known to be non-negative. Handle warnings
13178 about undefined signed overflow. */
13181 tree_expr_nonnegative_p (tree t
)
13183 bool ret
, strict_overflow_p
;
13185 strict_overflow_p
= false;
13186 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
13187 if (strict_overflow_p
)
13188 fold_overflow_warning (("assuming signed overflow does not occur when "
13189 "determining that expression is always "
13191 WARN_STRICT_OVERFLOW_MISC
);
13196 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13197 For floating point we further ensure that T is not denormal.
13198 Similar logic is present in nonzero_address in rtlanal.h.
13200 If the return value is based on the assumption that signed overflow
13201 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13202 change *STRICT_OVERFLOW_P. */
13205 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
13206 bool *strict_overflow_p
)
13211 return tree_expr_nonzero_warnv_p (op0
,
13212 strict_overflow_p
);
13216 tree inner_type
= TREE_TYPE (op0
);
13217 tree outer_type
= type
;
13219 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
13220 && tree_expr_nonzero_warnv_p (op0
,
13221 strict_overflow_p
));
13225 case NON_LVALUE_EXPR
:
13226 return tree_expr_nonzero_warnv_p (op0
,
13227 strict_overflow_p
);
13236 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13237 For floating point we further ensure that T is not denormal.
13238 Similar logic is present in nonzero_address in rtlanal.h.
13240 If the return value is based on the assumption that signed overflow
13241 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13242 change *STRICT_OVERFLOW_P. */
13245 tree_binary_nonzero_warnv_p (enum tree_code code
,
13248 tree op1
, bool *strict_overflow_p
)
13250 bool sub_strict_overflow_p
;
13253 case POINTER_PLUS_EXPR
:
13255 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
13257 /* With the presence of negative values it is hard
13258 to say something. */
13259 sub_strict_overflow_p
= false;
13260 if (!tree_expr_nonnegative_warnv_p (op0
,
13261 &sub_strict_overflow_p
)
13262 || !tree_expr_nonnegative_warnv_p (op1
,
13263 &sub_strict_overflow_p
))
13265 /* One of operands must be positive and the other non-negative. */
13266 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13267 overflows, on a twos-complement machine the sum of two
13268 nonnegative numbers can never be zero. */
13269 return (tree_expr_nonzero_warnv_p (op0
,
13271 || tree_expr_nonzero_warnv_p (op1
,
13272 strict_overflow_p
));
13277 if (TYPE_OVERFLOW_UNDEFINED (type
))
13279 if (tree_expr_nonzero_warnv_p (op0
,
13281 && tree_expr_nonzero_warnv_p (op1
,
13282 strict_overflow_p
))
13284 *strict_overflow_p
= true;
13291 sub_strict_overflow_p
= false;
13292 if (tree_expr_nonzero_warnv_p (op0
,
13293 &sub_strict_overflow_p
)
13294 && tree_expr_nonzero_warnv_p (op1
,
13295 &sub_strict_overflow_p
))
13297 if (sub_strict_overflow_p
)
13298 *strict_overflow_p
= true;
13303 sub_strict_overflow_p
= false;
13304 if (tree_expr_nonzero_warnv_p (op0
,
13305 &sub_strict_overflow_p
))
13307 if (sub_strict_overflow_p
)
13308 *strict_overflow_p
= true;
13310 /* When both operands are nonzero, then MAX must be too. */
13311 if (tree_expr_nonzero_warnv_p (op1
,
13312 strict_overflow_p
))
13315 /* MAX where operand 0 is positive is positive. */
13316 return tree_expr_nonnegative_warnv_p (op0
,
13317 strict_overflow_p
);
13319 /* MAX where operand 1 is positive is positive. */
13320 else if (tree_expr_nonzero_warnv_p (op1
,
13321 &sub_strict_overflow_p
)
13322 && tree_expr_nonnegative_warnv_p (op1
,
13323 &sub_strict_overflow_p
))
13325 if (sub_strict_overflow_p
)
13326 *strict_overflow_p
= true;
13332 return (tree_expr_nonzero_warnv_p (op1
,
13334 || tree_expr_nonzero_warnv_p (op0
,
13335 strict_overflow_p
));
13344 /* Return true when T is an address and is known to be nonzero.
13345 For floating point we further ensure that T is not denormal.
13346 Similar logic is present in nonzero_address in rtlanal.h.
13348 If the return value is based on the assumption that signed overflow
13349 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13350 change *STRICT_OVERFLOW_P. */
13353 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
13355 bool sub_strict_overflow_p
;
13356 switch (TREE_CODE (t
))
13359 return !integer_zerop (t
);
13363 tree base
= TREE_OPERAND (t
, 0);
13365 if (!DECL_P (base
))
13366 base
= get_base_address (base
);
13368 if (base
&& TREE_CODE (base
) == TARGET_EXPR
)
13369 base
= TARGET_EXPR_SLOT (base
);
13374 /* For objects in symbol table check if we know they are non-zero.
13375 Don't do anything for variables and functions before symtab is built;
13376 it is quite possible that they will be declared weak later. */
13377 int nonzero_addr
= maybe_nonzero_address (base
);
13378 if (nonzero_addr
>= 0)
13379 return nonzero_addr
;
13381 /* Constants are never weak. */
13382 if (CONSTANT_CLASS_P (base
))
13389 sub_strict_overflow_p
= false;
13390 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13391 &sub_strict_overflow_p
)
13392 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
13393 &sub_strict_overflow_p
))
13395 if (sub_strict_overflow_p
)
13396 *strict_overflow_p
= true;
13402 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
13404 return expr_not_equal_to (t
, wi::zero (TYPE_PRECISION (TREE_TYPE (t
))));
13412 #define integer_valued_real_p(X) \
13413 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13415 #define RECURSE(X) \
13416 ((integer_valued_real_p) (X, depth + 1))
13418 /* Return true if the floating point result of (CODE OP0) has an
13419 integer value. We also allow +Inf, -Inf and NaN to be considered
13420 integer values. Return false for signaling NaN.
13422 DEPTH is the current nesting depth of the query. */
13425 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
13433 return RECURSE (op0
);
13437 tree type
= TREE_TYPE (op0
);
13438 if (TREE_CODE (type
) == INTEGER_TYPE
)
13440 if (TREE_CODE (type
) == REAL_TYPE
)
13441 return RECURSE (op0
);
13451 /* Return true if the floating point result of (CODE OP0 OP1) has an
13452 integer value. We also allow +Inf, -Inf and NaN to be considered
13453 integer values. Return false for signaling NaN.
13455 DEPTH is the current nesting depth of the query. */
13458 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
13467 return RECURSE (op0
) && RECURSE (op1
);
13475 /* Return true if the floating point result of calling FNDECL with arguments
13476 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13477 considered integer values. Return false for signaling NaN. If FNDECL
13478 takes fewer than 2 arguments, the remaining ARGn are null.
13480 DEPTH is the current nesting depth of the query. */
13483 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
13489 CASE_CFN_NEARBYINT
:
13497 return RECURSE (arg0
) && RECURSE (arg1
);
13505 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13506 has an integer value. We also allow +Inf, -Inf and NaN to be
13507 considered integer values. Return false for signaling NaN.
13509 DEPTH is the current nesting depth of the query. */
13512 integer_valued_real_single_p (tree t
, int depth
)
13514 switch (TREE_CODE (t
))
13517 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
13520 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
13523 /* Limit the depth of recursion to avoid quadratic behavior.
13524 This is expected to catch almost all occurrences in practice.
13525 If this code misses important cases that unbounded recursion
13526 would not, passes that need this information could be revised
13527 to provide it through dataflow propagation. */
13528 return (!name_registered_for_update_p (t
)
13529 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
13530 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
13539 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13540 has an integer value. We also allow +Inf, -Inf and NaN to be
13541 considered integer values. Return false for signaling NaN.
13543 DEPTH is the current nesting depth of the query. */
13546 integer_valued_real_invalid_p (tree t
, int depth
)
13548 switch (TREE_CODE (t
))
13550 case COMPOUND_EXPR
:
13553 return RECURSE (TREE_OPERAND (t
, 1));
13556 return RECURSE (TREE_OPERAND (t
, 0));
13565 #undef integer_valued_real_p
13567 /* Return true if the floating point expression T has an integer value.
13568 We also allow +Inf, -Inf and NaN to be considered integer values.
13569 Return false for signaling NaN.
13571 DEPTH is the current nesting depth of the query. */
13574 integer_valued_real_p (tree t
, int depth
)
13576 if (t
== error_mark_node
)
13579 tree_code code
= TREE_CODE (t
);
13580 switch (TREE_CODE_CLASS (code
))
13583 case tcc_comparison
:
13584 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
13585 TREE_OPERAND (t
, 1), depth
);
13588 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
13591 case tcc_declaration
:
13592 case tcc_reference
:
13593 return integer_valued_real_single_p (t
, depth
);
13603 return integer_valued_real_single_p (t
, depth
);
13607 tree arg0
= (call_expr_nargs (t
) > 0
13608 ? CALL_EXPR_ARG (t
, 0)
13610 tree arg1
= (call_expr_nargs (t
) > 1
13611 ? CALL_EXPR_ARG (t
, 1)
13613 return integer_valued_real_call_p (get_call_combined_fn (t
),
13614 arg0
, arg1
, depth
);
13618 return integer_valued_real_invalid_p (t
, depth
);
13622 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13623 attempt to fold the expression to a constant without modifying TYPE,
13626 If the expression could be simplified to a constant, then return
13627 the constant. If the expression would not be simplified to a
13628 constant, then return NULL_TREE. */
13631 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13633 tree tem
= fold_binary (code
, type
, op0
, op1
);
13634 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13637 /* Given the components of a unary expression CODE, TYPE and OP0,
13638 attempt to fold the expression to a constant without modifying
13641 If the expression could be simplified to a constant, then return
13642 the constant. If the expression would not be simplified to a
13643 constant, then return NULL_TREE. */
13646 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13648 tree tem
= fold_unary (code
, type
, op0
);
13649 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13652 /* If EXP represents referencing an element in a constant string
13653 (either via pointer arithmetic or array indexing), return the
13654 tree representing the value accessed, otherwise return NULL. */
13657 fold_read_from_constant_string (tree exp
)
13659 if ((TREE_CODE (exp
) == INDIRECT_REF
13660 || TREE_CODE (exp
) == ARRAY_REF
)
13661 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13663 tree exp1
= TREE_OPERAND (exp
, 0);
13666 location_t loc
= EXPR_LOCATION (exp
);
13668 if (TREE_CODE (exp
) == INDIRECT_REF
)
13669 string
= string_constant (exp1
, &index
);
13672 tree low_bound
= array_ref_low_bound (exp
);
13673 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
13675 /* Optimize the special-case of a zero lower bound.
13677 We convert the low_bound to sizetype to avoid some problems
13678 with constant folding. (E.g. suppose the lower bound is 1,
13679 and its mode is QI. Without the conversion,l (ARRAY
13680 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13681 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13682 if (! integer_zerop (low_bound
))
13683 index
= size_diffop_loc (loc
, index
,
13684 fold_convert_loc (loc
, sizetype
, low_bound
));
13690 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13691 && TREE_CODE (string
) == STRING_CST
13692 && TREE_CODE (index
) == INTEGER_CST
13693 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13694 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13696 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13697 return build_int_cst_type (TREE_TYPE (exp
),
13698 (TREE_STRING_POINTER (string
)
13699 [TREE_INT_CST_LOW (index
)]));
13704 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13705 an integer constant, real, or fixed-point constant.
13707 TYPE is the type of the result. */
13710 fold_negate_const (tree arg0
, tree type
)
13712 tree t
= NULL_TREE
;
13714 switch (TREE_CODE (arg0
))
13719 wide_int val
= wi::neg (arg0
, &overflow
);
13720 t
= force_fit_type (type
, val
, 1,
13721 (overflow
&& ! TYPE_UNSIGNED (type
))
13722 || TREE_OVERFLOW (arg0
));
13727 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13732 FIXED_VALUE_TYPE f
;
13733 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
13734 &(TREE_FIXED_CST (arg0
)), NULL
,
13735 TYPE_SATURATING (type
));
13736 t
= build_fixed (type
, f
);
13737 /* Propagate overflow flags. */
13738 if (overflow_p
| TREE_OVERFLOW (arg0
))
13739 TREE_OVERFLOW (t
) = 1;
13744 gcc_unreachable ();
13750 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13751 an integer constant or real constant.
13753 TYPE is the type of the result. */
13756 fold_abs_const (tree arg0
, tree type
)
13758 tree t
= NULL_TREE
;
13760 switch (TREE_CODE (arg0
))
13764 /* If the value is unsigned or non-negative, then the absolute value
13765 is the same as the ordinary value. */
13766 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
13769 /* If the value is negative, then the absolute value is
13774 wide_int val
= wi::neg (arg0
, &overflow
);
13775 t
= force_fit_type (type
, val
, -1,
13776 overflow
| TREE_OVERFLOW (arg0
));
13782 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13783 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13789 gcc_unreachable ();
13795 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13796 constant. TYPE is the type of the result. */
13799 fold_not_const (const_tree arg0
, tree type
)
13801 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13803 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
13806 /* Given CODE, a relational operator, the target type, TYPE and two
13807 constant operands OP0 and OP1, return the result of the
13808 relational operation. If the result is not a compile time
13809 constant, then return NULL_TREE. */
13812 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13814 int result
, invert
;
13816 /* From here on, the only cases we handle are when the result is
13817 known to be a constant. */
13819 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13821 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13822 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13824 /* Handle the cases where either operand is a NaN. */
13825 if (real_isnan (c0
) || real_isnan (c1
))
13835 case UNORDERED_EXPR
:
13849 if (flag_trapping_math
)
13855 gcc_unreachable ();
13858 return constant_boolean_node (result
, type
);
13861 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13864 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
13866 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
13867 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
13868 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
13871 /* Handle equality/inequality of complex constants. */
13872 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
13874 tree rcond
= fold_relational_const (code
, type
,
13875 TREE_REALPART (op0
),
13876 TREE_REALPART (op1
));
13877 tree icond
= fold_relational_const (code
, type
,
13878 TREE_IMAGPART (op0
),
13879 TREE_IMAGPART (op1
));
13880 if (code
== EQ_EXPR
)
13881 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
13882 else if (code
== NE_EXPR
)
13883 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
13888 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
13890 if (!VECTOR_TYPE_P (type
))
13892 /* Have vector comparison with scalar boolean result. */
13893 gcc_assert ((code
== EQ_EXPR
|| code
== NE_EXPR
)
13894 && VECTOR_CST_NELTS (op0
) == VECTOR_CST_NELTS (op1
));
13895 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (op0
); i
++)
13897 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13898 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13899 tree tmp
= fold_relational_const (code
, type
, elem0
, elem1
);
13900 if (tmp
== NULL_TREE
)
13902 if (integer_zerop (tmp
))
13903 return constant_boolean_node (false, type
);
13905 return constant_boolean_node (true, type
);
13907 unsigned count
= VECTOR_CST_NELTS (op0
);
13908 tree
*elts
= XALLOCAVEC (tree
, count
);
13909 gcc_assert (VECTOR_CST_NELTS (op1
) == count
13910 && TYPE_VECTOR_SUBPARTS (type
) == count
);
13912 for (unsigned i
= 0; i
< count
; i
++)
13914 tree elem_type
= TREE_TYPE (type
);
13915 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13916 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13918 tree tem
= fold_relational_const (code
, elem_type
,
13921 if (tem
== NULL_TREE
)
13924 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
13927 return build_vector (type
, elts
);
13930 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13932 To compute GT, swap the arguments and do LT.
13933 To compute GE, do LT and invert the result.
13934 To compute LE, swap the arguments, do LT and invert the result.
13935 To compute NE, do EQ and invert the result.
13937 Therefore, the code below must handle only EQ and LT. */
13939 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13941 std::swap (op0
, op1
);
13942 code
= swap_tree_comparison (code
);
13945 /* Note that it is safe to invert for real values here because we
13946 have already handled the one case that it matters. */
13949 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13952 code
= invert_tree_comparison (code
, false);
13955 /* Compute a result for LT or EQ if args permit;
13956 Otherwise return T. */
13957 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
13959 if (code
== EQ_EXPR
)
13960 result
= tree_int_cst_equal (op0
, op1
);
13962 result
= tree_int_cst_lt (op0
, op1
);
13969 return constant_boolean_node (result
, type
);
13972 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13973 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13977 fold_build_cleanup_point_expr (tree type
, tree expr
)
13979 /* If the expression does not have side effects then we don't have to wrap
13980 it with a cleanup point expression. */
13981 if (!TREE_SIDE_EFFECTS (expr
))
13984 /* If the expression is a return, check to see if the expression inside the
13985 return has no side effects or the right hand side of the modify expression
13986 inside the return. If either don't have side effects set we don't need to
13987 wrap the expression in a cleanup point expression. Note we don't check the
13988 left hand side of the modify because it should always be a return decl. */
13989 if (TREE_CODE (expr
) == RETURN_EXPR
)
13991 tree op
= TREE_OPERAND (expr
, 0);
13992 if (!op
|| !TREE_SIDE_EFFECTS (op
))
13994 op
= TREE_OPERAND (op
, 1);
13995 if (!TREE_SIDE_EFFECTS (op
))
13999 return build1_loc (EXPR_LOCATION (expr
), CLEANUP_POINT_EXPR
, type
, expr
);
14002 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14003 of an indirection through OP0, or NULL_TREE if no simplification is
14007 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
14013 subtype
= TREE_TYPE (sub
);
14014 if (!POINTER_TYPE_P (subtype
)
14015 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0
)))
14018 if (TREE_CODE (sub
) == ADDR_EXPR
)
14020 tree op
= TREE_OPERAND (sub
, 0);
14021 tree optype
= TREE_TYPE (op
);
14022 /* *&CONST_DECL -> to the value of the const decl. */
14023 if (TREE_CODE (op
) == CONST_DECL
)
14024 return DECL_INITIAL (op
);
14025 /* *&p => p; make sure to handle *&"str"[cst] here. */
14026 if (type
== optype
)
14028 tree fop
= fold_read_from_constant_string (op
);
14034 /* *(foo *)&fooarray => fooarray[0] */
14035 else if (TREE_CODE (optype
) == ARRAY_TYPE
14036 && type
== TREE_TYPE (optype
)
14037 && (!in_gimple_form
14038 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14040 tree type_domain
= TYPE_DOMAIN (optype
);
14041 tree min_val
= size_zero_node
;
14042 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14043 min_val
= TYPE_MIN_VALUE (type_domain
);
14045 && TREE_CODE (min_val
) != INTEGER_CST
)
14047 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
14048 NULL_TREE
, NULL_TREE
);
14050 /* *(foo *)&complexfoo => __real__ complexfoo */
14051 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14052 && type
== TREE_TYPE (optype
))
14053 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
14054 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14055 else if (TREE_CODE (optype
) == VECTOR_TYPE
14056 && type
== TREE_TYPE (optype
))
14058 tree part_width
= TYPE_SIZE (type
);
14059 tree index
= bitsize_int (0);
14060 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
14064 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14065 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14067 tree op00
= TREE_OPERAND (sub
, 0);
14068 tree op01
= TREE_OPERAND (sub
, 1);
14071 if (TREE_CODE (op00
) == ADDR_EXPR
)
14074 op00
= TREE_OPERAND (op00
, 0);
14075 op00type
= TREE_TYPE (op00
);
14077 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14078 if (TREE_CODE (op00type
) == VECTOR_TYPE
14079 && type
== TREE_TYPE (op00type
))
14081 tree part_width
= TYPE_SIZE (type
);
14082 unsigned HOST_WIDE_INT max_offset
14083 = (tree_to_uhwi (part_width
) / BITS_PER_UNIT
14084 * TYPE_VECTOR_SUBPARTS (op00type
));
14085 if (tree_int_cst_sign_bit (op01
) == 0
14086 && compare_tree_int (op01
, max_offset
) == -1)
14088 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (op01
);
14089 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
14090 tree index
= bitsize_int (indexi
);
14091 return fold_build3_loc (loc
,
14092 BIT_FIELD_REF
, type
, op00
,
14093 part_width
, index
);
14096 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14097 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
14098 && type
== TREE_TYPE (op00type
))
14100 tree size
= TYPE_SIZE_UNIT (type
);
14101 if (tree_int_cst_equal (size
, op01
))
14102 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
14104 /* ((foo *)&fooarray)[1] => fooarray[1] */
14105 else if (TREE_CODE (op00type
) == ARRAY_TYPE
14106 && type
== TREE_TYPE (op00type
))
14108 tree type_domain
= TYPE_DOMAIN (op00type
);
14109 tree min_val
= size_zero_node
;
14110 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14111 min_val
= TYPE_MIN_VALUE (type_domain
);
14112 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
14113 TYPE_SIZE_UNIT (type
));
14114 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
14115 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
14116 NULL_TREE
, NULL_TREE
);
14121 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14122 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14123 && type
== TREE_TYPE (TREE_TYPE (subtype
))
14124 && (!in_gimple_form
14125 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14128 tree min_val
= size_zero_node
;
14129 sub
= build_fold_indirect_ref_loc (loc
, sub
);
14130 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14131 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14132 min_val
= TYPE_MIN_VALUE (type_domain
);
14134 && TREE_CODE (min_val
) != INTEGER_CST
)
14136 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
14143 /* Builds an expression for an indirection through T, simplifying some
14147 build_fold_indirect_ref_loc (location_t loc
, tree t
)
14149 tree type
= TREE_TYPE (TREE_TYPE (t
));
14150 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
14155 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
14158 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14161 fold_indirect_ref_loc (location_t loc
, tree t
)
14163 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14171 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14172 whose result is ignored. The type of the returned tree need not be
14173 the same as the original expression. */
14176 fold_ignored_result (tree t
)
14178 if (!TREE_SIDE_EFFECTS (t
))
14179 return integer_zero_node
;
14182 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14185 t
= TREE_OPERAND (t
, 0);
14189 case tcc_comparison
:
14190 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14191 t
= TREE_OPERAND (t
, 0);
14192 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14193 t
= TREE_OPERAND (t
, 1);
14198 case tcc_expression
:
14199 switch (TREE_CODE (t
))
14201 case COMPOUND_EXPR
:
14202 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14204 t
= TREE_OPERAND (t
, 0);
14208 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14209 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14211 t
= TREE_OPERAND (t
, 0);
14224 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14227 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
14229 tree div
= NULL_TREE
;
14234 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14235 have to do anything. Only do this when we are not given a const,
14236 because in that case, this check is more expensive than just
14238 if (TREE_CODE (value
) != INTEGER_CST
)
14240 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14242 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14246 /* If divisor is a power of two, simplify this to bit manipulation. */
14247 if (pow2_or_zerop (divisor
))
14249 if (TREE_CODE (value
) == INTEGER_CST
)
14251 wide_int val
= value
;
14254 if ((val
& (divisor
- 1)) == 0)
14257 overflow_p
= TREE_OVERFLOW (value
);
14258 val
+= divisor
- 1;
14259 val
&= (int) -divisor
;
14263 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
14269 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14270 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
14271 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
14272 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14278 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14279 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
14280 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14286 /* Likewise, but round down. */
14289 round_down_loc (location_t loc
, tree value
, int divisor
)
14291 tree div
= NULL_TREE
;
14293 gcc_assert (divisor
> 0);
14297 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14298 have to do anything. Only do this when we are not given a const,
14299 because in that case, this check is more expensive than just
14301 if (TREE_CODE (value
) != INTEGER_CST
)
14303 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14305 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14309 /* If divisor is a power of two, simplify this to bit manipulation. */
14310 if (pow2_or_zerop (divisor
))
14314 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14315 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14320 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14321 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
14322 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14328 /* Returns the pointer to the base of the object addressed by EXP and
14329 extracts the information about the offset of the access, storing it
14330 to PBITPOS and POFFSET. */
14333 split_address_to_core_and_offset (tree exp
,
14334 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
14338 int unsignedp
, reversep
, volatilep
;
14339 HOST_WIDE_INT bitsize
;
14340 location_t loc
= EXPR_LOCATION (exp
);
14342 if (TREE_CODE (exp
) == ADDR_EXPR
)
14344 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
14345 poffset
, &mode
, &unsignedp
, &reversep
,
14347 core
= build_fold_addr_expr_loc (loc
, core
);
14349 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
14351 core
= TREE_OPERAND (exp
, 0);
14354 *poffset
= TREE_OPERAND (exp
, 1);
14355 if (TREE_CODE (*poffset
) == INTEGER_CST
)
14357 offset_int tem
= wi::sext (wi::to_offset (*poffset
),
14358 TYPE_PRECISION (TREE_TYPE (*poffset
)));
14359 tem
<<= LOG2_BITS_PER_UNIT
;
14360 if (wi::fits_shwi_p (tem
))
14362 *pbitpos
= tem
.to_shwi ();
14363 *poffset
= NULL_TREE
;
14371 *poffset
= NULL_TREE
;
14377 /* Returns true if addresses of E1 and E2 differ by a constant, false
14378 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14381 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
14384 HOST_WIDE_INT bitpos1
, bitpos2
;
14385 tree toffset1
, toffset2
, tdiff
, type
;
14387 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
14388 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
14390 if (bitpos1
% BITS_PER_UNIT
!= 0
14391 || bitpos2
% BITS_PER_UNIT
!= 0
14392 || !operand_equal_p (core1
, core2
, 0))
14395 if (toffset1
&& toffset2
)
14397 type
= TREE_TYPE (toffset1
);
14398 if (type
!= TREE_TYPE (toffset2
))
14399 toffset2
= fold_convert (type
, toffset2
);
14401 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
14402 if (!cst_and_fits_in_hwi (tdiff
))
14405 *diff
= int_cst_value (tdiff
);
14407 else if (toffset1
|| toffset2
)
14409 /* If only one of the offsets is non-constant, the difference cannot
14416 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
14420 /* Return OFF converted to a pointer offset type suitable as offset for
14421 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14423 convert_to_ptrofftype_loc (location_t loc
, tree off
)
14425 return fold_convert_loc (loc
, sizetype
, off
);
14428 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14430 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
14432 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14433 ptr
, convert_to_ptrofftype_loc (loc
, off
));
14436 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14438 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
14440 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14441 ptr
, size_int (off
));
14444 /* Return a char pointer for a C string if it is a string constant
14445 or sum of string constant and integer constant. We only support
14446 string constants properly terminated with '\0' character.
14447 If STRLEN is a valid pointer, length (including terminating character)
14448 of returned string is stored to the argument. */
14451 c_getstr (tree src
, unsigned HOST_WIDE_INT
*strlen
)
14458 src
= string_constant (src
, &offset_node
);
14462 unsigned HOST_WIDE_INT offset
= 0;
14463 if (offset_node
!= NULL_TREE
)
14465 if (!tree_fits_uhwi_p (offset_node
))
14468 offset
= tree_to_uhwi (offset_node
);
14471 unsigned HOST_WIDE_INT string_length
= TREE_STRING_LENGTH (src
);
14472 const char *string
= TREE_STRING_POINTER (src
);
14474 /* Support only properly null-terminated strings. */
14475 if (string_length
== 0
14476 || string
[string_length
- 1] != '\0'
14477 || offset
>= string_length
)
14481 *strlen
= string_length
- offset
;
14482 return string
+ offset
;
14487 namespace selftest
{
14489 /* Helper functions for writing tests of folding trees. */
14491 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14494 assert_binop_folds_to_const (tree lhs
, enum tree_code code
, tree rhs
,
14497 ASSERT_EQ (constant
, fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
));
14500 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14501 wrapping WRAPPED_EXPR. */
14504 assert_binop_folds_to_nonlvalue (tree lhs
, enum tree_code code
, tree rhs
,
14507 tree result
= fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
);
14508 ASSERT_NE (wrapped_expr
, result
);
14509 ASSERT_EQ (NON_LVALUE_EXPR
, TREE_CODE (result
));
14510 ASSERT_EQ (wrapped_expr
, TREE_OPERAND (result
, 0));
14513 /* Verify that various arithmetic binary operations are folded
14517 test_arithmetic_folding ()
14519 tree type
= integer_type_node
;
14520 tree x
= create_tmp_var_raw (type
, "x");
14521 tree zero
= build_zero_cst (type
);
14522 tree one
= build_int_cst (type
, 1);
14525 /* 1 <-- (0 + 1) */
14526 assert_binop_folds_to_const (zero
, PLUS_EXPR
, one
,
14528 assert_binop_folds_to_const (one
, PLUS_EXPR
, zero
,
14531 /* (nonlvalue)x <-- (x + 0) */
14532 assert_binop_folds_to_nonlvalue (x
, PLUS_EXPR
, zero
,
14536 /* 0 <-- (x - x) */
14537 assert_binop_folds_to_const (x
, MINUS_EXPR
, x
,
14539 assert_binop_folds_to_nonlvalue (x
, MINUS_EXPR
, zero
,
14542 /* Multiplication. */
14543 /* 0 <-- (x * 0) */
14544 assert_binop_folds_to_const (x
, MULT_EXPR
, zero
,
14547 /* (nonlvalue)x <-- (x * 1) */
14548 assert_binop_folds_to_nonlvalue (x
, MULT_EXPR
, one
,
14552 /* Verify that various binary operations on vectors are folded
14556 test_vector_folding ()
14558 tree inner_type
= integer_type_node
;
14559 tree type
= build_vector_type (inner_type
, 4);
14560 tree zero
= build_zero_cst (type
);
14561 tree one
= build_one_cst (type
);
14563 /* Verify equality tests that return a scalar boolean result. */
14564 tree res_type
= boolean_type_node
;
14565 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, one
)));
14566 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, zero
)));
14567 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, zero
, one
)));
14568 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, one
, one
)));
14571 /* Run all of the selftests within this file. */
14574 fold_const_c_tests ()
14576 test_arithmetic_folding ();
14577 test_vector_folding ();
14580 } // namespace selftest
14582 #endif /* CHECKING_P */