1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer
= 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code
{
91 static bool negate_mathfn_p (enum built_in_function
);
92 static bool negate_expr_p (tree
);
93 static tree
negate_expr (tree
);
94 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
95 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
96 static tree
const_binop (enum tree_code
, tree
, tree
);
97 static enum comparison_code
comparison_to_compcode (enum tree_code
);
98 static enum tree_code
compcode_to_comparison (enum comparison_code
);
99 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
100 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
101 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
102 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
103 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
104 static tree
make_bit_field_ref (location_t
, tree
, tree
,
105 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
106 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
108 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
110 enum machine_mode
*, int *, int *,
112 static int all_ones_mask_p (const_tree
, int);
113 static tree
sign_bit_p (tree
, const_tree
);
114 static int simple_operand_p (const_tree
);
115 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
116 static tree
range_predecessor (tree
);
117 static tree
range_successor (tree
);
118 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
119 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
120 static tree
unextend (tree
, int, int, tree
);
121 static tree
fold_truthop (location_t
, enum tree_code
, tree
, tree
, tree
);
122 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
124 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
125 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
126 static tree
fold_binary_op_with_conditional_arg (location_t
,
127 enum tree_code
, tree
,
130 static tree
fold_mathfn_compare (location_t
,
131 enum built_in_function
, enum tree_code
,
133 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
134 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static bool reorder_operands_p (const_tree
, const_tree
);
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t
, location_t loc
)
147 location_t tloc
= EXPR_LOCATION (t
);
148 return tloc
!= UNKNOWN_LOCATION
? tloc
: loc
;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x
, location_t loc
)
157 if (CAN_HAVE_LOCATION_P (x
)
158 && EXPR_LOCATION (x
) != loc
159 && !(TREE_CODE (x
) == SAVE_EXPR
160 || TREE_CODE (x
) == TARGET_EXPR
161 || TREE_CODE (x
) == BIND_EXPR
))
164 SET_EXPR_LOCATION (x
, loc
);
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
185 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
194 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
198 quo
= double_int_divmod (tree_to_double_int (arg1
),
199 tree_to_double_int (arg2
),
202 if (double_int_zero_p (rem
))
203 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
217 static int fold_deferring_overflow_warnings
;
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
224 static const char* fold_deferred_overflow_warning
;
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
229 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
235 fold_defer_overflow_warnings (void)
237 ++fold_deferring_overflow_warnings
;
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
250 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
255 gcc_assert (fold_deferring_overflow_warnings
> 0);
256 --fold_deferring_overflow_warnings
;
257 if (fold_deferring_overflow_warnings
> 0)
259 if (fold_deferred_overflow_warning
!= NULL
261 && code
< (int) fold_deferred_overflow_code
)
262 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
266 warnmsg
= fold_deferred_overflow_warning
;
267 fold_deferred_overflow_warning
= NULL
;
269 if (!issue
|| warnmsg
== NULL
)
272 if (gimple_no_warning_p (stmt
))
275 /* Use the smallest code level when deciding to issue the
277 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
278 code
= fold_deferred_overflow_code
;
280 if (!issue_strict_overflow_warning (code
))
284 locus
= input_location
;
286 locus
= gimple_location (stmt
);
287 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
290 /* Stop deferring overflow warnings, ignoring any deferred
294 fold_undefer_and_ignore_overflow_warnings (void)
296 fold_undefer_overflow_warnings (false, NULL
, 0);
299 /* Whether we are deferring overflow warnings. */
302 fold_deferring_overflow_warnings_p (void)
304 return fold_deferring_overflow_warnings
> 0;
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
311 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
313 if (fold_deferring_overflow_warnings
> 0)
315 if (fold_deferred_overflow_warning
== NULL
316 || wc
< fold_deferred_overflow_code
)
318 fold_deferred_overflow_warning
= gmsgid
;
319 fold_deferred_overflow_code
= wc
;
322 else if (issue_strict_overflow_warning (wc
))
323 warning (OPT_Wstrict_overflow
, gmsgid
);
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
330 negate_mathfn_p (enum built_in_function code
)
334 CASE_FLT_FN (BUILT_IN_ASIN
):
335 CASE_FLT_FN (BUILT_IN_ASINH
):
336 CASE_FLT_FN (BUILT_IN_ATAN
):
337 CASE_FLT_FN (BUILT_IN_ATANH
):
338 CASE_FLT_FN (BUILT_IN_CASIN
):
339 CASE_FLT_FN (BUILT_IN_CASINH
):
340 CASE_FLT_FN (BUILT_IN_CATAN
):
341 CASE_FLT_FN (BUILT_IN_CATANH
):
342 CASE_FLT_FN (BUILT_IN_CBRT
):
343 CASE_FLT_FN (BUILT_IN_CPROJ
):
344 CASE_FLT_FN (BUILT_IN_CSIN
):
345 CASE_FLT_FN (BUILT_IN_CSINH
):
346 CASE_FLT_FN (BUILT_IN_CTAN
):
347 CASE_FLT_FN (BUILT_IN_CTANH
):
348 CASE_FLT_FN (BUILT_IN_ERF
):
349 CASE_FLT_FN (BUILT_IN_LLROUND
):
350 CASE_FLT_FN (BUILT_IN_LROUND
):
351 CASE_FLT_FN (BUILT_IN_ROUND
):
352 CASE_FLT_FN (BUILT_IN_SIN
):
353 CASE_FLT_FN (BUILT_IN_SINH
):
354 CASE_FLT_FN (BUILT_IN_TAN
):
355 CASE_FLT_FN (BUILT_IN_TANH
):
356 CASE_FLT_FN (BUILT_IN_TRUNC
):
359 CASE_FLT_FN (BUILT_IN_LLRINT
):
360 CASE_FLT_FN (BUILT_IN_LRINT
):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
362 CASE_FLT_FN (BUILT_IN_RINT
):
363 return !flag_rounding_math
;
371 /* Check whether we may negate an integer constant T without causing
375 may_negate_without_overflow_p (const_tree t
)
377 unsigned HOST_WIDE_INT val
;
381 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
383 type
= TREE_TYPE (t
);
384 if (TYPE_UNSIGNED (type
))
387 prec
= TYPE_PRECISION (type
);
388 if (prec
> HOST_BITS_PER_WIDE_INT
)
390 if (TREE_INT_CST_LOW (t
) != 0)
392 prec
-= HOST_BITS_PER_WIDE_INT
;
393 val
= TREE_INT_CST_HIGH (t
);
396 val
= TREE_INT_CST_LOW (t
);
397 if (prec
< HOST_BITS_PER_WIDE_INT
)
398 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
399 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
406 negate_expr_p (tree t
)
413 type
= TREE_TYPE (t
);
416 switch (TREE_CODE (t
))
419 if (TYPE_OVERFLOW_WRAPS (type
))
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t
);
425 return (INTEGRAL_TYPE_P (type
)
426 && TYPE_OVERFLOW_WRAPS (type
));
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
438 return negate_expr_p (TREE_REALPART (t
))
439 && negate_expr_p (TREE_IMAGPART (t
));
442 return negate_expr_p (TREE_OPERAND (t
, 0))
443 && negate_expr_p (TREE_OPERAND (t
, 1));
446 return negate_expr_p (TREE_OPERAND (t
, 0));
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t
, 1))
454 && reorder_operands_p (TREE_OPERAND (t
, 0),
455 TREE_OPERAND (t
, 1)))
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t
, 0));
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
464 && reorder_operands_p (TREE_OPERAND (t
, 0),
465 TREE_OPERAND (t
, 1));
468 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
492 return negate_expr_p (TREE_OPERAND (t
, 1))
493 || negate_expr_p (TREE_OPERAND (t
, 0));
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type
) == REAL_TYPE
)
499 tree tem
= strip_float_extensions (t
);
501 return negate_expr_p (tem
);
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t
)))
508 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
515 tree op1
= TREE_OPERAND (t
, 1);
516 if (TREE_INT_CST_HIGH (op1
) == 0
517 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
518 == TREE_INT_CST_LOW (op1
))
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
535 fold_negate_expr (location_t loc
, tree t
)
537 tree type
= TREE_TYPE (t
);
540 switch (TREE_CODE (t
))
542 /* Convert - (~A) to A + 1. */
544 if (INTEGRAL_TYPE_P (type
))
545 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
546 build_int_cst (type
, 1));
550 tem
= fold_negate_const (t
, type
);
551 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
552 || !TYPE_OVERFLOW_TRAPS (type
))
557 tem
= fold_negate_const (t
, type
);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
564 tem
= fold_negate_const (t
, type
);
569 tree rpart
= negate_expr (TREE_REALPART (t
));
570 tree ipart
= negate_expr (TREE_IMAGPART (t
));
572 if ((TREE_CODE (rpart
) == REAL_CST
573 && TREE_CODE (ipart
) == REAL_CST
)
574 || (TREE_CODE (rpart
) == INTEGER_CST
575 && TREE_CODE (ipart
) == INTEGER_CST
))
576 return build_complex (type
, rpart
, ipart
);
581 if (negate_expr_p (t
))
582 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
583 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
584 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
588 if (negate_expr_p (t
))
589 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
590 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
594 return TREE_OPERAND (t
, 0);
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t
, 1))
602 && reorder_operands_p (TREE_OPERAND (t
, 0),
603 TREE_OPERAND (t
, 1)))
605 tem
= negate_expr (TREE_OPERAND (t
, 1));
606 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
607 tem
, TREE_OPERAND (t
, 0));
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t
, 0)))
613 tem
= negate_expr (TREE_OPERAND (t
, 0));
614 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
615 tem
, TREE_OPERAND (t
, 1));
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
624 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
625 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
626 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
630 if (TYPE_UNSIGNED (type
))
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
638 tem
= TREE_OPERAND (t
, 1);
639 if (negate_expr_p (tem
))
640 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
641 TREE_OPERAND (t
, 0), negate_expr (tem
));
642 tem
= TREE_OPERAND (t
, 0);
643 if (negate_expr_p (tem
))
644 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
645 negate_expr (tem
), TREE_OPERAND (t
, 1));
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
659 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
661 const char * const warnmsg
= G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem
= TREE_OPERAND (t
, 1);
664 if (negate_expr_p (tem
))
666 if (INTEGRAL_TYPE_P (type
)
667 && (TREE_CODE (tem
) != INTEGER_CST
668 || integer_onep (tem
)))
669 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
670 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
671 TREE_OPERAND (t
, 0), negate_expr (tem
));
673 tem
= TREE_OPERAND (t
, 0);
674 if (negate_expr_p (tem
))
676 if (INTEGRAL_TYPE_P (type
)
677 && (TREE_CODE (tem
) != INTEGER_CST
678 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
679 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
680 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
681 negate_expr (tem
), TREE_OPERAND (t
, 1));
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type
) == REAL_TYPE
)
690 tem
= strip_float_extensions (t
);
691 if (tem
!= t
&& negate_expr_p (tem
))
692 return fold_convert_loc (loc
, type
, negate_expr (tem
));
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t
))
699 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
703 fndecl
= get_callee_fndecl (t
);
704 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
705 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
713 tree op1
= TREE_OPERAND (t
, 1);
714 if (TREE_INT_CST_HIGH (op1
) == 0
715 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
716 == TREE_INT_CST_LOW (op1
))
718 tree ntype
= TYPE_UNSIGNED (type
)
719 ? signed_type_for (type
)
720 : unsigned_type_for (type
);
721 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
722 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
723 return fold_convert_loc (loc
, type
, temp
);
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 loc
= EXPR_LOCATION (t
);
749 type
= TREE_TYPE (t
);
752 tem
= fold_negate_expr (loc
, t
);
754 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
755 return fold_convert_loc (loc
, type
, tem
);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
779 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
780 tree
*minus_litp
, int negate_p
)
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in
);
791 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
792 || TREE_CODE (in
) == FIXED_CST
)
794 else if (TREE_CODE (in
) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
802 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
804 tree op0
= TREE_OPERAND (in
, 0);
805 tree op1
= TREE_OPERAND (in
, 1);
806 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
807 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
811 || TREE_CODE (op0
) == FIXED_CST
)
812 *litp
= op0
, op0
= 0;
813 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
814 || TREE_CODE (op1
) == FIXED_CST
)
815 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
817 if (op0
!= 0 && TREE_CONSTANT (op0
))
818 *conp
= op0
, op0
= 0;
819 else if (op1
!= 0 && TREE_CONSTANT (op1
))
820 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0
!= 0 && op1
!= 0)
829 var
= op1
, neg_var_p
= neg1_p
;
831 /* Now do any needed negations. */
833 *minus_litp
= *litp
, *litp
= 0;
835 *conp
= negate_expr (*conp
);
837 var
= negate_expr (var
);
839 else if (TREE_CONSTANT (in
))
847 *minus_litp
= *litp
, *litp
= 0;
848 else if (*minus_litp
)
849 *litp
= *minus_litp
, *minus_litp
= 0;
850 *conp
= negate_expr (*conp
);
851 var
= negate_expr (var
);
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
863 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
874 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
876 if (code
== PLUS_EXPR
)
878 if (TREE_CODE (t1
) == NEGATE_EXPR
)
879 return build2_loc (loc
, MINUS_EXPR
, type
,
880 fold_convert_loc (loc
, type
, t2
),
881 fold_convert_loc (loc
, type
,
882 TREE_OPERAND (t1
, 0)));
883 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
884 return build2_loc (loc
, MINUS_EXPR
, type
,
885 fold_convert_loc (loc
, type
, t1
),
886 fold_convert_loc (loc
, type
,
887 TREE_OPERAND (t2
, 0)));
888 else if (integer_zerop (t2
))
889 return fold_convert_loc (loc
, type
, t1
);
891 else if (code
== MINUS_EXPR
)
893 if (integer_zerop (t2
))
894 return fold_convert_loc (loc
, type
, t1
);
897 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
898 fold_convert_loc (loc
, type
, t2
));
901 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
902 fold_convert_loc (loc
, type
, t2
));
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
909 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
911 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
913 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
928 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
929 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
930 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
939 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
941 double_int op1
, op2
, res
, tmp
;
943 tree type
= TREE_TYPE (arg1
);
944 bool uns
= TYPE_UNSIGNED (type
);
946 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
947 bool overflow
= false;
949 op1
= tree_to_double_int (arg1
);
950 op2
= tree_to_double_int (arg2
);
955 res
= double_int_ior (op1
, op2
);
959 res
= double_int_xor (op1
, op2
);
963 res
= double_int_and (op1
, op2
);
967 res
= double_int_rshift (op1
, double_int_to_shwi (op2
),
968 TYPE_PRECISION (type
), !uns
);
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res
= double_int_lshift (op1
, double_int_to_shwi (op2
),
976 TYPE_PRECISION (type
), !uns
);
980 res
= double_int_rrotate (op1
, double_int_to_shwi (op2
),
981 TYPE_PRECISION (type
));
985 res
= double_int_lrotate (op1
, double_int_to_shwi (op2
),
986 TYPE_PRECISION (type
));
990 overflow
= add_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
991 &res
.low
, &res
.high
);
995 neg_double (op2
.low
, op2
.high
, &res
.low
, &res
.high
);
996 add_double (op1
.low
, op1
.high
, res
.low
, res
.high
,
997 &res
.low
, &res
.high
);
998 overflow
= OVERFLOW_SUM_SIGN (res
.high
, op2
.high
, op1
.high
);
1002 overflow
= mul_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1003 &res
.low
, &res
.high
);
1006 case TRUNC_DIV_EXPR
:
1007 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1008 case EXACT_DIV_EXPR
:
1009 /* This is a shortcut for a common special case. */
1010 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1011 && !TREE_OVERFLOW (arg1
)
1012 && !TREE_OVERFLOW (arg2
)
1013 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1015 if (code
== CEIL_DIV_EXPR
)
1016 op1
.low
+= op2
.low
- 1;
1018 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1022 /* ... fall through ... */
1024 case ROUND_DIV_EXPR
:
1025 if (double_int_zero_p (op2
))
1027 if (double_int_one_p (op2
))
1032 if (double_int_equal_p (op1
, op2
)
1033 && ! double_int_zero_p (op1
))
1035 res
= double_int_one
;
1038 overflow
= div_and_round_double (code
, uns
,
1039 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1040 &res
.low
, &res
.high
,
1041 &tmp
.low
, &tmp
.high
);
1044 case TRUNC_MOD_EXPR
:
1045 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1046 /* This is a shortcut for a common special case. */
1047 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1048 && !TREE_OVERFLOW (arg1
)
1049 && !TREE_OVERFLOW (arg2
)
1050 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1052 if (code
== CEIL_MOD_EXPR
)
1053 op1
.low
+= op2
.low
- 1;
1054 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR
:
1061 if (double_int_zero_p (op2
))
1063 overflow
= div_and_round_double (code
, uns
,
1064 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1065 &tmp
.low
, &tmp
.high
,
1066 &res
.low
, &res
.high
);
1070 res
= double_int_min (op1
, op2
, uns
);
1074 res
= double_int_max (op1
, op2
, uns
);
1081 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, 1,
1082 ((!uns
|| is_sizetype
) && overflow
)
1083 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1094 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1096 /* Sanity check for the recursive cases. */
1103 if (TREE_CODE (arg1
) == INTEGER_CST
)
1104 return int_const_binop (code
, arg1
, arg2
);
1106 if (TREE_CODE (arg1
) == REAL_CST
)
1108 enum machine_mode mode
;
1111 REAL_VALUE_TYPE value
;
1112 REAL_VALUE_TYPE result
;
1116 /* The following codes are handled by real_arithmetic. */
1131 d1
= TREE_REAL_CST (arg1
);
1132 d2
= TREE_REAL_CST (arg2
);
1134 type
= TREE_TYPE (arg1
);
1135 mode
= TYPE_MODE (type
);
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode
)
1140 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code
== RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2
, dconst0
)
1147 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1
))
1154 else if (REAL_VALUE_ISNAN (d2
))
1157 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1158 real_convert (&result
, mode
, &value
);
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode
)
1164 && REAL_VALUE_ISINF (result
)
1165 && !REAL_VALUE_ISINF (d1
)
1166 && !REAL_VALUE_ISINF (d2
))
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1175 && (inexact
|| !real_identical (&result
, &value
)))
1178 t
= build_real (type
, result
);
1180 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1184 if (TREE_CODE (arg1
) == FIXED_CST
)
1186 FIXED_VALUE_TYPE f1
;
1187 FIXED_VALUE_TYPE f2
;
1188 FIXED_VALUE_TYPE result
;
1193 /* The following codes are handled by fixed_arithmetic. */
1199 case TRUNC_DIV_EXPR
:
1200 f2
= TREE_FIXED_CST (arg2
);
1205 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1206 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1214 f1
= TREE_FIXED_CST (arg1
);
1215 type
= TREE_TYPE (arg1
);
1216 sat_p
= TYPE_SATURATING (type
);
1217 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1218 t
= build_fixed (type
, result
);
1219 /* Propagate overflow flags. */
1220 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1221 TREE_OVERFLOW (t
) = 1;
1225 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1227 tree type
= TREE_TYPE (arg1
);
1228 tree r1
= TREE_REALPART (arg1
);
1229 tree i1
= TREE_IMAGPART (arg1
);
1230 tree r2
= TREE_REALPART (arg2
);
1231 tree i2
= TREE_IMAGPART (arg2
);
1238 real
= const_binop (code
, r1
, r2
);
1239 imag
= const_binop (code
, i1
, i2
);
1243 if (COMPLEX_FLOAT_TYPE_P (type
))
1244 return do_mpc_arg2 (arg1
, arg2
, type
,
1245 /* do_nonfinite= */ folding_initializer
,
1248 real
= const_binop (MINUS_EXPR
,
1249 const_binop (MULT_EXPR
, r1
, r2
),
1250 const_binop (MULT_EXPR
, i1
, i2
));
1251 imag
= const_binop (PLUS_EXPR
,
1252 const_binop (MULT_EXPR
, r1
, i2
),
1253 const_binop (MULT_EXPR
, i1
, r2
));
1257 if (COMPLEX_FLOAT_TYPE_P (type
))
1258 return do_mpc_arg2 (arg1
, arg2
, type
,
1259 /* do_nonfinite= */ folding_initializer
,
1262 case TRUNC_DIV_EXPR
:
1264 case FLOOR_DIV_EXPR
:
1265 case ROUND_DIV_EXPR
:
1266 if (flag_complex_method
== 0)
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1276 = const_binop (PLUS_EXPR
,
1277 const_binop (MULT_EXPR
, r2
, r2
),
1278 const_binop (MULT_EXPR
, i2
, i2
));
1280 = const_binop (PLUS_EXPR
,
1281 const_binop (MULT_EXPR
, r1
, r2
),
1282 const_binop (MULT_EXPR
, i1
, i2
));
1284 = const_binop (MINUS_EXPR
,
1285 const_binop (MULT_EXPR
, i1
, r2
),
1286 const_binop (MULT_EXPR
, r1
, i2
));
1288 real
= const_binop (code
, t1
, magsquared
);
1289 imag
= const_binop (code
, t2
, magsquared
);
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1299 fold_abs_const (r2
, TREE_TYPE (type
)),
1300 fold_abs_const (i2
, TREE_TYPE (type
)));
1302 if (integer_nonzerop (compare
))
1304 /* In the TRUE branch, we compute
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1311 tree ratio
= const_binop (code
, r2
, i2
);
1312 tree div
= const_binop (PLUS_EXPR
, i2
,
1313 const_binop (MULT_EXPR
, r2
, ratio
));
1314 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1315 real
= const_binop (PLUS_EXPR
, real
, i1
);
1316 real
= const_binop (code
, real
, div
);
1318 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1319 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1320 imag
= const_binop (code
, imag
, div
);
1324 /* In the FALSE branch, we compute
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1331 tree ratio
= const_binop (code
, i2
, r2
);
1332 tree div
= const_binop (PLUS_EXPR
, r2
,
1333 const_binop (MULT_EXPR
, i2
, ratio
));
1335 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1336 real
= const_binop (PLUS_EXPR
, real
, r1
);
1337 real
= const_binop (code
, real
, div
);
1339 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1340 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1341 imag
= const_binop (code
, imag
, div
);
1351 return build_complex (type
, real
, imag
);
1354 if (TREE_CODE (arg1
) == VECTOR_CST
)
1356 tree type
= TREE_TYPE(arg1
);
1357 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1358 tree elements1
, elements2
, list
= NULL_TREE
;
1360 if(TREE_CODE(arg2
) != VECTOR_CST
)
1363 elements1
= TREE_VECTOR_CST_ELTS (arg1
);
1364 elements2
= TREE_VECTOR_CST_ELTS (arg2
);
1366 for (i
= 0; i
< count
; i
++)
1368 tree elem1
, elem2
, elem
;
1370 /* The trailing elements can be empty and should be treated as 0 */
1372 elem1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1375 elem1
= TREE_VALUE(elements1
);
1376 elements1
= TREE_CHAIN (elements1
);
1380 elem2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1383 elem2
= TREE_VALUE(elements2
);
1384 elements2
= TREE_CHAIN (elements2
);
1387 elem
= const_binop (code
, elem1
, elem2
);
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if(elem
== NULL_TREE
)
1394 list
= tree_cons (NULL_TREE
, elem
, list
);
1396 return build_vector(type
, nreverse(list
));
1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1402 indicates which particular sizetype to create. */
1405 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1407 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be equivalent integer types, ala int_binop_types_match_p.
1413 If the operands are constant, so is the result. */
1416 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1418 tree type
= TREE_TYPE (arg0
);
1420 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1421 return error_mark_node
;
1423 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1426 /* Handle the special case of two integer constants faster. */
1427 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1429 /* And some specific cases even faster than that. */
1430 if (code
== PLUS_EXPR
)
1432 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1434 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1437 else if (code
== MINUS_EXPR
)
1439 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1442 else if (code
== MULT_EXPR
)
1444 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1448 /* Handle general case of two integer constants. */
1449 return int_const_binop (code
, arg0
, arg1
);
1452 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1460 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1462 tree type
= TREE_TYPE (arg0
);
1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1468 /* If the type is already signed, just do the simple thing. */
1469 if (!TYPE_UNSIGNED (type
))
1470 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1472 if (type
== sizetype
)
1474 else if (type
== bitsizetype
)
1475 ctype
= sbitsizetype
;
1477 ctype
= signed_type_for (type
);
1479 /* If either operand is not a constant, do the conversions to the signed
1480 type and subtract. The hardware will do the right thing with any
1481 overflow in the subtraction. */
1482 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1483 return size_binop_loc (loc
, MINUS_EXPR
,
1484 fold_convert_loc (loc
, ctype
, arg0
),
1485 fold_convert_loc (loc
, ctype
, arg1
));
1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489 overflow) and negate (which can't either). Special-case a result
1490 of zero while we're here. */
1491 if (tree_int_cst_equal (arg0
, arg1
))
1492 return build_int_cst (ctype
, 0);
1493 else if (tree_int_cst_lt (arg1
, arg0
))
1494 return fold_convert_loc (loc
, ctype
,
1495 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1497 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1498 fold_convert_loc (loc
, ctype
,
1499 size_binop_loc (loc
,
1504 /* A subroutine of fold_convert_const handling conversions of an
1505 INTEGER_CST to another integer type. */
1508 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1512 /* Given an integer constant, make new constant with new type,
1513 appropriately sign-extended or truncated. */
1514 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1516 (TREE_INT_CST_HIGH (arg1
) < 0
1517 && (TYPE_UNSIGNED (type
)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1519 | TREE_OVERFLOW (arg1
));
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1528 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1544 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1548 case FIX_TRUNC_EXPR
:
1549 real_trunc (&r
, VOIDmode
, &x
);
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r
))
1560 val
= double_int_zero
;
1563 /* See if R is less than the lower bound or greater than the
1568 tree lt
= TYPE_MIN_VALUE (type
);
1569 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1570 if (REAL_VALUES_LESS (r
, l
))
1573 val
= tree_to_double_int (lt
);
1579 tree ut
= TYPE_MAX_VALUE (type
);
1582 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1583 if (REAL_VALUES_LESS (u
, r
))
1586 val
= tree_to_double_int (ut
);
1592 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1594 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1598 /* A subroutine of fold_convert_const handling conversions of a
1599 FIXED_CST to an integer type. */
1602 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1605 double_int temp
, temp_trunc
;
1608 /* Right shift FIXED_CST to temp by fbit. */
1609 temp
= TREE_FIXED_CST (arg1
).data
;
1610 mode
= TREE_FIXED_CST (arg1
).mode
;
1611 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
1613 temp
= double_int_rshift (temp
, GET_MODE_FBIT (mode
),
1614 HOST_BITS_PER_DOUBLE_INT
,
1615 SIGNED_FIXED_POINT_MODE_P (mode
));
1617 /* Left shift temp to temp_trunc by fbit. */
1618 temp_trunc
= double_int_lshift (temp
, GET_MODE_FBIT (mode
),
1619 HOST_BITS_PER_DOUBLE_INT
,
1620 SIGNED_FIXED_POINT_MODE_P (mode
));
1624 temp
= double_int_zero
;
1625 temp_trunc
= double_int_zero
;
1628 /* If FIXED_CST is negative, we need to round the value toward 0.
1629 By checking if the fractional bits are not zero to add 1 to temp. */
1630 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1631 && double_int_negative_p (temp_trunc
)
1632 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
1633 temp
= double_int_add (temp
, double_int_one
);
1635 /* Given a fixed-point constant, make new constant with new type,
1636 appropriately sign-extended or truncated. */
1637 t
= force_fit_type_double (type
, temp
, -1,
1638 (double_int_negative_p (temp
)
1639 && (TYPE_UNSIGNED (type
)
1640 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1641 | TREE_OVERFLOW (arg1
));
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647 to another floating point type. */
1650 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1652 REAL_VALUE_TYPE value
;
1655 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1656 t
= build_real (type
, value
);
1658 /* If converting an infinity or NAN to a representation that doesn't
1659 have one, set the overflow bit so that we can produce some kind of
1660 error message at the appropriate point if necessary. It's not the
1661 most user-friendly message, but it's better than nothing. */
1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1664 TREE_OVERFLOW (t
) = 1;
1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1666 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1667 TREE_OVERFLOW (t
) = 1;
1668 /* Regular overflow, conversion produced an infinity in a mode that
1669 can't represent them. */
1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1671 && REAL_VALUE_ISINF (value
)
1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1673 TREE_OVERFLOW (t
) = 1;
1675 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680 to a floating point type. */
1683 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1685 REAL_VALUE_TYPE value
;
1688 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1689 t
= build_real (type
, value
);
1691 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to another fixed-point type. */
1699 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1701 FIXED_VALUE_TYPE value
;
1705 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1706 TYPE_SATURATING (type
));
1707 t
= build_fixed (type
, value
);
1709 /* Propagate overflow flags. */
1710 if (overflow_p
| TREE_OVERFLOW (arg1
))
1711 TREE_OVERFLOW (t
) = 1;
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716 to a fixed-point type. */
1719 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1721 FIXED_VALUE_TYPE value
;
1725 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1726 TREE_INT_CST (arg1
),
1727 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1728 TYPE_SATURATING (type
));
1729 t
= build_fixed (type
, value
);
1731 /* Propagate overflow flags. */
1732 if (overflow_p
| TREE_OVERFLOW (arg1
))
1733 TREE_OVERFLOW (t
) = 1;
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738 to a fixed-point type. */
1741 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1743 FIXED_VALUE_TYPE value
;
1747 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1748 &TREE_REAL_CST (arg1
),
1749 TYPE_SATURATING (type
));
1750 t
= build_fixed (type
, value
);
1752 /* Propagate overflow flags. */
1753 if (overflow_p
| TREE_OVERFLOW (arg1
))
1754 TREE_OVERFLOW (t
) = 1;
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759 type TYPE. If no simplification can be done return NULL_TREE. */
1762 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1764 if (TREE_TYPE (arg1
) == type
)
1767 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1768 || TREE_CODE (type
) == OFFSET_TYPE
)
1770 if (TREE_CODE (arg1
) == INTEGER_CST
)
1771 return fold_convert_const_int_from_int (type
, arg1
);
1772 else if (TREE_CODE (arg1
) == REAL_CST
)
1773 return fold_convert_const_int_from_real (code
, type
, arg1
);
1774 else if (TREE_CODE (arg1
) == FIXED_CST
)
1775 return fold_convert_const_int_from_fixed (type
, arg1
);
1777 else if (TREE_CODE (type
) == REAL_TYPE
)
1779 if (TREE_CODE (arg1
) == INTEGER_CST
)
1780 return build_real_from_int_cst (type
, arg1
);
1781 else if (TREE_CODE (arg1
) == REAL_CST
)
1782 return fold_convert_const_real_from_real (type
, arg1
);
1783 else if (TREE_CODE (arg1
) == FIXED_CST
)
1784 return fold_convert_const_real_from_fixed (type
, arg1
);
1786 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1788 if (TREE_CODE (arg1
) == FIXED_CST
)
1789 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1790 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1791 return fold_convert_const_fixed_from_int (type
, arg1
);
1792 else if (TREE_CODE (arg1
) == REAL_CST
)
1793 return fold_convert_const_fixed_from_real (type
, arg1
);
1798 /* Construct a vector of zero elements of vector type TYPE. */
1801 build_zero_vector (tree type
)
1805 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1806 return build_vector_from_val (type
, t
);
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1812 fold_convertible_p (const_tree type
, const_tree arg
)
1814 tree orig
= TREE_TYPE (arg
);
1819 if (TREE_CODE (arg
) == ERROR_MARK
1820 || TREE_CODE (type
) == ERROR_MARK
1821 || TREE_CODE (orig
) == ERROR_MARK
)
1824 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1827 switch (TREE_CODE (type
))
1829 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1830 case POINTER_TYPE
: case REFERENCE_TYPE
:
1832 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1833 || TREE_CODE (orig
) == OFFSET_TYPE
)
1835 return (TREE_CODE (orig
) == VECTOR_TYPE
1836 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1839 case FIXED_POINT_TYPE
:
1843 return TREE_CODE (type
) == TREE_CODE (orig
);
1850 /* Convert expression ARG to type TYPE. Used by the middle-end for
1851 simple conversions in preference to calling the front-end's convert. */
1854 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1856 tree orig
= TREE_TYPE (arg
);
1862 if (TREE_CODE (arg
) == ERROR_MARK
1863 || TREE_CODE (type
) == ERROR_MARK
1864 || TREE_CODE (orig
) == ERROR_MARK
)
1865 return error_mark_node
;
1867 switch (TREE_CODE (type
))
1870 case REFERENCE_TYPE
:
1871 /* Handle conversions between pointers to different address spaces. */
1872 if (POINTER_TYPE_P (orig
)
1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1875 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1878 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1880 if (TREE_CODE (arg
) == INTEGER_CST
)
1882 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1883 if (tem
!= NULL_TREE
)
1886 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1887 || TREE_CODE (orig
) == OFFSET_TYPE
)
1888 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1889 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1890 return fold_convert_loc (loc
, type
,
1891 fold_build1_loc (loc
, REALPART_EXPR
,
1892 TREE_TYPE (orig
), arg
));
1893 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1895 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1898 if (TREE_CODE (arg
) == INTEGER_CST
)
1900 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1901 if (tem
!= NULL_TREE
)
1904 else if (TREE_CODE (arg
) == REAL_CST
)
1906 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1907 if (tem
!= NULL_TREE
)
1910 else if (TREE_CODE (arg
) == FIXED_CST
)
1912 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1913 if (tem
!= NULL_TREE
)
1917 switch (TREE_CODE (orig
))
1920 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1921 case POINTER_TYPE
: case REFERENCE_TYPE
:
1922 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1925 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1927 case FIXED_POINT_TYPE
:
1928 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1931 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1932 return fold_convert_loc (loc
, type
, tem
);
1938 case FIXED_POINT_TYPE
:
1939 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1940 || TREE_CODE (arg
) == REAL_CST
)
1942 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1943 if (tem
!= NULL_TREE
)
1944 goto fold_convert_exit
;
1947 switch (TREE_CODE (orig
))
1949 case FIXED_POINT_TYPE
:
1954 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1957 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1958 return fold_convert_loc (loc
, type
, tem
);
1965 switch (TREE_CODE (orig
))
1968 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1969 case POINTER_TYPE
: case REFERENCE_TYPE
:
1971 case FIXED_POINT_TYPE
:
1972 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1973 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1974 fold_convert_loc (loc
, TREE_TYPE (type
),
1975 integer_zero_node
));
1980 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1982 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1983 TREE_OPERAND (arg
, 0));
1984 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1985 TREE_OPERAND (arg
, 1));
1986 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1989 arg
= save_expr (arg
);
1990 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1991 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1992 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1993 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1994 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2002 if (integer_zerop (arg
))
2003 return build_zero_vector (type
);
2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2005 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2006 || TREE_CODE (orig
) == VECTOR_TYPE
);
2007 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2010 tem
= fold_ignored_result (arg
);
2011 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2014 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2015 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2019 protected_set_expr_location_unshare (tem
, loc
);
2023 /* Return false if expr can be assumed not to be an lvalue, true
2027 maybe_lvalue_p (const_tree x
)
2029 /* We only need to wrap lvalue tree codes. */
2030 switch (TREE_CODE (x
))
2043 case ARRAY_RANGE_REF
:
2049 case PREINCREMENT_EXPR
:
2050 case PREDECREMENT_EXPR
:
2052 case TRY_CATCH_EXPR
:
2053 case WITH_CLEANUP_EXPR
:
2062 /* Assume the worst for front-end tree codes. */
2063 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2071 /* Return an expr equal to X but certainly not valid as an lvalue. */
2074 non_lvalue_loc (location_t loc
, tree x
)
2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2081 if (! maybe_lvalue_p (x
))
2083 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087 Zero means allow extended lvalues. */
2089 int pedantic_lvalues
;
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092 pedantic lvalue. Otherwise, return X. */
2095 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2097 if (pedantic_lvalues
)
2098 return non_lvalue_loc (loc
, x
);
2100 return protected_set_expr_location_unshare (x
, loc
);
2103 /* Given a tree comparison code, return the code that is the logical inverse
2104 of the given code. It is not safe to do this for floating-point
2105 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2106 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2109 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2111 if (honor_nans
&& flag_trapping_math
)
2121 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2123 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2125 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2127 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2141 return UNORDERED_EXPR
;
2142 case UNORDERED_EXPR
:
2143 return ORDERED_EXPR
;
2149 /* Similar, but return the comparison that results if the operands are
2150 swapped. This is safe for floating-point. */
2153 swap_tree_comparison (enum tree_code code
)
2160 case UNORDERED_EXPR
:
2186 /* Convert a comparison tree code from an enum tree_code representation
2187 into a compcode bit-based encoding. This function is the inverse of
2188 compcode_to_comparison. */
2190 static enum comparison_code
2191 comparison_to_compcode (enum tree_code code
)
2208 return COMPCODE_ORD
;
2209 case UNORDERED_EXPR
:
2210 return COMPCODE_UNORD
;
2212 return COMPCODE_UNLT
;
2214 return COMPCODE_UNEQ
;
2216 return COMPCODE_UNLE
;
2218 return COMPCODE_UNGT
;
2220 return COMPCODE_LTGT
;
2222 return COMPCODE_UNGE
;
2228 /* Convert a compcode bit-based encoding of a comparison operator back
2229 to GCC's enum tree_code representation. This function is the
2230 inverse of comparison_to_compcode. */
2232 static enum tree_code
2233 compcode_to_comparison (enum comparison_code code
)
2250 return ORDERED_EXPR
;
2251 case COMPCODE_UNORD
:
2252 return UNORDERED_EXPR
;
2270 /* Return a tree for the comparison which is the combination of
2271 doing the AND or OR (depending on CODE) of the two operations LCODE
2272 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2273 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2274 if this makes the transformation invalid. */
2277 combine_comparisons (location_t loc
,
2278 enum tree_code code
, enum tree_code lcode
,
2279 enum tree_code rcode
, tree truth_type
,
2280 tree ll_arg
, tree lr_arg
)
2282 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2283 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2284 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2289 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2290 compcode
= lcompcode
& rcompcode
;
2293 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2294 compcode
= lcompcode
| rcompcode
;
2303 /* Eliminate unordered comparisons, as well as LTGT and ORD
2304 which are not used unless the mode has NaNs. */
2305 compcode
&= ~COMPCODE_UNORD
;
2306 if (compcode
== COMPCODE_LTGT
)
2307 compcode
= COMPCODE_NE
;
2308 else if (compcode
== COMPCODE_ORD
)
2309 compcode
= COMPCODE_TRUE
;
2311 else if (flag_trapping_math
)
2313 /* Check that the original operation and the optimized ones will trap
2314 under the same condition. */
2315 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2316 && (lcompcode
!= COMPCODE_EQ
)
2317 && (lcompcode
!= COMPCODE_ORD
);
2318 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2319 && (rcompcode
!= COMPCODE_EQ
)
2320 && (rcompcode
!= COMPCODE_ORD
);
2321 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2322 && (compcode
!= COMPCODE_EQ
)
2323 && (compcode
!= COMPCODE_ORD
);
2325 /* In a short-circuited boolean expression the LHS might be
2326 such that the RHS, if evaluated, will never trap. For
2327 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2328 if neither x nor y is NaN. (This is a mixed blessing: for
2329 example, the expression above will never trap, hence
2330 optimizing it to x < y would be invalid). */
2331 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2332 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2335 /* If the comparison was short-circuited, and only the RHS
2336 trapped, we may now generate a spurious trap. */
2338 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2341 /* If we changed the conditions that cause a trap, we lose. */
2342 if ((ltrap
|| rtrap
) != trap
)
2346 if (compcode
== COMPCODE_TRUE
)
2347 return constant_boolean_node (true, truth_type
);
2348 else if (compcode
== COMPCODE_FALSE
)
2349 return constant_boolean_node (false, truth_type
);
2352 enum tree_code tcode
;
2354 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2355 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2359 /* Return nonzero if two operands (typically of the same tree node)
2360 are necessarily equal. If either argument has side-effects this
2361 function returns zero. FLAGS modifies behavior as follows:
2363 If OEP_ONLY_CONST is set, only return nonzero for constants.
2364 This function tests whether the operands are indistinguishable;
2365 it does not test whether they are equal using C's == operation.
2366 The distinction is important for IEEE floating point, because
2367 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2368 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2370 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2371 even though it may hold multiple values during a function.
2372 This is because a GCC tree node guarantees that nothing else is
2373 executed between the evaluation of its "operands" (which may often
2374 be evaluated in arbitrary order). Hence if the operands themselves
2375 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2376 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2377 unset means assuming isochronic (or instantaneous) tree equivalence.
2378 Unless comparing arbitrary expression trees, such as from different
2379 statements, this flag can usually be left unset.
2381 If OEP_PURE_SAME is set, then pure functions with identical arguments
2382 are considered the same. It is used when the caller has other ways
2383 to ensure that global memory is unchanged in between. */
2386 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2388 /* If either is ERROR_MARK, they aren't equal. */
2389 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2390 || TREE_TYPE (arg0
) == error_mark_node
2391 || TREE_TYPE (arg1
) == error_mark_node
)
2394 /* Similar, if either does not have a type (like a released SSA name),
2395 they aren't equal. */
2396 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2399 /* Check equality of integer constants before bailing out due to
2400 precision differences. */
2401 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2402 return tree_int_cst_equal (arg0
, arg1
);
2404 /* If both types don't have the same signedness, then we can't consider
2405 them equal. We must check this before the STRIP_NOPS calls
2406 because they may change the signedness of the arguments. As pointers
2407 strictly don't have a signedness, require either two pointers or
2408 two non-pointers as well. */
2409 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2410 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2413 /* We cannot consider pointers to different address space equal. */
2414 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2419 /* If both types don't have the same precision, then it is not safe
2421 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2427 /* In case both args are comparisons but with different comparison
2428 code, try to swap the comparison operands of one arg to produce
2429 a match and compare that variant. */
2430 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2431 && COMPARISON_CLASS_P (arg0
)
2432 && COMPARISON_CLASS_P (arg1
))
2434 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2436 if (TREE_CODE (arg0
) == swap_code
)
2437 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2438 TREE_OPERAND (arg1
, 1), flags
)
2439 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2440 TREE_OPERAND (arg1
, 0), flags
);
2443 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2444 /* This is needed for conversions and for COMPONENT_REF.
2445 Might as well play it safe and always test this. */
2446 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2447 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2448 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2451 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2452 We don't care about side effects in that case because the SAVE_EXPR
2453 takes care of that for us. In all other cases, two expressions are
2454 equal if they have no side effects. If we have two identical
2455 expressions with side effects that should be treated the same due
2456 to the only side effects being identical SAVE_EXPR's, that will
2457 be detected in the recursive calls below.
2458 If we are taking an invariant address of two identical objects
2459 they are necessarily equal as well. */
2460 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2461 && (TREE_CODE (arg0
) == SAVE_EXPR
2462 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2463 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2466 /* Next handle constant cases, those for which we can return 1 even
2467 if ONLY_CONST is set. */
2468 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2469 switch (TREE_CODE (arg0
))
2472 return tree_int_cst_equal (arg0
, arg1
);
2475 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2476 TREE_FIXED_CST (arg1
));
2479 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2480 TREE_REAL_CST (arg1
)))
2484 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2486 /* If we do not distinguish between signed and unsigned zero,
2487 consider them equal. */
2488 if (real_zerop (arg0
) && real_zerop (arg1
))
2497 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2498 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2501 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2504 v1
= TREE_CHAIN (v1
);
2505 v2
= TREE_CHAIN (v2
);
2512 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2514 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2518 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2519 && ! memcmp (TREE_STRING_POINTER (arg0
),
2520 TREE_STRING_POINTER (arg1
),
2521 TREE_STRING_LENGTH (arg0
)));
2524 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2525 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2526 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2531 if (flags
& OEP_ONLY_CONST
)
2534 /* Define macros to test an operand from arg0 and arg1 for equality and a
2535 variant that allows null and views null as being different from any
2536 non-null value. In the latter case, if either is null, the both
2537 must be; otherwise, do the normal comparison. */
2538 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2539 TREE_OPERAND (arg1, N), flags)
2541 #define OP_SAME_WITH_NULL(N) \
2542 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2543 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2545 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2548 /* Two conversions are equal only if signedness and modes match. */
2549 switch (TREE_CODE (arg0
))
2552 case FIX_TRUNC_EXPR
:
2553 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2554 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2564 case tcc_comparison
:
2566 if (OP_SAME (0) && OP_SAME (1))
2569 /* For commutative ops, allow the other order. */
2570 return (commutative_tree_code (TREE_CODE (arg0
))
2571 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2572 TREE_OPERAND (arg1
, 1), flags
)
2573 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2574 TREE_OPERAND (arg1
, 0), flags
));
2577 /* If either of the pointer (or reference) expressions we are
2578 dereferencing contain a side effect, these cannot be equal. */
2579 if (TREE_SIDE_EFFECTS (arg0
)
2580 || TREE_SIDE_EFFECTS (arg1
))
2583 switch (TREE_CODE (arg0
))
2591 /* Require equal access sizes, and similar pointer types.
2592 We can have incomplete types for array references of
2593 variable-sized arrays from the Fortran frontent
2595 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2596 || (TYPE_SIZE (TREE_TYPE (arg0
))
2597 && TYPE_SIZE (TREE_TYPE (arg1
))
2598 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2599 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2600 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2601 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2602 && OP_SAME (0) && OP_SAME (1));
2605 case ARRAY_RANGE_REF
:
2606 /* Operands 2 and 3 may be null.
2607 Compare the array index by value if it is constant first as we
2608 may have different types but same value here. */
2610 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2611 TREE_OPERAND (arg1
, 1))
2613 && OP_SAME_WITH_NULL (2)
2614 && OP_SAME_WITH_NULL (3));
2617 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2618 may be NULL when we're called to compare MEM_EXPRs. */
2619 return OP_SAME_WITH_NULL (0)
2621 && OP_SAME_WITH_NULL (2);
2624 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2630 case tcc_expression
:
2631 switch (TREE_CODE (arg0
))
2634 case TRUTH_NOT_EXPR
:
2637 case TRUTH_ANDIF_EXPR
:
2638 case TRUTH_ORIF_EXPR
:
2639 return OP_SAME (0) && OP_SAME (1);
2642 case WIDEN_MULT_PLUS_EXPR
:
2643 case WIDEN_MULT_MINUS_EXPR
:
2646 /* The multiplcation operands are commutative. */
2649 case TRUTH_AND_EXPR
:
2651 case TRUTH_XOR_EXPR
:
2652 if (OP_SAME (0) && OP_SAME (1))
2655 /* Otherwise take into account this is a commutative operation. */
2656 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2657 TREE_OPERAND (arg1
, 1), flags
)
2658 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2659 TREE_OPERAND (arg1
, 0), flags
));
2664 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2671 switch (TREE_CODE (arg0
))
2674 /* If the CALL_EXPRs call different functions, then they
2675 clearly can not be equal. */
2676 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2681 unsigned int cef
= call_expr_flags (arg0
);
2682 if (flags
& OEP_PURE_SAME
)
2683 cef
&= ECF_CONST
| ECF_PURE
;
2690 /* Now see if all the arguments are the same. */
2692 const_call_expr_arg_iterator iter0
, iter1
;
2694 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2695 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2697 a0
= next_const_call_expr_arg (&iter0
),
2698 a1
= next_const_call_expr_arg (&iter1
))
2699 if (! operand_equal_p (a0
, a1
, flags
))
2702 /* If we get here and both argument lists are exhausted
2703 then the CALL_EXPRs are equal. */
2704 return ! (a0
|| a1
);
2710 case tcc_declaration
:
2711 /* Consider __builtin_sqrt equal to sqrt. */
2712 return (TREE_CODE (arg0
) == FUNCTION_DECL
2713 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2714 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2715 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2722 #undef OP_SAME_WITH_NULL
2725 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2726 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2728 When in doubt, return 0. */
2731 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2733 int unsignedp1
, unsignedpo
;
2734 tree primarg0
, primarg1
, primother
;
2735 unsigned int correct_width
;
2737 if (operand_equal_p (arg0
, arg1
, 0))
2740 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2741 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2744 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2745 and see if the inner values are the same. This removes any
2746 signedness comparison, which doesn't matter here. */
2747 primarg0
= arg0
, primarg1
= arg1
;
2748 STRIP_NOPS (primarg0
);
2749 STRIP_NOPS (primarg1
);
2750 if (operand_equal_p (primarg0
, primarg1
, 0))
2753 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2754 actual comparison operand, ARG0.
2756 First throw away any conversions to wider types
2757 already present in the operands. */
2759 primarg1
= get_narrower (arg1
, &unsignedp1
);
2760 primother
= get_narrower (other
, &unsignedpo
);
2762 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2763 if (unsignedp1
== unsignedpo
2764 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2765 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2767 tree type
= TREE_TYPE (arg0
);
2769 /* Make sure shorter operand is extended the right way
2770 to match the longer operand. */
2771 primarg1
= fold_convert (signed_or_unsigned_type_for
2772 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2774 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2781 /* See if ARG is an expression that is either a comparison or is performing
2782 arithmetic on comparisons. The comparisons must only be comparing
2783 two different values, which will be stored in *CVAL1 and *CVAL2; if
2784 they are nonzero it means that some operands have already been found.
2785 No variables may be used anywhere else in the expression except in the
2786 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2787 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2789 If this is true, return 1. Otherwise, return zero. */
2792 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2794 enum tree_code code
= TREE_CODE (arg
);
2795 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2797 /* We can handle some of the tcc_expression cases here. */
2798 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2800 else if (tclass
== tcc_expression
2801 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2802 || code
== COMPOUND_EXPR
))
2803 tclass
= tcc_binary
;
2805 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2806 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2808 /* If we've already found a CVAL1 or CVAL2, this expression is
2809 two complex to handle. */
2810 if (*cval1
|| *cval2
)
2820 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2823 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2824 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2825 cval1
, cval2
, save_p
));
2830 case tcc_expression
:
2831 if (code
== COND_EXPR
)
2832 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2833 cval1
, cval2
, save_p
)
2834 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2835 cval1
, cval2
, save_p
)
2836 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2837 cval1
, cval2
, save_p
));
2840 case tcc_comparison
:
2841 /* First see if we can handle the first operand, then the second. For
2842 the second operand, we know *CVAL1 can't be zero. It must be that
2843 one side of the comparison is each of the values; test for the
2844 case where this isn't true by failing if the two operands
2847 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2848 TREE_OPERAND (arg
, 1), 0))
2852 *cval1
= TREE_OPERAND (arg
, 0);
2853 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2855 else if (*cval2
== 0)
2856 *cval2
= TREE_OPERAND (arg
, 0);
2857 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2862 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2864 else if (*cval2
== 0)
2865 *cval2
= TREE_OPERAND (arg
, 1);
2866 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2878 /* ARG is a tree that is known to contain just arithmetic operations and
2879 comparisons. Evaluate the operations in the tree substituting NEW0 for
2880 any occurrence of OLD0 as an operand of a comparison and likewise for
2884 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2885 tree old1
, tree new1
)
2887 tree type
= TREE_TYPE (arg
);
2888 enum tree_code code
= TREE_CODE (arg
);
2889 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2891 /* We can handle some of the tcc_expression cases here. */
2892 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2894 else if (tclass
== tcc_expression
2895 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2896 tclass
= tcc_binary
;
2901 return fold_build1_loc (loc
, code
, type
,
2902 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2903 old0
, new0
, old1
, new1
));
2906 return fold_build2_loc (loc
, code
, type
,
2907 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2908 old0
, new0
, old1
, new1
),
2909 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2910 old0
, new0
, old1
, new1
));
2912 case tcc_expression
:
2916 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2920 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2924 return fold_build3_loc (loc
, code
, type
,
2925 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2926 old0
, new0
, old1
, new1
),
2927 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2928 old0
, new0
, old1
, new1
),
2929 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2930 old0
, new0
, old1
, new1
));
2934 /* Fall through - ??? */
2936 case tcc_comparison
:
2938 tree arg0
= TREE_OPERAND (arg
, 0);
2939 tree arg1
= TREE_OPERAND (arg
, 1);
2941 /* We need to check both for exact equality and tree equality. The
2942 former will be true if the operand has a side-effect. In that
2943 case, we know the operand occurred exactly once. */
2945 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2947 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2950 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2952 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2955 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2963 /* Return a tree for the case when the result of an expression is RESULT
2964 converted to TYPE and OMITTED was previously an operand of the expression
2965 but is now not needed (e.g., we folded OMITTED * 0).
2967 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2968 the conversion of RESULT to TYPE. */
2971 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2973 tree t
= fold_convert_loc (loc
, type
, result
);
2975 /* If the resulting operand is an empty statement, just return the omitted
2976 statement casted to void. */
2977 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2978 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2979 fold_ignored_result (omitted
));
2981 if (TREE_SIDE_EFFECTS (omitted
))
2982 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2983 fold_ignored_result (omitted
), t
);
2985 return non_lvalue_loc (loc
, t
);
2988 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2991 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2994 tree t
= fold_convert_loc (loc
, type
, result
);
2996 /* If the resulting operand is an empty statement, just return the omitted
2997 statement casted to void. */
2998 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2999 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3000 fold_ignored_result (omitted
));
3002 if (TREE_SIDE_EFFECTS (omitted
))
3003 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3004 fold_ignored_result (omitted
), t
);
3006 return pedantic_non_lvalue_loc (loc
, t
);
3009 /* Return a tree for the case when the result of an expression is RESULT
3010 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3011 of the expression but are now not needed.
3013 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3014 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3015 evaluated before OMITTED2. Otherwise, if neither has side effects,
3016 just do the conversion of RESULT to TYPE. */
3019 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3020 tree omitted1
, tree omitted2
)
3022 tree t
= fold_convert_loc (loc
, type
, result
);
3024 if (TREE_SIDE_EFFECTS (omitted2
))
3025 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3026 if (TREE_SIDE_EFFECTS (omitted1
))
3027 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3029 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3033 /* Return a simplified tree node for the truth-negation of ARG. This
3034 never alters ARG itself. We assume that ARG is an operation that
3035 returns a truth value (0 or 1).
3037 FIXME: one would think we would fold the result, but it causes
3038 problems with the dominator optimizer. */
3041 fold_truth_not_expr (location_t loc
, tree arg
)
3043 tree type
= TREE_TYPE (arg
);
3044 enum tree_code code
= TREE_CODE (arg
);
3045 location_t loc1
, loc2
;
3047 /* If this is a comparison, we can simply invert it, except for
3048 floating-point non-equality comparisons, in which case we just
3049 enclose a TRUTH_NOT_EXPR around what we have. */
3051 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3053 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3054 if (FLOAT_TYPE_P (op_type
)
3055 && flag_trapping_math
3056 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3057 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3060 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3061 if (code
== ERROR_MARK
)
3064 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3065 TREE_OPERAND (arg
, 1));
3071 return constant_boolean_node (integer_zerop (arg
), type
);
3073 case TRUTH_AND_EXPR
:
3074 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3075 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3076 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3077 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3078 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3081 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3082 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3083 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3084 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3085 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3087 case TRUTH_XOR_EXPR
:
3088 /* Here we can invert either operand. We invert the first operand
3089 unless the second operand is a TRUTH_NOT_EXPR in which case our
3090 result is the XOR of the first operand with the inside of the
3091 negation of the second operand. */
3093 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3094 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3095 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3097 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3098 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3099 TREE_OPERAND (arg
, 1));
3101 case TRUTH_ANDIF_EXPR
:
3102 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3103 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3104 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3105 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3106 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3108 case TRUTH_ORIF_EXPR
:
3109 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3110 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3111 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3112 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3113 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3115 case TRUTH_NOT_EXPR
:
3116 return TREE_OPERAND (arg
, 0);
3120 tree arg1
= TREE_OPERAND (arg
, 1);
3121 tree arg2
= TREE_OPERAND (arg
, 2);
3123 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3124 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3126 /* A COND_EXPR may have a throw as one operand, which
3127 then has void type. Just leave void operands
3129 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3130 VOID_TYPE_P (TREE_TYPE (arg1
))
3131 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3132 VOID_TYPE_P (TREE_TYPE (arg2
))
3133 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3137 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3138 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3139 TREE_OPERAND (arg
, 0),
3140 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3142 case NON_LVALUE_EXPR
:
3143 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3144 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3147 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3148 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3150 /* ... fall through ... */
3153 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3154 return build1_loc (loc
, TREE_CODE (arg
), type
,
3155 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3158 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3160 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3163 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3165 case CLEANUP_POINT_EXPR
:
3166 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3167 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3168 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3175 /* Return a simplified tree node for the truth-negation of ARG. This
3176 never alters ARG itself. We assume that ARG is an operation that
3177 returns a truth value (0 or 1).
3179 FIXME: one would think we would fold the result, but it causes
3180 problems with the dominator optimizer. */
3183 invert_truthvalue_loc (location_t loc
, tree arg
)
3187 if (TREE_CODE (arg
) == ERROR_MARK
)
3190 tem
= fold_truth_not_expr (loc
, arg
);
3192 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3197 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3198 operands are another bit-wise operation with a common input. If so,
3199 distribute the bit operations to save an operation and possibly two if
3200 constants are involved. For example, convert
3201 (A | B) & (A | C) into A | (B & C)
3202 Further simplification will occur if B and C are constants.
3204 If this optimization cannot be done, 0 will be returned. */
3207 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3208 tree arg0
, tree arg1
)
3213 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3214 || TREE_CODE (arg0
) == code
3215 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3216 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3219 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3221 common
= TREE_OPERAND (arg0
, 0);
3222 left
= TREE_OPERAND (arg0
, 1);
3223 right
= TREE_OPERAND (arg1
, 1);
3225 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3227 common
= TREE_OPERAND (arg0
, 0);
3228 left
= TREE_OPERAND (arg0
, 1);
3229 right
= TREE_OPERAND (arg1
, 0);
3231 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3233 common
= TREE_OPERAND (arg0
, 1);
3234 left
= TREE_OPERAND (arg0
, 0);
3235 right
= TREE_OPERAND (arg1
, 1);
3237 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3239 common
= TREE_OPERAND (arg0
, 1);
3240 left
= TREE_OPERAND (arg0
, 0);
3241 right
= TREE_OPERAND (arg1
, 0);
3246 common
= fold_convert_loc (loc
, type
, common
);
3247 left
= fold_convert_loc (loc
, type
, left
);
3248 right
= fold_convert_loc (loc
, type
, right
);
3249 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3250 fold_build2_loc (loc
, code
, type
, left
, right
));
3253 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3254 with code CODE. This optimization is unsafe. */
3256 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3257 tree arg0
, tree arg1
)
3259 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3260 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3262 /* (A / C) +- (B / C) -> (A +- B) / C. */
3264 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3265 TREE_OPERAND (arg1
, 1), 0))
3266 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3267 fold_build2_loc (loc
, code
, type
,
3268 TREE_OPERAND (arg0
, 0),
3269 TREE_OPERAND (arg1
, 0)),
3270 TREE_OPERAND (arg0
, 1));
3272 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3273 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3274 TREE_OPERAND (arg1
, 0), 0)
3275 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3276 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3278 REAL_VALUE_TYPE r0
, r1
;
3279 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3280 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3282 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3284 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3285 real_arithmetic (&r0
, code
, &r0
, &r1
);
3286 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3287 TREE_OPERAND (arg0
, 0),
3288 build_real (type
, r0
));
3294 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3295 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3298 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3299 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3301 tree result
, bftype
;
3305 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3306 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3307 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3308 && host_integerp (size
, 0)
3309 && tree_low_cst (size
, 0) == bitsize
)
3310 return fold_convert_loc (loc
, type
, inner
);
3314 if (TYPE_PRECISION (bftype
) != bitsize
3315 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3316 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3318 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3319 size_int (bitsize
), bitsize_int (bitpos
));
3322 result
= fold_convert_loc (loc
, type
, result
);
3327 /* Optimize a bit-field compare.
3329 There are two cases: First is a compare against a constant and the
3330 second is a comparison of two items where the fields are at the same
3331 bit position relative to the start of a chunk (byte, halfword, word)
3332 large enough to contain it. In these cases we can avoid the shift
3333 implicit in bitfield extractions.
3335 For constants, we emit a compare of the shifted constant with the
3336 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3337 compared. For two fields at the same position, we do the ANDs with the
3338 similar mask and compare the result of the ANDs.
3340 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3341 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3342 are the left and right operands of the comparison, respectively.
3344 If the optimization described above can be done, we return the resulting
3345 tree. Otherwise we return zero. */
3348 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3349 tree compare_type
, tree lhs
, tree rhs
)
3351 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3352 tree type
= TREE_TYPE (lhs
);
3353 tree signed_type
, unsigned_type
;
3354 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3355 enum machine_mode lmode
, rmode
, nmode
;
3356 int lunsignedp
, runsignedp
;
3357 int lvolatilep
= 0, rvolatilep
= 0;
3358 tree linner
, rinner
= NULL_TREE
;
3362 /* Get all the information about the extractions being done. If the bit size
3363 if the same as the size of the underlying object, we aren't doing an
3364 extraction at all and so can do nothing. We also don't want to
3365 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3366 then will no longer be able to replace it. */
3367 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3368 &lunsignedp
, &lvolatilep
, false);
3369 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3370 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3375 /* If this is not a constant, we can only do something if bit positions,
3376 sizes, and signedness are the same. */
3377 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3378 &runsignedp
, &rvolatilep
, false);
3380 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3381 || lunsignedp
!= runsignedp
|| offset
!= 0
3382 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3386 /* See if we can find a mode to refer to this field. We should be able to,
3387 but fail if we can't. */
3389 && GET_MODE_BITSIZE (lmode
) > 0
3390 && flag_strict_volatile_bitfields
> 0)
3393 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3394 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3395 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3396 TYPE_ALIGN (TREE_TYPE (rinner
))),
3397 word_mode
, lvolatilep
|| rvolatilep
);
3398 if (nmode
== VOIDmode
)
3401 /* Set signed and unsigned types of the precision of this mode for the
3403 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3404 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3406 /* Compute the bit position and size for the new reference and our offset
3407 within it. If the new reference is the same size as the original, we
3408 won't optimize anything, so return zero. */
3409 nbitsize
= GET_MODE_BITSIZE (nmode
);
3410 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3412 if (nbitsize
== lbitsize
)
3415 if (BYTES_BIG_ENDIAN
)
3416 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3418 /* Make the mask to be used against the extracted field. */
3419 mask
= build_int_cst_type (unsigned_type
, -1);
3420 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3421 mask
= const_binop (RSHIFT_EXPR
, mask
,
3422 size_int (nbitsize
- lbitsize
- lbitpos
));
3425 /* If not comparing with constant, just rework the comparison
3427 return fold_build2_loc (loc
, code
, compare_type
,
3428 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3429 make_bit_field_ref (loc
, linner
,
3434 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3435 make_bit_field_ref (loc
, rinner
,
3441 /* Otherwise, we are handling the constant case. See if the constant is too
3442 big for the field. Warn and return a tree of for 0 (false) if so. We do
3443 this not only for its own sake, but to avoid having to test for this
3444 error case below. If we didn't, we might generate wrong code.
3446 For unsigned fields, the constant shifted right by the field length should
3447 be all zero. For signed fields, the high-order bits should agree with
3452 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3453 fold_convert_loc (loc
,
3454 unsigned_type
, rhs
),
3455 size_int (lbitsize
))))
3457 warning (0, "comparison is always %d due to width of bit-field",
3459 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3464 tree tem
= const_binop (RSHIFT_EXPR
,
3465 fold_convert_loc (loc
, signed_type
, rhs
),
3466 size_int (lbitsize
- 1));
3467 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3469 warning (0, "comparison is always %d due to width of bit-field",
3471 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3475 /* Single-bit compares should always be against zero. */
3476 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3478 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3479 rhs
= build_int_cst (type
, 0);
3482 /* Make a new bitfield reference, shift the constant over the
3483 appropriate number of bits and mask it with the computed mask
3484 (in case this was a signed field). If we changed it, make a new one. */
3485 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3488 TREE_SIDE_EFFECTS (lhs
) = 1;
3489 TREE_THIS_VOLATILE (lhs
) = 1;
3492 rhs
= const_binop (BIT_AND_EXPR
,
3493 const_binop (LSHIFT_EXPR
,
3494 fold_convert_loc (loc
, unsigned_type
, rhs
),
3495 size_int (lbitpos
)),
3498 lhs
= build2_loc (loc
, code
, compare_type
,
3499 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3503 /* Subroutine for fold_truthop: decode a field reference.
3505 If EXP is a comparison reference, we return the innermost reference.
3507 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3508 set to the starting bit number.
3510 If the innermost field can be completely contained in a mode-sized
3511 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3513 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3514 otherwise it is not changed.
3516 *PUNSIGNEDP is set to the signedness of the field.
3518 *PMASK is set to the mask used. This is either contained in a
3519 BIT_AND_EXPR or derived from the width of the field.
3521 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3523 Return 0 if this is not a component reference or is one that we can't
3524 do anything with. */
3527 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3528 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3529 int *punsignedp
, int *pvolatilep
,
3530 tree
*pmask
, tree
*pand_mask
)
3532 tree outer_type
= 0;
3534 tree mask
, inner
, offset
;
3536 unsigned int precision
;
3538 /* All the optimizations using this function assume integer fields.
3539 There are problems with FP fields since the type_for_size call
3540 below can fail for, e.g., XFmode. */
3541 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3544 /* We are interested in the bare arrangement of bits, so strip everything
3545 that doesn't affect the machine mode. However, record the type of the
3546 outermost expression if it may matter below. */
3547 if (CONVERT_EXPR_P (exp
)
3548 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3549 outer_type
= TREE_TYPE (exp
);
3552 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3554 and_mask
= TREE_OPERAND (exp
, 1);
3555 exp
= TREE_OPERAND (exp
, 0);
3556 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3557 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3561 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3562 punsignedp
, pvolatilep
, false);
3563 if ((inner
== exp
&& and_mask
== 0)
3564 || *pbitsize
< 0 || offset
!= 0
3565 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3568 /* If the number of bits in the reference is the same as the bitsize of
3569 the outer type, then the outer type gives the signedness. Otherwise
3570 (in case of a small bitfield) the signedness is unchanged. */
3571 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3572 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3574 /* Compute the mask to access the bitfield. */
3575 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3576 precision
= TYPE_PRECISION (unsigned_type
);
3578 mask
= build_int_cst_type (unsigned_type
, -1);
3580 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3581 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3583 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3585 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3586 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3589 *pand_mask
= and_mask
;
3593 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3597 all_ones_mask_p (const_tree mask
, int size
)
3599 tree type
= TREE_TYPE (mask
);
3600 unsigned int precision
= TYPE_PRECISION (type
);
3603 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3606 tree_int_cst_equal (mask
,
3607 const_binop (RSHIFT_EXPR
,
3608 const_binop (LSHIFT_EXPR
, tmask
,
3609 size_int (precision
- size
)),
3610 size_int (precision
- size
)));
3613 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3614 represents the sign bit of EXP's type. If EXP represents a sign
3615 or zero extension, also test VAL against the unextended type.
3616 The return value is the (sub)expression whose sign bit is VAL,
3617 or NULL_TREE otherwise. */
3620 sign_bit_p (tree exp
, const_tree val
)
3622 unsigned HOST_WIDE_INT mask_lo
, lo
;
3623 HOST_WIDE_INT mask_hi
, hi
;
3627 /* Tree EXP must have an integral type. */
3628 t
= TREE_TYPE (exp
);
3629 if (! INTEGRAL_TYPE_P (t
))
3632 /* Tree VAL must be an integer constant. */
3633 if (TREE_CODE (val
) != INTEGER_CST
3634 || TREE_OVERFLOW (val
))
3637 width
= TYPE_PRECISION (t
);
3638 if (width
> HOST_BITS_PER_WIDE_INT
)
3640 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3643 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3644 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3650 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3653 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3654 >> (HOST_BITS_PER_WIDE_INT
- width
));
3657 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3658 treat VAL as if it were unsigned. */
3659 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3660 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3663 /* Handle extension from a narrower type. */
3664 if (TREE_CODE (exp
) == NOP_EXPR
3665 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3666 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3671 /* Subroutine for fold_truthop: determine if an operand is simple enough
3672 to be evaluated unconditionally. */
3675 simple_operand_p (const_tree exp
)
3677 /* Strip any conversions that don't change the machine mode. */
3680 return (CONSTANT_CLASS_P (exp
)
3681 || TREE_CODE (exp
) == SSA_NAME
3683 && ! TREE_ADDRESSABLE (exp
)
3684 && ! TREE_THIS_VOLATILE (exp
)
3685 && ! DECL_NONLOCAL (exp
)
3686 /* Don't regard global variables as simple. They may be
3687 allocated in ways unknown to the compiler (shared memory,
3688 #pragma weak, etc). */
3689 && ! TREE_PUBLIC (exp
)
3690 && ! DECL_EXTERNAL (exp
)
3691 /* Loading a static variable is unduly expensive, but global
3692 registers aren't expensive. */
3693 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3696 /* The following functions are subroutines to fold_range_test and allow it to
3697 try to change a logical combination of comparisons into a range test.
3700 X == 2 || X == 3 || X == 4 || X == 5
3704 (unsigned) (X - 2) <= 3
3706 We describe each set of comparisons as being either inside or outside
3707 a range, using a variable named like IN_P, and then describe the
3708 range with a lower and upper bound. If one of the bounds is omitted,
3709 it represents either the highest or lowest value of the type.
3711 In the comments below, we represent a range by two numbers in brackets
3712 preceded by a "+" to designate being inside that range, or a "-" to
3713 designate being outside that range, so the condition can be inverted by
3714 flipping the prefix. An omitted bound is represented by a "-". For
3715 example, "- [-, 10]" means being outside the range starting at the lowest
3716 possible value and ending at 10, in other words, being greater than 10.
3717 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3720 We set up things so that the missing bounds are handled in a consistent
3721 manner so neither a missing bound nor "true" and "false" need to be
3722 handled using a special case. */
3724 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3725 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3726 and UPPER1_P are nonzero if the respective argument is an upper bound
3727 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3728 must be specified for a comparison. ARG1 will be converted to ARG0's
3729 type if both are specified. */
3732 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3733 tree arg1
, int upper1_p
)
3739 /* If neither arg represents infinity, do the normal operation.
3740 Else, if not a comparison, return infinity. Else handle the special
3741 comparison rules. Note that most of the cases below won't occur, but
3742 are handled for consistency. */
3744 if (arg0
!= 0 && arg1
!= 0)
3746 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3747 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3749 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3752 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3755 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3756 for neither. In real maths, we cannot assume open ended ranges are
3757 the same. But, this is computer arithmetic, where numbers are finite.
3758 We can therefore make the transformation of any unbounded range with
3759 the value Z, Z being greater than any representable number. This permits
3760 us to treat unbounded ranges as equal. */
3761 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3762 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3766 result
= sgn0
== sgn1
;
3769 result
= sgn0
!= sgn1
;
3772 result
= sgn0
< sgn1
;
3775 result
= sgn0
<= sgn1
;
3778 result
= sgn0
> sgn1
;
3781 result
= sgn0
>= sgn1
;
3787 return constant_boolean_node (result
, type
);
3790 /* Helper routine for make_range. Perform one step for it, return
3791 new expression if the loop should continue or NULL_TREE if it should
3795 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3796 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3797 bool *strict_overflow_p
)
3799 tree arg0_type
= TREE_TYPE (arg0
);
3800 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3801 int in_p
= *p_in_p
, n_in_p
;
3805 case TRUTH_NOT_EXPR
:
3809 case EQ_EXPR
: case NE_EXPR
:
3810 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3811 /* We can only do something if the range is testing for zero
3812 and if the second operand is an integer constant. Note that
3813 saying something is "in" the range we make is done by
3814 complementing IN_P since it will set in the initial case of
3815 being not equal to zero; "out" is leaving it alone. */
3816 if (low
== NULL_TREE
|| high
== NULL_TREE
3817 || ! integer_zerop (low
) || ! integer_zerop (high
)
3818 || TREE_CODE (arg1
) != INTEGER_CST
)
3823 case NE_EXPR
: /* - [c, c] */
3826 case EQ_EXPR
: /* + [c, c] */
3827 in_p
= ! in_p
, low
= high
= arg1
;
3829 case GT_EXPR
: /* - [-, c] */
3830 low
= 0, high
= arg1
;
3832 case GE_EXPR
: /* + [c, -] */
3833 in_p
= ! in_p
, low
= arg1
, high
= 0;
3835 case LT_EXPR
: /* - [c, -] */
3836 low
= arg1
, high
= 0;
3838 case LE_EXPR
: /* + [-, c] */
3839 in_p
= ! in_p
, low
= 0, high
= arg1
;
3845 /* If this is an unsigned comparison, we also know that EXP is
3846 greater than or equal to zero. We base the range tests we make
3847 on that fact, so we record it here so we can parse existing
3848 range tests. We test arg0_type since often the return type
3849 of, e.g. EQ_EXPR, is boolean. */
3850 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3852 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3854 build_int_cst (arg0_type
, 0),
3858 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3860 /* If the high bound is missing, but we have a nonzero low
3861 bound, reverse the range so it goes from zero to the low bound
3863 if (high
== 0 && low
&& ! integer_zerop (low
))
3866 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3867 integer_one_node
, 0);
3868 low
= build_int_cst (arg0_type
, 0);
3878 /* (-x) IN [a,b] -> x in [-b, -a] */
3879 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3880 build_int_cst (exp_type
, 0),
3882 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3883 build_int_cst (exp_type
, 0),
3885 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3891 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3892 build_int_cst (exp_type
, 1));
3896 if (TREE_CODE (arg1
) != INTEGER_CST
)
3899 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3900 move a constant to the other side. */
3901 if (!TYPE_UNSIGNED (arg0_type
)
3902 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3905 /* If EXP is signed, any overflow in the computation is undefined,
3906 so we don't worry about it so long as our computations on
3907 the bounds don't overflow. For unsigned, overflow is defined
3908 and this is exactly the right thing. */
3909 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3910 arg0_type
, low
, 0, arg1
, 0);
3911 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3912 arg0_type
, high
, 1, arg1
, 0);
3913 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3914 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3917 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3918 *strict_overflow_p
= true;
3921 /* Check for an unsigned range which has wrapped around the maximum
3922 value thus making n_high < n_low, and normalize it. */
3923 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3925 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3926 integer_one_node
, 0);
3927 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3928 integer_one_node
, 0);
3930 /* If the range is of the form +/- [ x+1, x ], we won't
3931 be able to normalize it. But then, it represents the
3932 whole range or the empty set, so make it
3934 if (tree_int_cst_equal (n_low
, low
)
3935 && tree_int_cst_equal (n_high
, high
))
3941 low
= n_low
, high
= n_high
;
3949 case NON_LVALUE_EXPR
:
3950 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3953 if (! INTEGRAL_TYPE_P (arg0_type
)
3954 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3955 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3958 n_low
= low
, n_high
= high
;
3961 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
3964 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
3966 /* If we're converting arg0 from an unsigned type, to exp,
3967 a signed type, we will be doing the comparison as unsigned.
3968 The tests above have already verified that LOW and HIGH
3971 So we have to ensure that we will handle large unsigned
3972 values the same way that the current signed bounds treat
3975 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3979 /* For fixed-point modes, we need to pass the saturating flag
3980 as the 2nd parameter. */
3981 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
3983 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
3984 TYPE_SATURATING (arg0_type
));
3987 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
3989 /* A range without an upper bound is, naturally, unbounded.
3990 Since convert would have cropped a very large value, use
3991 the max value for the destination type. */
3993 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3994 : TYPE_MAX_VALUE (arg0_type
);
3996 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3997 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
3998 fold_convert_loc (loc
, arg0_type
,
4000 build_int_cst (arg0_type
, 1));
4002 /* If the low bound is specified, "and" the range with the
4003 range for which the original unsigned value will be
4007 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4008 1, fold_convert_loc (loc
, arg0_type
,
4013 in_p
= (n_in_p
== in_p
);
4017 /* Otherwise, "or" the range with the range of the input
4018 that will be interpreted as negative. */
4019 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4020 1, fold_convert_loc (loc
, arg0_type
,
4025 in_p
= (in_p
!= n_in_p
);
4039 /* Given EXP, a logical expression, set the range it is testing into
4040 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4041 actually being tested. *PLOW and *PHIGH will be made of the same
4042 type as the returned expression. If EXP is not a comparison, we
4043 will most likely not be returning a useful value and range. Set
4044 *STRICT_OVERFLOW_P to true if the return value is only valid
4045 because signed overflow is undefined; otherwise, do not change
4046 *STRICT_OVERFLOW_P. */
4049 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4050 bool *strict_overflow_p
)
4052 enum tree_code code
;
4053 tree arg0
, arg1
= NULL_TREE
;
4054 tree exp_type
, nexp
;
4057 location_t loc
= EXPR_LOCATION (exp
);
4059 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4060 and see if we can refine the range. Some of the cases below may not
4061 happen, but it doesn't seem worth worrying about this. We "continue"
4062 the outer loop when we've changed something; otherwise we "break"
4063 the switch, which will "break" the while. */
4066 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4070 code
= TREE_CODE (exp
);
4071 exp_type
= TREE_TYPE (exp
);
4074 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4076 if (TREE_OPERAND_LENGTH (exp
) > 0)
4077 arg0
= TREE_OPERAND (exp
, 0);
4078 if (TREE_CODE_CLASS (code
) == tcc_binary
4079 || TREE_CODE_CLASS (code
) == tcc_comparison
4080 || (TREE_CODE_CLASS (code
) == tcc_expression
4081 && TREE_OPERAND_LENGTH (exp
) > 1))
4082 arg1
= TREE_OPERAND (exp
, 1);
4084 if (arg0
== NULL_TREE
)
4087 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4088 &high
, &in_p
, strict_overflow_p
);
4089 if (nexp
== NULL_TREE
)
4094 /* If EXP is a constant, we can evaluate whether this is true or false. */
4095 if (TREE_CODE (exp
) == INTEGER_CST
)
4097 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4099 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4105 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4109 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4110 type, TYPE, return an expression to test if EXP is in (or out of, depending
4111 on IN_P) the range. Return 0 if the test couldn't be created. */
4114 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4115 tree low
, tree high
)
4117 tree etype
= TREE_TYPE (exp
), value
;
4119 #ifdef HAVE_canonicalize_funcptr_for_compare
4120 /* Disable this optimization for function pointer expressions
4121 on targets that require function pointer canonicalization. */
4122 if (HAVE_canonicalize_funcptr_for_compare
4123 && TREE_CODE (etype
) == POINTER_TYPE
4124 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4130 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4132 return invert_truthvalue_loc (loc
, value
);
4137 if (low
== 0 && high
== 0)
4138 return build_int_cst (type
, 1);
4141 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4142 fold_convert_loc (loc
, etype
, high
));
4145 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4146 fold_convert_loc (loc
, etype
, low
));
4148 if (operand_equal_p (low
, high
, 0))
4149 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4150 fold_convert_loc (loc
, etype
, low
));
4152 if (integer_zerop (low
))
4154 if (! TYPE_UNSIGNED (etype
))
4156 etype
= unsigned_type_for (etype
);
4157 high
= fold_convert_loc (loc
, etype
, high
);
4158 exp
= fold_convert_loc (loc
, etype
, exp
);
4160 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4163 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4164 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4166 unsigned HOST_WIDE_INT lo
;
4170 prec
= TYPE_PRECISION (etype
);
4171 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4174 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4178 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4179 lo
= (unsigned HOST_WIDE_INT
) -1;
4182 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4184 if (TYPE_UNSIGNED (etype
))
4186 tree signed_etype
= signed_type_for (etype
);
4187 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4189 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4191 etype
= signed_etype
;
4192 exp
= fold_convert_loc (loc
, etype
, exp
);
4194 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4195 build_int_cst (etype
, 0));
4199 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4200 This requires wrap-around arithmetics for the type of the expression.
4201 First make sure that arithmetics in this type is valid, then make sure
4202 that it wraps around. */
4203 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4204 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4205 TYPE_UNSIGNED (etype
));
4207 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4209 tree utype
, minv
, maxv
;
4211 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4212 for the type in question, as we rely on this here. */
4213 utype
= unsigned_type_for (etype
);
4214 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4215 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4216 integer_one_node
, 1);
4217 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4219 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4226 high
= fold_convert_loc (loc
, etype
, high
);
4227 low
= fold_convert_loc (loc
, etype
, low
);
4228 exp
= fold_convert_loc (loc
, etype
, exp
);
4230 value
= const_binop (MINUS_EXPR
, high
, low
);
4233 if (POINTER_TYPE_P (etype
))
4235 if (value
!= 0 && !TREE_OVERFLOW (value
))
4237 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4238 return build_range_check (loc
, type
,
4239 fold_build_pointer_plus_loc (loc
, exp
, low
),
4240 1, build_int_cst (etype
, 0), value
);
4245 if (value
!= 0 && !TREE_OVERFLOW (value
))
4246 return build_range_check (loc
, type
,
4247 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4248 1, build_int_cst (etype
, 0), value
);
4253 /* Return the predecessor of VAL in its type, handling the infinite case. */
4256 range_predecessor (tree val
)
4258 tree type
= TREE_TYPE (val
);
4260 if (INTEGRAL_TYPE_P (type
)
4261 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4264 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4267 /* Return the successor of VAL in its type, handling the infinite case. */
4270 range_successor (tree val
)
4272 tree type
= TREE_TYPE (val
);
4274 if (INTEGRAL_TYPE_P (type
)
4275 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4278 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4281 /* Given two ranges, see if we can merge them into one. Return 1 if we
4282 can, 0 if we can't. Set the output range into the specified parameters. */
4285 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4286 tree high0
, int in1_p
, tree low1
, tree high1
)
4294 int lowequal
= ((low0
== 0 && low1
== 0)
4295 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4296 low0
, 0, low1
, 0)));
4297 int highequal
= ((high0
== 0 && high1
== 0)
4298 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4299 high0
, 1, high1
, 1)));
4301 /* Make range 0 be the range that starts first, or ends last if they
4302 start at the same value. Swap them if it isn't. */
4303 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4306 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4307 high1
, 1, high0
, 1))))
4309 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4310 tem
= low0
, low0
= low1
, low1
= tem
;
4311 tem
= high0
, high0
= high1
, high1
= tem
;
4314 /* Now flag two cases, whether the ranges are disjoint or whether the
4315 second range is totally subsumed in the first. Note that the tests
4316 below are simplified by the ones above. */
4317 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4318 high0
, 1, low1
, 0));
4319 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4320 high1
, 1, high0
, 1));
4322 /* We now have four cases, depending on whether we are including or
4323 excluding the two ranges. */
4326 /* If they don't overlap, the result is false. If the second range
4327 is a subset it is the result. Otherwise, the range is from the start
4328 of the second to the end of the first. */
4330 in_p
= 0, low
= high
= 0;
4332 in_p
= 1, low
= low1
, high
= high1
;
4334 in_p
= 1, low
= low1
, high
= high0
;
4337 else if (in0_p
&& ! in1_p
)
4339 /* If they don't overlap, the result is the first range. If they are
4340 equal, the result is false. If the second range is a subset of the
4341 first, and the ranges begin at the same place, we go from just after
4342 the end of the second range to the end of the first. If the second
4343 range is not a subset of the first, or if it is a subset and both
4344 ranges end at the same place, the range starts at the start of the
4345 first range and ends just before the second range.
4346 Otherwise, we can't describe this as a single range. */
4348 in_p
= 1, low
= low0
, high
= high0
;
4349 else if (lowequal
&& highequal
)
4350 in_p
= 0, low
= high
= 0;
4351 else if (subset
&& lowequal
)
4353 low
= range_successor (high1
);
4358 /* We are in the weird situation where high0 > high1 but
4359 high1 has no successor. Punt. */
4363 else if (! subset
|| highequal
)
4366 high
= range_predecessor (low1
);
4370 /* low0 < low1 but low1 has no predecessor. Punt. */
4378 else if (! in0_p
&& in1_p
)
4380 /* If they don't overlap, the result is the second range. If the second
4381 is a subset of the first, the result is false. Otherwise,
4382 the range starts just after the first range and ends at the
4383 end of the second. */
4385 in_p
= 1, low
= low1
, high
= high1
;
4386 else if (subset
|| highequal
)
4387 in_p
= 0, low
= high
= 0;
4390 low
= range_successor (high0
);
4395 /* high1 > high0 but high0 has no successor. Punt. */
4403 /* The case where we are excluding both ranges. Here the complex case
4404 is if they don't overlap. In that case, the only time we have a
4405 range is if they are adjacent. If the second is a subset of the
4406 first, the result is the first. Otherwise, the range to exclude
4407 starts at the beginning of the first range and ends at the end of the
4411 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4412 range_successor (high0
),
4414 in_p
= 0, low
= low0
, high
= high1
;
4417 /* Canonicalize - [min, x] into - [-, x]. */
4418 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4419 switch (TREE_CODE (TREE_TYPE (low0
)))
4422 if (TYPE_PRECISION (TREE_TYPE (low0
))
4423 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4427 if (tree_int_cst_equal (low0
,
4428 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4432 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4433 && integer_zerop (low0
))
4440 /* Canonicalize - [x, max] into - [x, -]. */
4441 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4442 switch (TREE_CODE (TREE_TYPE (high1
)))
4445 if (TYPE_PRECISION (TREE_TYPE (high1
))
4446 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4450 if (tree_int_cst_equal (high1
,
4451 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4455 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4456 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4458 integer_one_node
, 1)))
4465 /* The ranges might be also adjacent between the maximum and
4466 minimum values of the given type. For
4467 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4468 return + [x + 1, y - 1]. */
4469 if (low0
== 0 && high1
== 0)
4471 low
= range_successor (high0
);
4472 high
= range_predecessor (low1
);
4473 if (low
== 0 || high
== 0)
4483 in_p
= 0, low
= low0
, high
= high0
;
4485 in_p
= 0, low
= low0
, high
= high1
;
4488 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4493 /* Subroutine of fold, looking inside expressions of the form
4494 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4495 of the COND_EXPR. This function is being used also to optimize
4496 A op B ? C : A, by reversing the comparison first.
4498 Return a folded expression whose code is not a COND_EXPR
4499 anymore, or NULL_TREE if no folding opportunity is found. */
4502 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4503 tree arg0
, tree arg1
, tree arg2
)
4505 enum tree_code comp_code
= TREE_CODE (arg0
);
4506 tree arg00
= TREE_OPERAND (arg0
, 0);
4507 tree arg01
= TREE_OPERAND (arg0
, 1);
4508 tree arg1_type
= TREE_TYPE (arg1
);
4514 /* If we have A op 0 ? A : -A, consider applying the following
4517 A == 0? A : -A same as -A
4518 A != 0? A : -A same as A
4519 A >= 0? A : -A same as abs (A)
4520 A > 0? A : -A same as abs (A)
4521 A <= 0? A : -A same as -abs (A)
4522 A < 0? A : -A same as -abs (A)
4524 None of these transformations work for modes with signed
4525 zeros. If A is +/-0, the first two transformations will
4526 change the sign of the result (from +0 to -0, or vice
4527 versa). The last four will fix the sign of the result,
4528 even though the original expressions could be positive or
4529 negative, depending on the sign of A.
4531 Note that all these transformations are correct if A is
4532 NaN, since the two alternatives (A and -A) are also NaNs. */
4533 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4534 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4535 ? real_zerop (arg01
)
4536 : integer_zerop (arg01
))
4537 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4538 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4539 /* In the case that A is of the form X-Y, '-A' (arg2) may
4540 have already been folded to Y-X, check for that. */
4541 || (TREE_CODE (arg1
) == MINUS_EXPR
4542 && TREE_CODE (arg2
) == MINUS_EXPR
4543 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4544 TREE_OPERAND (arg2
, 1), 0)
4545 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4546 TREE_OPERAND (arg2
, 0), 0))))
4551 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4552 return pedantic_non_lvalue_loc (loc
,
4553 fold_convert_loc (loc
, type
,
4554 negate_expr (tem
)));
4557 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4560 if (flag_trapping_math
)
4565 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4566 arg1
= fold_convert_loc (loc
, signed_type_for
4567 (TREE_TYPE (arg1
)), arg1
);
4568 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4569 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4572 if (flag_trapping_math
)
4576 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4577 arg1
= fold_convert_loc (loc
, signed_type_for
4578 (TREE_TYPE (arg1
)), arg1
);
4579 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4580 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4582 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4586 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4587 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4588 both transformations are correct when A is NaN: A != 0
4589 is then true, and A == 0 is false. */
4591 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4592 && integer_zerop (arg01
) && integer_zerop (arg2
))
4594 if (comp_code
== NE_EXPR
)
4595 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4596 else if (comp_code
== EQ_EXPR
)
4597 return build_int_cst (type
, 0);
4600 /* Try some transformations of A op B ? A : B.
4602 A == B? A : B same as B
4603 A != B? A : B same as A
4604 A >= B? A : B same as max (A, B)
4605 A > B? A : B same as max (B, A)
4606 A <= B? A : B same as min (A, B)
4607 A < B? A : B same as min (B, A)
4609 As above, these transformations don't work in the presence
4610 of signed zeros. For example, if A and B are zeros of
4611 opposite sign, the first two transformations will change
4612 the sign of the result. In the last four, the original
4613 expressions give different results for (A=+0, B=-0) and
4614 (A=-0, B=+0), but the transformed expressions do not.
4616 The first two transformations are correct if either A or B
4617 is a NaN. In the first transformation, the condition will
4618 be false, and B will indeed be chosen. In the case of the
4619 second transformation, the condition A != B will be true,
4620 and A will be chosen.
4622 The conversions to max() and min() are not correct if B is
4623 a number and A is not. The conditions in the original
4624 expressions will be false, so all four give B. The min()
4625 and max() versions would give a NaN instead. */
4626 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4627 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4628 /* Avoid these transformations if the COND_EXPR may be used
4629 as an lvalue in the C++ front-end. PR c++/19199. */
4631 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4632 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4633 || ! maybe_lvalue_p (arg1
)
4634 || ! maybe_lvalue_p (arg2
)))
4636 tree comp_op0
= arg00
;
4637 tree comp_op1
= arg01
;
4638 tree comp_type
= TREE_TYPE (comp_op0
);
4640 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4641 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4651 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4653 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4658 /* In C++ a ?: expression can be an lvalue, so put the
4659 operand which will be used if they are equal first
4660 so that we can convert this back to the
4661 corresponding COND_EXPR. */
4662 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4664 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4665 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4666 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4667 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4668 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4669 comp_op1
, comp_op0
);
4670 return pedantic_non_lvalue_loc (loc
,
4671 fold_convert_loc (loc
, type
, tem
));
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4680 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4681 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4682 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4683 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4684 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4685 comp_op1
, comp_op0
);
4686 return pedantic_non_lvalue_loc (loc
,
4687 fold_convert_loc (loc
, type
, tem
));
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4692 return pedantic_non_lvalue_loc (loc
,
4693 fold_convert_loc (loc
, type
, arg2
));
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4697 return pedantic_non_lvalue_loc (loc
,
4698 fold_convert_loc (loc
, type
, arg1
));
4701 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4706 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4707 we might still be able to simplify this. For example,
4708 if C1 is one less or one more than C2, this might have started
4709 out as a MIN or MAX and been transformed by this function.
4710 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4712 if (INTEGRAL_TYPE_P (type
)
4713 && TREE_CODE (arg01
) == INTEGER_CST
4714 && TREE_CODE (arg2
) == INTEGER_CST
)
4718 if (TREE_CODE (arg1
) == INTEGER_CST
)
4720 /* We can replace A with C1 in this case. */
4721 arg1
= fold_convert_loc (loc
, type
, arg01
);
4722 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4725 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4726 MIN_EXPR, to preserve the signedness of the comparison. */
4727 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4729 && operand_equal_p (arg01
,
4730 const_binop (PLUS_EXPR
, arg2
,
4731 build_int_cst (type
, 1)),
4734 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4735 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4737 return pedantic_non_lvalue_loc (loc
,
4738 fold_convert_loc (loc
, type
, tem
));
4743 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4745 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4747 && operand_equal_p (arg01
,
4748 const_binop (MINUS_EXPR
, arg2
,
4749 build_int_cst (type
, 1)),
4752 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4753 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4755 return pedantic_non_lvalue_loc (loc
,
4756 fold_convert_loc (loc
, type
, tem
));
4761 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4762 MAX_EXPR, to preserve the signedness of the comparison. */
4763 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4765 && operand_equal_p (arg01
,
4766 const_binop (MINUS_EXPR
, arg2
,
4767 build_int_cst (type
, 1)),
4770 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4771 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4773 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4778 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4779 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4781 && operand_equal_p (arg01
,
4782 const_binop (PLUS_EXPR
, arg2
,
4783 build_int_cst (type
, 1)),
4786 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4787 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4789 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4803 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4804 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4805 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4809 /* EXP is some logical combination of boolean tests. See if we can
4810 merge it into some range test. Return the new tree if so. */
4813 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4816 int or_op
= (code
== TRUTH_ORIF_EXPR
4817 || code
== TRUTH_OR_EXPR
);
4818 int in0_p
, in1_p
, in_p
;
4819 tree low0
, low1
, low
, high0
, high1
, high
;
4820 bool strict_overflow_p
= false;
4821 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4822 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4824 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4825 "when simplifying range test");
4827 /* If this is an OR operation, invert both sides; we will invert
4828 again at the end. */
4830 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4832 /* If both expressions are the same, if we can merge the ranges, and we
4833 can build the range test, return it or it inverted. If one of the
4834 ranges is always true or always false, consider it to be the same
4835 expression as the other. */
4836 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4837 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4839 && 0 != (tem
= (build_range_check (loc
, type
,
4841 : rhs
!= 0 ? rhs
: integer_zero_node
,
4844 if (strict_overflow_p
)
4845 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4846 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4849 /* On machines where the branch cost is expensive, if this is a
4850 short-circuited branch and the underlying object on both sides
4851 is the same, make a non-short-circuit operation. */
4852 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4853 && lhs
!= 0 && rhs
!= 0
4854 && (code
== TRUTH_ANDIF_EXPR
4855 || code
== TRUTH_ORIF_EXPR
)
4856 && operand_equal_p (lhs
, rhs
, 0))
4858 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4859 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4860 which cases we can't do this. */
4861 if (simple_operand_p (lhs
))
4862 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4863 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4866 else if (!lang_hooks
.decls
.global_bindings_p ()
4867 && !CONTAINS_PLACEHOLDER_P (lhs
))
4869 tree common
= save_expr (lhs
);
4871 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4872 or_op
? ! in0_p
: in0_p
,
4874 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4875 or_op
? ! in1_p
: in1_p
,
4878 if (strict_overflow_p
)
4879 fold_overflow_warning (warnmsg
,
4880 WARN_STRICT_OVERFLOW_COMPARISON
);
4881 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4882 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4891 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4892 bit value. Arrange things so the extra bits will be set to zero if and
4893 only if C is signed-extended to its full width. If MASK is nonzero,
4894 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4897 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4899 tree type
= TREE_TYPE (c
);
4900 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4903 if (p
== modesize
|| unsignedp
)
4906 /* We work by getting just the sign bit into the low-order bit, then
4907 into the high-order bit, then sign-extend. We then XOR that value
4909 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4910 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4912 /* We must use a signed type in order to get an arithmetic right shift.
4913 However, we must also avoid introducing accidental overflows, so that
4914 a subsequent call to integer_zerop will work. Hence we must
4915 do the type conversion here. At this point, the constant is either
4916 zero or one, and the conversion to a signed type can never overflow.
4917 We could get an overflow if this conversion is done anywhere else. */
4918 if (TYPE_UNSIGNED (type
))
4919 temp
= fold_convert (signed_type_for (type
), temp
);
4921 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4922 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4924 temp
= const_binop (BIT_AND_EXPR
, temp
,
4925 fold_convert (TREE_TYPE (c
), mask
));
4926 /* If necessary, convert the type back to match the type of C. */
4927 if (TYPE_UNSIGNED (type
))
4928 temp
= fold_convert (type
, temp
);
4930 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4933 /* For an expression that has the form
4937 we can drop one of the inner expressions and simplify to
4941 LOC is the location of the resulting expression. OP is the inner
4942 logical operation; the left-hand side in the examples above, while CMPOP
4943 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4944 removing a condition that guards another, as in
4945 (A != NULL && A->...) || A == NULL
4946 which we must not transform. If RHS_ONLY is true, only eliminate the
4947 right-most operand of the inner logical operation. */
4950 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
4953 tree type
= TREE_TYPE (cmpop
);
4954 enum tree_code code
= TREE_CODE (cmpop
);
4955 enum tree_code truthop_code
= TREE_CODE (op
);
4956 tree lhs
= TREE_OPERAND (op
, 0);
4957 tree rhs
= TREE_OPERAND (op
, 1);
4958 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4959 enum tree_code rhs_code
= TREE_CODE (rhs
);
4960 enum tree_code lhs_code
= TREE_CODE (lhs
);
4961 enum tree_code inv_code
;
4963 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
4966 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4969 if (rhs_code
== truthop_code
)
4971 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
4972 if (newrhs
!= NULL_TREE
)
4975 rhs_code
= TREE_CODE (rhs
);
4978 if (lhs_code
== truthop_code
&& !rhs_only
)
4980 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
4981 if (newlhs
!= NULL_TREE
)
4984 lhs_code
= TREE_CODE (lhs
);
4988 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
4989 if (inv_code
== rhs_code
4990 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
4991 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
4993 if (!rhs_only
&& inv_code
== lhs_code
4994 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
4995 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
4997 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
4998 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5003 /* Find ways of folding logical expressions of LHS and RHS:
5004 Try to merge two comparisons to the same innermost item.
5005 Look for range tests like "ch >= '0' && ch <= '9'".
5006 Look for combinations of simple terms on machines with expensive branches
5007 and evaluate the RHS unconditionally.
5009 For example, if we have p->a == 2 && p->b == 4 and we can make an
5010 object large enough to span both A and B, we can do this with a comparison
5011 against the object ANDed with the a mask.
5013 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5014 operations to do this with one comparison.
5016 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5017 function and the one above.
5019 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5020 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5022 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5025 We return the simplified tree or 0 if no optimization is possible. */
5028 fold_truthop (location_t loc
, enum tree_code code
, tree truth_type
,
5031 /* If this is the "or" of two comparisons, we can do something if
5032 the comparisons are NE_EXPR. If this is the "and", we can do something
5033 if the comparisons are EQ_EXPR. I.e.,
5034 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5036 WANTED_CODE is this operation code. For single bit fields, we can
5037 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5038 comparison for one-bit fields. */
5040 enum tree_code wanted_code
;
5041 enum tree_code lcode
, rcode
;
5042 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5043 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5044 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5045 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5046 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5047 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5048 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5049 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5050 enum machine_mode lnmode
, rnmode
;
5051 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5052 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5053 tree l_const
, r_const
;
5054 tree lntype
, rntype
, result
;
5055 HOST_WIDE_INT first_bit
, end_bit
;
5057 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5058 enum tree_code orig_code
= code
;
5060 /* Start by getting the comparison codes. Fail if anything is volatile.
5061 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5062 it were surrounded with a NE_EXPR. */
5064 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5067 lcode
= TREE_CODE (lhs
);
5068 rcode
= TREE_CODE (rhs
);
5070 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5072 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5073 build_int_cst (TREE_TYPE (lhs
), 0));
5077 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5079 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5080 build_int_cst (TREE_TYPE (rhs
), 0));
5084 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5085 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5088 ll_arg
= TREE_OPERAND (lhs
, 0);
5089 lr_arg
= TREE_OPERAND (lhs
, 1);
5090 rl_arg
= TREE_OPERAND (rhs
, 0);
5091 rr_arg
= TREE_OPERAND (rhs
, 1);
5093 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5094 if (simple_operand_p (ll_arg
)
5095 && simple_operand_p (lr_arg
))
5097 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5098 && operand_equal_p (lr_arg
, rr_arg
, 0))
5100 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5101 truth_type
, ll_arg
, lr_arg
);
5105 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5106 && operand_equal_p (lr_arg
, rl_arg
, 0))
5108 result
= combine_comparisons (loc
, code
, lcode
,
5109 swap_tree_comparison (rcode
),
5110 truth_type
, ll_arg
, lr_arg
);
5116 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5117 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5119 /* If the RHS can be evaluated unconditionally and its operands are
5120 simple, it wins to evaluate the RHS unconditionally on machines
5121 with expensive branches. In this case, this isn't a comparison
5122 that can be merged. Avoid doing this if the RHS is a floating-point
5123 comparison since those can trap. */
5125 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5127 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5128 && simple_operand_p (rl_arg
)
5129 && simple_operand_p (rr_arg
))
5131 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5132 if (code
== TRUTH_OR_EXPR
5133 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5134 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5135 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5136 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5137 return build2_loc (loc
, NE_EXPR
, truth_type
,
5138 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5140 build_int_cst (TREE_TYPE (ll_arg
), 0));
5142 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5143 if (code
== TRUTH_AND_EXPR
5144 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5145 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5146 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5147 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5148 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5149 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5151 build_int_cst (TREE_TYPE (ll_arg
), 0));
5153 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5155 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5156 return build2_loc (loc
, code
, truth_type
, lhs
, rhs
);
5161 /* See if the comparisons can be merged. Then get all the parameters for
5164 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5165 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5169 ll_inner
= decode_field_reference (loc
, ll_arg
,
5170 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5171 &ll_unsignedp
, &volatilep
, &ll_mask
,
5173 lr_inner
= decode_field_reference (loc
, lr_arg
,
5174 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5175 &lr_unsignedp
, &volatilep
, &lr_mask
,
5177 rl_inner
= decode_field_reference (loc
, rl_arg
,
5178 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5179 &rl_unsignedp
, &volatilep
, &rl_mask
,
5181 rr_inner
= decode_field_reference (loc
, rr_arg
,
5182 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5183 &rr_unsignedp
, &volatilep
, &rr_mask
,
5186 /* It must be true that the inner operation on the lhs of each
5187 comparison must be the same if we are to be able to do anything.
5188 Then see if we have constants. If not, the same must be true for
5190 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5191 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5194 if (TREE_CODE (lr_arg
) == INTEGER_CST
5195 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5196 l_const
= lr_arg
, r_const
= rr_arg
;
5197 else if (lr_inner
== 0 || rr_inner
== 0
5198 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5201 l_const
= r_const
= 0;
5203 /* If either comparison code is not correct for our logical operation,
5204 fail. However, we can convert a one-bit comparison against zero into
5205 the opposite comparison against that bit being set in the field. */
5207 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5208 if (lcode
!= wanted_code
)
5210 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5212 /* Make the left operand unsigned, since we are only interested
5213 in the value of one bit. Otherwise we are doing the wrong
5222 /* This is analogous to the code for l_const above. */
5223 if (rcode
!= wanted_code
)
5225 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5234 /* See if we can find a mode that contains both fields being compared on
5235 the left. If we can't, fail. Otherwise, update all constants and masks
5236 to be relative to a field of that size. */
5237 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5238 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5239 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5240 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5242 if (lnmode
== VOIDmode
)
5245 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5246 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5247 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5248 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5250 if (BYTES_BIG_ENDIAN
)
5252 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5253 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5256 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5257 size_int (xll_bitpos
));
5258 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5259 size_int (xrl_bitpos
));
5263 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5264 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5265 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5266 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5267 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5270 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5272 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5277 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5278 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5279 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5280 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5281 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5284 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5286 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5290 /* If the right sides are not constant, do the same for it. Also,
5291 disallow this optimization if a size or signedness mismatch occurs
5292 between the left and right sides. */
5295 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5296 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5297 /* Make sure the two fields on the right
5298 correspond to the left without being swapped. */
5299 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5302 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5303 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5304 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5305 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5307 if (rnmode
== VOIDmode
)
5310 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5311 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5312 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5313 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5315 if (BYTES_BIG_ENDIAN
)
5317 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5318 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5321 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5323 size_int (xlr_bitpos
));
5324 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5326 size_int (xrr_bitpos
));
5328 /* Make a mask that corresponds to both fields being compared.
5329 Do this for both items being compared. If the operands are the
5330 same size and the bits being compared are in the same position
5331 then we can do this by masking both and comparing the masked
5333 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5334 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5335 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5337 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5338 ll_unsignedp
|| rl_unsignedp
);
5339 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5340 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5342 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5343 lr_unsignedp
|| rr_unsignedp
);
5344 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5345 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5347 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5350 /* There is still another way we can do something: If both pairs of
5351 fields being compared are adjacent, we may be able to make a wider
5352 field containing them both.
5354 Note that we still must mask the lhs/rhs expressions. Furthermore,
5355 the mask must be shifted to account for the shift done by
5356 make_bit_field_ref. */
5357 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5358 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5359 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5360 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5364 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5365 ll_bitsize
+ rl_bitsize
,
5366 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5367 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5368 lr_bitsize
+ rr_bitsize
,
5369 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5371 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5372 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5373 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5374 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5376 /* Convert to the smaller type before masking out unwanted bits. */
5378 if (lntype
!= rntype
)
5380 if (lnbitsize
> rnbitsize
)
5382 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5383 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5386 else if (lnbitsize
< rnbitsize
)
5388 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5389 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5394 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5395 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5397 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5398 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5400 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5406 /* Handle the case of comparisons with constants. If there is something in
5407 common between the masks, those bits of the constants must be the same.
5408 If not, the condition is always false. Test for this to avoid generating
5409 incorrect code below. */
5410 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5411 if (! integer_zerop (result
)
5412 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5413 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5415 if (wanted_code
== NE_EXPR
)
5417 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5418 return constant_boolean_node (true, truth_type
);
5422 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5423 return constant_boolean_node (false, truth_type
);
5427 /* Construct the expression we will return. First get the component
5428 reference we will make. Unless the mask is all ones the width of
5429 that field, perform the mask operation. Then compare with the
5431 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5432 ll_unsignedp
|| rl_unsignedp
);
5434 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5435 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5436 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5438 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5439 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5442 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5446 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5450 enum tree_code op_code
;
5453 int consts_equal
, consts_lt
;
5456 STRIP_SIGN_NOPS (arg0
);
5458 op_code
= TREE_CODE (arg0
);
5459 minmax_const
= TREE_OPERAND (arg0
, 1);
5460 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5461 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5462 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5463 inner
= TREE_OPERAND (arg0
, 0);
5465 /* If something does not permit us to optimize, return the original tree. */
5466 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5467 || TREE_CODE (comp_const
) != INTEGER_CST
5468 || TREE_OVERFLOW (comp_const
)
5469 || TREE_CODE (minmax_const
) != INTEGER_CST
5470 || TREE_OVERFLOW (minmax_const
))
5473 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5474 and GT_EXPR, doing the rest with recursive calls using logical
5478 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5481 = optimize_minmax_comparison (loc
,
5482 invert_tree_comparison (code
, false),
5485 return invert_truthvalue_loc (loc
, tem
);
5491 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5492 optimize_minmax_comparison
5493 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5494 optimize_minmax_comparison
5495 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5498 if (op_code
== MAX_EXPR
&& consts_equal
)
5499 /* MAX (X, 0) == 0 -> X <= 0 */
5500 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5502 else if (op_code
== MAX_EXPR
&& consts_lt
)
5503 /* MAX (X, 0) == 5 -> X == 5 */
5504 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5506 else if (op_code
== MAX_EXPR
)
5507 /* MAX (X, 0) == -1 -> false */
5508 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5510 else if (consts_equal
)
5511 /* MIN (X, 0) == 0 -> X >= 0 */
5512 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5515 /* MIN (X, 0) == 5 -> false */
5516 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5519 /* MIN (X, 0) == -1 -> X == -1 */
5520 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5523 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5524 /* MAX (X, 0) > 0 -> X > 0
5525 MAX (X, 0) > 5 -> X > 5 */
5526 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5528 else if (op_code
== MAX_EXPR
)
5529 /* MAX (X, 0) > -1 -> true */
5530 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5532 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5533 /* MIN (X, 0) > 0 -> false
5534 MIN (X, 0) > 5 -> false */
5535 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5538 /* MIN (X, 0) > -1 -> X > -1 */
5539 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5546 /* T is an integer expression that is being multiplied, divided, or taken a
5547 modulus (CODE says which and what kind of divide or modulus) by a
5548 constant C. See if we can eliminate that operation by folding it with
5549 other operations already in T. WIDE_TYPE, if non-null, is a type that
5550 should be used for the computation if wider than our type.
5552 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5553 (X * 2) + (Y * 4). We must, however, be assured that either the original
5554 expression would not overflow or that overflow is undefined for the type
5555 in the language in question.
5557 If we return a non-null expression, it is an equivalent form of the
5558 original computation, but need not be in the original type.
5560 We set *STRICT_OVERFLOW_P to true if the return values depends on
5561 signed overflow being undefined. Otherwise we do not change
5562 *STRICT_OVERFLOW_P. */
5565 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5566 bool *strict_overflow_p
)
5568 /* To avoid exponential search depth, refuse to allow recursion past
5569 three levels. Beyond that (1) it's highly unlikely that we'll find
5570 something interesting and (2) we've probably processed it before
5571 when we built the inner expression. */
5580 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5587 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5588 bool *strict_overflow_p
)
5590 tree type
= TREE_TYPE (t
);
5591 enum tree_code tcode
= TREE_CODE (t
);
5592 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5593 > GET_MODE_SIZE (TYPE_MODE (type
)))
5594 ? wide_type
: type
);
5596 int same_p
= tcode
== code
;
5597 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5598 bool sub_strict_overflow_p
;
5600 /* Don't deal with constants of zero here; they confuse the code below. */
5601 if (integer_zerop (c
))
5604 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5605 op0
= TREE_OPERAND (t
, 0);
5607 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5608 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5610 /* Note that we need not handle conditional operations here since fold
5611 already handles those cases. So just do arithmetic here. */
5615 /* For a constant, we can always simplify if we are a multiply
5616 or (for divide and modulus) if it is a multiple of our constant. */
5617 if (code
== MULT_EXPR
5618 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5619 return const_binop (code
, fold_convert (ctype
, t
),
5620 fold_convert (ctype
, c
));
5623 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5624 /* If op0 is an expression ... */
5625 if ((COMPARISON_CLASS_P (op0
)
5626 || UNARY_CLASS_P (op0
)
5627 || BINARY_CLASS_P (op0
)
5628 || VL_EXP_CLASS_P (op0
)
5629 || EXPRESSION_CLASS_P (op0
))
5630 /* ... and has wrapping overflow, and its type is smaller
5631 than ctype, then we cannot pass through as widening. */
5632 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5633 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5634 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5635 && (TYPE_PRECISION (ctype
)
5636 > TYPE_PRECISION (TREE_TYPE (op0
))))
5637 /* ... or this is a truncation (t is narrower than op0),
5638 then we cannot pass through this narrowing. */
5639 || (TYPE_PRECISION (type
)
5640 < TYPE_PRECISION (TREE_TYPE (op0
)))
5641 /* ... or signedness changes for division or modulus,
5642 then we cannot pass through this conversion. */
5643 || (code
!= MULT_EXPR
5644 && (TYPE_UNSIGNED (ctype
)
5645 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5646 /* ... or has undefined overflow while the converted to
5647 type has not, we cannot do the operation in the inner type
5648 as that would introduce undefined overflow. */
5649 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5650 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5653 /* Pass the constant down and see if we can make a simplification. If
5654 we can, replace this expression with the inner simplification for
5655 possible later conversion to our or some other type. */
5656 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5657 && TREE_CODE (t2
) == INTEGER_CST
5658 && !TREE_OVERFLOW (t2
)
5659 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5661 ? ctype
: NULL_TREE
,
5662 strict_overflow_p
))))
5667 /* If widening the type changes it from signed to unsigned, then we
5668 must avoid building ABS_EXPR itself as unsigned. */
5669 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5671 tree cstype
= (*signed_type_for
) (ctype
);
5672 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5675 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5676 return fold_convert (ctype
, t1
);
5680 /* If the constant is negative, we cannot simplify this. */
5681 if (tree_int_cst_sgn (c
) == -1)
5685 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5687 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5690 case MIN_EXPR
: case MAX_EXPR
:
5691 /* If widening the type changes the signedness, then we can't perform
5692 this optimization as that changes the result. */
5693 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5696 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5697 sub_strict_overflow_p
= false;
5698 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5699 &sub_strict_overflow_p
)) != 0
5700 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5701 &sub_strict_overflow_p
)) != 0)
5703 if (tree_int_cst_sgn (c
) < 0)
5704 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5705 if (sub_strict_overflow_p
)
5706 *strict_overflow_p
= true;
5707 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5708 fold_convert (ctype
, t2
));
5712 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5713 /* If the second operand is constant, this is a multiplication
5714 or floor division, by a power of two, so we can treat it that
5715 way unless the multiplier or divisor overflows. Signed
5716 left-shift overflow is implementation-defined rather than
5717 undefined in C90, so do not convert signed left shift into
5719 if (TREE_CODE (op1
) == INTEGER_CST
5720 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5721 /* const_binop may not detect overflow correctly,
5722 so check for it explicitly here. */
5723 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5724 && TREE_INT_CST_HIGH (op1
) == 0
5725 && 0 != (t1
= fold_convert (ctype
,
5726 const_binop (LSHIFT_EXPR
,
5729 && !TREE_OVERFLOW (t1
))
5730 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5731 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5733 fold_convert (ctype
, op0
),
5735 c
, code
, wide_type
, strict_overflow_p
);
5738 case PLUS_EXPR
: case MINUS_EXPR
:
5739 /* See if we can eliminate the operation on both sides. If we can, we
5740 can return a new PLUS or MINUS. If we can't, the only remaining
5741 cases where we can do anything are if the second operand is a
5743 sub_strict_overflow_p
= false;
5744 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5745 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5746 if (t1
!= 0 && t2
!= 0
5747 && (code
== MULT_EXPR
5748 /* If not multiplication, we can only do this if both operands
5749 are divisible by c. */
5750 || (multiple_of_p (ctype
, op0
, c
)
5751 && multiple_of_p (ctype
, op1
, c
))))
5753 if (sub_strict_overflow_p
)
5754 *strict_overflow_p
= true;
5755 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5756 fold_convert (ctype
, t2
));
5759 /* If this was a subtraction, negate OP1 and set it to be an addition.
5760 This simplifies the logic below. */
5761 if (tcode
== MINUS_EXPR
)
5763 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5764 /* If OP1 was not easily negatable, the constant may be OP0. */
5765 if (TREE_CODE (op0
) == INTEGER_CST
)
5776 if (TREE_CODE (op1
) != INTEGER_CST
)
5779 /* If either OP1 or C are negative, this optimization is not safe for
5780 some of the division and remainder types while for others we need
5781 to change the code. */
5782 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5784 if (code
== CEIL_DIV_EXPR
)
5785 code
= FLOOR_DIV_EXPR
;
5786 else if (code
== FLOOR_DIV_EXPR
)
5787 code
= CEIL_DIV_EXPR
;
5788 else if (code
!= MULT_EXPR
5789 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5793 /* If it's a multiply or a division/modulus operation of a multiple
5794 of our constant, do the operation and verify it doesn't overflow. */
5795 if (code
== MULT_EXPR
5796 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5798 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5799 fold_convert (ctype
, c
));
5800 /* We allow the constant to overflow with wrapping semantics. */
5802 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5808 /* If we have an unsigned type is not a sizetype, we cannot widen
5809 the operation since it will change the result if the original
5810 computation overflowed. */
5811 if (TYPE_UNSIGNED (ctype
)
5812 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5816 /* If we were able to eliminate our operation from the first side,
5817 apply our operation to the second side and reform the PLUS. */
5818 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5819 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5821 /* The last case is if we are a multiply. In that case, we can
5822 apply the distributive law to commute the multiply and addition
5823 if the multiplication of the constants doesn't overflow. */
5824 if (code
== MULT_EXPR
)
5825 return fold_build2 (tcode
, ctype
,
5826 fold_build2 (code
, ctype
,
5827 fold_convert (ctype
, op0
),
5828 fold_convert (ctype
, c
)),
5834 /* We have a special case here if we are doing something like
5835 (C * 8) % 4 since we know that's zero. */
5836 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5837 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5838 /* If the multiplication can overflow we cannot optimize this.
5839 ??? Until we can properly mark individual operations as
5840 not overflowing we need to treat sizetype special here as
5841 stor-layout relies on this opimization to make
5842 DECL_FIELD_BIT_OFFSET always a constant. */
5843 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5844 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
5845 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
5846 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5847 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5849 *strict_overflow_p
= true;
5850 return omit_one_operand (type
, integer_zero_node
, op0
);
5853 /* ... fall through ... */
5855 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5856 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5857 /* If we can extract our operation from the LHS, do so and return a
5858 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5859 do something only if the second operand is a constant. */
5861 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5862 strict_overflow_p
)) != 0)
5863 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5864 fold_convert (ctype
, op1
));
5865 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5866 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5867 strict_overflow_p
)) != 0)
5868 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5869 fold_convert (ctype
, t1
));
5870 else if (TREE_CODE (op1
) != INTEGER_CST
)
5873 /* If these are the same operation types, we can associate them
5874 assuming no overflow. */
5879 mul
= double_int_mul_with_sign
5881 (tree_to_double_int (op1
),
5882 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5884 (tree_to_double_int (c
),
5885 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5886 false, &overflow_p
);
5887 overflow_p
= (((!TYPE_UNSIGNED (ctype
)
5888 || (TREE_CODE (ctype
) == INTEGER_TYPE
5889 && TYPE_IS_SIZETYPE (ctype
)))
5891 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5892 if (!double_int_fits_to_tree_p (ctype
, mul
)
5893 && ((TYPE_UNSIGNED (ctype
) && tcode
!= MULT_EXPR
)
5894 || !TYPE_UNSIGNED (ctype
)
5895 || (TREE_CODE (ctype
) == INTEGER_TYPE
5896 && TYPE_IS_SIZETYPE (ctype
))))
5899 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5900 double_int_to_tree (ctype
, mul
));
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5908 If we have an unsigned type that is not a sizetype, we cannot do
5909 this since it will change the result if the original computation
5911 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5912 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5913 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5914 || (tcode
== MULT_EXPR
5915 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5916 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5917 && code
!= MULT_EXPR
)))
5919 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5921 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5922 *strict_overflow_p
= true;
5923 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5924 fold_convert (ctype
,
5925 const_binop (TRUNC_DIV_EXPR
,
5928 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5930 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5931 *strict_overflow_p
= true;
5932 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5933 fold_convert (ctype
,
5934 const_binop (TRUNC_DIV_EXPR
,
5947 /* Return a node which has the indicated constant VALUE (either 0 or
5948 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5949 and is of the indicated TYPE. */
5952 constant_boolean_node (bool value
, tree type
)
5954 if (type
== integer_type_node
)
5955 return value
? integer_one_node
: integer_zero_node
;
5956 else if (type
== boolean_type_node
)
5957 return value
? boolean_true_node
: boolean_false_node
;
5958 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5959 return build_vector_from_val (type
,
5960 build_int_cst (TREE_TYPE (type
),
5963 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5967 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5968 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5969 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5970 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5971 COND is the first argument to CODE; otherwise (as in the example
5972 given here), it is the second argument. TYPE is the type of the
5973 original expression. Return NULL_TREE if no simplification is
5977 fold_binary_op_with_conditional_arg (location_t loc
,
5978 enum tree_code code
,
5979 tree type
, tree op0
, tree op1
,
5980 tree cond
, tree arg
, int cond_first_p
)
5982 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5983 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5984 tree test
, true_value
, false_value
;
5985 tree lhs
= NULL_TREE
;
5986 tree rhs
= NULL_TREE
;
5988 if (TREE_CODE (cond
) == COND_EXPR
)
5990 test
= TREE_OPERAND (cond
, 0);
5991 true_value
= TREE_OPERAND (cond
, 1);
5992 false_value
= TREE_OPERAND (cond
, 2);
5993 /* If this operand throws an expression, then it does not make
5994 sense to try to perform a logical or arithmetic operation
5996 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5998 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6003 tree testtype
= TREE_TYPE (cond
);
6005 true_value
= constant_boolean_node (true, testtype
);
6006 false_value
= constant_boolean_node (false, testtype
);
6009 /* This transformation is only worthwhile if we don't have to wrap ARG
6010 in a SAVE_EXPR and the operation can be simplified on at least one
6011 of the branches once its pushed inside the COND_EXPR. */
6012 if (!TREE_CONSTANT (arg
)
6013 && (TREE_SIDE_EFFECTS (arg
)
6014 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6017 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6020 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6022 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6024 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6028 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6030 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6032 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6035 /* Check that we have simplified at least one of the branches. */
6036 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6039 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6043 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6045 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6046 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6047 ADDEND is the same as X.
6049 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6050 and finite. The problematic cases are when X is zero, and its mode
6051 has signed zeros. In the case of rounding towards -infinity,
6052 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6053 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6056 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6058 if (!real_zerop (addend
))
6061 /* Don't allow the fold with -fsignaling-nans. */
6062 if (HONOR_SNANS (TYPE_MODE (type
)))
6065 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6066 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6069 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6070 if (TREE_CODE (addend
) == REAL_CST
6071 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6074 /* The mode has signed zeros, and we have to honor their sign.
6075 In this situation, there is only one case we can return true for.
6076 X - 0 is the same as X unless rounding towards -infinity is
6078 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6081 /* Subroutine of fold() that checks comparisons of built-in math
6082 functions against real constants.
6084 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6085 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6086 is the type of the result and ARG0 and ARG1 are the operands of the
6087 comparison. ARG1 must be a TREE_REAL_CST.
6089 The function returns the constant folded tree if a simplification
6090 can be made, and NULL_TREE otherwise. */
6093 fold_mathfn_compare (location_t loc
,
6094 enum built_in_function fcode
, enum tree_code code
,
6095 tree type
, tree arg0
, tree arg1
)
6099 if (BUILTIN_SQRT_P (fcode
))
6101 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6102 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6104 c
= TREE_REAL_CST (arg1
);
6105 if (REAL_VALUE_NEGATIVE (c
))
6107 /* sqrt(x) < y is always false, if y is negative. */
6108 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6109 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6111 /* sqrt(x) > y is always true, if y is negative and we
6112 don't care about NaNs, i.e. negative values of x. */
6113 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6114 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6116 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6117 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6118 build_real (TREE_TYPE (arg
), dconst0
));
6120 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6124 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6125 real_convert (&c2
, mode
, &c2
);
6127 if (REAL_VALUE_ISINF (c2
))
6129 /* sqrt(x) > y is x == +Inf, when y is very large. */
6130 if (HONOR_INFINITIES (mode
))
6131 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6132 build_real (TREE_TYPE (arg
), c2
));
6134 /* sqrt(x) > y is always false, when y is very large
6135 and we don't care about infinities. */
6136 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6139 /* sqrt(x) > c is the same as x > c*c. */
6140 return fold_build2_loc (loc
, code
, type
, arg
,
6141 build_real (TREE_TYPE (arg
), c2
));
6143 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6147 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6148 real_convert (&c2
, mode
, &c2
);
6150 if (REAL_VALUE_ISINF (c2
))
6152 /* sqrt(x) < y is always true, when y is a very large
6153 value and we don't care about NaNs or Infinities. */
6154 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6155 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6157 /* sqrt(x) < y is x != +Inf when y is very large and we
6158 don't care about NaNs. */
6159 if (! HONOR_NANS (mode
))
6160 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6161 build_real (TREE_TYPE (arg
), c2
));
6163 /* sqrt(x) < y is x >= 0 when y is very large and we
6164 don't care about Infinities. */
6165 if (! HONOR_INFINITIES (mode
))
6166 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6167 build_real (TREE_TYPE (arg
), dconst0
));
6169 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6170 arg
= save_expr (arg
);
6171 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6172 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6173 build_real (TREE_TYPE (arg
),
6175 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6176 build_real (TREE_TYPE (arg
),
6180 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6181 if (! HONOR_NANS (mode
))
6182 return fold_build2_loc (loc
, code
, type
, arg
,
6183 build_real (TREE_TYPE (arg
), c2
));
6185 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6186 arg
= save_expr (arg
);
6187 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6188 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6189 build_real (TREE_TYPE (arg
),
6191 fold_build2_loc (loc
, code
, type
, arg
,
6192 build_real (TREE_TYPE (arg
),
6200 /* Subroutine of fold() that optimizes comparisons against Infinities,
6201 either +Inf or -Inf.
6203 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6204 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6205 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6207 The function returns the constant folded tree if a simplification
6208 can be made, and NULL_TREE otherwise. */
6211 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6212 tree arg0
, tree arg1
)
6214 enum machine_mode mode
;
6215 REAL_VALUE_TYPE max
;
6219 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6221 /* For negative infinity swap the sense of the comparison. */
6222 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6224 code
= swap_tree_comparison (code
);
6229 /* x > +Inf is always false, if with ignore sNANs. */
6230 if (HONOR_SNANS (mode
))
6232 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6235 /* x <= +Inf is always true, if we don't case about NaNs. */
6236 if (! HONOR_NANS (mode
))
6237 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6239 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6240 arg0
= save_expr (arg0
);
6241 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6245 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6246 real_maxval (&max
, neg
, mode
);
6247 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6248 arg0
, build_real (TREE_TYPE (arg0
), max
));
6251 /* x < +Inf is always equal to x <= DBL_MAX. */
6252 real_maxval (&max
, neg
, mode
);
6253 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6254 arg0
, build_real (TREE_TYPE (arg0
), max
));
6257 /* x != +Inf is always equal to !(x > DBL_MAX). */
6258 real_maxval (&max
, neg
, mode
);
6259 if (! HONOR_NANS (mode
))
6260 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6261 arg0
, build_real (TREE_TYPE (arg0
), max
));
6263 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6264 arg0
, build_real (TREE_TYPE (arg0
), max
));
6265 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6274 /* Subroutine of fold() that optimizes comparisons of a division by
6275 a nonzero integer constant against an integer constant, i.e.
6278 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6279 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6280 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6282 The function returns the constant folded tree if a simplification
6283 can be made, and NULL_TREE otherwise. */
6286 fold_div_compare (location_t loc
,
6287 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6289 tree prod
, tmp
, hi
, lo
;
6290 tree arg00
= TREE_OPERAND (arg0
, 0);
6291 tree arg01
= TREE_OPERAND (arg0
, 1);
6293 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6297 /* We have to do this the hard way to detect unsigned overflow.
6298 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6299 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6300 TREE_INT_CST_HIGH (arg01
),
6301 TREE_INT_CST_LOW (arg1
),
6302 TREE_INT_CST_HIGH (arg1
),
6303 &val
.low
, &val
.high
, unsigned_p
);
6304 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6305 neg_overflow
= false;
6309 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6310 build_int_cst (TREE_TYPE (arg01
), 1));
6313 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6314 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6315 TREE_INT_CST_HIGH (prod
),
6316 TREE_INT_CST_LOW (tmp
),
6317 TREE_INT_CST_HIGH (tmp
),
6318 &val
.low
, &val
.high
, unsigned_p
);
6319 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6320 -1, overflow
| TREE_OVERFLOW (prod
));
6322 else if (tree_int_cst_sgn (arg01
) >= 0)
6324 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6325 build_int_cst (TREE_TYPE (arg01
), 1));
6326 switch (tree_int_cst_sgn (arg1
))
6329 neg_overflow
= true;
6330 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6335 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6340 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6350 /* A negative divisor reverses the relational operators. */
6351 code
= swap_tree_comparison (code
);
6353 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6354 build_int_cst (TREE_TYPE (arg01
), 1));
6355 switch (tree_int_cst_sgn (arg1
))
6358 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6363 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6368 neg_overflow
= true;
6369 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6381 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6382 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6383 if (TREE_OVERFLOW (hi
))
6384 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6385 if (TREE_OVERFLOW (lo
))
6386 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6387 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6390 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6391 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6392 if (TREE_OVERFLOW (hi
))
6393 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6394 if (TREE_OVERFLOW (lo
))
6395 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6396 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6399 if (TREE_OVERFLOW (lo
))
6401 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6402 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6404 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6407 if (TREE_OVERFLOW (hi
))
6409 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6410 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6412 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6415 if (TREE_OVERFLOW (hi
))
6417 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6418 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6420 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6423 if (TREE_OVERFLOW (lo
))
6425 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6426 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6428 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6438 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6439 equality/inequality test, then return a simplified form of the test
6440 using a sign testing. Otherwise return NULL. TYPE is the desired
6444 fold_single_bit_test_into_sign_test (location_t loc
,
6445 enum tree_code code
, tree arg0
, tree arg1
,
6448 /* If this is testing a single bit, we can optimize the test. */
6449 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6450 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6451 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6453 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6454 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6455 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6457 if (arg00
!= NULL_TREE
6458 /* This is only a win if casting to a signed type is cheap,
6459 i.e. when arg00's type is not a partial mode. */
6460 && TYPE_PRECISION (TREE_TYPE (arg00
))
6461 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6463 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6464 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6466 fold_convert_loc (loc
, stype
, arg00
),
6467 build_int_cst (stype
, 0));
6474 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6475 equality/inequality test, then return a simplified form of
6476 the test using shifts and logical operations. Otherwise return
6477 NULL. TYPE is the desired result type. */
6480 fold_single_bit_test (location_t loc
, enum tree_code code
,
6481 tree arg0
, tree arg1
, tree result_type
)
6483 /* If this is testing a single bit, we can optimize the test. */
6484 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6485 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6486 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6488 tree inner
= TREE_OPERAND (arg0
, 0);
6489 tree type
= TREE_TYPE (arg0
);
6490 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6491 enum machine_mode operand_mode
= TYPE_MODE (type
);
6493 tree signed_type
, unsigned_type
, intermediate_type
;
6496 /* First, see if we can fold the single bit test into a sign-bit
6498 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6503 /* Otherwise we have (A & C) != 0 where C is a single bit,
6504 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6505 Similarly for (A & C) == 0. */
6507 /* If INNER is a right shift of a constant and it plus BITNUM does
6508 not overflow, adjust BITNUM and INNER. */
6509 if (TREE_CODE (inner
) == RSHIFT_EXPR
6510 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6511 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6512 && bitnum
< TYPE_PRECISION (type
)
6513 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6514 bitnum
- TYPE_PRECISION (type
)))
6516 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6517 inner
= TREE_OPERAND (inner
, 0);
6520 /* If we are going to be able to omit the AND below, we must do our
6521 operations as unsigned. If we must use the AND, we have a choice.
6522 Normally unsigned is faster, but for some machines signed is. */
6523 #ifdef LOAD_EXTEND_OP
6524 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6525 && !flag_syntax_only
) ? 0 : 1;
6530 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6531 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6532 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6533 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6536 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6537 inner
, size_int (bitnum
));
6539 one
= build_int_cst (intermediate_type
, 1);
6541 if (code
== EQ_EXPR
)
6542 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6544 /* Put the AND last so it can combine with more things. */
6545 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6547 /* Make sure to return the proper type. */
6548 inner
= fold_convert_loc (loc
, result_type
, inner
);
6555 /* Check whether we are allowed to reorder operands arg0 and arg1,
6556 such that the evaluation of arg1 occurs before arg0. */
6559 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6561 if (! flag_evaluation_order
)
6563 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6565 return ! TREE_SIDE_EFFECTS (arg0
)
6566 && ! TREE_SIDE_EFFECTS (arg1
);
6569 /* Test whether it is preferable two swap two operands, ARG0 and
6570 ARG1, for example because ARG0 is an integer constant and ARG1
6571 isn't. If REORDER is true, only recommend swapping if we can
6572 evaluate the operands in reverse order. */
6575 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6577 STRIP_SIGN_NOPS (arg0
);
6578 STRIP_SIGN_NOPS (arg1
);
6580 if (TREE_CODE (arg1
) == INTEGER_CST
)
6582 if (TREE_CODE (arg0
) == INTEGER_CST
)
6585 if (TREE_CODE (arg1
) == REAL_CST
)
6587 if (TREE_CODE (arg0
) == REAL_CST
)
6590 if (TREE_CODE (arg1
) == FIXED_CST
)
6592 if (TREE_CODE (arg0
) == FIXED_CST
)
6595 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6597 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6600 if (TREE_CONSTANT (arg1
))
6602 if (TREE_CONSTANT (arg0
))
6605 if (optimize_function_for_size_p (cfun
))
6608 if (reorder
&& flag_evaluation_order
6609 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6612 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6613 for commutative and comparison operators. Ensuring a canonical
6614 form allows the optimizers to find additional redundancies without
6615 having to explicitly check for both orderings. */
6616 if (TREE_CODE (arg0
) == SSA_NAME
6617 && TREE_CODE (arg1
) == SSA_NAME
6618 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6621 /* Put SSA_NAMEs last. */
6622 if (TREE_CODE (arg1
) == SSA_NAME
)
6624 if (TREE_CODE (arg0
) == SSA_NAME
)
6627 /* Put variables last. */
6636 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6637 ARG0 is extended to a wider type. */
6640 fold_widened_comparison (location_t loc
, enum tree_code code
,
6641 tree type
, tree arg0
, tree arg1
)
6643 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6645 tree shorter_type
, outer_type
;
6649 if (arg0_unw
== arg0
)
6651 shorter_type
= TREE_TYPE (arg0_unw
);
6653 #ifdef HAVE_canonicalize_funcptr_for_compare
6654 /* Disable this optimization if we're casting a function pointer
6655 type on targets that require function pointer canonicalization. */
6656 if (HAVE_canonicalize_funcptr_for_compare
6657 && TREE_CODE (shorter_type
) == POINTER_TYPE
6658 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6662 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6665 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6667 /* If possible, express the comparison in the shorter mode. */
6668 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6669 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6670 && (TREE_TYPE (arg1_unw
) == shorter_type
6671 || ((TYPE_PRECISION (shorter_type
)
6672 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6673 && (TYPE_UNSIGNED (shorter_type
)
6674 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6675 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6676 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6677 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6678 && int_fits_type_p (arg1_unw
, shorter_type
))))
6679 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6680 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6682 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6683 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6684 || !int_fits_type_p (arg1_unw
, shorter_type
))
6687 /* If we are comparing with the integer that does not fit into the range
6688 of the shorter type, the result is known. */
6689 outer_type
= TREE_TYPE (arg1_unw
);
6690 min
= lower_bound_in_type (outer_type
, shorter_type
);
6691 max
= upper_bound_in_type (outer_type
, shorter_type
);
6693 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6695 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6702 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6707 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6713 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6715 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6720 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6722 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6731 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6732 ARG0 just the signedness is changed. */
6735 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6736 tree arg0
, tree arg1
)
6739 tree inner_type
, outer_type
;
6741 if (!CONVERT_EXPR_P (arg0
))
6744 outer_type
= TREE_TYPE (arg0
);
6745 arg0_inner
= TREE_OPERAND (arg0
, 0);
6746 inner_type
= TREE_TYPE (arg0_inner
);
6748 #ifdef HAVE_canonicalize_funcptr_for_compare
6749 /* Disable this optimization if we're casting a function pointer
6750 type on targets that require function pointer canonicalization. */
6751 if (HAVE_canonicalize_funcptr_for_compare
6752 && TREE_CODE (inner_type
) == POINTER_TYPE
6753 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6757 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6760 if (TREE_CODE (arg1
) != INTEGER_CST
6761 && !(CONVERT_EXPR_P (arg1
)
6762 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6765 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6766 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6771 if (TREE_CODE (arg1
) == INTEGER_CST
)
6772 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6773 0, TREE_OVERFLOW (arg1
));
6775 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6777 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6780 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6781 step of the array. Reconstructs s and delta in the case of s *
6782 delta being an integer constant (and thus already folded). ADDR is
6783 the address. MULT is the multiplicative expression. If the
6784 function succeeds, the new address expression is returned.
6785 Otherwise NULL_TREE is returned. LOC is the location of the
6786 resulting expression. */
6789 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6791 tree s
, delta
, step
;
6792 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6797 /* Strip the nops that might be added when converting op1 to sizetype. */
6800 /* Canonicalize op1 into a possibly non-constant delta
6801 and an INTEGER_CST s. */
6802 if (TREE_CODE (op1
) == MULT_EXPR
)
6804 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6809 if (TREE_CODE (arg0
) == INTEGER_CST
)
6814 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6822 else if (TREE_CODE (op1
) == INTEGER_CST
)
6829 /* Simulate we are delta * 1. */
6831 s
= integer_one_node
;
6834 for (;; ref
= TREE_OPERAND (ref
, 0))
6836 if (TREE_CODE (ref
) == ARRAY_REF
)
6840 /* Remember if this was a multi-dimensional array. */
6841 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6844 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6847 itype
= TREE_TYPE (domain
);
6849 step
= array_ref_element_size (ref
);
6850 if (TREE_CODE (step
) != INTEGER_CST
)
6855 if (! tree_int_cst_equal (step
, s
))
6860 /* Try if delta is a multiple of step. */
6861 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6867 /* Only fold here if we can verify we do not overflow one
6868 dimension of a multi-dimensional array. */
6873 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6874 || !TYPE_MAX_VALUE (domain
)
6875 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6878 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6879 fold_convert_loc (loc
, itype
,
6880 TREE_OPERAND (ref
, 1)),
6881 fold_convert_loc (loc
, itype
, delta
));
6883 || TREE_CODE (tmp
) != INTEGER_CST
6884 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6890 else if (TREE_CODE (ref
) == COMPONENT_REF
6891 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6895 /* Remember if this was a multi-dimensional array. */
6896 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6899 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6902 itype
= TREE_TYPE (domain
);
6904 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6905 if (TREE_CODE (step
) != INTEGER_CST
)
6910 if (! tree_int_cst_equal (step
, s
))
6915 /* Try if delta is a multiple of step. */
6916 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6922 /* Only fold here if we can verify we do not overflow one
6923 dimension of a multi-dimensional array. */
6928 if (!TYPE_MIN_VALUE (domain
)
6929 || !TYPE_MAX_VALUE (domain
)
6930 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6933 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6934 fold_convert_loc (loc
, itype
,
6935 TYPE_MIN_VALUE (domain
)),
6936 fold_convert_loc (loc
, itype
, delta
));
6937 if (TREE_CODE (tmp
) != INTEGER_CST
6938 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6947 if (!handled_component_p (ref
))
6951 /* We found the suitable array reference. So copy everything up to it,
6952 and replace the index. */
6954 pref
= TREE_OPERAND (addr
, 0);
6955 ret
= copy_node (pref
);
6956 SET_EXPR_LOCATION (ret
, loc
);
6961 pref
= TREE_OPERAND (pref
, 0);
6962 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6963 pos
= TREE_OPERAND (pos
, 0);
6966 if (TREE_CODE (ref
) == ARRAY_REF
)
6968 TREE_OPERAND (pos
, 1)
6969 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6970 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
6971 fold_convert_loc (loc
, itype
, delta
));
6972 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6974 else if (TREE_CODE (ref
) == COMPONENT_REF
)
6976 gcc_assert (ret
== pos
);
6977 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6979 (loc
, PLUS_EXPR
, itype
,
6980 fold_convert_loc (loc
, itype
,
6982 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6983 fold_convert_loc (loc
, itype
, delta
)),
6984 NULL_TREE
, NULL_TREE
);
6985 return build_fold_addr_expr_loc (loc
, ret
);
6992 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6993 means A >= Y && A != MAX, but in this case we know that
6994 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6997 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6999 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7001 if (TREE_CODE (bound
) == LT_EXPR
)
7002 a
= TREE_OPERAND (bound
, 0);
7003 else if (TREE_CODE (bound
) == GT_EXPR
)
7004 a
= TREE_OPERAND (bound
, 1);
7008 typea
= TREE_TYPE (a
);
7009 if (!INTEGRAL_TYPE_P (typea
)
7010 && !POINTER_TYPE_P (typea
))
7013 if (TREE_CODE (ineq
) == LT_EXPR
)
7015 a1
= TREE_OPERAND (ineq
, 1);
7016 y
= TREE_OPERAND (ineq
, 0);
7018 else if (TREE_CODE (ineq
) == GT_EXPR
)
7020 a1
= TREE_OPERAND (ineq
, 0);
7021 y
= TREE_OPERAND (ineq
, 1);
7026 if (TREE_TYPE (a1
) != typea
)
7029 if (POINTER_TYPE_P (typea
))
7031 /* Convert the pointer types into integer before taking the difference. */
7032 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7033 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7034 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7037 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7039 if (!diff
|| !integer_onep (diff
))
7042 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7045 /* Fold a sum or difference of at least one multiplication.
7046 Returns the folded tree or NULL if no simplification could be made. */
7049 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7050 tree arg0
, tree arg1
)
7052 tree arg00
, arg01
, arg10
, arg11
;
7053 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7055 /* (A * C) +- (B * C) -> (A+-B) * C.
7056 (A * C) +- A -> A * (C+-1).
7057 We are most concerned about the case where C is a constant,
7058 but other combinations show up during loop reduction. Since
7059 it is not difficult, try all four possibilities. */
7061 if (TREE_CODE (arg0
) == MULT_EXPR
)
7063 arg00
= TREE_OPERAND (arg0
, 0);
7064 arg01
= TREE_OPERAND (arg0
, 1);
7066 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7068 arg00
= build_one_cst (type
);
7073 /* We cannot generate constant 1 for fract. */
7074 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7077 arg01
= build_one_cst (type
);
7079 if (TREE_CODE (arg1
) == MULT_EXPR
)
7081 arg10
= TREE_OPERAND (arg1
, 0);
7082 arg11
= TREE_OPERAND (arg1
, 1);
7084 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7086 arg10
= build_one_cst (type
);
7087 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7088 the purpose of this canonicalization. */
7089 if (TREE_INT_CST_HIGH (arg1
) == -1
7090 && negate_expr_p (arg1
)
7091 && code
== PLUS_EXPR
)
7093 arg11
= negate_expr (arg1
);
7101 /* We cannot generate constant 1 for fract. */
7102 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7105 arg11
= build_one_cst (type
);
7109 if (operand_equal_p (arg01
, arg11
, 0))
7110 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7111 else if (operand_equal_p (arg00
, arg10
, 0))
7112 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7113 else if (operand_equal_p (arg00
, arg11
, 0))
7114 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7115 else if (operand_equal_p (arg01
, arg10
, 0))
7116 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7118 /* No identical multiplicands; see if we can find a common
7119 power-of-two factor in non-power-of-two multiplies. This
7120 can help in multi-dimensional array access. */
7121 else if (host_integerp (arg01
, 0)
7122 && host_integerp (arg11
, 0))
7124 HOST_WIDE_INT int01
, int11
, tmp
;
7127 int01
= TREE_INT_CST_LOW (arg01
);
7128 int11
= TREE_INT_CST_LOW (arg11
);
7130 /* Move min of absolute values to int11. */
7131 if (absu_hwi (int01
) < absu_hwi (int11
))
7133 tmp
= int01
, int01
= int11
, int11
= tmp
;
7134 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7141 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7142 /* The remainder should not be a constant, otherwise we
7143 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7144 increased the number of multiplications necessary. */
7145 && TREE_CODE (arg10
) != INTEGER_CST
)
7147 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7148 build_int_cst (TREE_TYPE (arg00
),
7153 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7158 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7159 fold_build2_loc (loc
, code
, type
,
7160 fold_convert_loc (loc
, type
, alt0
),
7161 fold_convert_loc (loc
, type
, alt1
)),
7162 fold_convert_loc (loc
, type
, same
));
7167 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7168 specified by EXPR into the buffer PTR of length LEN bytes.
7169 Return the number of bytes placed in the buffer, or zero
7173 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7175 tree type
= TREE_TYPE (expr
);
7176 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7177 int byte
, offset
, word
, words
;
7178 unsigned char value
;
7180 if (total_bytes
> len
)
7182 words
= total_bytes
/ UNITS_PER_WORD
;
7184 for (byte
= 0; byte
< total_bytes
; byte
++)
7186 int bitpos
= byte
* BITS_PER_UNIT
;
7187 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7188 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7190 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7191 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7193 if (total_bytes
> UNITS_PER_WORD
)
7195 word
= byte
/ UNITS_PER_WORD
;
7196 if (WORDS_BIG_ENDIAN
)
7197 word
= (words
- 1) - word
;
7198 offset
= word
* UNITS_PER_WORD
;
7199 if (BYTES_BIG_ENDIAN
)
7200 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7202 offset
+= byte
% UNITS_PER_WORD
;
7205 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7206 ptr
[offset
] = value
;
7212 /* Subroutine of native_encode_expr. Encode the REAL_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7218 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7220 tree type
= TREE_TYPE (expr
);
7221 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7222 int byte
, offset
, word
, words
, bitpos
;
7223 unsigned char value
;
7225 /* There are always 32 bits in each long, no matter the size of
7226 the hosts long. We handle floating point representations with
7230 if (total_bytes
> len
)
7232 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7234 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7236 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7237 bitpos
+= BITS_PER_UNIT
)
7239 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7240 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7242 if (UNITS_PER_WORD
< 4)
7244 word
= byte
/ UNITS_PER_WORD
;
7245 if (WORDS_BIG_ENDIAN
)
7246 word
= (words
- 1) - word
;
7247 offset
= word
* UNITS_PER_WORD
;
7248 if (BYTES_BIG_ENDIAN
)
7249 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7251 offset
+= byte
% UNITS_PER_WORD
;
7254 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7255 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7260 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7266 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7271 part
= TREE_REALPART (expr
);
7272 rsize
= native_encode_expr (part
, ptr
, len
);
7275 part
= TREE_IMAGPART (expr
);
7276 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7279 return rsize
+ isize
;
7283 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7284 specified by EXPR into the buffer PTR of length LEN bytes.
7285 Return the number of bytes placed in the buffer, or zero
7289 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7291 int i
, size
, offset
, count
;
7292 tree itype
, elem
, elements
;
7295 elements
= TREE_VECTOR_CST_ELTS (expr
);
7296 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7297 itype
= TREE_TYPE (TREE_TYPE (expr
));
7298 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7299 for (i
= 0; i
< count
; i
++)
7303 elem
= TREE_VALUE (elements
);
7304 elements
= TREE_CHAIN (elements
);
7311 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7316 if (offset
+ size
> len
)
7318 memset (ptr
+offset
, 0, size
);
7326 /* Subroutine of native_encode_expr. Encode the STRING_CST
7327 specified by EXPR into the buffer PTR of length LEN bytes.
7328 Return the number of bytes placed in the buffer, or zero
7332 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7334 tree type
= TREE_TYPE (expr
);
7335 HOST_WIDE_INT total_bytes
;
7337 if (TREE_CODE (type
) != ARRAY_TYPE
7338 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7339 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7340 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7342 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7343 if (total_bytes
> len
)
7345 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7347 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7348 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7349 total_bytes
- TREE_STRING_LENGTH (expr
));
7352 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7357 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7358 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7359 buffer PTR of length LEN bytes. Return the number of bytes
7360 placed in the buffer, or zero upon failure. */
7363 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7365 switch (TREE_CODE (expr
))
7368 return native_encode_int (expr
, ptr
, len
);
7371 return native_encode_real (expr
, ptr
, len
);
7374 return native_encode_complex (expr
, ptr
, len
);
7377 return native_encode_vector (expr
, ptr
, len
);
7380 return native_encode_string (expr
, ptr
, len
);
7388 /* Subroutine of native_interpret_expr. Interpret the contents of
7389 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7390 If the buffer cannot be interpreted, return NULL_TREE. */
7393 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7395 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7396 int byte
, offset
, word
, words
;
7397 unsigned char value
;
7400 if (total_bytes
> len
)
7402 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7405 result
= double_int_zero
;
7406 words
= total_bytes
/ UNITS_PER_WORD
;
7408 for (byte
= 0; byte
< total_bytes
; byte
++)
7410 int bitpos
= byte
* BITS_PER_UNIT
;
7411 if (total_bytes
> UNITS_PER_WORD
)
7413 word
= byte
/ UNITS_PER_WORD
;
7414 if (WORDS_BIG_ENDIAN
)
7415 word
= (words
- 1) - word
;
7416 offset
= word
* UNITS_PER_WORD
;
7417 if (BYTES_BIG_ENDIAN
)
7418 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7420 offset
+= byte
% UNITS_PER_WORD
;
7423 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7424 value
= ptr
[offset
];
7426 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7427 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7429 result
.high
|= (unsigned HOST_WIDE_INT
) value
7430 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7433 return double_int_to_tree (type
, result
);
7437 /* Subroutine of native_interpret_expr. Interpret the contents of
7438 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7439 If the buffer cannot be interpreted, return NULL_TREE. */
7442 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7444 enum machine_mode mode
= TYPE_MODE (type
);
7445 int total_bytes
= GET_MODE_SIZE (mode
);
7446 int byte
, offset
, word
, words
, bitpos
;
7447 unsigned char value
;
7448 /* There are always 32 bits in each long, no matter the size of
7449 the hosts long. We handle floating point representations with
7454 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7455 if (total_bytes
> len
|| total_bytes
> 24)
7457 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7459 memset (tmp
, 0, sizeof (tmp
));
7460 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7461 bitpos
+= BITS_PER_UNIT
)
7463 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7464 if (UNITS_PER_WORD
< 4)
7466 word
= byte
/ UNITS_PER_WORD
;
7467 if (WORDS_BIG_ENDIAN
)
7468 word
= (words
- 1) - word
;
7469 offset
= word
* UNITS_PER_WORD
;
7470 if (BYTES_BIG_ENDIAN
)
7471 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7473 offset
+= byte
% UNITS_PER_WORD
;
7476 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7477 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7479 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7482 real_from_target (&r
, tmp
, mode
);
7483 return build_real (type
, r
);
7487 /* Subroutine of native_interpret_expr. Interpret the contents of
7488 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7489 If the buffer cannot be interpreted, return NULL_TREE. */
7492 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7494 tree etype
, rpart
, ipart
;
7497 etype
= TREE_TYPE (type
);
7498 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7501 rpart
= native_interpret_expr (etype
, ptr
, size
);
7504 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7507 return build_complex (type
, rpart
, ipart
);
7511 /* Subroutine of native_interpret_expr. Interpret the contents of
7512 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7513 If the buffer cannot be interpreted, return NULL_TREE. */
7516 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7518 tree etype
, elem
, elements
;
7521 etype
= TREE_TYPE (type
);
7522 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7523 count
= TYPE_VECTOR_SUBPARTS (type
);
7524 if (size
* count
> len
)
7527 elements
= NULL_TREE
;
7528 for (i
= count
- 1; i
>= 0; i
--)
7530 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7533 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7535 return build_vector (type
, elements
);
7539 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7540 the buffer PTR of length LEN as a constant of type TYPE. For
7541 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7542 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7543 return NULL_TREE. */
7546 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7548 switch (TREE_CODE (type
))
7553 return native_interpret_int (type
, ptr
, len
);
7556 return native_interpret_real (type
, ptr
, len
);
7559 return native_interpret_complex (type
, ptr
, len
);
7562 return native_interpret_vector (type
, ptr
, len
);
7570 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7571 TYPE at compile-time. If we're unable to perform the conversion
7572 return NULL_TREE. */
7575 fold_view_convert_expr (tree type
, tree expr
)
7577 /* We support up to 512-bit values (for V8DFmode). */
7578 unsigned char buffer
[64];
7581 /* Check that the host and target are sane. */
7582 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7585 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7589 return native_interpret_expr (type
, buffer
, len
);
7592 /* Build an expression for the address of T. Folds away INDIRECT_REF
7593 to avoid confusing the gimplify process. */
7596 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7598 /* The size of the object is not relevant when talking about its address. */
7599 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7600 t
= TREE_OPERAND (t
, 0);
7602 if (TREE_CODE (t
) == INDIRECT_REF
)
7604 t
= TREE_OPERAND (t
, 0);
7606 if (TREE_TYPE (t
) != ptrtype
)
7607 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7609 else if (TREE_CODE (t
) == MEM_REF
7610 && integer_zerop (TREE_OPERAND (t
, 1)))
7611 return TREE_OPERAND (t
, 0);
7612 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7614 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7616 if (TREE_TYPE (t
) != ptrtype
)
7617 t
= fold_convert_loc (loc
, ptrtype
, t
);
7620 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7625 /* Build an expression for the address of T. */
7628 build_fold_addr_expr_loc (location_t loc
, tree t
)
7630 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7632 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7635 /* Fold a unary expression of code CODE and type TYPE with operand
7636 OP0. Return the folded expression if folding is successful.
7637 Otherwise, return NULL_TREE. */
7640 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7644 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7646 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7647 && TREE_CODE_LENGTH (code
) == 1);
7652 if (CONVERT_EXPR_CODE_P (code
)
7653 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7655 /* Don't use STRIP_NOPS, because signedness of argument type
7657 STRIP_SIGN_NOPS (arg0
);
7661 /* Strip any conversions that don't change the mode. This
7662 is safe for every expression, except for a comparison
7663 expression because its signedness is derived from its
7666 Note that this is done as an internal manipulation within
7667 the constant folder, in order to find the simplest
7668 representation of the arguments so that their form can be
7669 studied. In any cases, the appropriate type conversions
7670 should be put back in the tree that will get out of the
7676 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7678 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7679 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7680 fold_build1_loc (loc
, code
, type
,
7681 fold_convert_loc (loc
, TREE_TYPE (op0
),
7682 TREE_OPERAND (arg0
, 1))));
7683 else if (TREE_CODE (arg0
) == COND_EXPR
)
7685 tree arg01
= TREE_OPERAND (arg0
, 1);
7686 tree arg02
= TREE_OPERAND (arg0
, 2);
7687 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7688 arg01
= fold_build1_loc (loc
, code
, type
,
7689 fold_convert_loc (loc
,
7690 TREE_TYPE (op0
), arg01
));
7691 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7692 arg02
= fold_build1_loc (loc
, code
, type
,
7693 fold_convert_loc (loc
,
7694 TREE_TYPE (op0
), arg02
));
7695 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7698 /* If this was a conversion, and all we did was to move into
7699 inside the COND_EXPR, bring it back out. But leave it if
7700 it is a conversion from integer to integer and the
7701 result precision is no wider than a word since such a
7702 conversion is cheap and may be optimized away by combine,
7703 while it couldn't if it were outside the COND_EXPR. Then return
7704 so we don't get into an infinite recursion loop taking the
7705 conversion out and then back in. */
7707 if ((CONVERT_EXPR_CODE_P (code
)
7708 || code
== NON_LVALUE_EXPR
)
7709 && TREE_CODE (tem
) == COND_EXPR
7710 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7711 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7712 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7713 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7714 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7715 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7716 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7718 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7719 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7720 || flag_syntax_only
))
7721 tem
= build1_loc (loc
, code
, type
,
7723 TREE_TYPE (TREE_OPERAND
7724 (TREE_OPERAND (tem
, 1), 0)),
7725 TREE_OPERAND (tem
, 0),
7726 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7727 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7736 /* Re-association barriers around constants and other re-association
7737 barriers can be removed. */
7738 if (CONSTANT_CLASS_P (op0
)
7739 || TREE_CODE (op0
) == PAREN_EXPR
)
7740 return fold_convert_loc (loc
, type
, op0
);
7745 case FIX_TRUNC_EXPR
:
7746 if (TREE_TYPE (op0
) == type
)
7749 if (COMPARISON_CLASS_P (op0
))
7751 /* If we have (type) (a CMP b) and type is an integral type, return
7752 new expression involving the new type. Canonicalize
7753 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7755 Do not fold the result as that would not simplify further, also
7756 folding again results in recursions. */
7757 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7758 return build2_loc (loc
, TREE_CODE (op0
), type
,
7759 TREE_OPERAND (op0
, 0),
7760 TREE_OPERAND (op0
, 1));
7761 else if (!INTEGRAL_TYPE_P (type
))
7762 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7763 constant_boolean_node (true, type
),
7764 constant_boolean_node (false, type
));
7767 /* Handle cases of two conversions in a row. */
7768 if (CONVERT_EXPR_P (op0
))
7770 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7771 tree inter_type
= TREE_TYPE (op0
);
7772 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7773 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7774 int inside_float
= FLOAT_TYPE_P (inside_type
);
7775 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7776 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7777 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7778 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7779 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7780 int inter_float
= FLOAT_TYPE_P (inter_type
);
7781 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7782 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7783 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7784 int final_int
= INTEGRAL_TYPE_P (type
);
7785 int final_ptr
= POINTER_TYPE_P (type
);
7786 int final_float
= FLOAT_TYPE_P (type
);
7787 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7788 unsigned int final_prec
= TYPE_PRECISION (type
);
7789 int final_unsignedp
= TYPE_UNSIGNED (type
);
7791 /* In addition to the cases of two conversions in a row
7792 handled below, if we are converting something to its own
7793 type via an object of identical or wider precision, neither
7794 conversion is needed. */
7795 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7796 && (((inter_int
|| inter_ptr
) && final_int
)
7797 || (inter_float
&& final_float
))
7798 && inter_prec
>= final_prec
)
7799 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7801 /* Likewise, if the intermediate and initial types are either both
7802 float or both integer, we don't need the middle conversion if the
7803 former is wider than the latter and doesn't change the signedness
7804 (for integers). Avoid this if the final type is a pointer since
7805 then we sometimes need the middle conversion. Likewise if the
7806 final type has a precision not equal to the size of its mode. */
7807 if (((inter_int
&& inside_int
)
7808 || (inter_float
&& inside_float
)
7809 || (inter_vec
&& inside_vec
))
7810 && inter_prec
>= inside_prec
7811 && (inter_float
|| inter_vec
7812 || inter_unsignedp
== inside_unsignedp
)
7813 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7814 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7816 && (! final_vec
|| inter_prec
== inside_prec
))
7817 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7819 /* If we have a sign-extension of a zero-extended value, we can
7820 replace that by a single zero-extension. */
7821 if (inside_int
&& inter_int
&& final_int
7822 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7823 && inside_unsignedp
&& !inter_unsignedp
)
7824 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7826 /* Two conversions in a row are not needed unless:
7827 - some conversion is floating-point (overstrict for now), or
7828 - some conversion is a vector (overstrict for now), or
7829 - the intermediate type is narrower than both initial and
7831 - the intermediate type and innermost type differ in signedness,
7832 and the outermost type is wider than the intermediate, or
7833 - the initial type is a pointer type and the precisions of the
7834 intermediate and final types differ, or
7835 - the final type is a pointer type and the precisions of the
7836 initial and intermediate types differ. */
7837 if (! inside_float
&& ! inter_float
&& ! final_float
7838 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7839 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7840 && ! (inside_int
&& inter_int
7841 && inter_unsignedp
!= inside_unsignedp
7842 && inter_prec
< final_prec
)
7843 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7844 == (final_unsignedp
&& final_prec
> inter_prec
))
7845 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7846 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7847 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7848 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7849 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7852 /* Handle (T *)&A.B.C for A being of type T and B and C
7853 living at offset zero. This occurs frequently in
7854 C++ upcasting and then accessing the base. */
7855 if (TREE_CODE (op0
) == ADDR_EXPR
7856 && POINTER_TYPE_P (type
)
7857 && handled_component_p (TREE_OPERAND (op0
, 0)))
7859 HOST_WIDE_INT bitsize
, bitpos
;
7861 enum machine_mode mode
;
7862 int unsignedp
, volatilep
;
7863 tree base
= TREE_OPERAND (op0
, 0);
7864 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7865 &mode
, &unsignedp
, &volatilep
, false);
7866 /* If the reference was to a (constant) zero offset, we can use
7867 the address of the base if it has the same base type
7868 as the result type and the pointer type is unqualified. */
7869 if (! offset
&& bitpos
== 0
7870 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7871 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7872 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7873 return fold_convert_loc (loc
, type
,
7874 build_fold_addr_expr_loc (loc
, base
));
7877 if (TREE_CODE (op0
) == MODIFY_EXPR
7878 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7879 /* Detect assigning a bitfield. */
7880 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7882 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7884 /* Don't leave an assignment inside a conversion
7885 unless assigning a bitfield. */
7886 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7887 /* First do the assignment, then return converted constant. */
7888 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7889 TREE_NO_WARNING (tem
) = 1;
7890 TREE_USED (tem
) = 1;
7894 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7895 constants (if x has signed type, the sign bit cannot be set
7896 in c). This folds extension into the BIT_AND_EXPR.
7897 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7898 very likely don't have maximal range for their precision and this
7899 transformation effectively doesn't preserve non-maximal ranges. */
7900 if (TREE_CODE (type
) == INTEGER_TYPE
7901 && TREE_CODE (op0
) == BIT_AND_EXPR
7902 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7904 tree and_expr
= op0
;
7905 tree and0
= TREE_OPERAND (and_expr
, 0);
7906 tree and1
= TREE_OPERAND (and_expr
, 1);
7909 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7910 || (TYPE_PRECISION (type
)
7911 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7913 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7914 <= HOST_BITS_PER_WIDE_INT
7915 && host_integerp (and1
, 1))
7917 unsigned HOST_WIDE_INT cst
;
7919 cst
= tree_low_cst (and1
, 1);
7920 cst
&= (HOST_WIDE_INT
) -1
7921 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7922 change
= (cst
== 0);
7923 #ifdef LOAD_EXTEND_OP
7925 && !flag_syntax_only
7926 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7929 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7930 and0
= fold_convert_loc (loc
, uns
, and0
);
7931 and1
= fold_convert_loc (loc
, uns
, and1
);
7937 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7938 0, TREE_OVERFLOW (and1
));
7939 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7940 fold_convert_loc (loc
, type
, and0
), tem
);
7944 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7945 when one of the new casts will fold away. Conservatively we assume
7946 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7947 if (POINTER_TYPE_P (type
)
7948 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7949 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7950 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7951 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7953 tree arg00
= TREE_OPERAND (arg0
, 0);
7954 tree arg01
= TREE_OPERAND (arg0
, 1);
7956 return fold_build_pointer_plus_loc
7957 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7960 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7961 of the same precision, and X is an integer type not narrower than
7962 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7963 if (INTEGRAL_TYPE_P (type
)
7964 && TREE_CODE (op0
) == BIT_NOT_EXPR
7965 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7966 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7967 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7969 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7970 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7971 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7972 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7973 fold_convert_loc (loc
, type
, tem
));
7976 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7977 type of X and Y (integer types only). */
7978 if (INTEGRAL_TYPE_P (type
)
7979 && TREE_CODE (op0
) == MULT_EXPR
7980 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7981 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7983 /* Be careful not to introduce new overflows. */
7985 if (TYPE_OVERFLOW_WRAPS (type
))
7988 mult_type
= unsigned_type_for (type
);
7990 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7992 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7993 fold_convert_loc (loc
, mult_type
,
7994 TREE_OPERAND (op0
, 0)),
7995 fold_convert_loc (loc
, mult_type
,
7996 TREE_OPERAND (op0
, 1)));
7997 return fold_convert_loc (loc
, type
, tem
);
8001 tem
= fold_convert_const (code
, type
, op0
);
8002 return tem
? tem
: NULL_TREE
;
8004 case ADDR_SPACE_CONVERT_EXPR
:
8005 if (integer_zerop (arg0
))
8006 return fold_convert_const (code
, type
, arg0
);
8009 case FIXED_CONVERT_EXPR
:
8010 tem
= fold_convert_const (code
, type
, arg0
);
8011 return tem
? tem
: NULL_TREE
;
8013 case VIEW_CONVERT_EXPR
:
8014 if (TREE_TYPE (op0
) == type
)
8016 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8017 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8018 type
, TREE_OPERAND (op0
, 0));
8019 if (TREE_CODE (op0
) == MEM_REF
)
8020 return fold_build2_loc (loc
, MEM_REF
, type
,
8021 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8023 /* For integral conversions with the same precision or pointer
8024 conversions use a NOP_EXPR instead. */
8025 if ((INTEGRAL_TYPE_P (type
)
8026 || POINTER_TYPE_P (type
))
8027 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8028 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8029 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8030 return fold_convert_loc (loc
, type
, op0
);
8032 /* Strip inner integral conversions that do not change the precision. */
8033 if (CONVERT_EXPR_P (op0
)
8034 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8035 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8036 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8037 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8038 && (TYPE_PRECISION (TREE_TYPE (op0
))
8039 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8040 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8041 type
, TREE_OPERAND (op0
, 0));
8043 return fold_view_convert_expr (type
, op0
);
8046 tem
= fold_negate_expr (loc
, arg0
);
8048 return fold_convert_loc (loc
, type
, tem
);
8052 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8053 return fold_abs_const (arg0
, type
);
8054 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8055 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8056 /* Convert fabs((double)float) into (double)fabsf(float). */
8057 else if (TREE_CODE (arg0
) == NOP_EXPR
8058 && TREE_CODE (type
) == REAL_TYPE
)
8060 tree targ0
= strip_float_extensions (arg0
);
8062 return fold_convert_loc (loc
, type
,
8063 fold_build1_loc (loc
, ABS_EXPR
,
8067 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8068 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8070 else if (tree_expr_nonnegative_p (arg0
))
8073 /* Strip sign ops from argument. */
8074 if (TREE_CODE (type
) == REAL_TYPE
)
8076 tem
= fold_strip_sign_ops (arg0
);
8078 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8079 fold_convert_loc (loc
, type
, tem
));
8084 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8085 return fold_convert_loc (loc
, type
, arg0
);
8086 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8088 tree itype
= TREE_TYPE (type
);
8089 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8090 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8091 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8092 negate_expr (ipart
));
8094 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8096 tree itype
= TREE_TYPE (type
);
8097 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8098 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8099 return build_complex (type
, rpart
, negate_expr (ipart
));
8101 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8102 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8106 if (TREE_CODE (arg0
) == INTEGER_CST
)
8107 return fold_not_const (arg0
, type
);
8108 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8109 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8110 /* Convert ~ (-A) to A - 1. */
8111 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8112 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8113 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8114 build_int_cst (type
, 1));
8115 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8116 else if (INTEGRAL_TYPE_P (type
)
8117 && ((TREE_CODE (arg0
) == MINUS_EXPR
8118 && integer_onep (TREE_OPERAND (arg0
, 1)))
8119 || (TREE_CODE (arg0
) == PLUS_EXPR
8120 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8121 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8122 fold_convert_loc (loc
, type
,
8123 TREE_OPERAND (arg0
, 0)));
8124 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8125 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8126 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8127 fold_convert_loc (loc
, type
,
8128 TREE_OPERAND (arg0
, 0)))))
8129 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8130 fold_convert_loc (loc
, type
,
8131 TREE_OPERAND (arg0
, 1)));
8132 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8133 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8134 fold_convert_loc (loc
, type
,
8135 TREE_OPERAND (arg0
, 1)))))
8136 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8137 fold_convert_loc (loc
, type
,
8138 TREE_OPERAND (arg0
, 0)), tem
);
8139 /* Perform BIT_NOT_EXPR on each element individually. */
8140 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8142 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8143 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8145 for (i
= 0; i
< count
; i
++)
8149 elem
= TREE_VALUE (elements
);
8150 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8151 if (elem
== NULL_TREE
)
8153 elements
= TREE_CHAIN (elements
);
8156 elem
= build_int_cst (TREE_TYPE (type
), -1);
8157 list
= tree_cons (NULL_TREE
, elem
, list
);
8160 return build_vector (type
, nreverse (list
));
8165 case TRUTH_NOT_EXPR
:
8166 /* The argument to invert_truthvalue must have Boolean type. */
8167 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8168 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8170 /* Note that the operand of this must be an int
8171 and its values must be 0 or 1.
8172 ("true" is a fixed value perhaps depending on the language,
8173 but we don't handle values other than 1 correctly yet.) */
8174 tem
= fold_truth_not_expr (loc
, arg0
);
8177 return fold_convert_loc (loc
, type
, tem
);
8180 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8181 return fold_convert_loc (loc
, type
, arg0
);
8182 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8183 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8184 TREE_OPERAND (arg0
, 1));
8185 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8186 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8187 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8189 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8190 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8191 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8192 TREE_OPERAND (arg0
, 0)),
8193 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8194 TREE_OPERAND (arg0
, 1)));
8195 return fold_convert_loc (loc
, type
, tem
);
8197 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8199 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8200 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8201 TREE_OPERAND (arg0
, 0));
8202 return fold_convert_loc (loc
, type
, tem
);
8204 if (TREE_CODE (arg0
) == CALL_EXPR
)
8206 tree fn
= get_callee_fndecl (arg0
);
8207 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8208 switch (DECL_FUNCTION_CODE (fn
))
8210 CASE_FLT_FN (BUILT_IN_CEXPI
):
8211 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8213 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8223 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8224 return build_zero_cst (type
);
8225 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8226 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8227 TREE_OPERAND (arg0
, 0));
8228 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8229 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8230 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8232 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8233 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8234 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8235 TREE_OPERAND (arg0
, 0)),
8236 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8237 TREE_OPERAND (arg0
, 1)));
8238 return fold_convert_loc (loc
, type
, tem
);
8240 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8242 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8243 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8244 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8246 if (TREE_CODE (arg0
) == CALL_EXPR
)
8248 tree fn
= get_callee_fndecl (arg0
);
8249 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8250 switch (DECL_FUNCTION_CODE (fn
))
8252 CASE_FLT_FN (BUILT_IN_CEXPI
):
8253 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8255 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8265 /* Fold *&X to X if X is an lvalue. */
8266 if (TREE_CODE (op0
) == ADDR_EXPR
)
8268 tree op00
= TREE_OPERAND (op0
, 0);
8269 if ((TREE_CODE (op00
) == VAR_DECL
8270 || TREE_CODE (op00
) == PARM_DECL
8271 || TREE_CODE (op00
) == RESULT_DECL
)
8272 && !TREE_READONLY (op00
))
8279 } /* switch (code) */
8283 /* If the operation was a conversion do _not_ mark a resulting constant
8284 with TREE_OVERFLOW if the original constant was not. These conversions
8285 have implementation defined behavior and retaining the TREE_OVERFLOW
8286 flag here would confuse later passes such as VRP. */
8288 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8289 tree type
, tree op0
)
8291 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8293 && TREE_CODE (res
) == INTEGER_CST
8294 && TREE_CODE (op0
) == INTEGER_CST
8295 && CONVERT_EXPR_CODE_P (code
))
8296 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8301 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8302 operands OP0 and OP1. LOC is the location of the resulting expression.
8303 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8304 Return the folded expression if folding is successful. Otherwise,
8305 return NULL_TREE. */
8307 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8308 tree arg0
, tree arg1
, tree op0
, tree op1
)
8312 /* We only do these simplifications if we are optimizing. */
8316 /* Check for things like (A || B) && (A || C). We can convert this
8317 to A || (B && C). Note that either operator can be any of the four
8318 truth and/or operations and the transformation will still be
8319 valid. Also note that we only care about order for the
8320 ANDIF and ORIF operators. If B contains side effects, this
8321 might change the truth-value of A. */
8322 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8323 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8324 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8325 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8326 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8327 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8329 tree a00
= TREE_OPERAND (arg0
, 0);
8330 tree a01
= TREE_OPERAND (arg0
, 1);
8331 tree a10
= TREE_OPERAND (arg1
, 0);
8332 tree a11
= TREE_OPERAND (arg1
, 1);
8333 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8334 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8335 && (code
== TRUTH_AND_EXPR
8336 || code
== TRUTH_OR_EXPR
));
8338 if (operand_equal_p (a00
, a10
, 0))
8339 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8340 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8341 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8342 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8343 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8344 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8345 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8346 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8348 /* This case if tricky because we must either have commutative
8349 operators or else A10 must not have side-effects. */
8351 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8352 && operand_equal_p (a01
, a11
, 0))
8353 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8354 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8358 /* See if we can build a range comparison. */
8359 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8362 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8363 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8365 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8367 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8370 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8371 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8373 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8375 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8378 /* Check for the possibility of merging component references. If our
8379 lhs is another similar operation, try to merge its rhs with our
8380 rhs. Then try to merge our lhs and rhs. */
8381 if (TREE_CODE (arg0
) == code
8382 && 0 != (tem
= fold_truthop (loc
, code
, type
,
8383 TREE_OPERAND (arg0
, 1), arg1
)))
8384 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8386 if ((tem
= fold_truthop (loc
, code
, type
, arg0
, arg1
)) != 0)
8392 /* Fold a binary expression of code CODE and type TYPE with operands
8393 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8394 Return the folded expression if folding is successful. Otherwise,
8395 return NULL_TREE. */
8398 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8400 enum tree_code compl_code
;
8402 if (code
== MIN_EXPR
)
8403 compl_code
= MAX_EXPR
;
8404 else if (code
== MAX_EXPR
)
8405 compl_code
= MIN_EXPR
;
8409 /* MIN (MAX (a, b), b) == b. */
8410 if (TREE_CODE (op0
) == compl_code
8411 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8412 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8414 /* MIN (MAX (b, a), b) == b. */
8415 if (TREE_CODE (op0
) == compl_code
8416 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8417 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8418 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8420 /* MIN (a, MAX (a, b)) == a. */
8421 if (TREE_CODE (op1
) == compl_code
8422 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8423 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8424 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8426 /* MIN (a, MAX (b, a)) == a. */
8427 if (TREE_CODE (op1
) == compl_code
8428 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8429 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8430 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8435 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8436 by changing CODE to reduce the magnitude of constants involved in
8437 ARG0 of the comparison.
8438 Returns a canonicalized comparison tree if a simplification was
8439 possible, otherwise returns NULL_TREE.
8440 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8441 valid if signed overflow is undefined. */
8444 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8445 tree arg0
, tree arg1
,
8446 bool *strict_overflow_p
)
8448 enum tree_code code0
= TREE_CODE (arg0
);
8449 tree t
, cst0
= NULL_TREE
;
8453 /* Match A +- CST code arg1 and CST code arg1. We can change the
8454 first form only if overflow is undefined. */
8455 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8456 /* In principle pointers also have undefined overflow behavior,
8457 but that causes problems elsewhere. */
8458 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8459 && (code0
== MINUS_EXPR
8460 || code0
== PLUS_EXPR
)
8461 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8462 || code0
== INTEGER_CST
))
8465 /* Identify the constant in arg0 and its sign. */
8466 if (code0
== INTEGER_CST
)
8469 cst0
= TREE_OPERAND (arg0
, 1);
8470 sgn0
= tree_int_cst_sgn (cst0
);
8472 /* Overflowed constants and zero will cause problems. */
8473 if (integer_zerop (cst0
)
8474 || TREE_OVERFLOW (cst0
))
8477 /* See if we can reduce the magnitude of the constant in
8478 arg0 by changing the comparison code. */
8479 if (code0
== INTEGER_CST
)
8481 /* CST <= arg1 -> CST-1 < arg1. */
8482 if (code
== LE_EXPR
&& sgn0
== 1)
8484 /* -CST < arg1 -> -CST-1 <= arg1. */
8485 else if (code
== LT_EXPR
&& sgn0
== -1)
8487 /* CST > arg1 -> CST-1 >= arg1. */
8488 else if (code
== GT_EXPR
&& sgn0
== 1)
8490 /* -CST >= arg1 -> -CST-1 > arg1. */
8491 else if (code
== GE_EXPR
&& sgn0
== -1)
8495 /* arg1 code' CST' might be more canonical. */
8500 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8502 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8504 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8505 else if (code
== GT_EXPR
8506 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8508 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8509 else if (code
== LE_EXPR
8510 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8512 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8513 else if (code
== GE_EXPR
8514 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8518 *strict_overflow_p
= true;
8521 /* Now build the constant reduced in magnitude. But not if that
8522 would produce one outside of its types range. */
8523 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8525 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8526 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8528 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8529 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8530 /* We cannot swap the comparison here as that would cause us to
8531 endlessly recurse. */
8534 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8535 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8536 if (code0
!= INTEGER_CST
)
8537 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8538 t
= fold_convert (TREE_TYPE (arg1
), t
);
8540 /* If swapping might yield to a more canonical form, do so. */
8542 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8544 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8547 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8548 overflow further. Try to decrease the magnitude of constants involved
8549 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8550 and put sole constants at the second argument position.
8551 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8554 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8555 tree arg0
, tree arg1
)
8558 bool strict_overflow_p
;
8559 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8560 "when reducing constant in comparison");
8562 /* Try canonicalization by simplifying arg0. */
8563 strict_overflow_p
= false;
8564 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8565 &strict_overflow_p
);
8568 if (strict_overflow_p
)
8569 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8573 /* Try canonicalization by simplifying arg1 using the swapped
8575 code
= swap_tree_comparison (code
);
8576 strict_overflow_p
= false;
8577 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8578 &strict_overflow_p
);
8579 if (t
&& strict_overflow_p
)
8580 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8584 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8585 space. This is used to avoid issuing overflow warnings for
8586 expressions like &p->x which can not wrap. */
8589 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8591 unsigned HOST_WIDE_INT offset_low
, total_low
;
8592 HOST_WIDE_INT size
, offset_high
, total_high
;
8594 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8600 if (offset
== NULL_TREE
)
8605 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8609 offset_low
= TREE_INT_CST_LOW (offset
);
8610 offset_high
= TREE_INT_CST_HIGH (offset
);
8613 if (add_double_with_sign (offset_low
, offset_high
,
8614 bitpos
/ BITS_PER_UNIT
, 0,
8615 &total_low
, &total_high
,
8619 if (total_high
!= 0)
8622 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8626 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8628 if (TREE_CODE (base
) == ADDR_EXPR
)
8630 HOST_WIDE_INT base_size
;
8632 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8633 if (base_size
> 0 && size
< base_size
)
8637 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8640 /* Subroutine of fold_binary. This routine performs all of the
8641 transformations that are common to the equality/inequality
8642 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8643 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8644 fold_binary should call fold_binary. Fold a comparison with
8645 tree code CODE and type TYPE with operands OP0 and OP1. Return
8646 the folded comparison or NULL_TREE. */
8649 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8652 tree arg0
, arg1
, tem
;
8657 STRIP_SIGN_NOPS (arg0
);
8658 STRIP_SIGN_NOPS (arg1
);
8660 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8661 if (tem
!= NULL_TREE
)
8664 /* If one arg is a real or integer constant, put it last. */
8665 if (tree_swap_operands_p (arg0
, arg1
, true))
8666 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8668 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8669 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8670 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8671 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8672 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8673 && (TREE_CODE (arg1
) == INTEGER_CST
8674 && !TREE_OVERFLOW (arg1
)))
8676 tree const1
= TREE_OPERAND (arg0
, 1);
8678 tree variable
= TREE_OPERAND (arg0
, 0);
8681 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8683 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8684 TREE_TYPE (arg1
), const2
, const1
);
8686 /* If the constant operation overflowed this can be
8687 simplified as a comparison against INT_MAX/INT_MIN. */
8688 if (TREE_CODE (lhs
) == INTEGER_CST
8689 && TREE_OVERFLOW (lhs
))
8691 int const1_sgn
= tree_int_cst_sgn (const1
);
8692 enum tree_code code2
= code
;
8694 /* Get the sign of the constant on the lhs if the
8695 operation were VARIABLE + CONST1. */
8696 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8697 const1_sgn
= -const1_sgn
;
8699 /* The sign of the constant determines if we overflowed
8700 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8701 Canonicalize to the INT_MIN overflow by swapping the comparison
8703 if (const1_sgn
== -1)
8704 code2
= swap_tree_comparison (code
);
8706 /* We now can look at the canonicalized case
8707 VARIABLE + 1 CODE2 INT_MIN
8708 and decide on the result. */
8709 if (code2
== LT_EXPR
8711 || code2
== EQ_EXPR
)
8712 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8713 else if (code2
== NE_EXPR
8715 || code2
== GT_EXPR
)
8716 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8719 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8720 && (TREE_CODE (lhs
) != INTEGER_CST
8721 || !TREE_OVERFLOW (lhs
)))
8723 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8724 fold_overflow_warning ("assuming signed overflow does not occur "
8725 "when changing X +- C1 cmp C2 to "
8727 WARN_STRICT_OVERFLOW_COMPARISON
);
8728 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8732 /* For comparisons of pointers we can decompose it to a compile time
8733 comparison of the base objects and the offsets into the object.
8734 This requires at least one operand being an ADDR_EXPR or a
8735 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8736 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8737 && (TREE_CODE (arg0
) == ADDR_EXPR
8738 || TREE_CODE (arg1
) == ADDR_EXPR
8739 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8740 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8742 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8743 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8744 enum machine_mode mode
;
8745 int volatilep
, unsignedp
;
8746 bool indirect_base0
= false, indirect_base1
= false;
8748 /* Get base and offset for the access. Strip ADDR_EXPR for
8749 get_inner_reference, but put it back by stripping INDIRECT_REF
8750 off the base object if possible. indirect_baseN will be true
8751 if baseN is not an address but refers to the object itself. */
8753 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8755 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8756 &bitsize
, &bitpos0
, &offset0
, &mode
,
8757 &unsignedp
, &volatilep
, false);
8758 if (TREE_CODE (base0
) == INDIRECT_REF
)
8759 base0
= TREE_OPERAND (base0
, 0);
8761 indirect_base0
= true;
8763 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8765 base0
= TREE_OPERAND (arg0
, 0);
8766 STRIP_SIGN_NOPS (base0
);
8767 if (TREE_CODE (base0
) == ADDR_EXPR
)
8769 base0
= TREE_OPERAND (base0
, 0);
8770 indirect_base0
= true;
8772 offset0
= TREE_OPERAND (arg0
, 1);
8776 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8778 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8779 &bitsize
, &bitpos1
, &offset1
, &mode
,
8780 &unsignedp
, &volatilep
, false);
8781 if (TREE_CODE (base1
) == INDIRECT_REF
)
8782 base1
= TREE_OPERAND (base1
, 0);
8784 indirect_base1
= true;
8786 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8788 base1
= TREE_OPERAND (arg1
, 0);
8789 STRIP_SIGN_NOPS (base1
);
8790 if (TREE_CODE (base1
) == ADDR_EXPR
)
8792 base1
= TREE_OPERAND (base1
, 0);
8793 indirect_base1
= true;
8795 offset1
= TREE_OPERAND (arg1
, 1);
8798 /* A local variable can never be pointed to by
8799 the default SSA name of an incoming parameter. */
8800 if ((TREE_CODE (arg0
) == ADDR_EXPR
8802 && TREE_CODE (base0
) == VAR_DECL
8803 && auto_var_in_fn_p (base0
, current_function_decl
)
8805 && TREE_CODE (base1
) == SSA_NAME
8806 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
8807 && SSA_NAME_IS_DEFAULT_DEF (base1
))
8808 || (TREE_CODE (arg1
) == ADDR_EXPR
8810 && TREE_CODE (base1
) == VAR_DECL
8811 && auto_var_in_fn_p (base1
, current_function_decl
)
8813 && TREE_CODE (base0
) == SSA_NAME
8814 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
8815 && SSA_NAME_IS_DEFAULT_DEF (base0
)))
8817 if (code
== NE_EXPR
)
8818 return constant_boolean_node (1, type
);
8819 else if (code
== EQ_EXPR
)
8820 return constant_boolean_node (0, type
);
8822 /* If we have equivalent bases we might be able to simplify. */
8823 else if (indirect_base0
== indirect_base1
8824 && operand_equal_p (base0
, base1
, 0))
8826 /* We can fold this expression to a constant if the non-constant
8827 offset parts are equal. */
8828 if ((offset0
== offset1
8829 || (offset0
&& offset1
8830 && operand_equal_p (offset0
, offset1
, 0)))
8833 || (indirect_base0
&& DECL_P (base0
))
8834 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8839 && bitpos0
!= bitpos1
8840 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8841 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8842 fold_overflow_warning (("assuming pointer wraparound does not "
8843 "occur when comparing P +- C1 with "
8845 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8850 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8852 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8854 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8856 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8858 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8860 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8864 /* We can simplify the comparison to a comparison of the variable
8865 offset parts if the constant offset parts are equal.
8866 Be careful to use signed size type here because otherwise we
8867 mess with array offsets in the wrong way. This is possible
8868 because pointer arithmetic is restricted to retain within an
8869 object and overflow on pointer differences is undefined as of
8870 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8871 else if (bitpos0
== bitpos1
8872 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8873 || (indirect_base0
&& DECL_P (base0
))
8874 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8876 /* By converting to signed size type we cover middle-end pointer
8877 arithmetic which operates on unsigned pointer types of size
8878 type size and ARRAY_REF offsets which are properly sign or
8879 zero extended from their type in case it is narrower than
8881 if (offset0
== NULL_TREE
)
8882 offset0
= build_int_cst (ssizetype
, 0);
8884 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8885 if (offset1
== NULL_TREE
)
8886 offset1
= build_int_cst (ssizetype
, 0);
8888 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8892 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8893 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8894 fold_overflow_warning (("assuming pointer wraparound does not "
8895 "occur when comparing P +- C1 with "
8897 WARN_STRICT_OVERFLOW_COMPARISON
);
8899 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8902 /* For non-equal bases we can simplify if they are addresses
8903 of local binding decls or constants. */
8904 else if (indirect_base0
&& indirect_base1
8905 /* We know that !operand_equal_p (base0, base1, 0)
8906 because the if condition was false. But make
8907 sure two decls are not the same. */
8909 && TREE_CODE (arg0
) == ADDR_EXPR
8910 && TREE_CODE (arg1
) == ADDR_EXPR
8911 && (((TREE_CODE (base0
) == VAR_DECL
8912 || TREE_CODE (base0
) == PARM_DECL
)
8913 && (targetm
.binds_local_p (base0
)
8914 || CONSTANT_CLASS_P (base1
)))
8915 || CONSTANT_CLASS_P (base0
))
8916 && (((TREE_CODE (base1
) == VAR_DECL
8917 || TREE_CODE (base1
) == PARM_DECL
)
8918 && (targetm
.binds_local_p (base1
)
8919 || CONSTANT_CLASS_P (base0
)))
8920 || CONSTANT_CLASS_P (base1
)))
8922 if (code
== EQ_EXPR
)
8923 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8925 else if (code
== NE_EXPR
)
8926 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8929 /* For equal offsets we can simplify to a comparison of the
8931 else if (bitpos0
== bitpos1
8933 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8935 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8936 && ((offset0
== offset1
)
8937 || (offset0
&& offset1
8938 && operand_equal_p (offset0
, offset1
, 0))))
8941 base0
= build_fold_addr_expr_loc (loc
, base0
);
8943 base1
= build_fold_addr_expr_loc (loc
, base1
);
8944 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8948 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8949 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8950 the resulting offset is smaller in absolute value than the
8952 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8953 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8954 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8955 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8956 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8957 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8958 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8960 tree const1
= TREE_OPERAND (arg0
, 1);
8961 tree const2
= TREE_OPERAND (arg1
, 1);
8962 tree variable1
= TREE_OPERAND (arg0
, 0);
8963 tree variable2
= TREE_OPERAND (arg1
, 0);
8965 const char * const warnmsg
= G_("assuming signed overflow does not "
8966 "occur when combining constants around "
8969 /* Put the constant on the side where it doesn't overflow and is
8970 of lower absolute value than before. */
8971 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8972 ? MINUS_EXPR
: PLUS_EXPR
,
8974 if (!TREE_OVERFLOW (cst
)
8975 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8977 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8978 return fold_build2_loc (loc
, code
, type
,
8980 fold_build2_loc (loc
,
8981 TREE_CODE (arg1
), TREE_TYPE (arg1
),
8985 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8986 ? MINUS_EXPR
: PLUS_EXPR
,
8988 if (!TREE_OVERFLOW (cst
)
8989 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8991 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8992 return fold_build2_loc (loc
, code
, type
,
8993 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
8999 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9000 signed arithmetic case. That form is created by the compiler
9001 often enough for folding it to be of value. One example is in
9002 computing loop trip counts after Operator Strength Reduction. */
9003 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9004 && TREE_CODE (arg0
) == MULT_EXPR
9005 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9006 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9007 && integer_zerop (arg1
))
9009 tree const1
= TREE_OPERAND (arg0
, 1);
9010 tree const2
= arg1
; /* zero */
9011 tree variable1
= TREE_OPERAND (arg0
, 0);
9012 enum tree_code cmp_code
= code
;
9014 /* Handle unfolded multiplication by zero. */
9015 if (integer_zerop (const1
))
9016 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9018 fold_overflow_warning (("assuming signed overflow does not occur when "
9019 "eliminating multiplication in comparison "
9021 WARN_STRICT_OVERFLOW_COMPARISON
);
9023 /* If const1 is negative we swap the sense of the comparison. */
9024 if (tree_int_cst_sgn (const1
) < 0)
9025 cmp_code
= swap_tree_comparison (cmp_code
);
9027 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9030 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9034 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9036 tree targ0
= strip_float_extensions (arg0
);
9037 tree targ1
= strip_float_extensions (arg1
);
9038 tree newtype
= TREE_TYPE (targ0
);
9040 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9041 newtype
= TREE_TYPE (targ1
);
9043 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9044 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9045 return fold_build2_loc (loc
, code
, type
,
9046 fold_convert_loc (loc
, newtype
, targ0
),
9047 fold_convert_loc (loc
, newtype
, targ1
));
9049 /* (-a) CMP (-b) -> b CMP a */
9050 if (TREE_CODE (arg0
) == NEGATE_EXPR
9051 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9052 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9053 TREE_OPERAND (arg0
, 0));
9055 if (TREE_CODE (arg1
) == REAL_CST
)
9057 REAL_VALUE_TYPE cst
;
9058 cst
= TREE_REAL_CST (arg1
);
9060 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9061 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9062 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9063 TREE_OPERAND (arg0
, 0),
9064 build_real (TREE_TYPE (arg1
),
9065 real_value_negate (&cst
)));
9067 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9068 /* a CMP (-0) -> a CMP 0 */
9069 if (REAL_VALUE_MINUS_ZERO (cst
))
9070 return fold_build2_loc (loc
, code
, type
, arg0
,
9071 build_real (TREE_TYPE (arg1
), dconst0
));
9073 /* x != NaN is always true, other ops are always false. */
9074 if (REAL_VALUE_ISNAN (cst
)
9075 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9077 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9078 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9081 /* Fold comparisons against infinity. */
9082 if (REAL_VALUE_ISINF (cst
)
9083 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9085 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9086 if (tem
!= NULL_TREE
)
9091 /* If this is a comparison of a real constant with a PLUS_EXPR
9092 or a MINUS_EXPR of a real constant, we can convert it into a
9093 comparison with a revised real constant as long as no overflow
9094 occurs when unsafe_math_optimizations are enabled. */
9095 if (flag_unsafe_math_optimizations
9096 && TREE_CODE (arg1
) == REAL_CST
9097 && (TREE_CODE (arg0
) == PLUS_EXPR
9098 || TREE_CODE (arg0
) == MINUS_EXPR
)
9099 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9100 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9101 ? MINUS_EXPR
: PLUS_EXPR
,
9102 arg1
, TREE_OPERAND (arg0
, 1)))
9103 && !TREE_OVERFLOW (tem
))
9104 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9106 /* Likewise, we can simplify a comparison of a real constant with
9107 a MINUS_EXPR whose first operand is also a real constant, i.e.
9108 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9109 floating-point types only if -fassociative-math is set. */
9110 if (flag_associative_math
9111 && TREE_CODE (arg1
) == REAL_CST
9112 && TREE_CODE (arg0
) == MINUS_EXPR
9113 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9114 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9116 && !TREE_OVERFLOW (tem
))
9117 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9118 TREE_OPERAND (arg0
, 1), tem
);
9120 /* Fold comparisons against built-in math functions. */
9121 if (TREE_CODE (arg1
) == REAL_CST
9122 && flag_unsafe_math_optimizations
9123 && ! flag_errno_math
)
9125 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9127 if (fcode
!= END_BUILTINS
)
9129 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9130 if (tem
!= NULL_TREE
)
9136 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9137 && CONVERT_EXPR_P (arg0
))
9139 /* If we are widening one operand of an integer comparison,
9140 see if the other operand is similarly being widened. Perhaps we
9141 can do the comparison in the narrower type. */
9142 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9146 /* Or if we are changing signedness. */
9147 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9152 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9153 constant, we can simplify it. */
9154 if (TREE_CODE (arg1
) == INTEGER_CST
9155 && (TREE_CODE (arg0
) == MIN_EXPR
9156 || TREE_CODE (arg0
) == MAX_EXPR
)
9157 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9159 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9164 /* Simplify comparison of something with itself. (For IEEE
9165 floating-point, we can only do some of these simplifications.) */
9166 if (operand_equal_p (arg0
, arg1
, 0))
9171 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9172 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9173 return constant_boolean_node (1, type
);
9178 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9179 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9180 return constant_boolean_node (1, type
);
9181 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9184 /* For NE, we can only do this simplification if integer
9185 or we don't honor IEEE floating point NaNs. */
9186 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9187 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9189 /* ... fall through ... */
9192 return constant_boolean_node (0, type
);
9198 /* If we are comparing an expression that just has comparisons
9199 of two integer values, arithmetic expressions of those comparisons,
9200 and constants, we can simplify it. There are only three cases
9201 to check: the two values can either be equal, the first can be
9202 greater, or the second can be greater. Fold the expression for
9203 those three values. Since each value must be 0 or 1, we have
9204 eight possibilities, each of which corresponds to the constant 0
9205 or 1 or one of the six possible comparisons.
9207 This handles common cases like (a > b) == 0 but also handles
9208 expressions like ((x > y) - (y > x)) > 0, which supposedly
9209 occur in macroized code. */
9211 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9213 tree cval1
= 0, cval2
= 0;
9216 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9217 /* Don't handle degenerate cases here; they should already
9218 have been handled anyway. */
9219 && cval1
!= 0 && cval2
!= 0
9220 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9221 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9222 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9223 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9224 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9225 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9226 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9228 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9229 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9231 /* We can't just pass T to eval_subst in case cval1 or cval2
9232 was the same as ARG1. */
9235 = fold_build2_loc (loc
, code
, type
,
9236 eval_subst (loc
, arg0
, cval1
, maxval
,
9240 = fold_build2_loc (loc
, code
, type
,
9241 eval_subst (loc
, arg0
, cval1
, maxval
,
9245 = fold_build2_loc (loc
, code
, type
,
9246 eval_subst (loc
, arg0
, cval1
, minval
,
9250 /* All three of these results should be 0 or 1. Confirm they are.
9251 Then use those values to select the proper code to use. */
9253 if (TREE_CODE (high_result
) == INTEGER_CST
9254 && TREE_CODE (equal_result
) == INTEGER_CST
9255 && TREE_CODE (low_result
) == INTEGER_CST
)
9257 /* Make a 3-bit mask with the high-order bit being the
9258 value for `>', the next for '=', and the low for '<'. */
9259 switch ((integer_onep (high_result
) * 4)
9260 + (integer_onep (equal_result
) * 2)
9261 + integer_onep (low_result
))
9265 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9286 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9291 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9292 SET_EXPR_LOCATION (tem
, loc
);
9295 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9300 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9301 into a single range test. */
9302 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9303 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9304 && TREE_CODE (arg1
) == INTEGER_CST
9305 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9306 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9307 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9308 && !TREE_OVERFLOW (arg1
))
9310 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9311 if (tem
!= NULL_TREE
)
9315 /* Fold ~X op ~Y as Y op X. */
9316 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9317 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9319 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9320 return fold_build2_loc (loc
, code
, type
,
9321 fold_convert_loc (loc
, cmp_type
,
9322 TREE_OPERAND (arg1
, 0)),
9323 TREE_OPERAND (arg0
, 0));
9326 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9327 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9328 && TREE_CODE (arg1
) == INTEGER_CST
)
9330 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9331 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9332 TREE_OPERAND (arg0
, 0),
9333 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9334 fold_convert_loc (loc
, cmp_type
, arg1
)));
9341 /* Subroutine of fold_binary. Optimize complex multiplications of the
9342 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9343 argument EXPR represents the expression "z" of type TYPE. */
9346 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9348 tree itype
= TREE_TYPE (type
);
9349 tree rpart
, ipart
, tem
;
9351 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9353 rpart
= TREE_OPERAND (expr
, 0);
9354 ipart
= TREE_OPERAND (expr
, 1);
9356 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9358 rpart
= TREE_REALPART (expr
);
9359 ipart
= TREE_IMAGPART (expr
);
9363 expr
= save_expr (expr
);
9364 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9365 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9368 rpart
= save_expr (rpart
);
9369 ipart
= save_expr (ipart
);
9370 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9371 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9372 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9373 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9374 build_zero_cst (itype
));
9378 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9379 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9380 guarantees that P and N have the same least significant log2(M) bits.
9381 N is not otherwise constrained. In particular, N is not normalized to
9382 0 <= N < M as is common. In general, the precise value of P is unknown.
9383 M is chosen as large as possible such that constant N can be determined.
9385 Returns M and sets *RESIDUE to N.
9387 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9388 account. This is not always possible due to PR 35705.
9391 static unsigned HOST_WIDE_INT
9392 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9393 bool allow_func_align
)
9395 enum tree_code code
;
9399 code
= TREE_CODE (expr
);
9400 if (code
== ADDR_EXPR
)
9402 unsigned int bitalign
;
9403 bitalign
= get_object_alignment_1 (TREE_OPERAND (expr
, 0), residue
);
9404 *residue
/= BITS_PER_UNIT
;
9405 return bitalign
/ BITS_PER_UNIT
;
9407 else if (code
== POINTER_PLUS_EXPR
)
9410 unsigned HOST_WIDE_INT modulus
;
9411 enum tree_code inner_code
;
9413 op0
= TREE_OPERAND (expr
, 0);
9415 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9418 op1
= TREE_OPERAND (expr
, 1);
9420 inner_code
= TREE_CODE (op1
);
9421 if (inner_code
== INTEGER_CST
)
9423 *residue
+= TREE_INT_CST_LOW (op1
);
9426 else if (inner_code
== MULT_EXPR
)
9428 op1
= TREE_OPERAND (op1
, 1);
9429 if (TREE_CODE (op1
) == INTEGER_CST
)
9431 unsigned HOST_WIDE_INT align
;
9433 /* Compute the greatest power-of-2 divisor of op1. */
9434 align
= TREE_INT_CST_LOW (op1
);
9437 /* If align is non-zero and less than *modulus, replace
9438 *modulus with align., If align is 0, then either op1 is 0
9439 or the greatest power-of-2 divisor of op1 doesn't fit in an
9440 unsigned HOST_WIDE_INT. In either case, no additional
9441 constraint is imposed. */
9443 modulus
= MIN (modulus
, align
);
9450 /* If we get here, we were unable to determine anything useful about the
9456 /* Fold a binary expression of code CODE and type TYPE with operands
9457 OP0 and OP1. LOC is the location of the resulting expression.
9458 Return the folded expression if folding is successful. Otherwise,
9459 return NULL_TREE. */
9462 fold_binary_loc (location_t loc
,
9463 enum tree_code code
, tree type
, tree op0
, tree op1
)
9465 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9466 tree arg0
, arg1
, tem
;
9467 tree t1
= NULL_TREE
;
9468 bool strict_overflow_p
;
9470 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9471 && TREE_CODE_LENGTH (code
) == 2
9473 && op1
!= NULL_TREE
);
9478 /* Strip any conversions that don't change the mode. This is
9479 safe for every expression, except for a comparison expression
9480 because its signedness is derived from its operands. So, in
9481 the latter case, only strip conversions that don't change the
9482 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9485 Note that this is done as an internal manipulation within the
9486 constant folder, in order to find the simplest representation
9487 of the arguments so that their form can be studied. In any
9488 cases, the appropriate type conversions should be put back in
9489 the tree that will get out of the constant folder. */
9491 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9493 STRIP_SIGN_NOPS (arg0
);
9494 STRIP_SIGN_NOPS (arg1
);
9502 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9503 constant but we can't do arithmetic on them. */
9504 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9505 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9506 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9507 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9508 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9509 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9511 if (kind
== tcc_binary
)
9513 /* Make sure type and arg0 have the same saturating flag. */
9514 gcc_assert (TYPE_SATURATING (type
)
9515 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9516 tem
= const_binop (code
, arg0
, arg1
);
9518 else if (kind
== tcc_comparison
)
9519 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9523 if (tem
!= NULL_TREE
)
9525 if (TREE_TYPE (tem
) != type
)
9526 tem
= fold_convert_loc (loc
, type
, tem
);
9531 /* If this is a commutative operation, and ARG0 is a constant, move it
9532 to ARG1 to reduce the number of tests below. */
9533 if (commutative_tree_code (code
)
9534 && tree_swap_operands_p (arg0
, arg1
, true))
9535 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9537 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9539 First check for cases where an arithmetic operation is applied to a
9540 compound, conditional, or comparison operation. Push the arithmetic
9541 operation inside the compound or conditional to see if any folding
9542 can then be done. Convert comparison to conditional for this purpose.
9543 The also optimizes non-constant cases that used to be done in
9546 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9547 one of the operands is a comparison and the other is a comparison, a
9548 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9549 code below would make the expression more complex. Change it to a
9550 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9551 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9553 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9554 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9555 && ((truth_value_p (TREE_CODE (arg0
))
9556 && (truth_value_p (TREE_CODE (arg1
))
9557 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9558 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9559 || (truth_value_p (TREE_CODE (arg1
))
9560 && (truth_value_p (TREE_CODE (arg0
))
9561 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9562 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9564 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9565 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9568 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9569 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9571 if (code
== EQ_EXPR
)
9572 tem
= invert_truthvalue_loc (loc
, tem
);
9574 return fold_convert_loc (loc
, type
, tem
);
9577 if (TREE_CODE_CLASS (code
) == tcc_binary
9578 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9580 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9582 tem
= fold_build2_loc (loc
, code
, type
,
9583 fold_convert_loc (loc
, TREE_TYPE (op0
),
9584 TREE_OPERAND (arg0
, 1)), op1
);
9585 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9588 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9589 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9591 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9592 fold_convert_loc (loc
, TREE_TYPE (op1
),
9593 TREE_OPERAND (arg1
, 1)));
9594 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9598 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9600 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9602 /*cond_first_p=*/1);
9603 if (tem
!= NULL_TREE
)
9607 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9609 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9611 /*cond_first_p=*/0);
9612 if (tem
!= NULL_TREE
)
9620 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9621 if (TREE_CODE (arg0
) == ADDR_EXPR
9622 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9624 tree iref
= TREE_OPERAND (arg0
, 0);
9625 return fold_build2 (MEM_REF
, type
,
9626 TREE_OPERAND (iref
, 0),
9627 int_const_binop (PLUS_EXPR
, arg1
,
9628 TREE_OPERAND (iref
, 1)));
9631 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9632 if (TREE_CODE (arg0
) == ADDR_EXPR
9633 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9636 HOST_WIDE_INT coffset
;
9637 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9641 return fold_build2 (MEM_REF
, type
,
9642 build_fold_addr_expr (base
),
9643 int_const_binop (PLUS_EXPR
, arg1
,
9644 size_int (coffset
)));
9649 case POINTER_PLUS_EXPR
:
9650 /* 0 +p index -> (type)index */
9651 if (integer_zerop (arg0
))
9652 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9654 /* PTR +p 0 -> PTR */
9655 if (integer_zerop (arg1
))
9656 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9658 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9659 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9660 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9661 return fold_convert_loc (loc
, type
,
9662 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9663 fold_convert_loc (loc
, sizetype
,
9665 fold_convert_loc (loc
, sizetype
,
9668 /* (PTR +p B) +p A -> PTR +p (B + A) */
9669 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9672 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9673 tree arg00
= TREE_OPERAND (arg0
, 0);
9674 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9675 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9676 return fold_convert_loc (loc
, type
,
9677 fold_build_pointer_plus_loc (loc
,
9681 /* PTR_CST +p CST -> CST1 */
9682 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9683 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9684 fold_convert_loc (loc
, type
, arg1
));
9686 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9687 of the array. Loop optimizer sometimes produce this type of
9689 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9691 tem
= try_move_mult_to_index (loc
, arg0
,
9692 fold_convert_loc (loc
, sizetype
, arg1
));
9694 return fold_convert_loc (loc
, type
, tem
);
9700 /* A + (-B) -> A - B */
9701 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9702 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9703 fold_convert_loc (loc
, type
, arg0
),
9704 fold_convert_loc (loc
, type
,
9705 TREE_OPERAND (arg1
, 0)));
9706 /* (-A) + B -> B - A */
9707 if (TREE_CODE (arg0
) == NEGATE_EXPR
9708 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9709 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9710 fold_convert_loc (loc
, type
, arg1
),
9711 fold_convert_loc (loc
, type
,
9712 TREE_OPERAND (arg0
, 0)));
9714 if (INTEGRAL_TYPE_P (type
))
9716 /* Convert ~A + 1 to -A. */
9717 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9718 && integer_onep (arg1
))
9719 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
9720 fold_convert_loc (loc
, type
,
9721 TREE_OPERAND (arg0
, 0)));
9724 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9725 && !TYPE_OVERFLOW_TRAPS (type
))
9727 tree tem
= TREE_OPERAND (arg0
, 0);
9730 if (operand_equal_p (tem
, arg1
, 0))
9732 t1
= build_int_cst_type (type
, -1);
9733 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
9738 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9739 && !TYPE_OVERFLOW_TRAPS (type
))
9741 tree tem
= TREE_OPERAND (arg1
, 0);
9744 if (operand_equal_p (arg0
, tem
, 0))
9746 t1
= build_int_cst_type (type
, -1);
9747 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
9751 /* X + (X / CST) * -CST is X % CST. */
9752 if (TREE_CODE (arg1
) == MULT_EXPR
9753 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9754 && operand_equal_p (arg0
,
9755 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9757 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9758 tree cst1
= TREE_OPERAND (arg1
, 1);
9759 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9761 if (sum
&& integer_zerop (sum
))
9762 return fold_convert_loc (loc
, type
,
9763 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9764 TREE_TYPE (arg0
), arg0
,
9769 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9770 same or one. Make sure type is not saturating.
9771 fold_plusminus_mult_expr will re-associate. */
9772 if ((TREE_CODE (arg0
) == MULT_EXPR
9773 || TREE_CODE (arg1
) == MULT_EXPR
)
9774 && !TYPE_SATURATING (type
)
9775 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9777 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9782 if (! FLOAT_TYPE_P (type
))
9784 if (integer_zerop (arg1
))
9785 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9787 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9788 with a constant, and the two constants have no bits in common,
9789 we should treat this as a BIT_IOR_EXPR since this may produce more
9791 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9792 && TREE_CODE (arg1
) == BIT_AND_EXPR
9793 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9794 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9795 && integer_zerop (const_binop (BIT_AND_EXPR
,
9796 TREE_OPERAND (arg0
, 1),
9797 TREE_OPERAND (arg1
, 1))))
9799 code
= BIT_IOR_EXPR
;
9803 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9804 (plus (plus (mult) (mult)) (foo)) so that we can
9805 take advantage of the factoring cases below. */
9806 if (TYPE_OVERFLOW_WRAPS (type
)
9807 && (((TREE_CODE (arg0
) == PLUS_EXPR
9808 || TREE_CODE (arg0
) == MINUS_EXPR
)
9809 && TREE_CODE (arg1
) == MULT_EXPR
)
9810 || ((TREE_CODE (arg1
) == PLUS_EXPR
9811 || TREE_CODE (arg1
) == MINUS_EXPR
)
9812 && TREE_CODE (arg0
) == MULT_EXPR
)))
9814 tree parg0
, parg1
, parg
, marg
;
9815 enum tree_code pcode
;
9817 if (TREE_CODE (arg1
) == MULT_EXPR
)
9818 parg
= arg0
, marg
= arg1
;
9820 parg
= arg1
, marg
= arg0
;
9821 pcode
= TREE_CODE (parg
);
9822 parg0
= TREE_OPERAND (parg
, 0);
9823 parg1
= TREE_OPERAND (parg
, 1);
9827 if (TREE_CODE (parg0
) == MULT_EXPR
9828 && TREE_CODE (parg1
) != MULT_EXPR
)
9829 return fold_build2_loc (loc
, pcode
, type
,
9830 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9831 fold_convert_loc (loc
, type
,
9833 fold_convert_loc (loc
, type
,
9835 fold_convert_loc (loc
, type
, parg1
));
9836 if (TREE_CODE (parg0
) != MULT_EXPR
9837 && TREE_CODE (parg1
) == MULT_EXPR
)
9839 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9840 fold_convert_loc (loc
, type
, parg0
),
9841 fold_build2_loc (loc
, pcode
, type
,
9842 fold_convert_loc (loc
, type
, marg
),
9843 fold_convert_loc (loc
, type
,
9849 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9850 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9851 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9853 /* Likewise if the operands are reversed. */
9854 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9855 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9857 /* Convert X + -C into X - C. */
9858 if (TREE_CODE (arg1
) == REAL_CST
9859 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9861 tem
= fold_negate_const (arg1
, type
);
9862 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9863 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9864 fold_convert_loc (loc
, type
, arg0
),
9865 fold_convert_loc (loc
, type
, tem
));
9868 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9869 to __complex__ ( x, y ). This is not the same for SNaNs or
9870 if signed zeros are involved. */
9871 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9872 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9873 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9875 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9876 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9877 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9878 bool arg0rz
= false, arg0iz
= false;
9879 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9880 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9882 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9883 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9884 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9886 tree rp
= arg1r
? arg1r
9887 : build1 (REALPART_EXPR
, rtype
, arg1
);
9888 tree ip
= arg0i
? arg0i
9889 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9890 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9892 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9894 tree rp
= arg0r
? arg0r
9895 : build1 (REALPART_EXPR
, rtype
, arg0
);
9896 tree ip
= arg1i
? arg1i
9897 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9898 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9903 if (flag_unsafe_math_optimizations
9904 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9905 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9906 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9909 /* Convert x+x into x*2.0. */
9910 if (operand_equal_p (arg0
, arg1
, 0)
9911 && SCALAR_FLOAT_TYPE_P (type
))
9912 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
9913 build_real (type
, dconst2
));
9915 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9916 We associate floats only if the user has specified
9917 -fassociative-math. */
9918 if (flag_associative_math
9919 && TREE_CODE (arg1
) == PLUS_EXPR
9920 && TREE_CODE (arg0
) != MULT_EXPR
)
9922 tree tree10
= TREE_OPERAND (arg1
, 0);
9923 tree tree11
= TREE_OPERAND (arg1
, 1);
9924 if (TREE_CODE (tree11
) == MULT_EXPR
9925 && TREE_CODE (tree10
) == MULT_EXPR
)
9928 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9929 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9932 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9933 We associate floats only if the user has specified
9934 -fassociative-math. */
9935 if (flag_associative_math
9936 && TREE_CODE (arg0
) == PLUS_EXPR
9937 && TREE_CODE (arg1
) != MULT_EXPR
)
9939 tree tree00
= TREE_OPERAND (arg0
, 0);
9940 tree tree01
= TREE_OPERAND (arg0
, 1);
9941 if (TREE_CODE (tree01
) == MULT_EXPR
9942 && TREE_CODE (tree00
) == MULT_EXPR
)
9945 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9946 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9952 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9953 is a rotate of A by C1 bits. */
9954 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9955 is a rotate of A by B bits. */
9957 enum tree_code code0
, code1
;
9959 code0
= TREE_CODE (arg0
);
9960 code1
= TREE_CODE (arg1
);
9961 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9962 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9963 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9964 TREE_OPERAND (arg1
, 0), 0)
9965 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9966 TYPE_UNSIGNED (rtype
))
9967 /* Only create rotates in complete modes. Other cases are not
9968 expanded properly. */
9969 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
9971 tree tree01
, tree11
;
9972 enum tree_code code01
, code11
;
9974 tree01
= TREE_OPERAND (arg0
, 1);
9975 tree11
= TREE_OPERAND (arg1
, 1);
9976 STRIP_NOPS (tree01
);
9977 STRIP_NOPS (tree11
);
9978 code01
= TREE_CODE (tree01
);
9979 code11
= TREE_CODE (tree11
);
9980 if (code01
== INTEGER_CST
9981 && code11
== INTEGER_CST
9982 && TREE_INT_CST_HIGH (tree01
) == 0
9983 && TREE_INT_CST_HIGH (tree11
) == 0
9984 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9985 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9987 tem
= build2_loc (loc
, LROTATE_EXPR
,
9988 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9989 TREE_OPERAND (arg0
, 0),
9990 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9991 return fold_convert_loc (loc
, type
, tem
);
9993 else if (code11
== MINUS_EXPR
)
9995 tree tree110
, tree111
;
9996 tree110
= TREE_OPERAND (tree11
, 0);
9997 tree111
= TREE_OPERAND (tree11
, 1);
9998 STRIP_NOPS (tree110
);
9999 STRIP_NOPS (tree111
);
10000 if (TREE_CODE (tree110
) == INTEGER_CST
10001 && 0 == compare_tree_int (tree110
,
10003 (TREE_TYPE (TREE_OPERAND
10005 && operand_equal_p (tree01
, tree111
, 0))
10007 fold_convert_loc (loc
, type
,
10008 build2 ((code0
== LSHIFT_EXPR
10011 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10012 TREE_OPERAND (arg0
, 0), tree01
));
10014 else if (code01
== MINUS_EXPR
)
10016 tree tree010
, tree011
;
10017 tree010
= TREE_OPERAND (tree01
, 0);
10018 tree011
= TREE_OPERAND (tree01
, 1);
10019 STRIP_NOPS (tree010
);
10020 STRIP_NOPS (tree011
);
10021 if (TREE_CODE (tree010
) == INTEGER_CST
10022 && 0 == compare_tree_int (tree010
,
10024 (TREE_TYPE (TREE_OPERAND
10026 && operand_equal_p (tree11
, tree011
, 0))
10027 return fold_convert_loc
10029 build2 ((code0
!= LSHIFT_EXPR
10032 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10033 TREE_OPERAND (arg0
, 0), tree11
));
10039 /* In most languages, can't associate operations on floats through
10040 parentheses. Rather than remember where the parentheses were, we
10041 don't associate floats at all, unless the user has specified
10042 -fassociative-math.
10043 And, we need to make sure type is not saturating. */
10045 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10046 && !TYPE_SATURATING (type
))
10048 tree var0
, con0
, lit0
, minus_lit0
;
10049 tree var1
, con1
, lit1
, minus_lit1
;
10052 /* Split both trees into variables, constants, and literals. Then
10053 associate each group together, the constants with literals,
10054 then the result with variables. This increases the chances of
10055 literals being recombined later and of generating relocatable
10056 expressions for the sum of a constant and literal. */
10057 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10058 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10059 code
== MINUS_EXPR
);
10061 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10062 if (code
== MINUS_EXPR
)
10065 /* With undefined overflow we can only associate constants with one
10066 variable, and constants whose association doesn't overflow. */
10067 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10068 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10075 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10076 tmp0
= TREE_OPERAND (tmp0
, 0);
10077 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10078 tmp1
= TREE_OPERAND (tmp1
, 0);
10079 /* The only case we can still associate with two variables
10080 is if they are the same, modulo negation. */
10081 if (!operand_equal_p (tmp0
, tmp1
, 0))
10085 if (ok
&& lit0
&& lit1
)
10087 tree tmp0
= fold_convert (type
, lit0
);
10088 tree tmp1
= fold_convert (type
, lit1
);
10090 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
10091 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
10096 /* Only do something if we found more than two objects. Otherwise,
10097 nothing has changed and we risk infinite recursion. */
10099 && (2 < ((var0
!= 0) + (var1
!= 0)
10100 + (con0
!= 0) + (con1
!= 0)
10101 + (lit0
!= 0) + (lit1
!= 0)
10102 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10104 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10105 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10106 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10107 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10109 /* Preserve the MINUS_EXPR if the negative part of the literal is
10110 greater than the positive part. Otherwise, the multiplicative
10111 folding code (i.e extract_muldiv) may be fooled in case
10112 unsigned constants are subtracted, like in the following
10113 example: ((X*2 + 4) - 8U)/2. */
10114 if (minus_lit0
&& lit0
)
10116 if (TREE_CODE (lit0
) == INTEGER_CST
10117 && TREE_CODE (minus_lit0
) == INTEGER_CST
10118 && tree_int_cst_lt (lit0
, minus_lit0
))
10120 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10126 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10135 fold_convert_loc (loc
, type
,
10136 associate_trees (loc
, var0
, minus_lit0
,
10137 MINUS_EXPR
, type
));
10140 con0
= associate_trees (loc
, con0
, minus_lit0
,
10143 fold_convert_loc (loc
, type
,
10144 associate_trees (loc
, var0
, con0
,
10149 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10151 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10159 /* Pointer simplifications for subtraction, simple reassociations. */
10160 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10162 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10163 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10164 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10166 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10167 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10168 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10169 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10170 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10171 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10173 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10176 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10177 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10179 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10180 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10181 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10182 fold_convert_loc (loc
, type
, arg1
));
10184 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10187 /* A - (-B) -> A + B */
10188 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10189 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10190 fold_convert_loc (loc
, type
,
10191 TREE_OPERAND (arg1
, 0)));
10192 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10193 if (TREE_CODE (arg0
) == NEGATE_EXPR
10194 && (FLOAT_TYPE_P (type
)
10195 || INTEGRAL_TYPE_P (type
))
10196 && negate_expr_p (arg1
)
10197 && reorder_operands_p (arg0
, arg1
))
10198 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10199 fold_convert_loc (loc
, type
,
10200 negate_expr (arg1
)),
10201 fold_convert_loc (loc
, type
,
10202 TREE_OPERAND (arg0
, 0)));
10203 /* Convert -A - 1 to ~A. */
10204 if (INTEGRAL_TYPE_P (type
)
10205 && TREE_CODE (arg0
) == NEGATE_EXPR
10206 && integer_onep (arg1
)
10207 && !TYPE_OVERFLOW_TRAPS (type
))
10208 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10209 fold_convert_loc (loc
, type
,
10210 TREE_OPERAND (arg0
, 0)));
10212 /* Convert -1 - A to ~A. */
10213 if (INTEGRAL_TYPE_P (type
)
10214 && integer_all_onesp (arg0
))
10215 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10218 /* X - (X / CST) * CST is X % CST. */
10219 if (INTEGRAL_TYPE_P (type
)
10220 && TREE_CODE (arg1
) == MULT_EXPR
10221 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10222 && operand_equal_p (arg0
,
10223 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10224 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10225 TREE_OPERAND (arg1
, 1), 0))
10227 fold_convert_loc (loc
, type
,
10228 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10229 arg0
, TREE_OPERAND (arg1
, 1)));
10231 if (! FLOAT_TYPE_P (type
))
10233 if (integer_zerop (arg0
))
10234 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10235 if (integer_zerop (arg1
))
10236 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10238 /* Fold A - (A & B) into ~B & A. */
10239 if (!TREE_SIDE_EFFECTS (arg0
)
10240 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10242 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10244 tree arg10
= fold_convert_loc (loc
, type
,
10245 TREE_OPERAND (arg1
, 0));
10246 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10247 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10249 fold_convert_loc (loc
, type
, arg0
));
10251 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10253 tree arg11
= fold_convert_loc (loc
,
10254 type
, TREE_OPERAND (arg1
, 1));
10255 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10256 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10258 fold_convert_loc (loc
, type
, arg0
));
10262 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10263 any power of 2 minus 1. */
10264 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10265 && TREE_CODE (arg1
) == BIT_AND_EXPR
10266 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10267 TREE_OPERAND (arg1
, 0), 0))
10269 tree mask0
= TREE_OPERAND (arg0
, 1);
10270 tree mask1
= TREE_OPERAND (arg1
, 1);
10271 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10273 if (operand_equal_p (tem
, mask1
, 0))
10275 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10276 TREE_OPERAND (arg0
, 0), mask1
);
10277 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10282 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10283 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10284 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10286 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10287 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10288 (-ARG1 + ARG0) reduces to -ARG1. */
10289 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10290 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10292 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10293 __complex__ ( x, -y ). This is not the same for SNaNs or if
10294 signed zeros are involved. */
10295 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10297 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10299 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10300 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10301 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10302 bool arg0rz
= false, arg0iz
= false;
10303 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10304 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10306 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10307 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10308 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10310 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10312 : build1 (REALPART_EXPR
, rtype
, arg1
));
10313 tree ip
= arg0i
? arg0i
10314 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10315 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10317 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10319 tree rp
= arg0r
? arg0r
10320 : build1 (REALPART_EXPR
, rtype
, arg0
);
10321 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10323 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10324 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10329 /* Fold &x - &x. This can happen from &x.foo - &x.
10330 This is unsafe for certain floats even in non-IEEE formats.
10331 In IEEE, it is unsafe because it does wrong for NaNs.
10332 Also note that operand_equal_p is always false if an operand
10335 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10336 && operand_equal_p (arg0
, arg1
, 0))
10337 return build_zero_cst (type
);
10339 /* A - B -> A + (-B) if B is easily negatable. */
10340 if (negate_expr_p (arg1
)
10341 && ((FLOAT_TYPE_P (type
)
10342 /* Avoid this transformation if B is a positive REAL_CST. */
10343 && (TREE_CODE (arg1
) != REAL_CST
10344 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10345 || INTEGRAL_TYPE_P (type
)))
10346 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10347 fold_convert_loc (loc
, type
, arg0
),
10348 fold_convert_loc (loc
, type
,
10349 negate_expr (arg1
)));
10351 /* Try folding difference of addresses. */
10353 HOST_WIDE_INT diff
;
10355 if ((TREE_CODE (arg0
) == ADDR_EXPR
10356 || TREE_CODE (arg1
) == ADDR_EXPR
)
10357 && ptr_difference_const (arg0
, arg1
, &diff
))
10358 return build_int_cst_type (type
, diff
);
10361 /* Fold &a[i] - &a[j] to i-j. */
10362 if (TREE_CODE (arg0
) == ADDR_EXPR
10363 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10364 && TREE_CODE (arg1
) == ADDR_EXPR
10365 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10367 tree aref0
= TREE_OPERAND (arg0
, 0);
10368 tree aref1
= TREE_OPERAND (arg1
, 0);
10369 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10370 TREE_OPERAND (aref1
, 0), 0))
10372 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10373 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10374 tree esz
= array_ref_element_size (aref0
);
10375 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10376 return fold_build2_loc (loc
, MULT_EXPR
, type
, diff
,
10377 fold_convert_loc (loc
, type
, esz
));
10382 if (FLOAT_TYPE_P (type
)
10383 && flag_unsafe_math_optimizations
10384 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10385 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10386 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10389 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10390 same or one. Make sure type is not saturating.
10391 fold_plusminus_mult_expr will re-associate. */
10392 if ((TREE_CODE (arg0
) == MULT_EXPR
10393 || TREE_CODE (arg1
) == MULT_EXPR
)
10394 && !TYPE_SATURATING (type
)
10395 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10397 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10405 /* (-A) * (-B) -> A * B */
10406 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10407 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10408 fold_convert_loc (loc
, type
,
10409 TREE_OPERAND (arg0
, 0)),
10410 fold_convert_loc (loc
, type
,
10411 negate_expr (arg1
)));
10412 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10413 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10414 fold_convert_loc (loc
, type
,
10415 negate_expr (arg0
)),
10416 fold_convert_loc (loc
, type
,
10417 TREE_OPERAND (arg1
, 0)));
10419 if (! FLOAT_TYPE_P (type
))
10421 if (integer_zerop (arg1
))
10422 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10423 if (integer_onep (arg1
))
10424 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10425 /* Transform x * -1 into -x. Make sure to do the negation
10426 on the original operand with conversions not stripped
10427 because we can only strip non-sign-changing conversions. */
10428 if (integer_all_onesp (arg1
))
10429 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10430 /* Transform x * -C into -x * C if x is easily negatable. */
10431 if (TREE_CODE (arg1
) == INTEGER_CST
10432 && tree_int_cst_sgn (arg1
) == -1
10433 && negate_expr_p (arg0
)
10434 && (tem
= negate_expr (arg1
)) != arg1
10435 && !TREE_OVERFLOW (tem
))
10436 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10437 fold_convert_loc (loc
, type
,
10438 negate_expr (arg0
)),
10441 /* (a * (1 << b)) is (a << b) */
10442 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10443 && integer_onep (TREE_OPERAND (arg1
, 0)))
10444 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10445 TREE_OPERAND (arg1
, 1));
10446 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10447 && integer_onep (TREE_OPERAND (arg0
, 0)))
10448 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10449 TREE_OPERAND (arg0
, 1));
10451 /* (A + A) * C -> A * 2 * C */
10452 if (TREE_CODE (arg0
) == PLUS_EXPR
10453 && TREE_CODE (arg1
) == INTEGER_CST
10454 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10455 TREE_OPERAND (arg0
, 1), 0))
10456 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10457 omit_one_operand_loc (loc
, type
,
10458 TREE_OPERAND (arg0
, 0),
10459 TREE_OPERAND (arg0
, 1)),
10460 fold_build2_loc (loc
, MULT_EXPR
, type
,
10461 build_int_cst (type
, 2) , arg1
));
10463 strict_overflow_p
= false;
10464 if (TREE_CODE (arg1
) == INTEGER_CST
10465 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10466 &strict_overflow_p
)))
10468 if (strict_overflow_p
)
10469 fold_overflow_warning (("assuming signed overflow does not "
10470 "occur when simplifying "
10472 WARN_STRICT_OVERFLOW_MISC
);
10473 return fold_convert_loc (loc
, type
, tem
);
10476 /* Optimize z * conj(z) for integer complex numbers. */
10477 if (TREE_CODE (arg0
) == CONJ_EXPR
10478 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10479 return fold_mult_zconjz (loc
, type
, arg1
);
10480 if (TREE_CODE (arg1
) == CONJ_EXPR
10481 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10482 return fold_mult_zconjz (loc
, type
, arg0
);
10486 /* Maybe fold x * 0 to 0. The expressions aren't the same
10487 when x is NaN, since x * 0 is also NaN. Nor are they the
10488 same in modes with signed zeros, since multiplying a
10489 negative value by 0 gives -0, not +0. */
10490 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10491 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10492 && real_zerop (arg1
))
10493 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10494 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10495 Likewise for complex arithmetic with signed zeros. */
10496 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10497 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10498 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10499 && real_onep (arg1
))
10500 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10502 /* Transform x * -1.0 into -x. */
10503 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10504 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10505 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10506 && real_minus_onep (arg1
))
10507 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10509 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10510 the result for floating point types due to rounding so it is applied
10511 only if -fassociative-math was specify. */
10512 if (flag_associative_math
10513 && TREE_CODE (arg0
) == RDIV_EXPR
10514 && TREE_CODE (arg1
) == REAL_CST
10515 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10517 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10520 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10521 TREE_OPERAND (arg0
, 1));
10524 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10525 if (operand_equal_p (arg0
, arg1
, 0))
10527 tree tem
= fold_strip_sign_ops (arg0
);
10528 if (tem
!= NULL_TREE
)
10530 tem
= fold_convert_loc (loc
, type
, tem
);
10531 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10535 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10536 This is not the same for NaNs or if signed zeros are
10538 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10539 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10540 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10541 && TREE_CODE (arg1
) == COMPLEX_CST
10542 && real_zerop (TREE_REALPART (arg1
)))
10544 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10545 if (real_onep (TREE_IMAGPART (arg1
)))
10547 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10548 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10550 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10551 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10553 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10554 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10555 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10559 /* Optimize z * conj(z) for floating point complex numbers.
10560 Guarded by flag_unsafe_math_optimizations as non-finite
10561 imaginary components don't produce scalar results. */
10562 if (flag_unsafe_math_optimizations
10563 && TREE_CODE (arg0
) == CONJ_EXPR
10564 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10565 return fold_mult_zconjz (loc
, type
, arg1
);
10566 if (flag_unsafe_math_optimizations
10567 && TREE_CODE (arg1
) == CONJ_EXPR
10568 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10569 return fold_mult_zconjz (loc
, type
, arg0
);
10571 if (flag_unsafe_math_optimizations
)
10573 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10574 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10576 /* Optimizations of root(...)*root(...). */
10577 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10580 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10581 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10583 /* Optimize sqrt(x)*sqrt(x) as x. */
10584 if (BUILTIN_SQRT_P (fcode0
)
10585 && operand_equal_p (arg00
, arg10
, 0)
10586 && ! HONOR_SNANS (TYPE_MODE (type
)))
10589 /* Optimize root(x)*root(y) as root(x*y). */
10590 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10591 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10592 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10595 /* Optimize expN(x)*expN(y) as expN(x+y). */
10596 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10598 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10599 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10600 CALL_EXPR_ARG (arg0
, 0),
10601 CALL_EXPR_ARG (arg1
, 0));
10602 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10605 /* Optimizations of pow(...)*pow(...). */
10606 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10607 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10608 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10610 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10611 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10612 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10613 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10615 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10616 if (operand_equal_p (arg01
, arg11
, 0))
10618 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10619 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10621 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10624 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10625 if (operand_equal_p (arg00
, arg10
, 0))
10627 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10628 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10630 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10634 /* Optimize tan(x)*cos(x) as sin(x). */
10635 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10636 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10637 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10638 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10639 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10640 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10641 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10642 CALL_EXPR_ARG (arg1
, 0), 0))
10644 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10646 if (sinfn
!= NULL_TREE
)
10647 return build_call_expr_loc (loc
, sinfn
, 1,
10648 CALL_EXPR_ARG (arg0
, 0));
10651 /* Optimize x*pow(x,c) as pow(x,c+1). */
10652 if (fcode1
== BUILT_IN_POW
10653 || fcode1
== BUILT_IN_POWF
10654 || fcode1
== BUILT_IN_POWL
)
10656 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10657 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10658 if (TREE_CODE (arg11
) == REAL_CST
10659 && !TREE_OVERFLOW (arg11
)
10660 && operand_equal_p (arg0
, arg10
, 0))
10662 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10666 c
= TREE_REAL_CST (arg11
);
10667 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10668 arg
= build_real (type
, c
);
10669 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10673 /* Optimize pow(x,c)*x as pow(x,c+1). */
10674 if (fcode0
== BUILT_IN_POW
10675 || fcode0
== BUILT_IN_POWF
10676 || fcode0
== BUILT_IN_POWL
)
10678 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10679 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10680 if (TREE_CODE (arg01
) == REAL_CST
10681 && !TREE_OVERFLOW (arg01
)
10682 && operand_equal_p (arg1
, arg00
, 0))
10684 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10688 c
= TREE_REAL_CST (arg01
);
10689 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10690 arg
= build_real (type
, c
);
10691 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10695 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10696 if (!in_gimple_form
10697 && optimize_function_for_speed_p (cfun
)
10698 && operand_equal_p (arg0
, arg1
, 0))
10700 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10704 tree arg
= build_real (type
, dconst2
);
10705 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10714 if (integer_all_onesp (arg1
))
10715 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10716 if (integer_zerop (arg1
))
10717 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10718 if (operand_equal_p (arg0
, arg1
, 0))
10719 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10721 /* ~X | X is -1. */
10722 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10723 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10725 t1
= build_zero_cst (type
);
10726 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10727 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10730 /* X | ~X is -1. */
10731 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10732 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10734 t1
= build_zero_cst (type
);
10735 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10736 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10739 /* Canonicalize (X & C1) | C2. */
10740 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10741 && TREE_CODE (arg1
) == INTEGER_CST
10742 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10744 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10745 int width
= TYPE_PRECISION (type
), w
;
10746 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10747 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10748 hi2
= TREE_INT_CST_HIGH (arg1
);
10749 lo2
= TREE_INT_CST_LOW (arg1
);
10751 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10752 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10753 return omit_one_operand_loc (loc
, type
, arg1
,
10754 TREE_OPERAND (arg0
, 0));
10756 if (width
> HOST_BITS_PER_WIDE_INT
)
10758 mhi
= (unsigned HOST_WIDE_INT
) -1
10759 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10765 mlo
= (unsigned HOST_WIDE_INT
) -1
10766 >> (HOST_BITS_PER_WIDE_INT
- width
);
10769 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10770 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10771 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10772 TREE_OPERAND (arg0
, 0), arg1
);
10774 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10775 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10776 mode which allows further optimizations. */
10783 for (w
= BITS_PER_UNIT
;
10784 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10787 unsigned HOST_WIDE_INT mask
10788 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10789 if (((lo1
| lo2
) & mask
) == mask
10790 && (lo1
& ~mask
) == 0 && hi1
== 0)
10797 if (hi3
!= hi1
|| lo3
!= lo1
)
10798 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10799 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10800 TREE_OPERAND (arg0
, 0),
10801 build_int_cst_wide (type
,
10806 /* (X & Y) | Y is (X, Y). */
10807 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10808 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10809 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
10810 /* (X & Y) | X is (Y, X). */
10811 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10812 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10813 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10814 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
10815 /* X | (X & Y) is (Y, X). */
10816 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10817 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10818 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10819 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
10820 /* X | (Y & X) is (Y, X). */
10821 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10822 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10823 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10824 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
10826 /* (X & ~Y) | (~X & Y) is X ^ Y */
10827 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10828 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10830 tree a0
, a1
, l0
, l1
, n0
, n1
;
10832 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10833 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10835 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10836 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10838 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
10839 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
10841 if ((operand_equal_p (n0
, a0
, 0)
10842 && operand_equal_p (n1
, a1
, 0))
10843 || (operand_equal_p (n0
, a1
, 0)
10844 && operand_equal_p (n1
, a0
, 0)))
10845 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
10848 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10849 if (t1
!= NULL_TREE
)
10852 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10854 This results in more efficient code for machines without a NAND
10855 instruction. Combine will canonicalize to the first form
10856 which will allow use of NAND instructions provided by the
10857 backend if they exist. */
10858 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10859 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10862 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10863 build2 (BIT_AND_EXPR
, type
,
10864 fold_convert_loc (loc
, type
,
10865 TREE_OPERAND (arg0
, 0)),
10866 fold_convert_loc (loc
, type
,
10867 TREE_OPERAND (arg1
, 0))));
10870 /* See if this can be simplified into a rotate first. If that
10871 is unsuccessful continue in the association code. */
10875 if (integer_zerop (arg1
))
10876 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10877 if (integer_all_onesp (arg1
))
10878 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
10879 if (operand_equal_p (arg0
, arg1
, 0))
10880 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10882 /* ~X ^ X is -1. */
10883 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10884 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10886 t1
= build_zero_cst (type
);
10887 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10888 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10891 /* X ^ ~X is -1. */
10892 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10893 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10895 t1
= build_zero_cst (type
);
10896 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10897 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10900 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10901 with a constant, and the two constants have no bits in common,
10902 we should treat this as a BIT_IOR_EXPR since this may produce more
10903 simplifications. */
10904 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10905 && TREE_CODE (arg1
) == BIT_AND_EXPR
10906 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10907 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10908 && integer_zerop (const_binop (BIT_AND_EXPR
,
10909 TREE_OPERAND (arg0
, 1),
10910 TREE_OPERAND (arg1
, 1))))
10912 code
= BIT_IOR_EXPR
;
10916 /* (X | Y) ^ X -> Y & ~ X*/
10917 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10918 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10920 tree t2
= TREE_OPERAND (arg0
, 1);
10921 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10923 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10924 fold_convert_loc (loc
, type
, t2
),
10925 fold_convert_loc (loc
, type
, t1
));
10929 /* (Y | X) ^ X -> Y & ~ X*/
10930 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10931 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10933 tree t2
= TREE_OPERAND (arg0
, 0);
10934 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10936 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10937 fold_convert_loc (loc
, type
, t2
),
10938 fold_convert_loc (loc
, type
, t1
));
10942 /* X ^ (X | Y) -> Y & ~ X*/
10943 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10944 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10946 tree t2
= TREE_OPERAND (arg1
, 1);
10947 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10949 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10950 fold_convert_loc (loc
, type
, t2
),
10951 fold_convert_loc (loc
, type
, t1
));
10955 /* X ^ (Y | X) -> Y & ~ X*/
10956 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10957 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10959 tree t2
= TREE_OPERAND (arg1
, 0);
10960 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10962 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10963 fold_convert_loc (loc
, type
, t2
),
10964 fold_convert_loc (loc
, type
, t1
));
10968 /* Convert ~X ^ ~Y to X ^ Y. */
10969 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10970 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10971 return fold_build2_loc (loc
, code
, type
,
10972 fold_convert_loc (loc
, type
,
10973 TREE_OPERAND (arg0
, 0)),
10974 fold_convert_loc (loc
, type
,
10975 TREE_OPERAND (arg1
, 0)));
10977 /* Convert ~X ^ C to X ^ ~C. */
10978 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10979 && TREE_CODE (arg1
) == INTEGER_CST
)
10980 return fold_build2_loc (loc
, code
, type
,
10981 fold_convert_loc (loc
, type
,
10982 TREE_OPERAND (arg0
, 0)),
10983 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
10985 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10986 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10987 && integer_onep (TREE_OPERAND (arg0
, 1))
10988 && integer_onep (arg1
))
10989 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10990 build_int_cst (TREE_TYPE (arg0
), 0));
10992 /* Fold (X & Y) ^ Y as ~X & Y. */
10993 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10994 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10996 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10997 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10998 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10999 fold_convert_loc (loc
, type
, arg1
));
11001 /* Fold (X & Y) ^ X as ~Y & X. */
11002 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11003 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11004 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11006 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11007 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11008 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11009 fold_convert_loc (loc
, type
, arg1
));
11011 /* Fold X ^ (X & Y) as X & ~Y. */
11012 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11013 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11015 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11016 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11017 fold_convert_loc (loc
, type
, arg0
),
11018 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11020 /* Fold X ^ (Y & X) as ~Y & X. */
11021 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11022 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11023 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11025 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11026 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11027 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11028 fold_convert_loc (loc
, type
, arg0
));
11031 /* See if this can be simplified into a rotate first. If that
11032 is unsuccessful continue in the association code. */
11036 if (integer_all_onesp (arg1
))
11037 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11038 if (integer_zerop (arg1
))
11039 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11040 if (operand_equal_p (arg0
, arg1
, 0))
11041 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11043 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11044 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11045 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11046 || (TREE_CODE (arg0
) == EQ_EXPR
11047 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11048 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11049 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11051 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11052 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11053 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11054 || (TREE_CODE (arg1
) == EQ_EXPR
11055 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11056 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11057 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11059 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11060 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11061 && TREE_CODE (arg1
) == INTEGER_CST
11062 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11064 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11065 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11066 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11067 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11068 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11070 fold_convert_loc (loc
, type
,
11071 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11072 type
, tmp2
, tmp3
));
11075 /* (X | Y) & Y is (X, Y). */
11076 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11077 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11078 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11079 /* (X | Y) & X is (Y, X). */
11080 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11081 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11082 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11083 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11084 /* X & (X | Y) is (Y, X). */
11085 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11086 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11087 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11088 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11089 /* X & (Y | X) is (Y, X). */
11090 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11091 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11092 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11093 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11095 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11096 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11097 && integer_onep (TREE_OPERAND (arg0
, 1))
11098 && integer_onep (arg1
))
11100 tem
= TREE_OPERAND (arg0
, 0);
11101 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11102 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11103 build_int_cst (TREE_TYPE (tem
), 1)),
11104 build_int_cst (TREE_TYPE (tem
), 0));
11106 /* Fold ~X & 1 as (X & 1) == 0. */
11107 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11108 && integer_onep (arg1
))
11110 tem
= TREE_OPERAND (arg0
, 0);
11111 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11112 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11113 build_int_cst (TREE_TYPE (tem
), 1)),
11114 build_int_cst (TREE_TYPE (tem
), 0));
11116 /* Fold !X & 1 as X == 0. */
11117 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11118 && integer_onep (arg1
))
11120 tem
= TREE_OPERAND (arg0
, 0);
11121 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11122 build_int_cst (TREE_TYPE (tem
), 0));
11125 /* Fold (X ^ Y) & Y as ~X & Y. */
11126 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11127 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11129 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11130 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11131 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11132 fold_convert_loc (loc
, type
, arg1
));
11134 /* Fold (X ^ Y) & X as ~Y & X. */
11135 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11136 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11137 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11139 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11140 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11141 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11142 fold_convert_loc (loc
, type
, arg1
));
11144 /* Fold X & (X ^ Y) as X & ~Y. */
11145 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11146 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11148 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11149 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11150 fold_convert_loc (loc
, type
, arg0
),
11151 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11153 /* Fold X & (Y ^ X) as ~Y & X. */
11154 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11155 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11156 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11158 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11159 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11160 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11161 fold_convert_loc (loc
, type
, arg0
));
11164 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11165 ((A & N) + B) & M -> (A + B) & M
11166 Similarly if (N & M) == 0,
11167 ((A | N) + B) & M -> (A + B) & M
11168 and for - instead of + (or unary - instead of +)
11169 and/or ^ instead of |.
11170 If B is constant and (B & M) == 0, fold into A & M. */
11171 if (host_integerp (arg1
, 1))
11173 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11174 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11175 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11176 && (TREE_CODE (arg0
) == PLUS_EXPR
11177 || TREE_CODE (arg0
) == MINUS_EXPR
11178 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11179 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11180 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11184 unsigned HOST_WIDE_INT cst0
;
11186 /* Now we know that arg0 is (C + D) or (C - D) or
11187 -C and arg1 (M) is == (1LL << cst) - 1.
11188 Store C into PMOP[0] and D into PMOP[1]. */
11189 pmop
[0] = TREE_OPERAND (arg0
, 0);
11191 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11193 pmop
[1] = TREE_OPERAND (arg0
, 1);
11197 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11198 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11202 for (; which
>= 0; which
--)
11203 switch (TREE_CODE (pmop
[which
]))
11208 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11211 /* tree_low_cst not used, because we don't care about
11213 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11215 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11220 else if (cst0
!= 0)
11222 /* If C or D is of the form (A & N) where
11223 (N & M) == M, or of the form (A | N) or
11224 (A ^ N) where (N & M) == 0, replace it with A. */
11225 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11228 /* If C or D is a N where (N & M) == 0, it can be
11229 omitted (assumed 0). */
11230 if ((TREE_CODE (arg0
) == PLUS_EXPR
11231 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11232 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11233 pmop
[which
] = NULL
;
11239 /* Only build anything new if we optimized one or both arguments
11241 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11242 || (TREE_CODE (arg0
) != NEGATE_EXPR
11243 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11245 tree utype
= TREE_TYPE (arg0
);
11246 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11248 /* Perform the operations in a type that has defined
11249 overflow behavior. */
11250 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11251 if (pmop
[0] != NULL
)
11252 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11253 if (pmop
[1] != NULL
)
11254 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11257 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11258 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11259 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11261 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11262 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11264 else if (pmop
[0] != NULL
)
11266 else if (pmop
[1] != NULL
)
11269 return build_int_cst (type
, 0);
11271 else if (pmop
[0] == NULL
)
11272 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11274 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11276 /* TEM is now the new binary +, - or unary - replacement. */
11277 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11278 fold_convert_loc (loc
, utype
, arg1
));
11279 return fold_convert_loc (loc
, type
, tem
);
11284 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11285 if (t1
!= NULL_TREE
)
11287 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11288 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11289 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11292 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11294 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11295 && (~TREE_INT_CST_LOW (arg1
)
11296 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11298 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11301 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11303 This results in more efficient code for machines without a NOR
11304 instruction. Combine will canonicalize to the first form
11305 which will allow use of NOR instructions provided by the
11306 backend if they exist. */
11307 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11308 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11310 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11311 build2 (BIT_IOR_EXPR
, type
,
11312 fold_convert_loc (loc
, type
,
11313 TREE_OPERAND (arg0
, 0)),
11314 fold_convert_loc (loc
, type
,
11315 TREE_OPERAND (arg1
, 0))));
11318 /* If arg0 is derived from the address of an object or function, we may
11319 be able to fold this expression using the object or function's
11321 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11323 unsigned HOST_WIDE_INT modulus
, residue
;
11324 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11326 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11327 integer_onep (arg1
));
11329 /* This works because modulus is a power of 2. If this weren't the
11330 case, we'd have to replace it by its greatest power-of-2
11331 divisor: modulus & -modulus. */
11333 return build_int_cst (type
, residue
& low
);
11336 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11337 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11338 if the new mask might be further optimized. */
11339 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11340 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11341 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11342 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11343 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11344 < TYPE_PRECISION (TREE_TYPE (arg0
))
11345 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11346 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11348 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11349 unsigned HOST_WIDE_INT mask
11350 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11351 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11352 tree shift_type
= TREE_TYPE (arg0
);
11354 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11355 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11356 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11357 && TYPE_PRECISION (TREE_TYPE (arg0
))
11358 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11360 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11361 tree arg00
= TREE_OPERAND (arg0
, 0);
11362 /* See if more bits can be proven as zero because of
11364 if (TREE_CODE (arg00
) == NOP_EXPR
11365 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11367 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11368 if (TYPE_PRECISION (inner_type
)
11369 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11370 && TYPE_PRECISION (inner_type
) < prec
)
11372 prec
= TYPE_PRECISION (inner_type
);
11373 /* See if we can shorten the right shift. */
11375 shift_type
= inner_type
;
11378 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11379 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11380 zerobits
<<= prec
- shiftc
;
11381 /* For arithmetic shift if sign bit could be set, zerobits
11382 can contain actually sign bits, so no transformation is
11383 possible, unless MASK masks them all away. In that
11384 case the shift needs to be converted into logical shift. */
11385 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11386 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11388 if ((mask
& zerobits
) == 0)
11389 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11395 /* ((X << 16) & 0xff00) is (X, 0). */
11396 if ((mask
& zerobits
) == mask
)
11397 return omit_one_operand_loc (loc
, type
,
11398 build_int_cst (type
, 0), arg0
);
11400 newmask
= mask
| zerobits
;
11401 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11405 /* Only do the transformation if NEWMASK is some integer
11407 for (prec
= BITS_PER_UNIT
;
11408 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11409 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11411 if (prec
< HOST_BITS_PER_WIDE_INT
11412 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11416 if (shift_type
!= TREE_TYPE (arg0
))
11418 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11419 fold_convert_loc (loc
, shift_type
,
11420 TREE_OPERAND (arg0
, 0)),
11421 TREE_OPERAND (arg0
, 1));
11422 tem
= fold_convert_loc (loc
, type
, tem
);
11426 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11427 if (!tree_int_cst_equal (newmaskt
, arg1
))
11428 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11436 /* Don't touch a floating-point divide by zero unless the mode
11437 of the constant can represent infinity. */
11438 if (TREE_CODE (arg1
) == REAL_CST
11439 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11440 && real_zerop (arg1
))
11443 /* Optimize A / A to 1.0 if we don't care about
11444 NaNs or Infinities. Skip the transformation
11445 for non-real operands. */
11446 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11447 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11448 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11449 && operand_equal_p (arg0
, arg1
, 0))
11451 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11453 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11456 /* The complex version of the above A / A optimization. */
11457 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11458 && operand_equal_p (arg0
, arg1
, 0))
11460 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11461 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11462 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11464 tree r
= build_real (elem_type
, dconst1
);
11465 /* omit_two_operands will call fold_convert for us. */
11466 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11470 /* (-A) / (-B) -> A / B */
11471 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11472 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11473 TREE_OPERAND (arg0
, 0),
11474 negate_expr (arg1
));
11475 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11476 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11477 negate_expr (arg0
),
11478 TREE_OPERAND (arg1
, 0));
11480 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11481 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11482 && real_onep (arg1
))
11483 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11485 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11486 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11487 && real_minus_onep (arg1
))
11488 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11489 negate_expr (arg0
)));
11491 /* If ARG1 is a constant, we can convert this to a multiply by the
11492 reciprocal. This does not have the same rounding properties,
11493 so only do this if -freciprocal-math. We can actually
11494 always safely do it if ARG1 is a power of two, but it's hard to
11495 tell if it is or not in a portable manner. */
11496 if (TREE_CODE (arg1
) == REAL_CST
)
11498 if (flag_reciprocal_math
11499 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11501 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11502 /* Find the reciprocal if optimizing and the result is exact. */
11506 r
= TREE_REAL_CST (arg1
);
11507 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11509 tem
= build_real (type
, r
);
11510 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11511 fold_convert_loc (loc
, type
, arg0
), tem
);
11515 /* Convert A/B/C to A/(B*C). */
11516 if (flag_reciprocal_math
11517 && TREE_CODE (arg0
) == RDIV_EXPR
)
11518 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11519 fold_build2_loc (loc
, MULT_EXPR
, type
,
11520 TREE_OPERAND (arg0
, 1), arg1
));
11522 /* Convert A/(B/C) to (A/B)*C. */
11523 if (flag_reciprocal_math
11524 && TREE_CODE (arg1
) == RDIV_EXPR
)
11525 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11526 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11527 TREE_OPERAND (arg1
, 0)),
11528 TREE_OPERAND (arg1
, 1));
11530 /* Convert C1/(X*C2) into (C1/C2)/X. */
11531 if (flag_reciprocal_math
11532 && TREE_CODE (arg1
) == MULT_EXPR
11533 && TREE_CODE (arg0
) == REAL_CST
11534 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11536 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11537 TREE_OPERAND (arg1
, 1));
11539 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11540 TREE_OPERAND (arg1
, 0));
11543 if (flag_unsafe_math_optimizations
)
11545 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11546 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11548 /* Optimize sin(x)/cos(x) as tan(x). */
11549 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11550 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11551 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11552 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11553 CALL_EXPR_ARG (arg1
, 0), 0))
11555 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11557 if (tanfn
!= NULL_TREE
)
11558 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11561 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11562 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11563 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11564 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11565 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11566 CALL_EXPR_ARG (arg1
, 0), 0))
11568 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11570 if (tanfn
!= NULL_TREE
)
11572 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11573 CALL_EXPR_ARG (arg0
, 0));
11574 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11575 build_real (type
, dconst1
), tmp
);
11579 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11580 NaNs or Infinities. */
11581 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11582 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11583 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11585 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11586 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11588 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11589 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11590 && operand_equal_p (arg00
, arg01
, 0))
11592 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11594 if (cosfn
!= NULL_TREE
)
11595 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11599 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11600 NaNs or Infinities. */
11601 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11602 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11603 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11605 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11606 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11608 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11609 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11610 && operand_equal_p (arg00
, arg01
, 0))
11612 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11614 if (cosfn
!= NULL_TREE
)
11616 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11617 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11618 build_real (type
, dconst1
),
11624 /* Optimize pow(x,c)/x as pow(x,c-1). */
11625 if (fcode0
== BUILT_IN_POW
11626 || fcode0
== BUILT_IN_POWF
11627 || fcode0
== BUILT_IN_POWL
)
11629 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11630 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11631 if (TREE_CODE (arg01
) == REAL_CST
11632 && !TREE_OVERFLOW (arg01
)
11633 && operand_equal_p (arg1
, arg00
, 0))
11635 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11639 c
= TREE_REAL_CST (arg01
);
11640 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11641 arg
= build_real (type
, c
);
11642 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11646 /* Optimize a/root(b/c) into a*root(c/b). */
11647 if (BUILTIN_ROOT_P (fcode1
))
11649 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11651 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11653 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11654 tree b
= TREE_OPERAND (rootarg
, 0);
11655 tree c
= TREE_OPERAND (rootarg
, 1);
11657 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11659 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11660 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11664 /* Optimize x/expN(y) into x*expN(-y). */
11665 if (BUILTIN_EXPONENT_P (fcode1
))
11667 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11668 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11669 arg1
= build_call_expr_loc (loc
,
11671 fold_convert_loc (loc
, type
, arg
));
11672 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11675 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11676 if (fcode1
== BUILT_IN_POW
11677 || fcode1
== BUILT_IN_POWF
11678 || fcode1
== BUILT_IN_POWL
)
11680 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11681 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11682 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11683 tree neg11
= fold_convert_loc (loc
, type
,
11684 negate_expr (arg11
));
11685 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11686 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11691 case TRUNC_DIV_EXPR
:
11692 /* Optimize (X & (-A)) / A where A is a power of 2,
11694 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11695 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11696 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11698 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11699 arg1
, TREE_OPERAND (arg0
, 1));
11700 if (sum
&& integer_zerop (sum
)) {
11701 unsigned long pow2
;
11703 if (TREE_INT_CST_LOW (arg1
))
11704 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
11706 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
11707 + HOST_BITS_PER_WIDE_INT
;
11709 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11710 TREE_OPERAND (arg0
, 0),
11711 build_int_cst (integer_type_node
, pow2
));
11717 case FLOOR_DIV_EXPR
:
11718 /* Simplify A / (B << N) where A and B are positive and B is
11719 a power of 2, to A >> (N + log2(B)). */
11720 strict_overflow_p
= false;
11721 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11722 && (TYPE_UNSIGNED (type
)
11723 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11725 tree sval
= TREE_OPERAND (arg1
, 0);
11726 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11728 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11729 unsigned long pow2
;
11731 if (TREE_INT_CST_LOW (sval
))
11732 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11734 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
11735 + HOST_BITS_PER_WIDE_INT
;
11737 if (strict_overflow_p
)
11738 fold_overflow_warning (("assuming signed overflow does not "
11739 "occur when simplifying A / (B << N)"),
11740 WARN_STRICT_OVERFLOW_MISC
);
11742 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11744 build_int_cst (TREE_TYPE (sh_cnt
),
11746 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11747 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11751 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11752 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11753 if (INTEGRAL_TYPE_P (type
)
11754 && TYPE_UNSIGNED (type
)
11755 && code
== FLOOR_DIV_EXPR
)
11756 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
11760 case ROUND_DIV_EXPR
:
11761 case CEIL_DIV_EXPR
:
11762 case EXACT_DIV_EXPR
:
11763 if (integer_onep (arg1
))
11764 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11765 if (integer_zerop (arg1
))
11767 /* X / -1 is -X. */
11768 if (!TYPE_UNSIGNED (type
)
11769 && TREE_CODE (arg1
) == INTEGER_CST
11770 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11771 && TREE_INT_CST_HIGH (arg1
) == -1)
11772 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11774 /* Convert -A / -B to A / B when the type is signed and overflow is
11776 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11777 && TREE_CODE (arg0
) == NEGATE_EXPR
11778 && negate_expr_p (arg1
))
11780 if (INTEGRAL_TYPE_P (type
))
11781 fold_overflow_warning (("assuming signed overflow does not occur "
11782 "when distributing negation across "
11784 WARN_STRICT_OVERFLOW_MISC
);
11785 return fold_build2_loc (loc
, code
, type
,
11786 fold_convert_loc (loc
, type
,
11787 TREE_OPERAND (arg0
, 0)),
11788 fold_convert_loc (loc
, type
,
11789 negate_expr (arg1
)));
11791 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11792 && TREE_CODE (arg1
) == NEGATE_EXPR
11793 && negate_expr_p (arg0
))
11795 if (INTEGRAL_TYPE_P (type
))
11796 fold_overflow_warning (("assuming signed overflow does not occur "
11797 "when distributing negation across "
11799 WARN_STRICT_OVERFLOW_MISC
);
11800 return fold_build2_loc (loc
, code
, type
,
11801 fold_convert_loc (loc
, type
,
11802 negate_expr (arg0
)),
11803 fold_convert_loc (loc
, type
,
11804 TREE_OPERAND (arg1
, 0)));
11807 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11808 operation, EXACT_DIV_EXPR.
11810 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11811 At one time others generated faster code, it's not clear if they do
11812 after the last round to changes to the DIV code in expmed.c. */
11813 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11814 && multiple_of_p (type
, arg0
, arg1
))
11815 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11817 strict_overflow_p
= false;
11818 if (TREE_CODE (arg1
) == INTEGER_CST
11819 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11820 &strict_overflow_p
)))
11822 if (strict_overflow_p
)
11823 fold_overflow_warning (("assuming signed overflow does not occur "
11824 "when simplifying division"),
11825 WARN_STRICT_OVERFLOW_MISC
);
11826 return fold_convert_loc (loc
, type
, tem
);
11831 case CEIL_MOD_EXPR
:
11832 case FLOOR_MOD_EXPR
:
11833 case ROUND_MOD_EXPR
:
11834 case TRUNC_MOD_EXPR
:
11835 /* X % 1 is always zero, but be sure to preserve any side
11837 if (integer_onep (arg1
))
11838 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11840 /* X % 0, return X % 0 unchanged so that we can get the
11841 proper warnings and errors. */
11842 if (integer_zerop (arg1
))
11845 /* 0 % X is always zero, but be sure to preserve any side
11846 effects in X. Place this after checking for X == 0. */
11847 if (integer_zerop (arg0
))
11848 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11850 /* X % -1 is zero. */
11851 if (!TYPE_UNSIGNED (type
)
11852 && TREE_CODE (arg1
) == INTEGER_CST
11853 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11854 && TREE_INT_CST_HIGH (arg1
) == -1)
11855 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11857 /* X % -C is the same as X % C. */
11858 if (code
== TRUNC_MOD_EXPR
11859 && !TYPE_UNSIGNED (type
)
11860 && TREE_CODE (arg1
) == INTEGER_CST
11861 && !TREE_OVERFLOW (arg1
)
11862 && TREE_INT_CST_HIGH (arg1
) < 0
11863 && !TYPE_OVERFLOW_TRAPS (type
)
11864 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11865 && !sign_bit_p (arg1
, arg1
))
11866 return fold_build2_loc (loc
, code
, type
,
11867 fold_convert_loc (loc
, type
, arg0
),
11868 fold_convert_loc (loc
, type
,
11869 negate_expr (arg1
)));
11871 /* X % -Y is the same as X % Y. */
11872 if (code
== TRUNC_MOD_EXPR
11873 && !TYPE_UNSIGNED (type
)
11874 && TREE_CODE (arg1
) == NEGATE_EXPR
11875 && !TYPE_OVERFLOW_TRAPS (type
))
11876 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
11877 fold_convert_loc (loc
, type
,
11878 TREE_OPERAND (arg1
, 0)));
11880 strict_overflow_p
= false;
11881 if (TREE_CODE (arg1
) == INTEGER_CST
11882 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11883 &strict_overflow_p
)))
11885 if (strict_overflow_p
)
11886 fold_overflow_warning (("assuming signed overflow does not occur "
11887 "when simplifying modulus"),
11888 WARN_STRICT_OVERFLOW_MISC
);
11889 return fold_convert_loc (loc
, type
, tem
);
11892 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11893 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11894 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11895 && (TYPE_UNSIGNED (type
)
11896 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11899 /* Also optimize A % (C << N) where C is a power of 2,
11900 to A & ((C << N) - 1). */
11901 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11902 c
= TREE_OPERAND (arg1
, 0);
11904 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11907 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11908 build_int_cst (TREE_TYPE (arg1
), 1));
11909 if (strict_overflow_p
)
11910 fold_overflow_warning (("assuming signed overflow does not "
11911 "occur when simplifying "
11912 "X % (power of two)"),
11913 WARN_STRICT_OVERFLOW_MISC
);
11914 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11915 fold_convert_loc (loc
, type
, arg0
),
11916 fold_convert_loc (loc
, type
, mask
));
11924 if (integer_all_onesp (arg0
))
11925 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11929 /* Optimize -1 >> x for arithmetic right shifts. */
11930 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
11931 && tree_expr_nonnegative_p (arg1
))
11932 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11933 /* ... fall through ... */
11937 if (integer_zerop (arg1
))
11938 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11939 if (integer_zerop (arg0
))
11940 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11942 /* Since negative shift count is not well-defined,
11943 don't try to compute it in the compiler. */
11944 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11947 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11948 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11949 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11950 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11951 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11953 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11954 + TREE_INT_CST_LOW (arg1
));
11956 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11957 being well defined. */
11958 if (low
>= TYPE_PRECISION (type
))
11960 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11961 low
= low
% TYPE_PRECISION (type
);
11962 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11963 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
11964 TREE_OPERAND (arg0
, 0));
11966 low
= TYPE_PRECISION (type
) - 1;
11969 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11970 build_int_cst (type
, low
));
11973 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11974 into x & ((unsigned)-1 >> c) for unsigned types. */
11975 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11976 || (TYPE_UNSIGNED (type
)
11977 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11978 && host_integerp (arg1
, false)
11979 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11980 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11981 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11983 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11984 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11990 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11992 lshift
= build_int_cst (type
, -1);
11993 lshift
= int_const_binop (code
, lshift
, arg1
);
11995 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
11999 /* Rewrite an LROTATE_EXPR by a constant into an
12000 RROTATE_EXPR by a new constant. */
12001 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12003 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12004 TYPE_PRECISION (type
));
12005 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12006 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12009 /* If we have a rotate of a bit operation with the rotate count and
12010 the second operand of the bit operation both constant,
12011 permute the two operations. */
12012 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12013 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12014 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12015 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12016 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12017 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12018 fold_build2_loc (loc
, code
, type
,
12019 TREE_OPERAND (arg0
, 0), arg1
),
12020 fold_build2_loc (loc
, code
, type
,
12021 TREE_OPERAND (arg0
, 1), arg1
));
12023 /* Two consecutive rotates adding up to the precision of the
12024 type can be ignored. */
12025 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12026 && TREE_CODE (arg0
) == RROTATE_EXPR
12027 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12028 && TREE_INT_CST_HIGH (arg1
) == 0
12029 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12030 && ((TREE_INT_CST_LOW (arg1
)
12031 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12032 == (unsigned int) TYPE_PRECISION (type
)))
12033 return TREE_OPERAND (arg0
, 0);
12035 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12036 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12037 if the latter can be further optimized. */
12038 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12039 && TREE_CODE (arg0
) == BIT_AND_EXPR
12040 && TREE_CODE (arg1
) == INTEGER_CST
12041 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12043 tree mask
= fold_build2_loc (loc
, code
, type
,
12044 fold_convert_loc (loc
, type
,
12045 TREE_OPERAND (arg0
, 1)),
12047 tree shift
= fold_build2_loc (loc
, code
, type
,
12048 fold_convert_loc (loc
, type
,
12049 TREE_OPERAND (arg0
, 0)),
12051 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12059 if (operand_equal_p (arg0
, arg1
, 0))
12060 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12061 if (INTEGRAL_TYPE_P (type
)
12062 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12063 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12064 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12070 if (operand_equal_p (arg0
, arg1
, 0))
12071 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12072 if (INTEGRAL_TYPE_P (type
)
12073 && TYPE_MAX_VALUE (type
)
12074 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12075 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12076 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12081 case TRUTH_ANDIF_EXPR
:
12082 /* Note that the operands of this must be ints
12083 and their values must be 0 or 1.
12084 ("true" is a fixed value perhaps depending on the language.) */
12085 /* If first arg is constant zero, return it. */
12086 if (integer_zerop (arg0
))
12087 return fold_convert_loc (loc
, type
, arg0
);
12088 case TRUTH_AND_EXPR
:
12089 /* If either arg is constant true, drop it. */
12090 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12091 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12092 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12093 /* Preserve sequence points. */
12094 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12095 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12096 /* If second arg is constant zero, result is zero, but first arg
12097 must be evaluated. */
12098 if (integer_zerop (arg1
))
12099 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12100 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12101 case will be handled here. */
12102 if (integer_zerop (arg0
))
12103 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12105 /* !X && X is always false. */
12106 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12107 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12108 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12109 /* X && !X is always false. */
12110 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12111 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12112 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12114 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12115 means A >= Y && A != MAX, but in this case we know that
12118 if (!TREE_SIDE_EFFECTS (arg0
)
12119 && !TREE_SIDE_EFFECTS (arg1
))
12121 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12122 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12123 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12125 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12126 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12127 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12130 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12136 case TRUTH_ORIF_EXPR
:
12137 /* Note that the operands of this must be ints
12138 and their values must be 0 or true.
12139 ("true" is a fixed value perhaps depending on the language.) */
12140 /* If first arg is constant true, return it. */
12141 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12142 return fold_convert_loc (loc
, type
, arg0
);
12143 case TRUTH_OR_EXPR
:
12144 /* If either arg is constant zero, drop it. */
12145 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12146 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12147 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12148 /* Preserve sequence points. */
12149 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12150 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12151 /* If second arg is constant true, result is true, but we must
12152 evaluate first arg. */
12153 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12154 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12155 /* Likewise for first arg, but note this only occurs here for
12157 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12158 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12160 /* !X || X is always true. */
12161 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12162 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12163 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12164 /* X || !X is always true. */
12165 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12166 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12167 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12169 /* (X && !Y) || (!X && Y) is X ^ Y */
12170 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12171 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12173 tree a0
, a1
, l0
, l1
, n0
, n1
;
12175 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12176 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12178 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12179 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12181 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12182 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12184 if ((operand_equal_p (n0
, a0
, 0)
12185 && operand_equal_p (n1
, a1
, 0))
12186 || (operand_equal_p (n0
, a1
, 0)
12187 && operand_equal_p (n1
, a0
, 0)))
12188 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12191 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12197 case TRUTH_XOR_EXPR
:
12198 /* If the second arg is constant zero, drop it. */
12199 if (integer_zerop (arg1
))
12200 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12201 /* If the second arg is constant true, this is a logical inversion. */
12202 if (integer_onep (arg1
))
12204 /* Only call invert_truthvalue if operand is a truth value. */
12205 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12206 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12208 tem
= invert_truthvalue_loc (loc
, arg0
);
12209 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12211 /* Identical arguments cancel to zero. */
12212 if (operand_equal_p (arg0
, arg1
, 0))
12213 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12215 /* !X ^ X is always true. */
12216 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12217 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12218 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12220 /* X ^ !X is always true. */
12221 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12222 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12223 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12232 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12233 if (tem
!= NULL_TREE
)
12236 /* bool_var != 0 becomes bool_var. */
12237 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12238 && code
== NE_EXPR
)
12239 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12241 /* bool_var == 1 becomes bool_var. */
12242 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12243 && code
== EQ_EXPR
)
12244 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12246 /* bool_var != 1 becomes !bool_var. */
12247 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12248 && code
== NE_EXPR
)
12249 return fold_convert_loc (loc
, type
,
12250 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12251 TREE_TYPE (arg0
), arg0
));
12253 /* bool_var == 0 becomes !bool_var. */
12254 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12255 && code
== EQ_EXPR
)
12256 return fold_convert_loc (loc
, type
,
12257 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12258 TREE_TYPE (arg0
), arg0
));
12260 /* !exp != 0 becomes !exp */
12261 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12262 && code
== NE_EXPR
)
12263 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12265 /* If this is an equality comparison of the address of two non-weak,
12266 unaliased symbols neither of which are extern (since we do not
12267 have access to attributes for externs), then we know the result. */
12268 if (TREE_CODE (arg0
) == ADDR_EXPR
12269 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12270 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12271 && ! lookup_attribute ("alias",
12272 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12273 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12274 && TREE_CODE (arg1
) == ADDR_EXPR
12275 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12276 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12277 && ! lookup_attribute ("alias",
12278 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12279 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12281 /* We know that we're looking at the address of two
12282 non-weak, unaliased, static _DECL nodes.
12284 It is both wasteful and incorrect to call operand_equal_p
12285 to compare the two ADDR_EXPR nodes. It is wasteful in that
12286 all we need to do is test pointer equality for the arguments
12287 to the two ADDR_EXPR nodes. It is incorrect to use
12288 operand_equal_p as that function is NOT equivalent to a
12289 C equality test. It can in fact return false for two
12290 objects which would test as equal using the C equality
12292 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12293 return constant_boolean_node (equal
12294 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12298 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12299 a MINUS_EXPR of a constant, we can convert it into a comparison with
12300 a revised constant as long as no overflow occurs. */
12301 if (TREE_CODE (arg1
) == INTEGER_CST
12302 && (TREE_CODE (arg0
) == PLUS_EXPR
12303 || TREE_CODE (arg0
) == MINUS_EXPR
)
12304 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12305 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12306 ? MINUS_EXPR
: PLUS_EXPR
,
12307 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12309 TREE_OPERAND (arg0
, 1)))
12310 && !TREE_OVERFLOW (tem
))
12311 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12313 /* Similarly for a NEGATE_EXPR. */
12314 if (TREE_CODE (arg0
) == NEGATE_EXPR
12315 && TREE_CODE (arg1
) == INTEGER_CST
12316 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12318 && TREE_CODE (tem
) == INTEGER_CST
12319 && !TREE_OVERFLOW (tem
))
12320 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12322 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12323 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12324 && TREE_CODE (arg1
) == INTEGER_CST
12325 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12326 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12327 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12328 fold_convert_loc (loc
,
12331 TREE_OPERAND (arg0
, 1)));
12333 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12334 if ((TREE_CODE (arg0
) == PLUS_EXPR
12335 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12336 || TREE_CODE (arg0
) == MINUS_EXPR
)
12337 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12340 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12341 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12343 tree val
= TREE_OPERAND (arg0
, 1);
12344 return omit_two_operands_loc (loc
, type
,
12345 fold_build2_loc (loc
, code
, type
,
12347 build_int_cst (TREE_TYPE (val
),
12349 TREE_OPERAND (arg0
, 0), arg1
);
12352 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12353 if (TREE_CODE (arg0
) == MINUS_EXPR
12354 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12355 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12358 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12360 return omit_two_operands_loc (loc
, type
,
12362 ? boolean_true_node
: boolean_false_node
,
12363 TREE_OPERAND (arg0
, 1), arg1
);
12366 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12367 for !=. Don't do this for ordered comparisons due to overflow. */
12368 if (TREE_CODE (arg0
) == MINUS_EXPR
12369 && integer_zerop (arg1
))
12370 return fold_build2_loc (loc
, code
, type
,
12371 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12373 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12374 if (TREE_CODE (arg0
) == ABS_EXPR
12375 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12376 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12378 /* If this is an EQ or NE comparison with zero and ARG0 is
12379 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12380 two operations, but the latter can be done in one less insn
12381 on machines that have only two-operand insns or on which a
12382 constant cannot be the first operand. */
12383 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12384 && integer_zerop (arg1
))
12386 tree arg00
= TREE_OPERAND (arg0
, 0);
12387 tree arg01
= TREE_OPERAND (arg0
, 1);
12388 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12389 && integer_onep (TREE_OPERAND (arg00
, 0)))
12391 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12392 arg01
, TREE_OPERAND (arg00
, 1));
12393 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12394 build_int_cst (TREE_TYPE (arg0
), 1));
12395 return fold_build2_loc (loc
, code
, type
,
12396 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12399 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12400 && integer_onep (TREE_OPERAND (arg01
, 0)))
12402 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12403 arg00
, TREE_OPERAND (arg01
, 1));
12404 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12405 build_int_cst (TREE_TYPE (arg0
), 1));
12406 return fold_build2_loc (loc
, code
, type
,
12407 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12412 /* If this is an NE or EQ comparison of zero against the result of a
12413 signed MOD operation whose second operand is a power of 2, make
12414 the MOD operation unsigned since it is simpler and equivalent. */
12415 if (integer_zerop (arg1
)
12416 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12417 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12418 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12419 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12420 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12421 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12423 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12424 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12425 fold_convert_loc (loc
, newtype
,
12426 TREE_OPERAND (arg0
, 0)),
12427 fold_convert_loc (loc
, newtype
,
12428 TREE_OPERAND (arg0
, 1)));
12430 return fold_build2_loc (loc
, code
, type
, newmod
,
12431 fold_convert_loc (loc
, newtype
, arg1
));
12434 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12435 C1 is a valid shift constant, and C2 is a power of two, i.e.
12437 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12438 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12439 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12441 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12442 && integer_zerop (arg1
))
12444 tree itype
= TREE_TYPE (arg0
);
12445 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12446 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12448 /* Check for a valid shift count. */
12449 if (TREE_INT_CST_HIGH (arg001
) == 0
12450 && TREE_INT_CST_LOW (arg001
) < prec
)
12452 tree arg01
= TREE_OPERAND (arg0
, 1);
12453 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12454 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12455 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12456 can be rewritten as (X & (C2 << C1)) != 0. */
12457 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12459 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12460 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12461 return fold_build2_loc (loc
, code
, type
, tem
,
12462 fold_convert_loc (loc
, itype
, arg1
));
12464 /* Otherwise, for signed (arithmetic) shifts,
12465 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12466 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12467 else if (!TYPE_UNSIGNED (itype
))
12468 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12469 arg000
, build_int_cst (itype
, 0));
12470 /* Otherwise, of unsigned (logical) shifts,
12471 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12472 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12474 return omit_one_operand_loc (loc
, type
,
12475 code
== EQ_EXPR
? integer_one_node
12476 : integer_zero_node
,
12481 /* If we have (A & C) == C where C is a power of 2, convert this into
12482 (A & C) != 0. Similarly for NE_EXPR. */
12483 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12484 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12485 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12486 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12487 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12488 integer_zero_node
));
12490 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12491 bit, then fold the expression into A < 0 or A >= 0. */
12492 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12496 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12497 Similarly for NE_EXPR. */
12498 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12499 && TREE_CODE (arg1
) == INTEGER_CST
12500 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12502 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12503 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12504 TREE_OPERAND (arg0
, 1));
12506 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12507 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12509 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12510 if (integer_nonzerop (dandnotc
))
12511 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12514 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12515 Similarly for NE_EXPR. */
12516 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12517 && TREE_CODE (arg1
) == INTEGER_CST
12518 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12520 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12522 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12523 TREE_OPERAND (arg0
, 1),
12524 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12525 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12526 if (integer_nonzerop (candnotd
))
12527 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12530 /* If this is a comparison of a field, we may be able to simplify it. */
12531 if ((TREE_CODE (arg0
) == COMPONENT_REF
12532 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12533 /* Handle the constant case even without -O
12534 to make sure the warnings are given. */
12535 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12537 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12542 /* Optimize comparisons of strlen vs zero to a compare of the
12543 first character of the string vs zero. To wit,
12544 strlen(ptr) == 0 => *ptr == 0
12545 strlen(ptr) != 0 => *ptr != 0
12546 Other cases should reduce to one of these two (or a constant)
12547 due to the return value of strlen being unsigned. */
12548 if (TREE_CODE (arg0
) == CALL_EXPR
12549 && integer_zerop (arg1
))
12551 tree fndecl
= get_callee_fndecl (arg0
);
12554 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12555 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12556 && call_expr_nargs (arg0
) == 1
12557 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12559 tree iref
= build_fold_indirect_ref_loc (loc
,
12560 CALL_EXPR_ARG (arg0
, 0));
12561 return fold_build2_loc (loc
, code
, type
, iref
,
12562 build_int_cst (TREE_TYPE (iref
), 0));
12566 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12567 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12568 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12569 && integer_zerop (arg1
)
12570 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12572 tree arg00
= TREE_OPERAND (arg0
, 0);
12573 tree arg01
= TREE_OPERAND (arg0
, 1);
12574 tree itype
= TREE_TYPE (arg00
);
12575 if (TREE_INT_CST_HIGH (arg01
) == 0
12576 && TREE_INT_CST_LOW (arg01
)
12577 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12579 if (TYPE_UNSIGNED (itype
))
12581 itype
= signed_type_for (itype
);
12582 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12584 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12585 type
, arg00
, build_int_cst (itype
, 0));
12589 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12590 if (integer_zerop (arg1
)
12591 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12592 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12593 TREE_OPERAND (arg0
, 1));
12595 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12596 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12597 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12598 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12599 build_int_cst (TREE_TYPE (arg0
), 0));
12600 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12601 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12602 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12603 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12604 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12605 build_int_cst (TREE_TYPE (arg0
), 0));
12607 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12608 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12609 && TREE_CODE (arg1
) == INTEGER_CST
12610 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12611 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12612 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12613 TREE_OPERAND (arg0
, 1), arg1
));
12615 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12616 (X & C) == 0 when C is a single bit. */
12617 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12618 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12619 && integer_zerop (arg1
)
12620 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12622 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12623 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12624 TREE_OPERAND (arg0
, 1));
12625 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12627 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12631 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12632 constant C is a power of two, i.e. a single bit. */
12633 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12634 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12635 && integer_zerop (arg1
)
12636 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12637 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12638 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12640 tree arg00
= TREE_OPERAND (arg0
, 0);
12641 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12642 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12645 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12646 when is C is a power of two, i.e. a single bit. */
12647 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12648 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12649 && integer_zerop (arg1
)
12650 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12651 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12652 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12654 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12655 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12656 arg000
, TREE_OPERAND (arg0
, 1));
12657 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12658 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12661 if (integer_zerop (arg1
)
12662 && tree_expr_nonzero_p (arg0
))
12664 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12665 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12668 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12669 if (TREE_CODE (arg0
) == NEGATE_EXPR
12670 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12671 return fold_build2_loc (loc
, code
, type
,
12672 TREE_OPERAND (arg0
, 0),
12673 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12674 TREE_OPERAND (arg1
, 0)));
12676 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12677 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12678 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12680 tree arg00
= TREE_OPERAND (arg0
, 0);
12681 tree arg01
= TREE_OPERAND (arg0
, 1);
12682 tree arg10
= TREE_OPERAND (arg1
, 0);
12683 tree arg11
= TREE_OPERAND (arg1
, 1);
12684 tree itype
= TREE_TYPE (arg0
);
12686 if (operand_equal_p (arg01
, arg11
, 0))
12687 return fold_build2_loc (loc
, code
, type
,
12688 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12689 fold_build2_loc (loc
,
12690 BIT_XOR_EXPR
, itype
,
12693 build_int_cst (itype
, 0));
12695 if (operand_equal_p (arg01
, arg10
, 0))
12696 return fold_build2_loc (loc
, code
, type
,
12697 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12698 fold_build2_loc (loc
,
12699 BIT_XOR_EXPR
, itype
,
12702 build_int_cst (itype
, 0));
12704 if (operand_equal_p (arg00
, arg11
, 0))
12705 return fold_build2_loc (loc
, code
, type
,
12706 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12707 fold_build2_loc (loc
,
12708 BIT_XOR_EXPR
, itype
,
12711 build_int_cst (itype
, 0));
12713 if (operand_equal_p (arg00
, arg10
, 0))
12714 return fold_build2_loc (loc
, code
, type
,
12715 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12716 fold_build2_loc (loc
,
12717 BIT_XOR_EXPR
, itype
,
12720 build_int_cst (itype
, 0));
12723 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12724 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12726 tree arg00
= TREE_OPERAND (arg0
, 0);
12727 tree arg01
= TREE_OPERAND (arg0
, 1);
12728 tree arg10
= TREE_OPERAND (arg1
, 0);
12729 tree arg11
= TREE_OPERAND (arg1
, 1);
12730 tree itype
= TREE_TYPE (arg0
);
12732 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12733 operand_equal_p guarantees no side-effects so we don't need
12734 to use omit_one_operand on Z. */
12735 if (operand_equal_p (arg01
, arg11
, 0))
12736 return fold_build2_loc (loc
, code
, type
, arg00
,
12737 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12739 if (operand_equal_p (arg01
, arg10
, 0))
12740 return fold_build2_loc (loc
, code
, type
, arg00
,
12741 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12743 if (operand_equal_p (arg00
, arg11
, 0))
12744 return fold_build2_loc (loc
, code
, type
, arg01
,
12745 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12747 if (operand_equal_p (arg00
, arg10
, 0))
12748 return fold_build2_loc (loc
, code
, type
, arg01
,
12749 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12752 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12753 if (TREE_CODE (arg01
) == INTEGER_CST
12754 && TREE_CODE (arg11
) == INTEGER_CST
)
12756 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12757 fold_convert_loc (loc
, itype
, arg11
));
12758 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12759 return fold_build2_loc (loc
, code
, type
, tem
,
12760 fold_convert_loc (loc
, itype
, arg10
));
12764 /* Attempt to simplify equality/inequality comparisons of complex
12765 values. Only lower the comparison if the result is known or
12766 can be simplified to a single scalar comparison. */
12767 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12768 || TREE_CODE (arg0
) == COMPLEX_CST
)
12769 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12770 || TREE_CODE (arg1
) == COMPLEX_CST
))
12772 tree real0
, imag0
, real1
, imag1
;
12775 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12777 real0
= TREE_OPERAND (arg0
, 0);
12778 imag0
= TREE_OPERAND (arg0
, 1);
12782 real0
= TREE_REALPART (arg0
);
12783 imag0
= TREE_IMAGPART (arg0
);
12786 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12788 real1
= TREE_OPERAND (arg1
, 0);
12789 imag1
= TREE_OPERAND (arg1
, 1);
12793 real1
= TREE_REALPART (arg1
);
12794 imag1
= TREE_IMAGPART (arg1
);
12797 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12798 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12800 if (integer_zerop (rcond
))
12802 if (code
== EQ_EXPR
)
12803 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12805 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12809 if (code
== NE_EXPR
)
12810 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12812 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12816 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12817 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12819 if (integer_zerop (icond
))
12821 if (code
== EQ_EXPR
)
12822 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12824 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12828 if (code
== NE_EXPR
)
12829 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12831 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12842 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12843 if (tem
!= NULL_TREE
)
12846 /* Transform comparisons of the form X +- C CMP X. */
12847 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12848 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12849 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12850 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12851 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12852 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12854 tree arg01
= TREE_OPERAND (arg0
, 1);
12855 enum tree_code code0
= TREE_CODE (arg0
);
12858 if (TREE_CODE (arg01
) == REAL_CST
)
12859 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12861 is_positive
= tree_int_cst_sgn (arg01
);
12863 /* (X - c) > X becomes false. */
12864 if (code
== GT_EXPR
12865 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12866 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12868 if (TREE_CODE (arg01
) == INTEGER_CST
12869 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12870 fold_overflow_warning (("assuming signed overflow does not "
12871 "occur when assuming that (X - c) > X "
12872 "is always false"),
12873 WARN_STRICT_OVERFLOW_ALL
);
12874 return constant_boolean_node (0, type
);
12877 /* Likewise (X + c) < X becomes false. */
12878 if (code
== LT_EXPR
12879 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12880 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12882 if (TREE_CODE (arg01
) == INTEGER_CST
12883 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12884 fold_overflow_warning (("assuming signed overflow does not "
12885 "occur when assuming that "
12886 "(X + c) < X is always false"),
12887 WARN_STRICT_OVERFLOW_ALL
);
12888 return constant_boolean_node (0, type
);
12891 /* Convert (X - c) <= X to true. */
12892 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12894 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12895 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12897 if (TREE_CODE (arg01
) == INTEGER_CST
12898 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12899 fold_overflow_warning (("assuming signed overflow does not "
12900 "occur when assuming that "
12901 "(X - c) <= X is always true"),
12902 WARN_STRICT_OVERFLOW_ALL
);
12903 return constant_boolean_node (1, type
);
12906 /* Convert (X + c) >= X to true. */
12907 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12909 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12910 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12912 if (TREE_CODE (arg01
) == INTEGER_CST
12913 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12914 fold_overflow_warning (("assuming signed overflow does not "
12915 "occur when assuming that "
12916 "(X + c) >= X is always true"),
12917 WARN_STRICT_OVERFLOW_ALL
);
12918 return constant_boolean_node (1, type
);
12921 if (TREE_CODE (arg01
) == INTEGER_CST
)
12923 /* Convert X + c > X and X - c < X to true for integers. */
12924 if (code
== GT_EXPR
12925 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12926 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12928 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12929 fold_overflow_warning (("assuming signed overflow does "
12930 "not occur when assuming that "
12931 "(X + c) > X is always true"),
12932 WARN_STRICT_OVERFLOW_ALL
);
12933 return constant_boolean_node (1, type
);
12936 if (code
== LT_EXPR
12937 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12938 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12940 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12941 fold_overflow_warning (("assuming signed overflow does "
12942 "not occur when assuming that "
12943 "(X - c) < X is always true"),
12944 WARN_STRICT_OVERFLOW_ALL
);
12945 return constant_boolean_node (1, type
);
12948 /* Convert X + c <= X and X - c >= X to false for integers. */
12949 if (code
== LE_EXPR
12950 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12951 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12954 fold_overflow_warning (("assuming signed overflow does "
12955 "not occur when assuming that "
12956 "(X + c) <= X is always false"),
12957 WARN_STRICT_OVERFLOW_ALL
);
12958 return constant_boolean_node (0, type
);
12961 if (code
== GE_EXPR
12962 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12963 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12965 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12966 fold_overflow_warning (("assuming signed overflow does "
12967 "not occur when assuming that "
12968 "(X - c) >= X is always false"),
12969 WARN_STRICT_OVERFLOW_ALL
);
12970 return constant_boolean_node (0, type
);
12975 /* Comparisons with the highest or lowest possible integer of
12976 the specified precision will have known values. */
12978 tree arg1_type
= TREE_TYPE (arg1
);
12979 unsigned int width
= TYPE_PRECISION (arg1_type
);
12981 if (TREE_CODE (arg1
) == INTEGER_CST
12982 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12983 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12985 HOST_WIDE_INT signed_max_hi
;
12986 unsigned HOST_WIDE_INT signed_max_lo
;
12987 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12989 if (width
<= HOST_BITS_PER_WIDE_INT
)
12991 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12996 if (TYPE_UNSIGNED (arg1_type
))
12998 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13004 max_lo
= signed_max_lo
;
13005 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13011 width
-= HOST_BITS_PER_WIDE_INT
;
13012 signed_max_lo
= -1;
13013 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13018 if (TYPE_UNSIGNED (arg1_type
))
13020 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13025 max_hi
= signed_max_hi
;
13026 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13030 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13031 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13035 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13038 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13041 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13044 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13046 /* The GE_EXPR and LT_EXPR cases above are not normally
13047 reached because of previous transformations. */
13052 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13054 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13058 arg1
= const_binop (PLUS_EXPR
, arg1
,
13059 build_int_cst (TREE_TYPE (arg1
), 1));
13060 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13061 fold_convert_loc (loc
,
13062 TREE_TYPE (arg1
), arg0
),
13065 arg1
= const_binop (PLUS_EXPR
, arg1
,
13066 build_int_cst (TREE_TYPE (arg1
), 1));
13067 return fold_build2_loc (loc
, NE_EXPR
, type
,
13068 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13074 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13076 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13080 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13083 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13086 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13089 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13094 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13096 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13100 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13101 return fold_build2_loc (loc
, NE_EXPR
, type
,
13102 fold_convert_loc (loc
,
13103 TREE_TYPE (arg1
), arg0
),
13106 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13107 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13108 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13115 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13116 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13117 && TYPE_UNSIGNED (arg1_type
)
13118 /* We will flip the signedness of the comparison operator
13119 associated with the mode of arg1, so the sign bit is
13120 specified by this mode. Check that arg1 is the signed
13121 max associated with this sign bit. */
13122 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13123 /* signed_type does not work on pointer types. */
13124 && INTEGRAL_TYPE_P (arg1_type
))
13126 /* The following case also applies to X < signed_max+1
13127 and X >= signed_max+1 because previous transformations. */
13128 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13131 st
= signed_type_for (TREE_TYPE (arg1
));
13132 return fold_build2_loc (loc
,
13133 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13134 type
, fold_convert_loc (loc
, st
, arg0
),
13135 build_int_cst (st
, 0));
13141 /* If we are comparing an ABS_EXPR with a constant, we can
13142 convert all the cases into explicit comparisons, but they may
13143 well not be faster than doing the ABS and one comparison.
13144 But ABS (X) <= C is a range comparison, which becomes a subtraction
13145 and a comparison, and is probably faster. */
13146 if (code
== LE_EXPR
13147 && TREE_CODE (arg1
) == INTEGER_CST
13148 && TREE_CODE (arg0
) == ABS_EXPR
13149 && ! TREE_SIDE_EFFECTS (arg0
)
13150 && (0 != (tem
= negate_expr (arg1
)))
13151 && TREE_CODE (tem
) == INTEGER_CST
13152 && !TREE_OVERFLOW (tem
))
13153 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13154 build2 (GE_EXPR
, type
,
13155 TREE_OPERAND (arg0
, 0), tem
),
13156 build2 (LE_EXPR
, type
,
13157 TREE_OPERAND (arg0
, 0), arg1
));
13159 /* Convert ABS_EXPR<x> >= 0 to true. */
13160 strict_overflow_p
= false;
13161 if (code
== GE_EXPR
13162 && (integer_zerop (arg1
)
13163 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13164 && real_zerop (arg1
)))
13165 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13167 if (strict_overflow_p
)
13168 fold_overflow_warning (("assuming signed overflow does not occur "
13169 "when simplifying comparison of "
13170 "absolute value and zero"),
13171 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13172 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13175 /* Convert ABS_EXPR<x> < 0 to false. */
13176 strict_overflow_p
= false;
13177 if (code
== LT_EXPR
13178 && (integer_zerop (arg1
) || real_zerop (arg1
))
13179 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13181 if (strict_overflow_p
)
13182 fold_overflow_warning (("assuming signed overflow does not occur "
13183 "when simplifying comparison of "
13184 "absolute value and zero"),
13185 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13186 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13189 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13190 and similarly for >= into !=. */
13191 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13192 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13193 && TREE_CODE (arg1
) == LSHIFT_EXPR
13194 && integer_onep (TREE_OPERAND (arg1
, 0)))
13195 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13196 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13197 TREE_OPERAND (arg1
, 1)),
13198 build_int_cst (TREE_TYPE (arg0
), 0));
13200 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13201 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13202 && CONVERT_EXPR_P (arg1
)
13203 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13204 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13206 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13207 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13208 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13209 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13210 build_int_cst (TREE_TYPE (arg0
), 0));
13215 case UNORDERED_EXPR
:
13223 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13225 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13226 if (t1
!= NULL_TREE
)
13230 /* If the first operand is NaN, the result is constant. */
13231 if (TREE_CODE (arg0
) == REAL_CST
13232 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13233 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13235 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13236 ? integer_zero_node
13237 : integer_one_node
;
13238 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13241 /* If the second operand is NaN, the result is constant. */
13242 if (TREE_CODE (arg1
) == REAL_CST
13243 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13244 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13246 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13247 ? integer_zero_node
13248 : integer_one_node
;
13249 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13252 /* Simplify unordered comparison of something with itself. */
13253 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13254 && operand_equal_p (arg0
, arg1
, 0))
13255 return constant_boolean_node (1, type
);
13257 if (code
== LTGT_EXPR
13258 && !flag_trapping_math
13259 && operand_equal_p (arg0
, arg1
, 0))
13260 return constant_boolean_node (0, type
);
13262 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13264 tree targ0
= strip_float_extensions (arg0
);
13265 tree targ1
= strip_float_extensions (arg1
);
13266 tree newtype
= TREE_TYPE (targ0
);
13268 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13269 newtype
= TREE_TYPE (targ1
);
13271 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13272 return fold_build2_loc (loc
, code
, type
,
13273 fold_convert_loc (loc
, newtype
, targ0
),
13274 fold_convert_loc (loc
, newtype
, targ1
));
13279 case COMPOUND_EXPR
:
13280 /* When pedantic, a compound expression can be neither an lvalue
13281 nor an integer constant expression. */
13282 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13284 /* Don't let (0, 0) be null pointer constant. */
13285 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13286 : fold_convert_loc (loc
, type
, arg1
);
13287 return pedantic_non_lvalue_loc (loc
, tem
);
13290 if ((TREE_CODE (arg0
) == REAL_CST
13291 && TREE_CODE (arg1
) == REAL_CST
)
13292 || (TREE_CODE (arg0
) == INTEGER_CST
13293 && TREE_CODE (arg1
) == INTEGER_CST
))
13294 return build_complex (type
, arg0
, arg1
);
13295 if (TREE_CODE (arg0
) == REALPART_EXPR
13296 && TREE_CODE (arg1
) == IMAGPART_EXPR
13297 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13298 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13299 TREE_OPERAND (arg1
, 0), 0))
13300 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13301 TREE_OPERAND (arg1
, 0));
13305 /* An ASSERT_EXPR should never be passed to fold_binary. */
13306 gcc_unreachable ();
13310 } /* switch (code) */
13313 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13314 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13318 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13320 switch (TREE_CODE (*tp
))
13326 *walk_subtrees
= 0;
13328 /* ... fall through ... */
13335 /* Return whether the sub-tree ST contains a label which is accessible from
13336 outside the sub-tree. */
13339 contains_label_p (tree st
)
13342 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13345 /* Fold a ternary expression of code CODE and type TYPE with operands
13346 OP0, OP1, and OP2. Return the folded expression if folding is
13347 successful. Otherwise, return NULL_TREE. */
13350 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13351 tree op0
, tree op1
, tree op2
)
13354 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13355 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13357 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13358 && TREE_CODE_LENGTH (code
) == 3);
13360 /* Strip any conversions that don't change the mode. This is safe
13361 for every expression, except for a comparison expression because
13362 its signedness is derived from its operands. So, in the latter
13363 case, only strip conversions that don't change the signedness.
13365 Note that this is done as an internal manipulation within the
13366 constant folder, in order to find the simplest representation of
13367 the arguments so that their form can be studied. In any cases,
13368 the appropriate type conversions should be put back in the tree
13369 that will get out of the constant folder. */
13390 case COMPONENT_REF
:
13391 if (TREE_CODE (arg0
) == CONSTRUCTOR
13392 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13394 unsigned HOST_WIDE_INT idx
;
13396 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13403 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13404 so all simple results must be passed through pedantic_non_lvalue. */
13405 if (TREE_CODE (arg0
) == INTEGER_CST
)
13407 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13408 tem
= integer_zerop (arg0
) ? op2
: op1
;
13409 /* Only optimize constant conditions when the selected branch
13410 has the same type as the COND_EXPR. This avoids optimizing
13411 away "c ? x : throw", where the throw has a void type.
13412 Avoid throwing away that operand which contains label. */
13413 if ((!TREE_SIDE_EFFECTS (unused_op
)
13414 || !contains_label_p (unused_op
))
13415 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13416 || VOID_TYPE_P (type
)))
13417 return pedantic_non_lvalue_loc (loc
, tem
);
13420 if (operand_equal_p (arg1
, op2
, 0))
13421 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13423 /* If we have A op B ? A : C, we may be able to convert this to a
13424 simpler expression, depending on the operation and the values
13425 of B and C. Signed zeros prevent all of these transformations,
13426 for reasons given above each one.
13428 Also try swapping the arguments and inverting the conditional. */
13429 if (COMPARISON_CLASS_P (arg0
)
13430 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13431 arg1
, TREE_OPERAND (arg0
, 1))
13432 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13434 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13439 if (COMPARISON_CLASS_P (arg0
)
13440 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13442 TREE_OPERAND (arg0
, 1))
13443 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13445 location_t loc0
= expr_location_or (arg0
, loc
);
13446 tem
= fold_truth_not_expr (loc0
, arg0
);
13447 if (tem
&& COMPARISON_CLASS_P (tem
))
13449 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13455 /* If the second operand is simpler than the third, swap them
13456 since that produces better jump optimization results. */
13457 if (truth_value_p (TREE_CODE (arg0
))
13458 && tree_swap_operands_p (op1
, op2
, false))
13460 location_t loc0
= expr_location_or (arg0
, loc
);
13461 /* See if this can be inverted. If it can't, possibly because
13462 it was a floating-point inequality comparison, don't do
13464 tem
= fold_truth_not_expr (loc0
, arg0
);
13466 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13469 /* Convert A ? 1 : 0 to simply A. */
13470 if (integer_onep (op1
)
13471 && integer_zerop (op2
)
13472 /* If we try to convert OP0 to our type, the
13473 call to fold will try to move the conversion inside
13474 a COND, which will recurse. In that case, the COND_EXPR
13475 is probably the best choice, so leave it alone. */
13476 && type
== TREE_TYPE (arg0
))
13477 return pedantic_non_lvalue_loc (loc
, arg0
);
13479 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13480 over COND_EXPR in cases such as floating point comparisons. */
13481 if (integer_zerop (op1
)
13482 && integer_onep (op2
)
13483 && truth_value_p (TREE_CODE (arg0
)))
13484 return pedantic_non_lvalue_loc (loc
,
13485 fold_convert_loc (loc
, type
,
13486 invert_truthvalue_loc (loc
,
13489 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13490 if (TREE_CODE (arg0
) == LT_EXPR
13491 && integer_zerop (TREE_OPERAND (arg0
, 1))
13492 && integer_zerop (op2
)
13493 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13495 /* sign_bit_p only checks ARG1 bits within A's precision.
13496 If <sign bit of A> has wider type than A, bits outside
13497 of A's precision in <sign bit of A> need to be checked.
13498 If they are all 0, this optimization needs to be done
13499 in unsigned A's type, if they are all 1 in signed A's type,
13500 otherwise this can't be done. */
13501 if (TYPE_PRECISION (TREE_TYPE (tem
))
13502 < TYPE_PRECISION (TREE_TYPE (arg1
))
13503 && TYPE_PRECISION (TREE_TYPE (tem
))
13504 < TYPE_PRECISION (type
))
13506 unsigned HOST_WIDE_INT mask_lo
;
13507 HOST_WIDE_INT mask_hi
;
13508 int inner_width
, outer_width
;
13511 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13512 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13513 if (outer_width
> TYPE_PRECISION (type
))
13514 outer_width
= TYPE_PRECISION (type
);
13516 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13518 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13519 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13525 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13526 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13528 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13530 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13531 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13535 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13536 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13538 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13539 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13541 tem_type
= signed_type_for (TREE_TYPE (tem
));
13542 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13544 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13545 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13547 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13548 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13556 fold_convert_loc (loc
, type
,
13557 fold_build2_loc (loc
, BIT_AND_EXPR
,
13558 TREE_TYPE (tem
), tem
,
13559 fold_convert_loc (loc
,
13564 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13565 already handled above. */
13566 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13567 && integer_onep (TREE_OPERAND (arg0
, 1))
13568 && integer_zerop (op2
)
13569 && integer_pow2p (arg1
))
13571 tree tem
= TREE_OPERAND (arg0
, 0);
13573 if (TREE_CODE (tem
) == RSHIFT_EXPR
13574 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13575 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13576 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13577 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13578 TREE_OPERAND (tem
, 0), arg1
);
13581 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13582 is probably obsolete because the first operand should be a
13583 truth value (that's why we have the two cases above), but let's
13584 leave it in until we can confirm this for all front-ends. */
13585 if (integer_zerop (op2
)
13586 && TREE_CODE (arg0
) == NE_EXPR
13587 && integer_zerop (TREE_OPERAND (arg0
, 1))
13588 && integer_pow2p (arg1
)
13589 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13590 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13591 arg1
, OEP_ONLY_CONST
))
13592 return pedantic_non_lvalue_loc (loc
,
13593 fold_convert_loc (loc
, type
,
13594 TREE_OPERAND (arg0
, 0)));
13596 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13597 if (integer_zerop (op2
)
13598 && truth_value_p (TREE_CODE (arg0
))
13599 && truth_value_p (TREE_CODE (arg1
)))
13600 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13601 fold_convert_loc (loc
, type
, arg0
),
13604 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13605 if (integer_onep (op2
)
13606 && truth_value_p (TREE_CODE (arg0
))
13607 && truth_value_p (TREE_CODE (arg1
)))
13609 location_t loc0
= expr_location_or (arg0
, loc
);
13610 /* Only perform transformation if ARG0 is easily inverted. */
13611 tem
= fold_truth_not_expr (loc0
, arg0
);
13613 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13614 fold_convert_loc (loc
, type
, tem
),
13618 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13619 if (integer_zerop (arg1
)
13620 && truth_value_p (TREE_CODE (arg0
))
13621 && truth_value_p (TREE_CODE (op2
)))
13623 location_t loc0
= expr_location_or (arg0
, loc
);
13624 /* Only perform transformation if ARG0 is easily inverted. */
13625 tem
= fold_truth_not_expr (loc0
, arg0
);
13627 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13628 fold_convert_loc (loc
, type
, tem
),
13632 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13633 if (integer_onep (arg1
)
13634 && truth_value_p (TREE_CODE (arg0
))
13635 && truth_value_p (TREE_CODE (op2
)))
13636 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13637 fold_convert_loc (loc
, type
, arg0
),
13643 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13644 of fold_ternary on them. */
13645 gcc_unreachable ();
13647 case BIT_FIELD_REF
:
13648 if ((TREE_CODE (arg0
) == VECTOR_CST
13649 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
13650 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
13652 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13653 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13656 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13657 && (idx
% width
) == 0
13658 && (idx
= idx
/ width
)
13659 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13661 tree elements
= NULL_TREE
;
13663 if (TREE_CODE (arg0
) == VECTOR_CST
)
13664 elements
= TREE_VECTOR_CST_ELTS (arg0
);
13667 unsigned HOST_WIDE_INT idx
;
13670 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
13671 elements
= tree_cons (NULL_TREE
, value
, elements
);
13673 while (idx
-- > 0 && elements
)
13674 elements
= TREE_CHAIN (elements
);
13676 return TREE_VALUE (elements
);
13678 return build_zero_cst (type
);
13682 /* A bit-field-ref that referenced the full argument can be stripped. */
13683 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13684 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
13685 && integer_zerop (op2
))
13686 return fold_convert_loc (loc
, type
, arg0
);
13691 /* For integers we can decompose the FMA if possible. */
13692 if (TREE_CODE (arg0
) == INTEGER_CST
13693 && TREE_CODE (arg1
) == INTEGER_CST
)
13694 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13695 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13696 if (integer_zerop (arg2
))
13697 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13699 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13703 } /* switch (code) */
13706 /* Perform constant folding and related simplification of EXPR.
13707 The related simplifications include x*1 => x, x*0 => 0, etc.,
13708 and application of the associative law.
13709 NOP_EXPR conversions may be removed freely (as long as we
13710 are careful not to change the type of the overall expression).
13711 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13712 but we can constant-fold them if they have constant operands. */
13714 #ifdef ENABLE_FOLD_CHECKING
13715 # define fold(x) fold_1 (x)
13716 static tree
fold_1 (tree
);
13722 const tree t
= expr
;
13723 enum tree_code code
= TREE_CODE (t
);
13724 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13726 location_t loc
= EXPR_LOCATION (expr
);
13728 /* Return right away if a constant. */
13729 if (kind
== tcc_constant
)
13732 /* CALL_EXPR-like objects with variable numbers of operands are
13733 treated specially. */
13734 if (kind
== tcc_vl_exp
)
13736 if (code
== CALL_EXPR
)
13738 tem
= fold_call_expr (loc
, expr
, false);
13739 return tem
? tem
: expr
;
13744 if (IS_EXPR_CODE_CLASS (kind
))
13746 tree type
= TREE_TYPE (t
);
13747 tree op0
, op1
, op2
;
13749 switch (TREE_CODE_LENGTH (code
))
13752 op0
= TREE_OPERAND (t
, 0);
13753 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13754 return tem
? tem
: expr
;
13756 op0
= TREE_OPERAND (t
, 0);
13757 op1
= TREE_OPERAND (t
, 1);
13758 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13759 return tem
? tem
: expr
;
13761 op0
= TREE_OPERAND (t
, 0);
13762 op1
= TREE_OPERAND (t
, 1);
13763 op2
= TREE_OPERAND (t
, 2);
13764 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13765 return tem
? tem
: expr
;
13775 tree op0
= TREE_OPERAND (t
, 0);
13776 tree op1
= TREE_OPERAND (t
, 1);
13778 if (TREE_CODE (op1
) == INTEGER_CST
13779 && TREE_CODE (op0
) == CONSTRUCTOR
13780 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13782 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
13783 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
13784 unsigned HOST_WIDE_INT begin
= 0;
13786 /* Find a matching index by means of a binary search. */
13787 while (begin
!= end
)
13789 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13790 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
13792 if (TREE_CODE (index
) == INTEGER_CST
13793 && tree_int_cst_lt (index
, op1
))
13794 begin
= middle
+ 1;
13795 else if (TREE_CODE (index
) == INTEGER_CST
13796 && tree_int_cst_lt (op1
, index
))
13798 else if (TREE_CODE (index
) == RANGE_EXPR
13799 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13800 begin
= middle
+ 1;
13801 else if (TREE_CODE (index
) == RANGE_EXPR
13802 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13805 return VEC_index (constructor_elt
, elts
, middle
)->value
;
13813 return fold (DECL_INITIAL (t
));
13817 } /* switch (code) */
13820 #ifdef ENABLE_FOLD_CHECKING
13823 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13824 static void fold_check_failed (const_tree
, const_tree
);
13825 void print_fold_checksum (const_tree
);
13827 /* When --enable-checking=fold, compute a digest of expr before
13828 and after actual fold call to see if fold did not accidentally
13829 change original expr. */
13835 struct md5_ctx ctx
;
13836 unsigned char checksum_before
[16], checksum_after
[16];
13839 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13840 md5_init_ctx (&ctx
);
13841 fold_checksum_tree (expr
, &ctx
, ht
);
13842 md5_finish_ctx (&ctx
, checksum_before
);
13845 ret
= fold_1 (expr
);
13847 md5_init_ctx (&ctx
);
13848 fold_checksum_tree (expr
, &ctx
, ht
);
13849 md5_finish_ctx (&ctx
, checksum_after
);
13852 if (memcmp (checksum_before
, checksum_after
, 16))
13853 fold_check_failed (expr
, ret
);
13859 print_fold_checksum (const_tree expr
)
13861 struct md5_ctx ctx
;
13862 unsigned char checksum
[16], cnt
;
13865 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13866 md5_init_ctx (&ctx
);
13867 fold_checksum_tree (expr
, &ctx
, ht
);
13868 md5_finish_ctx (&ctx
, checksum
);
13870 for (cnt
= 0; cnt
< 16; ++cnt
)
13871 fprintf (stderr
, "%02x", checksum
[cnt
]);
13872 putc ('\n', stderr
);
13876 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13878 internal_error ("fold check: original tree changed by fold");
13882 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13885 enum tree_code code
;
13886 union tree_node buf
;
13891 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13892 <= sizeof (struct tree_function_decl
))
13893 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13896 slot
= (void **) htab_find_slot (ht
, expr
, INSERT
);
13899 *slot
= CONST_CAST_TREE (expr
);
13900 code
= TREE_CODE (expr
);
13901 if (TREE_CODE_CLASS (code
) == tcc_declaration
13902 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13904 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13905 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13906 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13907 expr
= (tree
) &buf
;
13909 else if (TREE_CODE_CLASS (code
) == tcc_type
13910 && (TYPE_POINTER_TO (expr
)
13911 || TYPE_REFERENCE_TO (expr
)
13912 || TYPE_CACHED_VALUES_P (expr
)
13913 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13914 || TYPE_NEXT_VARIANT (expr
)))
13916 /* Allow these fields to be modified. */
13918 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13919 expr
= tmp
= (tree
) &buf
;
13920 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13921 TYPE_POINTER_TO (tmp
) = NULL
;
13922 TYPE_REFERENCE_TO (tmp
) = NULL
;
13923 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13924 if (TYPE_CACHED_VALUES_P (tmp
))
13926 TYPE_CACHED_VALUES_P (tmp
) = 0;
13927 TYPE_CACHED_VALUES (tmp
) = NULL
;
13930 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13931 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13932 if (TREE_CODE_CLASS (code
) != tcc_type
13933 && TREE_CODE_CLASS (code
) != tcc_declaration
13934 && code
!= TREE_LIST
13935 && code
!= SSA_NAME
13936 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
13937 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13938 switch (TREE_CODE_CLASS (code
))
13944 md5_process_bytes (TREE_STRING_POINTER (expr
),
13945 TREE_STRING_LENGTH (expr
), ctx
);
13948 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13949 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13952 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13958 case tcc_exceptional
:
13962 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13963 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13964 expr
= TREE_CHAIN (expr
);
13965 goto recursive_label
;
13968 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13969 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13975 case tcc_expression
:
13976 case tcc_reference
:
13977 case tcc_comparison
:
13980 case tcc_statement
:
13982 len
= TREE_OPERAND_LENGTH (expr
);
13983 for (i
= 0; i
< len
; ++i
)
13984 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13986 case tcc_declaration
:
13987 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13988 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13989 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13991 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13992 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13993 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13994 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13995 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13997 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13998 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14000 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14002 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14003 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14004 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14008 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14009 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14010 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14011 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14012 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14013 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14014 if (INTEGRAL_TYPE_P (expr
)
14015 || SCALAR_FLOAT_TYPE_P (expr
))
14017 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14018 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14020 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14021 if (TREE_CODE (expr
) == RECORD_TYPE
14022 || TREE_CODE (expr
) == UNION_TYPE
14023 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14024 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14025 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14032 /* Helper function for outputting the checksum of a tree T. When
14033 debugging with gdb, you can "define mynext" to be "next" followed
14034 by "call debug_fold_checksum (op0)", then just trace down till the
14037 DEBUG_FUNCTION
void
14038 debug_fold_checksum (const_tree t
)
14041 unsigned char checksum
[16];
14042 struct md5_ctx ctx
;
14043 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14045 md5_init_ctx (&ctx
);
14046 fold_checksum_tree (t
, &ctx
, ht
);
14047 md5_finish_ctx (&ctx
, checksum
);
14050 for (i
= 0; i
< 16; i
++)
14051 fprintf (stderr
, "%d ", checksum
[i
]);
14053 fprintf (stderr
, "\n");
14058 /* Fold a unary tree expression with code CODE of type TYPE with an
14059 operand OP0. LOC is the location of the resulting expression.
14060 Return a folded expression if successful. Otherwise, return a tree
14061 expression with code CODE of type TYPE with an operand OP0. */
14064 fold_build1_stat_loc (location_t loc
,
14065 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14068 #ifdef ENABLE_FOLD_CHECKING
14069 unsigned char checksum_before
[16], checksum_after
[16];
14070 struct md5_ctx ctx
;
14073 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14074 md5_init_ctx (&ctx
);
14075 fold_checksum_tree (op0
, &ctx
, ht
);
14076 md5_finish_ctx (&ctx
, checksum_before
);
14080 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14082 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14084 #ifdef ENABLE_FOLD_CHECKING
14085 md5_init_ctx (&ctx
);
14086 fold_checksum_tree (op0
, &ctx
, ht
);
14087 md5_finish_ctx (&ctx
, checksum_after
);
14090 if (memcmp (checksum_before
, checksum_after
, 16))
14091 fold_check_failed (op0
, tem
);
14096 /* Fold a binary tree expression with code CODE of type TYPE with
14097 operands OP0 and OP1. LOC is the location of the resulting
14098 expression. Return a folded expression if successful. Otherwise,
14099 return a tree expression with code CODE of type TYPE with operands
14103 fold_build2_stat_loc (location_t loc
,
14104 enum tree_code code
, tree type
, tree op0
, tree op1
14108 #ifdef ENABLE_FOLD_CHECKING
14109 unsigned char checksum_before_op0
[16],
14110 checksum_before_op1
[16],
14111 checksum_after_op0
[16],
14112 checksum_after_op1
[16];
14113 struct md5_ctx ctx
;
14116 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14117 md5_init_ctx (&ctx
);
14118 fold_checksum_tree (op0
, &ctx
, ht
);
14119 md5_finish_ctx (&ctx
, checksum_before_op0
);
14122 md5_init_ctx (&ctx
);
14123 fold_checksum_tree (op1
, &ctx
, ht
);
14124 md5_finish_ctx (&ctx
, checksum_before_op1
);
14128 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14130 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14132 #ifdef ENABLE_FOLD_CHECKING
14133 md5_init_ctx (&ctx
);
14134 fold_checksum_tree (op0
, &ctx
, ht
);
14135 md5_finish_ctx (&ctx
, checksum_after_op0
);
14138 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14139 fold_check_failed (op0
, tem
);
14141 md5_init_ctx (&ctx
);
14142 fold_checksum_tree (op1
, &ctx
, ht
);
14143 md5_finish_ctx (&ctx
, checksum_after_op1
);
14146 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14147 fold_check_failed (op1
, tem
);
14152 /* Fold a ternary tree expression with code CODE of type TYPE with
14153 operands OP0, OP1, and OP2. Return a folded expression if
14154 successful. Otherwise, return a tree expression with code CODE of
14155 type TYPE with operands OP0, OP1, and OP2. */
14158 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14159 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14162 #ifdef ENABLE_FOLD_CHECKING
14163 unsigned char checksum_before_op0
[16],
14164 checksum_before_op1
[16],
14165 checksum_before_op2
[16],
14166 checksum_after_op0
[16],
14167 checksum_after_op1
[16],
14168 checksum_after_op2
[16];
14169 struct md5_ctx ctx
;
14172 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14173 md5_init_ctx (&ctx
);
14174 fold_checksum_tree (op0
, &ctx
, ht
);
14175 md5_finish_ctx (&ctx
, checksum_before_op0
);
14178 md5_init_ctx (&ctx
);
14179 fold_checksum_tree (op1
, &ctx
, ht
);
14180 md5_finish_ctx (&ctx
, checksum_before_op1
);
14183 md5_init_ctx (&ctx
);
14184 fold_checksum_tree (op2
, &ctx
, ht
);
14185 md5_finish_ctx (&ctx
, checksum_before_op2
);
14189 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14190 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14192 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14194 #ifdef ENABLE_FOLD_CHECKING
14195 md5_init_ctx (&ctx
);
14196 fold_checksum_tree (op0
, &ctx
, ht
);
14197 md5_finish_ctx (&ctx
, checksum_after_op0
);
14200 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14201 fold_check_failed (op0
, tem
);
14203 md5_init_ctx (&ctx
);
14204 fold_checksum_tree (op1
, &ctx
, ht
);
14205 md5_finish_ctx (&ctx
, checksum_after_op1
);
14208 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14209 fold_check_failed (op1
, tem
);
14211 md5_init_ctx (&ctx
);
14212 fold_checksum_tree (op2
, &ctx
, ht
);
14213 md5_finish_ctx (&ctx
, checksum_after_op2
);
14216 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14217 fold_check_failed (op2
, tem
);
14222 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14223 arguments in ARGARRAY, and a null static chain.
14224 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14225 of type TYPE from the given operands as constructed by build_call_array. */
14228 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14229 int nargs
, tree
*argarray
)
14232 #ifdef ENABLE_FOLD_CHECKING
14233 unsigned char checksum_before_fn
[16],
14234 checksum_before_arglist
[16],
14235 checksum_after_fn
[16],
14236 checksum_after_arglist
[16];
14237 struct md5_ctx ctx
;
14241 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14242 md5_init_ctx (&ctx
);
14243 fold_checksum_tree (fn
, &ctx
, ht
);
14244 md5_finish_ctx (&ctx
, checksum_before_fn
);
14247 md5_init_ctx (&ctx
);
14248 for (i
= 0; i
< nargs
; i
++)
14249 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14250 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14254 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14256 #ifdef ENABLE_FOLD_CHECKING
14257 md5_init_ctx (&ctx
);
14258 fold_checksum_tree (fn
, &ctx
, ht
);
14259 md5_finish_ctx (&ctx
, checksum_after_fn
);
14262 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14263 fold_check_failed (fn
, tem
);
14265 md5_init_ctx (&ctx
);
14266 for (i
= 0; i
< nargs
; i
++)
14267 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14268 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14271 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14272 fold_check_failed (NULL_TREE
, tem
);
14277 /* Perform constant folding and related simplification of initializer
14278 expression EXPR. These behave identically to "fold_buildN" but ignore
14279 potential run-time traps and exceptions that fold must preserve. */
14281 #define START_FOLD_INIT \
14282 int saved_signaling_nans = flag_signaling_nans;\
14283 int saved_trapping_math = flag_trapping_math;\
14284 int saved_rounding_math = flag_rounding_math;\
14285 int saved_trapv = flag_trapv;\
14286 int saved_folding_initializer = folding_initializer;\
14287 flag_signaling_nans = 0;\
14288 flag_trapping_math = 0;\
14289 flag_rounding_math = 0;\
14291 folding_initializer = 1;
14293 #define END_FOLD_INIT \
14294 flag_signaling_nans = saved_signaling_nans;\
14295 flag_trapping_math = saved_trapping_math;\
14296 flag_rounding_math = saved_rounding_math;\
14297 flag_trapv = saved_trapv;\
14298 folding_initializer = saved_folding_initializer;
14301 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14302 tree type
, tree op
)
14307 result
= fold_build1_loc (loc
, code
, type
, op
);
14314 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14315 tree type
, tree op0
, tree op1
)
14320 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14327 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14328 tree type
, tree op0
, tree op1
, tree op2
)
14333 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14340 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14341 int nargs
, tree
*argarray
)
14346 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14352 #undef START_FOLD_INIT
14353 #undef END_FOLD_INIT
14355 /* Determine if first argument is a multiple of second argument. Return 0 if
14356 it is not, or we cannot easily determined it to be.
14358 An example of the sort of thing we care about (at this point; this routine
14359 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14360 fold cases do now) is discovering that
14362 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14368 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14370 This code also handles discovering that
14372 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14374 is a multiple of 8 so we don't have to worry about dealing with a
14375 possible remainder.
14377 Note that we *look* inside a SAVE_EXPR only to determine how it was
14378 calculated; it is not safe for fold to do much of anything else with the
14379 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14380 at run time. For example, the latter example above *cannot* be implemented
14381 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14382 evaluation time of the original SAVE_EXPR is not necessarily the same at
14383 the time the new expression is evaluated. The only optimization of this
14384 sort that would be valid is changing
14386 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14390 SAVE_EXPR (I) * SAVE_EXPR (J)
14392 (where the same SAVE_EXPR (J) is used in the original and the
14393 transformed version). */
14396 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14398 if (operand_equal_p (top
, bottom
, 0))
14401 if (TREE_CODE (type
) != INTEGER_TYPE
)
14404 switch (TREE_CODE (top
))
14407 /* Bitwise and provides a power of two multiple. If the mask is
14408 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14409 if (!integer_pow2p (bottom
))
14414 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14415 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14419 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14420 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14423 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14427 op1
= TREE_OPERAND (top
, 1);
14428 /* const_binop may not detect overflow correctly,
14429 so check for it explicitly here. */
14430 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14431 > TREE_INT_CST_LOW (op1
)
14432 && TREE_INT_CST_HIGH (op1
) == 0
14433 && 0 != (t1
= fold_convert (type
,
14434 const_binop (LSHIFT_EXPR
,
14437 && !TREE_OVERFLOW (t1
))
14438 return multiple_of_p (type
, t1
, bottom
);
14443 /* Can't handle conversions from non-integral or wider integral type. */
14444 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14445 || (TYPE_PRECISION (type
)
14446 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14449 /* .. fall through ... */
14452 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14455 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14456 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14459 if (TREE_CODE (bottom
) != INTEGER_CST
14460 || integer_zerop (bottom
)
14461 || (TYPE_UNSIGNED (type
)
14462 && (tree_int_cst_sgn (top
) < 0
14463 || tree_int_cst_sgn (bottom
) < 0)))
14465 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14473 /* Return true if CODE or TYPE is known to be non-negative. */
14476 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14478 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14479 && truth_value_p (code
))
14480 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14481 have a signed:1 type (where the value is -1 and 0). */
14486 /* Return true if (CODE OP0) is known to be non-negative. If the return
14487 value is based on the assumption that signed overflow is undefined,
14488 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14489 *STRICT_OVERFLOW_P. */
14492 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14493 bool *strict_overflow_p
)
14495 if (TYPE_UNSIGNED (type
))
14501 /* We can't return 1 if flag_wrapv is set because
14502 ABS_EXPR<INT_MIN> = INT_MIN. */
14503 if (!INTEGRAL_TYPE_P (type
))
14505 if (TYPE_OVERFLOW_UNDEFINED (type
))
14507 *strict_overflow_p
= true;
14512 case NON_LVALUE_EXPR
:
14514 case FIX_TRUNC_EXPR
:
14515 return tree_expr_nonnegative_warnv_p (op0
,
14516 strict_overflow_p
);
14520 tree inner_type
= TREE_TYPE (op0
);
14521 tree outer_type
= type
;
14523 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14525 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14526 return tree_expr_nonnegative_warnv_p (op0
,
14527 strict_overflow_p
);
14528 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14530 if (TYPE_UNSIGNED (inner_type
))
14532 return tree_expr_nonnegative_warnv_p (op0
,
14533 strict_overflow_p
);
14536 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14538 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14539 return tree_expr_nonnegative_warnv_p (op0
,
14540 strict_overflow_p
);
14541 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14542 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14543 && TYPE_UNSIGNED (inner_type
);
14549 return tree_simple_nonnegative_warnv_p (code
, type
);
14552 /* We don't know sign of `t', so be conservative and return false. */
14556 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14557 value is based on the assumption that signed overflow is undefined,
14558 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14559 *STRICT_OVERFLOW_P. */
14562 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14563 tree op1
, bool *strict_overflow_p
)
14565 if (TYPE_UNSIGNED (type
))
14570 case POINTER_PLUS_EXPR
:
14572 if (FLOAT_TYPE_P (type
))
14573 return (tree_expr_nonnegative_warnv_p (op0
,
14575 && tree_expr_nonnegative_warnv_p (op1
,
14576 strict_overflow_p
));
14578 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14579 both unsigned and at least 2 bits shorter than the result. */
14580 if (TREE_CODE (type
) == INTEGER_TYPE
14581 && TREE_CODE (op0
) == NOP_EXPR
14582 && TREE_CODE (op1
) == NOP_EXPR
)
14584 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14585 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14586 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14587 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14589 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14590 TYPE_PRECISION (inner2
)) + 1;
14591 return prec
< TYPE_PRECISION (type
);
14597 if (FLOAT_TYPE_P (type
))
14599 /* x * x for floating point x is always non-negative. */
14600 if (operand_equal_p (op0
, op1
, 0))
14602 return (tree_expr_nonnegative_warnv_p (op0
,
14604 && tree_expr_nonnegative_warnv_p (op1
,
14605 strict_overflow_p
));
14608 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14609 both unsigned and their total bits is shorter than the result. */
14610 if (TREE_CODE (type
) == INTEGER_TYPE
14611 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14612 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14614 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14615 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14617 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14618 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14621 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14622 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14624 if (TREE_CODE (op0
) == INTEGER_CST
)
14625 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14627 if (TREE_CODE (op1
) == INTEGER_CST
)
14628 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14630 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14631 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14633 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14634 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
14635 : TYPE_PRECISION (inner0
);
14637 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14638 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
14639 : TYPE_PRECISION (inner1
);
14641 return precision0
+ precision1
< TYPE_PRECISION (type
);
14648 return (tree_expr_nonnegative_warnv_p (op0
,
14650 || tree_expr_nonnegative_warnv_p (op1
,
14651 strict_overflow_p
));
14657 case TRUNC_DIV_EXPR
:
14658 case CEIL_DIV_EXPR
:
14659 case FLOOR_DIV_EXPR
:
14660 case ROUND_DIV_EXPR
:
14661 return (tree_expr_nonnegative_warnv_p (op0
,
14663 && tree_expr_nonnegative_warnv_p (op1
,
14664 strict_overflow_p
));
14666 case TRUNC_MOD_EXPR
:
14667 case CEIL_MOD_EXPR
:
14668 case FLOOR_MOD_EXPR
:
14669 case ROUND_MOD_EXPR
:
14670 return tree_expr_nonnegative_warnv_p (op0
,
14671 strict_overflow_p
);
14673 return tree_simple_nonnegative_warnv_p (code
, type
);
14676 /* We don't know sign of `t', so be conservative and return false. */
14680 /* Return true if T is known to be non-negative. If the return
14681 value is based on the assumption that signed overflow is undefined,
14682 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14683 *STRICT_OVERFLOW_P. */
14686 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14688 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14691 switch (TREE_CODE (t
))
14694 return tree_int_cst_sgn (t
) >= 0;
14697 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14700 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14703 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14705 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14706 strict_overflow_p
));
14708 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14711 /* We don't know sign of `t', so be conservative and return false. */
14715 /* Return true if T is known to be non-negative. If the return
14716 value is based on the assumption that signed overflow is undefined,
14717 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14718 *STRICT_OVERFLOW_P. */
14721 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14722 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14724 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14725 switch (DECL_FUNCTION_CODE (fndecl
))
14727 CASE_FLT_FN (BUILT_IN_ACOS
):
14728 CASE_FLT_FN (BUILT_IN_ACOSH
):
14729 CASE_FLT_FN (BUILT_IN_CABS
):
14730 CASE_FLT_FN (BUILT_IN_COSH
):
14731 CASE_FLT_FN (BUILT_IN_ERFC
):
14732 CASE_FLT_FN (BUILT_IN_EXP
):
14733 CASE_FLT_FN (BUILT_IN_EXP10
):
14734 CASE_FLT_FN (BUILT_IN_EXP2
):
14735 CASE_FLT_FN (BUILT_IN_FABS
):
14736 CASE_FLT_FN (BUILT_IN_FDIM
):
14737 CASE_FLT_FN (BUILT_IN_HYPOT
):
14738 CASE_FLT_FN (BUILT_IN_POW10
):
14739 CASE_INT_FN (BUILT_IN_FFS
):
14740 CASE_INT_FN (BUILT_IN_PARITY
):
14741 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14742 case BUILT_IN_BSWAP32
:
14743 case BUILT_IN_BSWAP64
:
14747 CASE_FLT_FN (BUILT_IN_SQRT
):
14748 /* sqrt(-0.0) is -0.0. */
14749 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
14751 return tree_expr_nonnegative_warnv_p (arg0
,
14752 strict_overflow_p
);
14754 CASE_FLT_FN (BUILT_IN_ASINH
):
14755 CASE_FLT_FN (BUILT_IN_ATAN
):
14756 CASE_FLT_FN (BUILT_IN_ATANH
):
14757 CASE_FLT_FN (BUILT_IN_CBRT
):
14758 CASE_FLT_FN (BUILT_IN_CEIL
):
14759 CASE_FLT_FN (BUILT_IN_ERF
):
14760 CASE_FLT_FN (BUILT_IN_EXPM1
):
14761 CASE_FLT_FN (BUILT_IN_FLOOR
):
14762 CASE_FLT_FN (BUILT_IN_FMOD
):
14763 CASE_FLT_FN (BUILT_IN_FREXP
):
14764 CASE_FLT_FN (BUILT_IN_ICEIL
):
14765 CASE_FLT_FN (BUILT_IN_IFLOOR
):
14766 CASE_FLT_FN (BUILT_IN_IRINT
):
14767 CASE_FLT_FN (BUILT_IN_IROUND
):
14768 CASE_FLT_FN (BUILT_IN_LCEIL
):
14769 CASE_FLT_FN (BUILT_IN_LDEXP
):
14770 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14771 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14772 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14773 CASE_FLT_FN (BUILT_IN_LLRINT
):
14774 CASE_FLT_FN (BUILT_IN_LLROUND
):
14775 CASE_FLT_FN (BUILT_IN_LRINT
):
14776 CASE_FLT_FN (BUILT_IN_LROUND
):
14777 CASE_FLT_FN (BUILT_IN_MODF
):
14778 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14779 CASE_FLT_FN (BUILT_IN_RINT
):
14780 CASE_FLT_FN (BUILT_IN_ROUND
):
14781 CASE_FLT_FN (BUILT_IN_SCALB
):
14782 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14783 CASE_FLT_FN (BUILT_IN_SCALBN
):
14784 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14785 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14786 CASE_FLT_FN (BUILT_IN_SINH
):
14787 CASE_FLT_FN (BUILT_IN_TANH
):
14788 CASE_FLT_FN (BUILT_IN_TRUNC
):
14789 /* True if the 1st argument is nonnegative. */
14790 return tree_expr_nonnegative_warnv_p (arg0
,
14791 strict_overflow_p
);
14793 CASE_FLT_FN (BUILT_IN_FMAX
):
14794 /* True if the 1st OR 2nd arguments are nonnegative. */
14795 return (tree_expr_nonnegative_warnv_p (arg0
,
14797 || (tree_expr_nonnegative_warnv_p (arg1
,
14798 strict_overflow_p
)));
14800 CASE_FLT_FN (BUILT_IN_FMIN
):
14801 /* True if the 1st AND 2nd arguments are nonnegative. */
14802 return (tree_expr_nonnegative_warnv_p (arg0
,
14804 && (tree_expr_nonnegative_warnv_p (arg1
,
14805 strict_overflow_p
)));
14807 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14808 /* True if the 2nd argument is nonnegative. */
14809 return tree_expr_nonnegative_warnv_p (arg1
,
14810 strict_overflow_p
);
14812 CASE_FLT_FN (BUILT_IN_POWI
):
14813 /* True if the 1st argument is nonnegative or the second
14814 argument is an even integer. */
14815 if (TREE_CODE (arg1
) == INTEGER_CST
14816 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14818 return tree_expr_nonnegative_warnv_p (arg0
,
14819 strict_overflow_p
);
14821 CASE_FLT_FN (BUILT_IN_POW
):
14822 /* True if the 1st argument is nonnegative or the second
14823 argument is an even integer valued real. */
14824 if (TREE_CODE (arg1
) == REAL_CST
)
14829 c
= TREE_REAL_CST (arg1
);
14830 n
= real_to_integer (&c
);
14833 REAL_VALUE_TYPE cint
;
14834 real_from_integer (&cint
, VOIDmode
, n
,
14835 n
< 0 ? -1 : 0, 0);
14836 if (real_identical (&c
, &cint
))
14840 return tree_expr_nonnegative_warnv_p (arg0
,
14841 strict_overflow_p
);
14846 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14850 /* Return true if T is known to be non-negative. If the return
14851 value is based on the assumption that signed overflow is undefined,
14852 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14853 *STRICT_OVERFLOW_P. */
14856 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14858 enum tree_code code
= TREE_CODE (t
);
14859 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14866 tree temp
= TARGET_EXPR_SLOT (t
);
14867 t
= TARGET_EXPR_INITIAL (t
);
14869 /* If the initializer is non-void, then it's a normal expression
14870 that will be assigned to the slot. */
14871 if (!VOID_TYPE_P (t
))
14872 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14874 /* Otherwise, the initializer sets the slot in some way. One common
14875 way is an assignment statement at the end of the initializer. */
14878 if (TREE_CODE (t
) == BIND_EXPR
)
14879 t
= expr_last (BIND_EXPR_BODY (t
));
14880 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14881 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14882 t
= expr_last (TREE_OPERAND (t
, 0));
14883 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14888 if (TREE_CODE (t
) == MODIFY_EXPR
14889 && TREE_OPERAND (t
, 0) == temp
)
14890 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14891 strict_overflow_p
);
14898 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14899 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14901 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14902 get_callee_fndecl (t
),
14905 strict_overflow_p
);
14907 case COMPOUND_EXPR
:
14909 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14910 strict_overflow_p
);
14912 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14913 strict_overflow_p
);
14915 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14916 strict_overflow_p
);
14919 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14923 /* We don't know sign of `t', so be conservative and return false. */
14927 /* Return true if T is known to be non-negative. If the return
14928 value is based on the assumption that signed overflow is undefined,
14929 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14930 *STRICT_OVERFLOW_P. */
14933 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14935 enum tree_code code
;
14936 if (t
== error_mark_node
)
14939 code
= TREE_CODE (t
);
14940 switch (TREE_CODE_CLASS (code
))
14943 case tcc_comparison
:
14944 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14946 TREE_OPERAND (t
, 0),
14947 TREE_OPERAND (t
, 1),
14948 strict_overflow_p
);
14951 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14953 TREE_OPERAND (t
, 0),
14954 strict_overflow_p
);
14957 case tcc_declaration
:
14958 case tcc_reference
:
14959 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14967 case TRUTH_AND_EXPR
:
14968 case TRUTH_OR_EXPR
:
14969 case TRUTH_XOR_EXPR
:
14970 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14972 TREE_OPERAND (t
, 0),
14973 TREE_OPERAND (t
, 1),
14974 strict_overflow_p
);
14975 case TRUTH_NOT_EXPR
:
14976 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14978 TREE_OPERAND (t
, 0),
14979 strict_overflow_p
);
14986 case WITH_SIZE_EXPR
:
14988 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14991 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14995 /* Return true if `t' is known to be non-negative. Handle warnings
14996 about undefined signed overflow. */
14999 tree_expr_nonnegative_p (tree t
)
15001 bool ret
, strict_overflow_p
;
15003 strict_overflow_p
= false;
15004 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15005 if (strict_overflow_p
)
15006 fold_overflow_warning (("assuming signed overflow does not occur when "
15007 "determining that expression is always "
15009 WARN_STRICT_OVERFLOW_MISC
);
15014 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15015 For floating point we further ensure that T is not denormal.
15016 Similar logic is present in nonzero_address in rtlanal.h.
15018 If the return value is based on the assumption that signed overflow
15019 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15020 change *STRICT_OVERFLOW_P. */
15023 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15024 bool *strict_overflow_p
)
15029 return tree_expr_nonzero_warnv_p (op0
,
15030 strict_overflow_p
);
15034 tree inner_type
= TREE_TYPE (op0
);
15035 tree outer_type
= type
;
15037 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15038 && tree_expr_nonzero_warnv_p (op0
,
15039 strict_overflow_p
));
15043 case NON_LVALUE_EXPR
:
15044 return tree_expr_nonzero_warnv_p (op0
,
15045 strict_overflow_p
);
15054 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15055 For floating point we further ensure that T is not denormal.
15056 Similar logic is present in nonzero_address in rtlanal.h.
15058 If the return value is based on the assumption that signed overflow
15059 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15060 change *STRICT_OVERFLOW_P. */
15063 tree_binary_nonzero_warnv_p (enum tree_code code
,
15066 tree op1
, bool *strict_overflow_p
)
15068 bool sub_strict_overflow_p
;
15071 case POINTER_PLUS_EXPR
:
15073 if (TYPE_OVERFLOW_UNDEFINED (type
))
15075 /* With the presence of negative values it is hard
15076 to say something. */
15077 sub_strict_overflow_p
= false;
15078 if (!tree_expr_nonnegative_warnv_p (op0
,
15079 &sub_strict_overflow_p
)
15080 || !tree_expr_nonnegative_warnv_p (op1
,
15081 &sub_strict_overflow_p
))
15083 /* One of operands must be positive and the other non-negative. */
15084 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15085 overflows, on a twos-complement machine the sum of two
15086 nonnegative numbers can never be zero. */
15087 return (tree_expr_nonzero_warnv_p (op0
,
15089 || tree_expr_nonzero_warnv_p (op1
,
15090 strict_overflow_p
));
15095 if (TYPE_OVERFLOW_UNDEFINED (type
))
15097 if (tree_expr_nonzero_warnv_p (op0
,
15099 && tree_expr_nonzero_warnv_p (op1
,
15100 strict_overflow_p
))
15102 *strict_overflow_p
= true;
15109 sub_strict_overflow_p
= false;
15110 if (tree_expr_nonzero_warnv_p (op0
,
15111 &sub_strict_overflow_p
)
15112 && tree_expr_nonzero_warnv_p (op1
,
15113 &sub_strict_overflow_p
))
15115 if (sub_strict_overflow_p
)
15116 *strict_overflow_p
= true;
15121 sub_strict_overflow_p
= false;
15122 if (tree_expr_nonzero_warnv_p (op0
,
15123 &sub_strict_overflow_p
))
15125 if (sub_strict_overflow_p
)
15126 *strict_overflow_p
= true;
15128 /* When both operands are nonzero, then MAX must be too. */
15129 if (tree_expr_nonzero_warnv_p (op1
,
15130 strict_overflow_p
))
15133 /* MAX where operand 0 is positive is positive. */
15134 return tree_expr_nonnegative_warnv_p (op0
,
15135 strict_overflow_p
);
15137 /* MAX where operand 1 is positive is positive. */
15138 else if (tree_expr_nonzero_warnv_p (op1
,
15139 &sub_strict_overflow_p
)
15140 && tree_expr_nonnegative_warnv_p (op1
,
15141 &sub_strict_overflow_p
))
15143 if (sub_strict_overflow_p
)
15144 *strict_overflow_p
= true;
15150 return (tree_expr_nonzero_warnv_p (op1
,
15152 || tree_expr_nonzero_warnv_p (op0
,
15153 strict_overflow_p
));
15162 /* Return true when T is an address and is known to be nonzero.
15163 For floating point we further ensure that T is not denormal.
15164 Similar logic is present in nonzero_address in rtlanal.h.
15166 If the return value is based on the assumption that signed overflow
15167 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15168 change *STRICT_OVERFLOW_P. */
15171 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15173 bool sub_strict_overflow_p
;
15174 switch (TREE_CODE (t
))
15177 return !integer_zerop (t
);
15181 tree base
= TREE_OPERAND (t
, 0);
15182 if (!DECL_P (base
))
15183 base
= get_base_address (base
);
15188 /* Weak declarations may link to NULL. Other things may also be NULL
15189 so protect with -fdelete-null-pointer-checks; but not variables
15190 allocated on the stack. */
15192 && (flag_delete_null_pointer_checks
15193 || (DECL_CONTEXT (base
)
15194 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15195 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15196 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15198 /* Constants are never weak. */
15199 if (CONSTANT_CLASS_P (base
))
15206 sub_strict_overflow_p
= false;
15207 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15208 &sub_strict_overflow_p
)
15209 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15210 &sub_strict_overflow_p
))
15212 if (sub_strict_overflow_p
)
15213 *strict_overflow_p
= true;
15224 /* Return true when T is an address and is known to be nonzero.
15225 For floating point we further ensure that T is not denormal.
15226 Similar logic is present in nonzero_address in rtlanal.h.
15228 If the return value is based on the assumption that signed overflow
15229 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15230 change *STRICT_OVERFLOW_P. */
15233 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15235 tree type
= TREE_TYPE (t
);
15236 enum tree_code code
;
15238 /* Doing something useful for floating point would need more work. */
15239 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15242 code
= TREE_CODE (t
);
15243 switch (TREE_CODE_CLASS (code
))
15246 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15247 strict_overflow_p
);
15249 case tcc_comparison
:
15250 return tree_binary_nonzero_warnv_p (code
, type
,
15251 TREE_OPERAND (t
, 0),
15252 TREE_OPERAND (t
, 1),
15253 strict_overflow_p
);
15255 case tcc_declaration
:
15256 case tcc_reference
:
15257 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15265 case TRUTH_NOT_EXPR
:
15266 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15267 strict_overflow_p
);
15269 case TRUTH_AND_EXPR
:
15270 case TRUTH_OR_EXPR
:
15271 case TRUTH_XOR_EXPR
:
15272 return tree_binary_nonzero_warnv_p (code
, type
,
15273 TREE_OPERAND (t
, 0),
15274 TREE_OPERAND (t
, 1),
15275 strict_overflow_p
);
15282 case WITH_SIZE_EXPR
:
15284 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15286 case COMPOUND_EXPR
:
15289 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15290 strict_overflow_p
);
15293 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15294 strict_overflow_p
);
15297 return alloca_call_p (t
);
15305 /* Return true when T is an address and is known to be nonzero.
15306 Handle warnings about undefined signed overflow. */
15309 tree_expr_nonzero_p (tree t
)
15311 bool ret
, strict_overflow_p
;
15313 strict_overflow_p
= false;
15314 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15315 if (strict_overflow_p
)
15316 fold_overflow_warning (("assuming signed overflow does not occur when "
15317 "determining that expression is always "
15319 WARN_STRICT_OVERFLOW_MISC
);
15323 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15324 attempt to fold the expression to a constant without modifying TYPE,
15327 If the expression could be simplified to a constant, then return
15328 the constant. If the expression would not be simplified to a
15329 constant, then return NULL_TREE. */
15332 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15334 tree tem
= fold_binary (code
, type
, op0
, op1
);
15335 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15338 /* Given the components of a unary expression CODE, TYPE and OP0,
15339 attempt to fold the expression to a constant without modifying
15342 If the expression could be simplified to a constant, then return
15343 the constant. If the expression would not be simplified to a
15344 constant, then return NULL_TREE. */
15347 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15349 tree tem
= fold_unary (code
, type
, op0
);
15350 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15353 /* If EXP represents referencing an element in a constant string
15354 (either via pointer arithmetic or array indexing), return the
15355 tree representing the value accessed, otherwise return NULL. */
15358 fold_read_from_constant_string (tree exp
)
15360 if ((TREE_CODE (exp
) == INDIRECT_REF
15361 || TREE_CODE (exp
) == ARRAY_REF
)
15362 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15364 tree exp1
= TREE_OPERAND (exp
, 0);
15367 location_t loc
= EXPR_LOCATION (exp
);
15369 if (TREE_CODE (exp
) == INDIRECT_REF
)
15370 string
= string_constant (exp1
, &index
);
15373 tree low_bound
= array_ref_low_bound (exp
);
15374 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15376 /* Optimize the special-case of a zero lower bound.
15378 We convert the low_bound to sizetype to avoid some problems
15379 with constant folding. (E.g. suppose the lower bound is 1,
15380 and its mode is QI. Without the conversion,l (ARRAY
15381 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15382 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15383 if (! integer_zerop (low_bound
))
15384 index
= size_diffop_loc (loc
, index
,
15385 fold_convert_loc (loc
, sizetype
, low_bound
));
15391 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15392 && TREE_CODE (string
) == STRING_CST
15393 && TREE_CODE (index
) == INTEGER_CST
15394 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15395 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15397 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15398 return build_int_cst_type (TREE_TYPE (exp
),
15399 (TREE_STRING_POINTER (string
)
15400 [TREE_INT_CST_LOW (index
)]));
15405 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15406 an integer constant, real, or fixed-point constant.
15408 TYPE is the type of the result. */
15411 fold_negate_const (tree arg0
, tree type
)
15413 tree t
= NULL_TREE
;
15415 switch (TREE_CODE (arg0
))
15419 double_int val
= tree_to_double_int (arg0
);
15420 int overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15422 t
= force_fit_type_double (type
, val
, 1,
15423 (overflow
| TREE_OVERFLOW (arg0
))
15424 && !TYPE_UNSIGNED (type
));
15429 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15434 FIXED_VALUE_TYPE f
;
15435 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15436 &(TREE_FIXED_CST (arg0
)), NULL
,
15437 TYPE_SATURATING (type
));
15438 t
= build_fixed (type
, f
);
15439 /* Propagate overflow flags. */
15440 if (overflow_p
| TREE_OVERFLOW (arg0
))
15441 TREE_OVERFLOW (t
) = 1;
15446 gcc_unreachable ();
15452 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15453 an integer constant or real constant.
15455 TYPE is the type of the result. */
15458 fold_abs_const (tree arg0
, tree type
)
15460 tree t
= NULL_TREE
;
15462 switch (TREE_CODE (arg0
))
15466 double_int val
= tree_to_double_int (arg0
);
15468 /* If the value is unsigned or non-negative, then the absolute value
15469 is the same as the ordinary value. */
15470 if (TYPE_UNSIGNED (type
)
15471 || !double_int_negative_p (val
))
15474 /* If the value is negative, then the absolute value is
15480 overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15481 t
= force_fit_type_double (type
, val
, -1,
15482 overflow
| TREE_OVERFLOW (arg0
));
15488 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15489 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15495 gcc_unreachable ();
15501 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15502 constant. TYPE is the type of the result. */
15505 fold_not_const (const_tree arg0
, tree type
)
15509 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15511 val
= double_int_not (tree_to_double_int (arg0
));
15512 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
15515 /* Given CODE, a relational operator, the target type, TYPE and two
15516 constant operands OP0 and OP1, return the result of the
15517 relational operation. If the result is not a compile time
15518 constant, then return NULL_TREE. */
15521 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15523 int result
, invert
;
15525 /* From here on, the only cases we handle are when the result is
15526 known to be a constant. */
15528 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15530 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15531 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15533 /* Handle the cases where either operand is a NaN. */
15534 if (real_isnan (c0
) || real_isnan (c1
))
15544 case UNORDERED_EXPR
:
15558 if (flag_trapping_math
)
15564 gcc_unreachable ();
15567 return constant_boolean_node (result
, type
);
15570 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15573 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15575 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15576 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15577 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15580 /* Handle equality/inequality of complex constants. */
15581 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15583 tree rcond
= fold_relational_const (code
, type
,
15584 TREE_REALPART (op0
),
15585 TREE_REALPART (op1
));
15586 tree icond
= fold_relational_const (code
, type
,
15587 TREE_IMAGPART (op0
),
15588 TREE_IMAGPART (op1
));
15589 if (code
== EQ_EXPR
)
15590 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15591 else if (code
== NE_EXPR
)
15592 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15597 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15599 To compute GT, swap the arguments and do LT.
15600 To compute GE, do LT and invert the result.
15601 To compute LE, swap the arguments, do LT and invert the result.
15602 To compute NE, do EQ and invert the result.
15604 Therefore, the code below must handle only EQ and LT. */
15606 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15611 code
= swap_tree_comparison (code
);
15614 /* Note that it is safe to invert for real values here because we
15615 have already handled the one case that it matters. */
15618 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15621 code
= invert_tree_comparison (code
, false);
15624 /* Compute a result for LT or EQ if args permit;
15625 Otherwise return T. */
15626 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15628 if (code
== EQ_EXPR
)
15629 result
= tree_int_cst_equal (op0
, op1
);
15630 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15631 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15633 result
= INT_CST_LT (op0
, op1
);
15640 return constant_boolean_node (result
, type
);
15643 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15644 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15648 fold_build_cleanup_point_expr (tree type
, tree expr
)
15650 /* If the expression does not have side effects then we don't have to wrap
15651 it with a cleanup point expression. */
15652 if (!TREE_SIDE_EFFECTS (expr
))
15655 /* If the expression is a return, check to see if the expression inside the
15656 return has no side effects or the right hand side of the modify expression
15657 inside the return. If either don't have side effects set we don't need to
15658 wrap the expression in a cleanup point expression. Note we don't check the
15659 left hand side of the modify because it should always be a return decl. */
15660 if (TREE_CODE (expr
) == RETURN_EXPR
)
15662 tree op
= TREE_OPERAND (expr
, 0);
15663 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15665 op
= TREE_OPERAND (op
, 1);
15666 if (!TREE_SIDE_EFFECTS (op
))
15670 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15673 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15674 of an indirection through OP0, or NULL_TREE if no simplification is
15678 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15684 subtype
= TREE_TYPE (sub
);
15685 if (!POINTER_TYPE_P (subtype
))
15688 if (TREE_CODE (sub
) == ADDR_EXPR
)
15690 tree op
= TREE_OPERAND (sub
, 0);
15691 tree optype
= TREE_TYPE (op
);
15692 /* *&CONST_DECL -> to the value of the const decl. */
15693 if (TREE_CODE (op
) == CONST_DECL
)
15694 return DECL_INITIAL (op
);
15695 /* *&p => p; make sure to handle *&"str"[cst] here. */
15696 if (type
== optype
)
15698 tree fop
= fold_read_from_constant_string (op
);
15704 /* *(foo *)&fooarray => fooarray[0] */
15705 else if (TREE_CODE (optype
) == ARRAY_TYPE
15706 && type
== TREE_TYPE (optype
)
15707 && (!in_gimple_form
15708 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15710 tree type_domain
= TYPE_DOMAIN (optype
);
15711 tree min_val
= size_zero_node
;
15712 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15713 min_val
= TYPE_MIN_VALUE (type_domain
);
15715 && TREE_CODE (min_val
) != INTEGER_CST
)
15717 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15718 NULL_TREE
, NULL_TREE
);
15720 /* *(foo *)&complexfoo => __real__ complexfoo */
15721 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15722 && type
== TREE_TYPE (optype
))
15723 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15724 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15725 else if (TREE_CODE (optype
) == VECTOR_TYPE
15726 && type
== TREE_TYPE (optype
))
15728 tree part_width
= TYPE_SIZE (type
);
15729 tree index
= bitsize_int (0);
15730 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15734 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15735 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15737 tree op00
= TREE_OPERAND (sub
, 0);
15738 tree op01
= TREE_OPERAND (sub
, 1);
15741 if (TREE_CODE (op00
) == ADDR_EXPR
)
15744 op00
= TREE_OPERAND (op00
, 0);
15745 op00type
= TREE_TYPE (op00
);
15747 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15748 if (TREE_CODE (op00type
) == VECTOR_TYPE
15749 && type
== TREE_TYPE (op00type
))
15751 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
15752 tree part_width
= TYPE_SIZE (type
);
15753 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
15754 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15755 tree index
= bitsize_int (indexi
);
15757 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
15758 return fold_build3_loc (loc
,
15759 BIT_FIELD_REF
, type
, op00
,
15760 part_width
, index
);
15763 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15764 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15765 && type
== TREE_TYPE (op00type
))
15767 tree size
= TYPE_SIZE_UNIT (type
);
15768 if (tree_int_cst_equal (size
, op01
))
15769 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15771 /* ((foo *)&fooarray)[1] => fooarray[1] */
15772 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15773 && type
== TREE_TYPE (op00type
))
15775 tree type_domain
= TYPE_DOMAIN (op00type
);
15776 tree min_val
= size_zero_node
;
15777 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15778 min_val
= TYPE_MIN_VALUE (type_domain
);
15779 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15780 TYPE_SIZE_UNIT (type
));
15781 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15782 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15783 NULL_TREE
, NULL_TREE
);
15788 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15789 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15790 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15791 && (!in_gimple_form
15792 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15795 tree min_val
= size_zero_node
;
15796 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15797 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15798 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15799 min_val
= TYPE_MIN_VALUE (type_domain
);
15801 && TREE_CODE (min_val
) != INTEGER_CST
)
15803 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15810 /* Builds an expression for an indirection through T, simplifying some
15814 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15816 tree type
= TREE_TYPE (TREE_TYPE (t
));
15817 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15822 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15825 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15828 fold_indirect_ref_loc (location_t loc
, tree t
)
15830 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15838 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15839 whose result is ignored. The type of the returned tree need not be
15840 the same as the original expression. */
15843 fold_ignored_result (tree t
)
15845 if (!TREE_SIDE_EFFECTS (t
))
15846 return integer_zero_node
;
15849 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15852 t
= TREE_OPERAND (t
, 0);
15856 case tcc_comparison
:
15857 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15858 t
= TREE_OPERAND (t
, 0);
15859 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15860 t
= TREE_OPERAND (t
, 1);
15865 case tcc_expression
:
15866 switch (TREE_CODE (t
))
15868 case COMPOUND_EXPR
:
15869 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15871 t
= TREE_OPERAND (t
, 0);
15875 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15876 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15878 t
= TREE_OPERAND (t
, 0);
15891 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15892 This can only be applied to objects of a sizetype. */
15895 round_up_loc (location_t loc
, tree value
, int divisor
)
15897 tree div
= NULL_TREE
;
15899 gcc_assert (divisor
> 0);
15903 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15904 have to do anything. Only do this when we are not given a const,
15905 because in that case, this check is more expensive than just
15907 if (TREE_CODE (value
) != INTEGER_CST
)
15909 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15911 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15915 /* If divisor is a power of two, simplify this to bit manipulation. */
15916 if (divisor
== (divisor
& -divisor
))
15918 if (TREE_CODE (value
) == INTEGER_CST
)
15920 double_int val
= tree_to_double_int (value
);
15923 if ((val
.low
& (divisor
- 1)) == 0)
15926 overflow_p
= TREE_OVERFLOW (value
);
15927 val
.low
&= ~(divisor
- 1);
15928 val
.low
+= divisor
;
15936 return force_fit_type_double (TREE_TYPE (value
), val
,
15943 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15944 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15945 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15946 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15952 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15953 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15954 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15960 /* Likewise, but round down. */
15963 round_down_loc (location_t loc
, tree value
, int divisor
)
15965 tree div
= NULL_TREE
;
15967 gcc_assert (divisor
> 0);
15971 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15972 have to do anything. Only do this when we are not given a const,
15973 because in that case, this check is more expensive than just
15975 if (TREE_CODE (value
) != INTEGER_CST
)
15977 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15979 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15983 /* If divisor is a power of two, simplify this to bit manipulation. */
15984 if (divisor
== (divisor
& -divisor
))
15988 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15989 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15994 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15995 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
15996 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16002 /* Returns the pointer to the base of the object addressed by EXP and
16003 extracts the information about the offset of the access, storing it
16004 to PBITPOS and POFFSET. */
16007 split_address_to_core_and_offset (tree exp
,
16008 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16011 enum machine_mode mode
;
16012 int unsignedp
, volatilep
;
16013 HOST_WIDE_INT bitsize
;
16014 location_t loc
= EXPR_LOCATION (exp
);
16016 if (TREE_CODE (exp
) == ADDR_EXPR
)
16018 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16019 poffset
, &mode
, &unsignedp
, &volatilep
,
16021 core
= build_fold_addr_expr_loc (loc
, core
);
16027 *poffset
= NULL_TREE
;
16033 /* Returns true if addresses of E1 and E2 differ by a constant, false
16034 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16037 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16040 HOST_WIDE_INT bitpos1
, bitpos2
;
16041 tree toffset1
, toffset2
, tdiff
, type
;
16043 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16044 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16046 if (bitpos1
% BITS_PER_UNIT
!= 0
16047 || bitpos2
% BITS_PER_UNIT
!= 0
16048 || !operand_equal_p (core1
, core2
, 0))
16051 if (toffset1
&& toffset2
)
16053 type
= TREE_TYPE (toffset1
);
16054 if (type
!= TREE_TYPE (toffset2
))
16055 toffset2
= fold_convert (type
, toffset2
);
16057 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16058 if (!cst_and_fits_in_hwi (tdiff
))
16061 *diff
= int_cst_value (tdiff
);
16063 else if (toffset1
|| toffset2
)
16065 /* If only one of the offsets is non-constant, the difference cannot
16072 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16076 /* Simplify the floating point expression EXP when the sign of the
16077 result is not significant. Return NULL_TREE if no simplification
16081 fold_strip_sign_ops (tree exp
)
16084 location_t loc
= EXPR_LOCATION (exp
);
16086 switch (TREE_CODE (exp
))
16090 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16091 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16095 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16097 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16098 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16099 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16100 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16101 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16102 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16105 case COMPOUND_EXPR
:
16106 arg0
= TREE_OPERAND (exp
, 0);
16107 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16109 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16113 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16114 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16116 return fold_build3_loc (loc
,
16117 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16118 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16119 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16124 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16127 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16128 /* Strip copysign function call, return the 1st argument. */
16129 arg0
= CALL_EXPR_ARG (exp
, 0);
16130 arg1
= CALL_EXPR_ARG (exp
, 1);
16131 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16134 /* Strip sign ops from the argument of "odd" math functions. */
16135 if (negate_mathfn_p (fcode
))
16137 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16139 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);