1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
65 #include "langhooks.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
70 int folding_initializer
= 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code
{
94 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
95 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
96 static bool negate_mathfn_p (enum built_in_function
);
97 static bool negate_expr_p (tree
);
98 static tree
negate_expr (tree
);
99 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
100 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
101 static tree
const_binop (enum tree_code
, tree
, tree
, int);
102 static enum comparison_code
comparison_to_compcode (enum tree_code
);
103 static enum tree_code
compcode_to_comparison (enum comparison_code
);
104 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
105 enum tree_code
, tree
, tree
, tree
);
106 static int truth_value_p (enum tree_code
);
107 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
108 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
109 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
110 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
111 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
112 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
113 enum machine_mode
*, int *, int *,
115 static tree
sign_bit_p (tree
, const_tree
);
116 static int simple_operand_p (const_tree
);
117 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
118 static tree
range_predecessor (tree
);
119 static tree
range_successor (tree
);
120 static tree
make_range (tree
, int *, tree
*, tree
*, bool *);
121 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
122 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
124 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
125 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
126 static tree
unextend (tree
, int, int, tree
);
127 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
128 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
129 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
130 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
131 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
134 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
136 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
138 static bool reorder_operands_p (const_tree
, const_tree
);
139 static tree
fold_negate_const (tree
, tree
);
140 static tree
fold_not_const (tree
, tree
);
141 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
144 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
145 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
146 and SUM1. Then this yields nonzero if overflow occurred during the
149 Overflow occurs if A and B have the same sign, but A and SUM differ in
150 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
154 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
155 We do that by representing the two-word integer in 4 words, with only
156 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
157 number. The value of the word is LOWPART + HIGHPART * BASE. */
160 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
161 #define HIGHPART(x) \
162 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
163 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
165 /* Unpack a two-word integer into 4 words.
166 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
167 WORDS points to the array of HOST_WIDE_INTs. */
170 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
172 words
[0] = LOWPART (low
);
173 words
[1] = HIGHPART (low
);
174 words
[2] = LOWPART (hi
);
175 words
[3] = HIGHPART (hi
);
178 /* Pack an array of 4 words into a two-word integer.
179 WORDS points to the array of words.
180 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
186 *low
= words
[0] + words
[1] * BASE
;
187 *hi
= words
[2] + words
[3] * BASE
;
190 /* Force the double-word integer L1, H1 to be within the range of the
191 integer type TYPE. Stores the properly truncated and sign-extended
192 double-word integer in *LV, *HV. Returns true if the operation
193 overflows, that is, argument and result are different. */
196 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
197 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, const_tree type
)
199 unsigned HOST_WIDE_INT low0
= l1
;
200 HOST_WIDE_INT high0
= h1
;
202 int sign_extended_type
;
204 if (POINTER_TYPE_P (type
)
205 || TREE_CODE (type
) == OFFSET_TYPE
)
208 prec
= TYPE_PRECISION (type
);
210 /* Size types *are* sign extended. */
211 sign_extended_type
= (!TYPE_UNSIGNED (type
)
212 || (TREE_CODE (type
) == INTEGER_TYPE
213 && TYPE_IS_SIZETYPE (type
)));
215 /* First clear all bits that are beyond the type's precision. */
216 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
218 else if (prec
> HOST_BITS_PER_WIDE_INT
)
219 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
223 if (prec
< HOST_BITS_PER_WIDE_INT
)
224 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
227 /* Then do sign extension if necessary. */
228 if (!sign_extended_type
)
229 /* No sign extension */;
230 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
231 /* Correct width already. */;
232 else if (prec
> HOST_BITS_PER_WIDE_INT
)
234 /* Sign extend top half? */
235 if (h1
& ((unsigned HOST_WIDE_INT
)1
236 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
237 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
239 else if (prec
== HOST_BITS_PER_WIDE_INT
)
241 if ((HOST_WIDE_INT
)l1
< 0)
246 /* Sign extend bottom half? */
247 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
250 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
257 /* If the value didn't fit, signal overflow. */
258 return l1
!= low0
|| h1
!= high0
;
261 /* We force the double-int HIGH:LOW to the range of the type TYPE by
262 sign or zero extending it.
263 OVERFLOWABLE indicates if we are interested
264 in overflow of the value, when >0 we are only interested in signed
265 overflow, for <0 we are interested in any overflow. OVERFLOWED
266 indicates whether overflow has already occurred. CONST_OVERFLOWED
267 indicates whether constant overflow has already occurred. We force
268 T's value to be within range of T's type (by setting to 0 or 1 all
269 the bits outside the type's range). We set TREE_OVERFLOWED if,
270 OVERFLOWED is nonzero,
271 or OVERFLOWABLE is >0 and signed overflow occurs
272 or OVERFLOWABLE is <0 and any overflow occurs
273 We return a new tree node for the extended double-int. The node
274 is shared if no overflow flags are set. */
277 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
278 HOST_WIDE_INT high
, int overflowable
,
281 int sign_extended_type
;
284 /* Size types *are* sign extended. */
285 sign_extended_type
= (!TYPE_UNSIGNED (type
)
286 || (TREE_CODE (type
) == INTEGER_TYPE
287 && TYPE_IS_SIZETYPE (type
)));
289 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
291 /* If we need to set overflow flags, return a new unshared node. */
292 if (overflowed
|| overflow
)
296 || (overflowable
> 0 && sign_extended_type
))
298 tree t
= make_node (INTEGER_CST
);
299 TREE_INT_CST_LOW (t
) = low
;
300 TREE_INT_CST_HIGH (t
) = high
;
301 TREE_TYPE (t
) = type
;
302 TREE_OVERFLOW (t
) = 1;
307 /* Else build a shared node. */
308 return build_int_cst_wide (type
, low
, high
);
311 /* Add two doubleword integers with doubleword result.
312 Return nonzero if the operation overflows according to UNSIGNED_P.
313 Each argument is given as two `HOST_WIDE_INT' pieces.
314 One argument is L1 and H1; the other, L2 and H2.
315 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
318 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
319 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
320 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
323 unsigned HOST_WIDE_INT l
;
327 h
= h1
+ h2
+ (l
< l1
);
333 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
335 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
338 /* Negate a doubleword integer with doubleword result.
339 Return nonzero if the operation overflows, assuming it's signed.
340 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
341 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
344 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
345 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
351 return (*hv
& h1
) < 0;
361 /* Multiply two doubleword integers with doubleword result.
362 Return nonzero if the operation overflows according to UNSIGNED_P.
363 Each argument is given as two `HOST_WIDE_INT' pieces.
364 One argument is L1 and H1; the other, L2 and H2.
365 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
368 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
369 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
370 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
373 HOST_WIDE_INT arg1
[4];
374 HOST_WIDE_INT arg2
[4];
375 HOST_WIDE_INT prod
[4 * 2];
376 unsigned HOST_WIDE_INT carry
;
378 unsigned HOST_WIDE_INT toplow
, neglow
;
379 HOST_WIDE_INT tophigh
, neghigh
;
381 encode (arg1
, l1
, h1
);
382 encode (arg2
, l2
, h2
);
384 memset (prod
, 0, sizeof prod
);
386 for (i
= 0; i
< 4; i
++)
389 for (j
= 0; j
< 4; j
++)
392 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
393 carry
+= arg1
[i
] * arg2
[j
];
394 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
396 prod
[k
] = LOWPART (carry
);
397 carry
= HIGHPART (carry
);
402 decode (prod
, lv
, hv
);
403 decode (prod
+ 4, &toplow
, &tophigh
);
405 /* Unsigned overflow is immediate. */
407 return (toplow
| tophigh
) != 0;
409 /* Check for signed overflow by calculating the signed representation of the
410 top half of the result; it should agree with the low half's sign bit. */
413 neg_double (l2
, h2
, &neglow
, &neghigh
);
414 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
418 neg_double (l1
, h1
, &neglow
, &neghigh
);
419 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
421 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
424 /* Shift the doubleword integer in L1, H1 left by COUNT places
425 keeping only PREC bits of result.
426 Shift right if COUNT is negative.
427 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
428 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
431 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
432 HOST_WIDE_INT count
, unsigned int prec
,
433 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
435 unsigned HOST_WIDE_INT signmask
;
439 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
443 if (SHIFT_COUNT_TRUNCATED
)
446 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
448 /* Shifting by the host word size is undefined according to the
449 ANSI standard, so we must handle this as a special case. */
453 else if (count
>= HOST_BITS_PER_WIDE_INT
)
455 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
460 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
461 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
465 /* Sign extend all bits that are beyond the precision. */
467 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
468 ? ((unsigned HOST_WIDE_INT
) *hv
469 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
470 : (*lv
>> (prec
- 1))) & 1);
472 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
474 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
476 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
477 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
482 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
483 *lv
|= signmask
<< prec
;
487 /* Shift the doubleword integer in L1, H1 right by COUNT places
488 keeping only PREC bits of result. COUNT must be positive.
489 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
490 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
493 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
494 HOST_WIDE_INT count
, unsigned int prec
,
495 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
498 unsigned HOST_WIDE_INT signmask
;
501 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
504 if (SHIFT_COUNT_TRUNCATED
)
507 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
509 /* Shifting by the host word size is undefined according to the
510 ANSI standard, so we must handle this as a special case. */
514 else if (count
>= HOST_BITS_PER_WIDE_INT
)
517 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
521 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
523 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
526 /* Zero / sign extend all bits that are beyond the precision. */
528 if (count
>= (HOST_WIDE_INT
)prec
)
533 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
535 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
537 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
538 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
543 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
544 *lv
|= signmask
<< (prec
- count
);
548 /* Rotate the doubleword integer in L1, H1 left by COUNT places
549 keeping only PREC bits of result.
550 Rotate right if COUNT is negative.
551 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
554 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
555 HOST_WIDE_INT count
, unsigned int prec
,
556 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
558 unsigned HOST_WIDE_INT s1l
, s2l
;
559 HOST_WIDE_INT s1h
, s2h
;
565 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
566 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
571 /* Rotate the doubleword integer in L1, H1 left by COUNT places
572 keeping only PREC bits of result. COUNT must be positive.
573 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
576 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
577 HOST_WIDE_INT count
, unsigned int prec
,
578 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
580 unsigned HOST_WIDE_INT s1l
, s2l
;
581 HOST_WIDE_INT s1h
, s2h
;
587 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
588 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
593 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
594 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
595 CODE is a tree code for a kind of division, one of
596 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
598 It controls how the quotient is rounded to an integer.
599 Return nonzero if the operation overflows.
600 UNS nonzero says do unsigned division. */
603 div_and_round_double (enum tree_code code
, int uns
,
604 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
605 HOST_WIDE_INT hnum_orig
,
606 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
607 HOST_WIDE_INT hden_orig
,
608 unsigned HOST_WIDE_INT
*lquo
,
609 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
613 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
614 HOST_WIDE_INT den
[4], quo
[4];
616 unsigned HOST_WIDE_INT work
;
617 unsigned HOST_WIDE_INT carry
= 0;
618 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
619 HOST_WIDE_INT hnum
= hnum_orig
;
620 unsigned HOST_WIDE_INT lden
= lden_orig
;
621 HOST_WIDE_INT hden
= hden_orig
;
624 if (hden
== 0 && lden
== 0)
625 overflow
= 1, lden
= 1;
627 /* Calculate quotient sign and convert operands to unsigned. */
633 /* (minimum integer) / (-1) is the only overflow case. */
634 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
635 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
641 neg_double (lden
, hden
, &lden
, &hden
);
645 if (hnum
== 0 && hden
== 0)
646 { /* single precision */
648 /* This unsigned division rounds toward zero. */
654 { /* trivial case: dividend < divisor */
655 /* hden != 0 already checked. */
662 memset (quo
, 0, sizeof quo
);
664 memset (num
, 0, sizeof num
); /* to zero 9th element */
665 memset (den
, 0, sizeof den
);
667 encode (num
, lnum
, hnum
);
668 encode (den
, lden
, hden
);
670 /* Special code for when the divisor < BASE. */
671 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
673 /* hnum != 0 already checked. */
674 for (i
= 4 - 1; i
>= 0; i
--)
676 work
= num
[i
] + carry
* BASE
;
677 quo
[i
] = work
/ lden
;
683 /* Full double precision division,
684 with thanks to Don Knuth's "Seminumerical Algorithms". */
685 int num_hi_sig
, den_hi_sig
;
686 unsigned HOST_WIDE_INT quo_est
, scale
;
688 /* Find the highest nonzero divisor digit. */
689 for (i
= 4 - 1;; i
--)
696 /* Insure that the first digit of the divisor is at least BASE/2.
697 This is required by the quotient digit estimation algorithm. */
699 scale
= BASE
/ (den
[den_hi_sig
] + 1);
701 { /* scale divisor and dividend */
703 for (i
= 0; i
<= 4 - 1; i
++)
705 work
= (num
[i
] * scale
) + carry
;
706 num
[i
] = LOWPART (work
);
707 carry
= HIGHPART (work
);
712 for (i
= 0; i
<= 4 - 1; i
++)
714 work
= (den
[i
] * scale
) + carry
;
715 den
[i
] = LOWPART (work
);
716 carry
= HIGHPART (work
);
717 if (den
[i
] != 0) den_hi_sig
= i
;
724 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
726 /* Guess the next quotient digit, quo_est, by dividing the first
727 two remaining dividend digits by the high order quotient digit.
728 quo_est is never low and is at most 2 high. */
729 unsigned HOST_WIDE_INT tmp
;
731 num_hi_sig
= i
+ den_hi_sig
+ 1;
732 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
733 if (num
[num_hi_sig
] != den
[den_hi_sig
])
734 quo_est
= work
/ den
[den_hi_sig
];
738 /* Refine quo_est so it's usually correct, and at most one high. */
739 tmp
= work
- quo_est
* den
[den_hi_sig
];
741 && (den
[den_hi_sig
- 1] * quo_est
742 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
745 /* Try QUO_EST as the quotient digit, by multiplying the
746 divisor by QUO_EST and subtracting from the remaining dividend.
747 Keep in mind that QUO_EST is the I - 1st digit. */
750 for (j
= 0; j
<= den_hi_sig
; j
++)
752 work
= quo_est
* den
[j
] + carry
;
753 carry
= HIGHPART (work
);
754 work
= num
[i
+ j
] - LOWPART (work
);
755 num
[i
+ j
] = LOWPART (work
);
756 carry
+= HIGHPART (work
) != 0;
759 /* If quo_est was high by one, then num[i] went negative and
760 we need to correct things. */
761 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
764 carry
= 0; /* add divisor back in */
765 for (j
= 0; j
<= den_hi_sig
; j
++)
767 work
= num
[i
+ j
] + den
[j
] + carry
;
768 carry
= HIGHPART (work
);
769 num
[i
+ j
] = LOWPART (work
);
772 num
[num_hi_sig
] += carry
;
775 /* Store the quotient digit. */
780 decode (quo
, lquo
, hquo
);
783 /* If result is negative, make it so. */
785 neg_double (*lquo
, *hquo
, lquo
, hquo
);
787 /* Compute trial remainder: rem = num - (quo * den) */
788 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
789 neg_double (*lrem
, *hrem
, lrem
, hrem
);
790 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
795 case TRUNC_MOD_EXPR
: /* round toward zero */
796 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
800 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
801 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
804 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
812 case CEIL_MOD_EXPR
: /* round toward positive infinity */
813 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
815 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
823 case ROUND_MOD_EXPR
: /* round to closest integer */
825 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
826 HOST_WIDE_INT habs_rem
= *hrem
;
827 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
828 HOST_WIDE_INT habs_den
= hden
, htwice
;
830 /* Get absolute values. */
832 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
834 neg_double (lden
, hden
, &labs_den
, &habs_den
);
836 /* If (2 * abs (lrem) >= abs (lden)) */
837 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
838 labs_rem
, habs_rem
, <wice
, &htwice
);
840 if (((unsigned HOST_WIDE_INT
) habs_den
841 < (unsigned HOST_WIDE_INT
) htwice
)
842 || (((unsigned HOST_WIDE_INT
) habs_den
843 == (unsigned HOST_WIDE_INT
) htwice
)
844 && (labs_den
< ltwice
)))
848 add_double (*lquo
, *hquo
,
849 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
852 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
864 /* Compute true remainder: rem = num - (quo * den) */
865 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
866 neg_double (*lrem
, *hrem
, lrem
, hrem
);
867 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
871 /* If ARG2 divides ARG1 with zero remainder, carries out the division
872 of type CODE and returns the quotient.
873 Otherwise returns NULL_TREE. */
876 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
878 unsigned HOST_WIDE_INT int1l
, int2l
;
879 HOST_WIDE_INT int1h
, int2h
;
880 unsigned HOST_WIDE_INT quol
, reml
;
881 HOST_WIDE_INT quoh
, remh
;
882 tree type
= TREE_TYPE (arg1
);
883 int uns
= TYPE_UNSIGNED (type
);
885 int1l
= TREE_INT_CST_LOW (arg1
);
886 int1h
= TREE_INT_CST_HIGH (arg1
);
887 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
888 &obj[some_exotic_number]. */
889 if (POINTER_TYPE_P (type
))
892 type
= signed_type_for (type
);
893 fit_double_type (int1l
, int1h
, &int1l
, &int1h
,
897 fit_double_type (int1l
, int1h
, &int1l
, &int1h
, type
);
898 int2l
= TREE_INT_CST_LOW (arg2
);
899 int2h
= TREE_INT_CST_HIGH (arg2
);
901 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
902 &quol
, &quoh
, &reml
, &remh
);
903 if (remh
!= 0 || reml
!= 0)
906 return build_int_cst_wide (type
, quol
, quoh
);
909 /* This is nonzero if we should defer warnings about undefined
910 overflow. This facility exists because these warnings are a
911 special case. The code to estimate loop iterations does not want
912 to issue any warnings, since it works with expressions which do not
913 occur in user code. Various bits of cleanup code call fold(), but
914 only use the result if it has certain characteristics (e.g., is a
915 constant); that code only wants to issue a warning if the result is
918 static int fold_deferring_overflow_warnings
;
920 /* If a warning about undefined overflow is deferred, this is the
921 warning. Note that this may cause us to turn two warnings into
922 one, but that is fine since it is sufficient to only give one
923 warning per expression. */
925 static const char* fold_deferred_overflow_warning
;
927 /* If a warning about undefined overflow is deferred, this is the
928 level at which the warning should be emitted. */
930 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
932 /* Start deferring overflow warnings. We could use a stack here to
933 permit nested calls, but at present it is not necessary. */
936 fold_defer_overflow_warnings (void)
938 ++fold_deferring_overflow_warnings
;
941 /* Stop deferring overflow warnings. If there is a pending warning,
942 and ISSUE is true, then issue the warning if appropriate. STMT is
943 the statement with which the warning should be associated (used for
944 location information); STMT may be NULL. CODE is the level of the
945 warning--a warn_strict_overflow_code value. This function will use
946 the smaller of CODE and the deferred code when deciding whether to
947 issue the warning. CODE may be zero to mean to always use the
951 fold_undefer_overflow_warnings (bool issue
, const_tree stmt
, int code
)
956 gcc_assert (fold_deferring_overflow_warnings
> 0);
957 --fold_deferring_overflow_warnings
;
958 if (fold_deferring_overflow_warnings
> 0)
960 if (fold_deferred_overflow_warning
!= NULL
962 && code
< (int) fold_deferred_overflow_code
)
963 fold_deferred_overflow_code
= code
;
967 warnmsg
= fold_deferred_overflow_warning
;
968 fold_deferred_overflow_warning
= NULL
;
970 if (!issue
|| warnmsg
== NULL
)
973 if (stmt
!= NULL_TREE
&& TREE_NO_WARNING (stmt
))
976 /* Use the smallest code level when deciding to issue the
978 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
979 code
= fold_deferred_overflow_code
;
981 if (!issue_strict_overflow_warning (code
))
984 if (stmt
== NULL_TREE
|| !expr_has_location (stmt
))
985 locus
= input_location
;
987 locus
= expr_location (stmt
);
988 warning (OPT_Wstrict_overflow
, "%H%s", &locus
, warnmsg
);
991 /* Stop deferring overflow warnings, ignoring any deferred
995 fold_undefer_and_ignore_overflow_warnings (void)
997 fold_undefer_overflow_warnings (false, NULL_TREE
, 0);
1000 /* Whether we are deferring overflow warnings. */
1003 fold_deferring_overflow_warnings_p (void)
1005 return fold_deferring_overflow_warnings
> 0;
1008 /* This is called when we fold something based on the fact that signed
1009 overflow is undefined. */
1012 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
1014 gcc_assert (!flag_wrapv
&& !flag_trapv
);
1015 if (fold_deferring_overflow_warnings
> 0)
1017 if (fold_deferred_overflow_warning
== NULL
1018 || wc
< fold_deferred_overflow_code
)
1020 fold_deferred_overflow_warning
= gmsgid
;
1021 fold_deferred_overflow_code
= wc
;
1024 else if (issue_strict_overflow_warning (wc
))
1025 warning (OPT_Wstrict_overflow
, gmsgid
);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1032 negate_mathfn_p (enum built_in_function code
)
1036 CASE_FLT_FN (BUILT_IN_ASIN
):
1037 CASE_FLT_FN (BUILT_IN_ASINH
):
1038 CASE_FLT_FN (BUILT_IN_ATAN
):
1039 CASE_FLT_FN (BUILT_IN_ATANH
):
1040 CASE_FLT_FN (BUILT_IN_CASIN
):
1041 CASE_FLT_FN (BUILT_IN_CASINH
):
1042 CASE_FLT_FN (BUILT_IN_CATAN
):
1043 CASE_FLT_FN (BUILT_IN_CATANH
):
1044 CASE_FLT_FN (BUILT_IN_CBRT
):
1045 CASE_FLT_FN (BUILT_IN_CPROJ
):
1046 CASE_FLT_FN (BUILT_IN_CSIN
):
1047 CASE_FLT_FN (BUILT_IN_CSINH
):
1048 CASE_FLT_FN (BUILT_IN_CTAN
):
1049 CASE_FLT_FN (BUILT_IN_CTANH
):
1050 CASE_FLT_FN (BUILT_IN_ERF
):
1051 CASE_FLT_FN (BUILT_IN_LLROUND
):
1052 CASE_FLT_FN (BUILT_IN_LROUND
):
1053 CASE_FLT_FN (BUILT_IN_ROUND
):
1054 CASE_FLT_FN (BUILT_IN_SIN
):
1055 CASE_FLT_FN (BUILT_IN_SINH
):
1056 CASE_FLT_FN (BUILT_IN_TAN
):
1057 CASE_FLT_FN (BUILT_IN_TANH
):
1058 CASE_FLT_FN (BUILT_IN_TRUNC
):
1061 CASE_FLT_FN (BUILT_IN_LLRINT
):
1062 CASE_FLT_FN (BUILT_IN_LRINT
):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1064 CASE_FLT_FN (BUILT_IN_RINT
):
1065 return !flag_rounding_math
;
1073 /* Check whether we may negate an integer constant T without causing
1077 may_negate_without_overflow_p (const_tree t
)
1079 unsigned HOST_WIDE_INT val
;
1083 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
1085 type
= TREE_TYPE (t
);
1086 if (TYPE_UNSIGNED (type
))
1089 prec
= TYPE_PRECISION (type
);
1090 if (prec
> HOST_BITS_PER_WIDE_INT
)
1092 if (TREE_INT_CST_LOW (t
) != 0)
1094 prec
-= HOST_BITS_PER_WIDE_INT
;
1095 val
= TREE_INT_CST_HIGH (t
);
1098 val
= TREE_INT_CST_LOW (t
);
1099 if (prec
< HOST_BITS_PER_WIDE_INT
)
1100 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
1101 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1108 negate_expr_p (tree t
)
1115 type
= TREE_TYPE (t
);
1117 STRIP_SIGN_NOPS (t
);
1118 switch (TREE_CODE (t
))
1121 if (TYPE_OVERFLOW_WRAPS (type
))
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t
);
1127 return (INTEGRAL_TYPE_P (type
)
1128 && TYPE_OVERFLOW_WRAPS (type
));
1136 return negate_expr_p (TREE_REALPART (t
))
1137 && negate_expr_p (TREE_IMAGPART (t
));
1140 return negate_expr_p (TREE_OPERAND (t
, 0))
1141 && negate_expr_p (TREE_OPERAND (t
, 1));
1144 return negate_expr_p (TREE_OPERAND (t
, 0));
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t
, 1))
1152 && reorder_operands_p (TREE_OPERAND (t
, 0),
1153 TREE_OPERAND (t
, 1)))
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t
, 0));
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1162 && reorder_operands_p (TREE_OPERAND (t
, 0),
1163 TREE_OPERAND (t
, 1));
1166 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1173 return negate_expr_p (TREE_OPERAND (t
, 1))
1174 || negate_expr_p (TREE_OPERAND (t
, 0));
1177 case TRUNC_DIV_EXPR
:
1178 case ROUND_DIV_EXPR
:
1179 case FLOOR_DIV_EXPR
:
1181 case EXACT_DIV_EXPR
:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1190 return negate_expr_p (TREE_OPERAND (t
, 1))
1191 || negate_expr_p (TREE_OPERAND (t
, 0));
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type
) == REAL_TYPE
)
1197 tree tem
= strip_float_extensions (t
);
1199 return negate_expr_p (tem
);
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1206 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1213 tree op1
= TREE_OPERAND (t
, 1);
1214 if (TREE_INT_CST_HIGH (op1
) == 0
1215 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1216 == TREE_INT_CST_LOW (op1
))
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1233 fold_negate_expr (tree t
)
1235 tree type
= TREE_TYPE (t
);
1238 switch (TREE_CODE (t
))
1240 /* Convert - (~A) to A + 1. */
1242 if (INTEGRAL_TYPE_P (type
))
1243 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1244 build_int_cst (type
, 1));
1248 tem
= fold_negate_const (t
, type
);
1249 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1250 || !TYPE_OVERFLOW_TRAPS (type
))
1255 tem
= fold_negate_const (t
, type
);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1262 tem
= fold_negate_const (t
, type
);
1267 tree rpart
= negate_expr (TREE_REALPART (t
));
1268 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1270 if ((TREE_CODE (rpart
) == REAL_CST
1271 && TREE_CODE (ipart
) == REAL_CST
)
1272 || (TREE_CODE (rpart
) == INTEGER_CST
1273 && TREE_CODE (ipart
) == INTEGER_CST
))
1274 return build_complex (type
, rpart
, ipart
);
1279 if (negate_expr_p (t
))
1280 return fold_build2 (COMPLEX_EXPR
, type
,
1281 fold_negate_expr (TREE_OPERAND (t
, 0)),
1282 fold_negate_expr (TREE_OPERAND (t
, 1)));
1286 if (negate_expr_p (t
))
1287 return fold_build1 (CONJ_EXPR
, type
,
1288 fold_negate_expr (TREE_OPERAND (t
, 0)));
1292 return TREE_OPERAND (t
, 0);
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t
, 1))
1300 && reorder_operands_p (TREE_OPERAND (t
, 0),
1301 TREE_OPERAND (t
, 1)))
1303 tem
= negate_expr (TREE_OPERAND (t
, 1));
1304 return fold_build2 (MINUS_EXPR
, type
,
1305 tem
, TREE_OPERAND (t
, 0));
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1311 tem
= negate_expr (TREE_OPERAND (t
, 0));
1312 return fold_build2 (MINUS_EXPR
, type
,
1313 tem
, TREE_OPERAND (t
, 1));
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1322 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1323 return fold_build2 (MINUS_EXPR
, type
,
1324 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1328 if (TYPE_UNSIGNED (type
))
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1336 tem
= TREE_OPERAND (t
, 1);
1337 if (negate_expr_p (tem
))
1338 return fold_build2 (TREE_CODE (t
), type
,
1339 TREE_OPERAND (t
, 0), negate_expr (tem
));
1340 tem
= TREE_OPERAND (t
, 0);
1341 if (negate_expr_p (tem
))
1342 return fold_build2 (TREE_CODE (t
), type
,
1343 negate_expr (tem
), TREE_OPERAND (t
, 1));
1347 case TRUNC_DIV_EXPR
:
1348 case ROUND_DIV_EXPR
:
1349 case FLOOR_DIV_EXPR
:
1351 case EXACT_DIV_EXPR
:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1357 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1359 const char * const warnmsg
= G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem
= TREE_OPERAND (t
, 1);
1362 if (negate_expr_p (tem
))
1364 if (INTEGRAL_TYPE_P (type
)
1365 && (TREE_CODE (tem
) != INTEGER_CST
1366 || integer_onep (tem
)))
1367 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1368 return fold_build2 (TREE_CODE (t
), type
,
1369 TREE_OPERAND (t
, 0), negate_expr (tem
));
1371 tem
= TREE_OPERAND (t
, 0);
1372 if (negate_expr_p (tem
))
1374 if (INTEGRAL_TYPE_P (type
)
1375 && (TREE_CODE (tem
) != INTEGER_CST
1376 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
1377 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1378 return fold_build2 (TREE_CODE (t
), type
,
1379 negate_expr (tem
), TREE_OPERAND (t
, 1));
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type
) == REAL_TYPE
)
1388 tem
= strip_float_extensions (t
);
1389 if (tem
!= t
&& negate_expr_p (tem
))
1390 return fold_convert (type
, negate_expr (tem
));
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t
))
1397 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
1401 fndecl
= get_callee_fndecl (t
);
1402 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
1403 return build_call_expr (fndecl
, 1, arg
);
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1411 tree op1
= TREE_OPERAND (t
, 1);
1412 if (TREE_INT_CST_HIGH (op1
) == 0
1413 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1414 == TREE_INT_CST_LOW (op1
))
1416 tree ntype
= TYPE_UNSIGNED (type
)
1417 ? signed_type_for (type
)
1418 : unsigned_type_for (type
);
1419 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1420 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1421 return fold_convert (type
, temp
);
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1438 negate_expr (tree t
)
1445 type
= TREE_TYPE (t
);
1446 STRIP_SIGN_NOPS (t
);
1448 tem
= fold_negate_expr (t
);
1450 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1451 return fold_convert (type
, tem
);
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1469 If IN is itself a literal or constant, return it as appropriate.
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1475 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1476 tree
*minus_litp
, int negate_p
)
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in
);
1487 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
1488 || TREE_CODE (in
) == FIXED_CST
)
1490 else if (TREE_CODE (in
) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1498 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1500 tree op0
= TREE_OPERAND (in
, 0);
1501 tree op1
= TREE_OPERAND (in
, 1);
1502 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1503 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
1507 || TREE_CODE (op0
) == FIXED_CST
)
1508 *litp
= op0
, op0
= 0;
1509 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
1510 || TREE_CODE (op1
) == FIXED_CST
)
1511 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1513 if (op0
!= 0 && TREE_CONSTANT (op0
))
1514 *conp
= op0
, op0
= 0;
1515 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1516 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0
!= 0 && op1
!= 0)
1525 var
= op1
, neg_var_p
= neg1_p
;
1527 /* Now do any needed negations. */
1529 *minus_litp
= *litp
, *litp
= 0;
1531 *conp
= negate_expr (*conp
);
1533 var
= negate_expr (var
);
1535 else if (TREE_CONSTANT (in
))
1543 *minus_litp
= *litp
, *litp
= 0;
1544 else if (*minus_litp
)
1545 *litp
= *minus_litp
, *minus_litp
= 0;
1546 *conp
= negate_expr (*conp
);
1547 var
= negate_expr (var
);
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1558 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1569 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1571 if (code
== PLUS_EXPR
)
1573 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1574 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1575 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1576 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1577 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1578 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1579 else if (integer_zerop (t2
))
1580 return fold_convert (type
, t1
);
1582 else if (code
== MINUS_EXPR
)
1584 if (integer_zerop (t2
))
1585 return fold_convert (type
, t1
);
1588 return build2 (code
, type
, fold_convert (type
, t1
),
1589 fold_convert (type
, t2
));
1592 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1593 fold_convert (type
, t2
));
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1600 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
1602 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1604 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1619 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1620 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1621 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1632 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
1634 unsigned HOST_WIDE_INT int1l
, int2l
;
1635 HOST_WIDE_INT int1h
, int2h
;
1636 unsigned HOST_WIDE_INT low
;
1638 unsigned HOST_WIDE_INT garbagel
;
1639 HOST_WIDE_INT garbageh
;
1641 tree type
= TREE_TYPE (arg1
);
1642 int uns
= TYPE_UNSIGNED (type
);
1644 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1647 int1l
= TREE_INT_CST_LOW (arg1
);
1648 int1h
= TREE_INT_CST_HIGH (arg1
);
1649 int2l
= TREE_INT_CST_LOW (arg2
);
1650 int2h
= TREE_INT_CST_HIGH (arg2
);
1655 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1659 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1663 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1679 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1684 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1688 neg_double (int2l
, int2h
, &low
, &hi
);
1689 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1690 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1694 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1697 case TRUNC_DIV_EXPR
:
1698 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1699 case EXACT_DIV_EXPR
:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1702 && !TREE_OVERFLOW (arg1
)
1703 && !TREE_OVERFLOW (arg2
)
1704 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1706 if (code
== CEIL_DIV_EXPR
)
1709 low
= int1l
/ int2l
, hi
= 0;
1713 /* ... fall through ... */
1715 case ROUND_DIV_EXPR
:
1716 if (int2h
== 0 && int2l
== 0)
1718 if (int2h
== 0 && int2l
== 1)
1720 low
= int1l
, hi
= int1h
;
1723 if (int1l
== int2l
&& int1h
== int2h
1724 && ! (int1l
== 0 && int1h
== 0))
1729 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1730 &low
, &hi
, &garbagel
, &garbageh
);
1733 case TRUNC_MOD_EXPR
:
1734 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1737 && !TREE_OVERFLOW (arg1
)
1738 && !TREE_OVERFLOW (arg2
)
1739 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1741 if (code
== CEIL_MOD_EXPR
)
1743 low
= int1l
% int2l
, hi
= 0;
1747 /* ... fall through ... */
1749 case ROUND_MOD_EXPR
:
1750 if (int2h
== 0 && int2l
== 0)
1752 overflow
= div_and_round_double (code
, uns
,
1753 int1l
, int1h
, int2l
, int2h
,
1754 &garbagel
, &garbageh
, &low
, &hi
);
1760 low
= (((unsigned HOST_WIDE_INT
) int1h
1761 < (unsigned HOST_WIDE_INT
) int2h
)
1762 || (((unsigned HOST_WIDE_INT
) int1h
1763 == (unsigned HOST_WIDE_INT
) int2h
)
1766 low
= (int1h
< int2h
1767 || (int1h
== int2h
&& int1l
< int2l
));
1769 if (low
== (code
== MIN_EXPR
))
1770 low
= int1l
, hi
= int1h
;
1772 low
= int2l
, hi
= int2h
;
1781 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns
|| is_sizetype
) && overflow
)
1785 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1788 TREE_OVERFLOW (t
) = 1;
1792 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1793 ((!uns
|| is_sizetype
) && overflow
)
1794 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1807 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1809 /* Sanity check for the recursive cases. */
1816 if (TREE_CODE (arg1
) == INTEGER_CST
)
1817 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1819 if (TREE_CODE (arg1
) == REAL_CST
)
1821 enum machine_mode mode
;
1824 REAL_VALUE_TYPE value
;
1825 REAL_VALUE_TYPE result
;
1829 /* The following codes are handled by real_arithmetic. */
1844 d1
= TREE_REAL_CST (arg1
);
1845 d2
= TREE_REAL_CST (arg2
);
1847 type
= TREE_TYPE (arg1
);
1848 mode
= TYPE_MODE (type
);
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode
)
1853 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code
== RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2
, dconst0
)
1860 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1
))
1867 else if (REAL_VALUE_ISNAN (d2
))
1870 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1871 real_convert (&result
, mode
, &value
);
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode
)
1877 && REAL_VALUE_ISINF (result
)
1878 && !REAL_VALUE_ISINF (d1
)
1879 && !REAL_VALUE_ISINF (d2
))
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1888 && !flag_unsafe_math_optimizations
))
1889 && (inexact
|| !real_identical (&result
, &value
)))
1892 t
= build_real (type
, result
);
1894 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1898 if (TREE_CODE (arg1
) == FIXED_CST
)
1900 FIXED_VALUE_TYPE f1
;
1901 FIXED_VALUE_TYPE f2
;
1902 FIXED_VALUE_TYPE result
;
1907 /* The following codes are handled by fixed_arithmetic. */
1913 case TRUNC_DIV_EXPR
:
1914 f2
= TREE_FIXED_CST (arg2
);
1919 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1920 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1928 f1
= TREE_FIXED_CST (arg1
);
1929 type
= TREE_TYPE (arg1
);
1930 sat_p
= TYPE_SATURATING (type
);
1931 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1932 t
= build_fixed (type
, result
);
1933 /* Propagate overflow flags. */
1934 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1936 TREE_OVERFLOW (t
) = 1;
1937 TREE_CONSTANT_OVERFLOW (t
) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1940 TREE_CONSTANT_OVERFLOW (t
) = 1;
1944 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1946 tree type
= TREE_TYPE (arg1
);
1947 tree r1
= TREE_REALPART (arg1
);
1948 tree i1
= TREE_IMAGPART (arg1
);
1949 tree r2
= TREE_REALPART (arg2
);
1950 tree i2
= TREE_IMAGPART (arg2
);
1957 real
= const_binop (code
, r1
, r2
, notrunc
);
1958 imag
= const_binop (code
, i1
, i2
, notrunc
);
1962 real
= const_binop (MINUS_EXPR
,
1963 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1964 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1966 imag
= const_binop (PLUS_EXPR
,
1967 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1968 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1975 = const_binop (PLUS_EXPR
,
1976 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1977 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1980 = const_binop (PLUS_EXPR
,
1981 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1982 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1985 = const_binop (MINUS_EXPR
,
1986 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1987 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1991 code
= TRUNC_DIV_EXPR
;
1993 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1994 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
2003 return build_complex (type
, real
, imag
);
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2013 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
2015 return build_int_cst (sizetype_tab
[(int) kind
], number
);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2024 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
2026 tree type
= TREE_TYPE (arg0
);
2028 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
2029 return error_mark_node
;
2031 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2037 /* And some specific cases even faster than that. */
2038 if (code
== PLUS_EXPR
)
2040 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
2042 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2045 else if (code
== MINUS_EXPR
)
2047 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2050 else if (code
== MULT_EXPR
)
2052 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code
, arg0
, arg1
, 0);
2060 return fold_build2 (code
, type
, arg0
, arg1
);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2068 size_diffop (tree arg0
, tree arg1
)
2070 tree type
= TREE_TYPE (arg0
);
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type
))
2078 return size_binop (MINUS_EXPR
, arg0
, arg1
);
2080 if (type
== sizetype
)
2082 else if (type
== bitsizetype
)
2083 ctype
= sbitsizetype
;
2085 ctype
= signed_type_for (type
);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2091 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
2092 fold_convert (ctype
, arg1
));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0
, arg1
))
2099 return build_int_cst (ctype
, 0);
2100 else if (tree_int_cst_lt (arg1
, arg0
))
2101 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
2103 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
2104 fold_convert (ctype
, size_binop (MINUS_EXPR
,
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2112 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
2119 TREE_INT_CST_HIGH (arg1
),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1
))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1
))
2130 == TYPE_PRECISION (type
))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2132 == TYPE_UNSIGNED (type
))
2133 && ((TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1
)))
2135 || (TREE_CODE (type
) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type
)))),
2137 (TREE_INT_CST_HIGH (arg1
) < 0
2138 && (TYPE_UNSIGNED (type
)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2140 | TREE_OVERFLOW (arg1
));
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2149 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high
, low
;
2165 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2169 case FIX_TRUNC_EXPR
:
2170 real_trunc (&r
, VOIDmode
, &x
);
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r
))
2185 /* See if R is less than the lower bound or greater than the
2190 tree lt
= TYPE_MIN_VALUE (type
);
2191 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2192 if (REAL_VALUES_LESS (r
, l
))
2195 high
= TREE_INT_CST_HIGH (lt
);
2196 low
= TREE_INT_CST_LOW (lt
);
2202 tree ut
= TYPE_MAX_VALUE (type
);
2205 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2206 if (REAL_VALUES_LESS (u
, r
))
2209 high
= TREE_INT_CST_HIGH (ut
);
2210 low
= TREE_INT_CST_LOW (ut
);
2216 REAL_VALUE_TO_INT (&low
, &high
, r
);
2218 t
= force_fit_type_double (type
, low
, high
, -1,
2219 overflow
| TREE_OVERFLOW (arg1
));
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2227 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2230 double_int temp
, temp_trunc
;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp
= TREE_FIXED_CST (arg1
).data
;
2235 mode
= TREE_FIXED_CST (arg1
).mode
;
2236 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
2238 lshift_double (temp
.low
, temp
.high
,
2239 - GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2240 &temp
.low
, &temp
.high
, SIGNED_FIXED_POINT_MODE_P (mode
));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp
.low
, temp
.high
,
2244 GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2245 &temp_trunc
.low
, &temp_trunc
.high
,
2246 SIGNED_FIXED_POINT_MODE_P (mode
));
2253 temp_trunc
.high
= 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode
) && temp_trunc
.high
< 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
2264 temp
= double_int_add (temp
, one
);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t
= force_fit_type_double (type
, temp
.low
, temp
.high
, -1,
2271 && (TYPE_UNSIGNED (type
)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2273 | TREE_OVERFLOW (arg1
));
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2282 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2284 REAL_VALUE_TYPE value
;
2287 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2288 t
= build_real (type
, value
);
2290 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2298 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2300 REAL_VALUE_TYPE value
;
2303 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2304 t
= build_real (type
, value
);
2306 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2307 TREE_CONSTANT_OVERFLOW (t
)
2308 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2316 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2318 FIXED_VALUE_TYPE value
;
2322 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2323 TYPE_SATURATING (type
));
2324 t
= build_fixed (type
, value
);
2326 /* Propagate overflow flags. */
2327 if (overflow_p
| TREE_OVERFLOW (arg1
))
2329 TREE_OVERFLOW (t
) = 1;
2330 TREE_CONSTANT_OVERFLOW (t
) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2333 TREE_CONSTANT_OVERFLOW (t
) = 1;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2341 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2343 FIXED_VALUE_TYPE value
;
2347 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
2348 TREE_INT_CST (arg1
),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2350 TYPE_SATURATING (type
));
2351 t
= build_fixed (type
, value
);
2353 /* Propagate overflow flags. */
2354 if (overflow_p
| TREE_OVERFLOW (arg1
))
2356 TREE_OVERFLOW (t
) = 1;
2357 TREE_CONSTANT_OVERFLOW (t
) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2360 TREE_CONSTANT_OVERFLOW (t
) = 1;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2368 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2370 FIXED_VALUE_TYPE value
;
2374 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2375 &TREE_REAL_CST (arg1
),
2376 TYPE_SATURATING (type
));
2377 t
= build_fixed (type
, value
);
2379 /* Propagate overflow flags. */
2380 if (overflow_p
| TREE_OVERFLOW (arg1
))
2382 TREE_OVERFLOW (t
) = 1;
2383 TREE_CONSTANT_OVERFLOW (t
) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2386 TREE_CONSTANT_OVERFLOW (t
) = 1;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2394 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2396 if (TREE_TYPE (arg1
) == type
)
2399 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2401 if (TREE_CODE (arg1
) == INTEGER_CST
)
2402 return fold_convert_const_int_from_int (type
, arg1
);
2403 else if (TREE_CODE (arg1
) == REAL_CST
)
2404 return fold_convert_const_int_from_real (code
, type
, arg1
);
2405 else if (TREE_CODE (arg1
) == FIXED_CST
)
2406 return fold_convert_const_int_from_fixed (type
, arg1
);
2408 else if (TREE_CODE (type
) == REAL_TYPE
)
2410 if (TREE_CODE (arg1
) == INTEGER_CST
)
2411 return build_real_from_int_cst (type
, arg1
);
2412 else if (TREE_CODE (arg1
) == REAL_CST
)
2413 return fold_convert_const_real_from_real (type
, arg1
);
2414 else if (TREE_CODE (arg1
) == FIXED_CST
)
2415 return fold_convert_const_real_from_fixed (type
, arg1
);
2417 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2419 if (TREE_CODE (arg1
) == FIXED_CST
)
2420 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2421 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2422 return fold_convert_const_fixed_from_int (type
, arg1
);
2423 else if (TREE_CODE (arg1
) == REAL_CST
)
2424 return fold_convert_const_fixed_from_real (type
, arg1
);
2429 /* Construct a vector of zero elements of vector type TYPE. */
2432 build_zero_vector (tree type
)
2437 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2438 units
= TYPE_VECTOR_SUBPARTS (type
);
2441 for (i
= 0; i
< units
; i
++)
2442 list
= tree_cons (NULL_TREE
, elem
, list
);
2443 return build_vector (type
, list
);
2446 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2449 fold_convertible_p (const_tree type
, const_tree arg
)
2451 tree orig
= TREE_TYPE (arg
);
2456 if (TREE_CODE (arg
) == ERROR_MARK
2457 || TREE_CODE (type
) == ERROR_MARK
2458 || TREE_CODE (orig
) == ERROR_MARK
)
2461 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2464 switch (TREE_CODE (type
))
2466 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2467 case POINTER_TYPE
: case REFERENCE_TYPE
:
2469 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2470 || TREE_CODE (orig
) == OFFSET_TYPE
)
2472 return (TREE_CODE (orig
) == VECTOR_TYPE
2473 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2476 case FIXED_POINT_TYPE
:
2480 return TREE_CODE (type
) == TREE_CODE (orig
);
2487 /* Convert expression ARG to type TYPE. Used by the middle-end for
2488 simple conversions in preference to calling the front-end's convert. */
2491 fold_convert (tree type
, tree arg
)
2493 tree orig
= TREE_TYPE (arg
);
2499 if (TREE_CODE (arg
) == ERROR_MARK
2500 || TREE_CODE (type
) == ERROR_MARK
2501 || TREE_CODE (orig
) == ERROR_MARK
)
2502 return error_mark_node
;
2504 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2505 return fold_build1 (NOP_EXPR
, type
, arg
);
2507 switch (TREE_CODE (type
))
2509 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2510 case POINTER_TYPE
: case REFERENCE_TYPE
:
2512 if (TREE_CODE (arg
) == INTEGER_CST
)
2514 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2515 if (tem
!= NULL_TREE
)
2518 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2519 || TREE_CODE (orig
) == OFFSET_TYPE
)
2520 return fold_build1 (NOP_EXPR
, type
, arg
);
2521 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2523 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2524 return fold_convert (type
, tem
);
2526 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2527 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2528 return fold_build1 (NOP_EXPR
, type
, arg
);
2531 if (TREE_CODE (arg
) == INTEGER_CST
)
2533 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2534 if (tem
!= NULL_TREE
)
2537 else if (TREE_CODE (arg
) == REAL_CST
)
2539 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2540 if (tem
!= NULL_TREE
)
2543 else if (TREE_CODE (arg
) == FIXED_CST
)
2545 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2546 if (tem
!= NULL_TREE
)
2550 switch (TREE_CODE (orig
))
2553 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2554 case POINTER_TYPE
: case REFERENCE_TYPE
:
2555 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2558 return fold_build1 (NOP_EXPR
, type
, arg
);
2560 case FIXED_POINT_TYPE
:
2561 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2564 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2565 return fold_convert (type
, tem
);
2571 case FIXED_POINT_TYPE
:
2572 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2573 || TREE_CODE (arg
) == REAL_CST
)
2575 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2576 if (tem
!= NULL_TREE
)
2580 switch (TREE_CODE (orig
))
2582 case FIXED_POINT_TYPE
:
2587 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2590 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2591 return fold_convert (type
, tem
);
2598 switch (TREE_CODE (orig
))
2601 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2602 case POINTER_TYPE
: case REFERENCE_TYPE
:
2604 case FIXED_POINT_TYPE
:
2605 return build2 (COMPLEX_EXPR
, type
,
2606 fold_convert (TREE_TYPE (type
), arg
),
2607 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2612 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2614 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2615 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2616 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2619 arg
= save_expr (arg
);
2620 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2621 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2622 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2623 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2624 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2632 if (integer_zerop (arg
))
2633 return build_zero_vector (type
);
2634 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2635 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2636 || TREE_CODE (orig
) == VECTOR_TYPE
);
2637 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2640 tem
= fold_ignored_result (arg
);
2641 if (TREE_CODE (tem
) == GIMPLE_MODIFY_STMT
)
2643 return fold_build1 (NOP_EXPR
, type
, tem
);
2650 /* Return false if expr can be assumed not to be an lvalue, true
2654 maybe_lvalue_p (const_tree x
)
2656 /* We only need to wrap lvalue tree codes. */
2657 switch (TREE_CODE (x
))
2668 case ALIGN_INDIRECT_REF
:
2669 case MISALIGNED_INDIRECT_REF
:
2671 case ARRAY_RANGE_REF
:
2677 case PREINCREMENT_EXPR
:
2678 case PREDECREMENT_EXPR
:
2680 case TRY_CATCH_EXPR
:
2681 case WITH_CLEANUP_EXPR
:
2684 case GIMPLE_MODIFY_STMT
:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2712 if (! maybe_lvalue_p (x
))
2714 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2720 int pedantic_lvalues
;
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2726 pedantic_non_lvalue (tree x
)
2728 if (pedantic_lvalues
)
2729 return non_lvalue (x
);
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2740 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2742 if (honor_nans
&& flag_trapping_math
)
2752 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2754 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2756 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2758 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2772 return UNORDERED_EXPR
;
2773 case UNORDERED_EXPR
:
2774 return ORDERED_EXPR
;
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2784 swap_tree_comparison (enum tree_code code
)
2791 case UNORDERED_EXPR
:
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code
)
2839 return COMPCODE_ORD
;
2840 case UNORDERED_EXPR
:
2841 return COMPCODE_UNORD
;
2843 return COMPCODE_UNLT
;
2845 return COMPCODE_UNEQ
;
2847 return COMPCODE_UNLE
;
2849 return COMPCODE_UNGT
;
2851 return COMPCODE_LTGT
;
2853 return COMPCODE_UNGE
;
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code
)
2881 return ORDERED_EXPR
;
2882 case COMPCODE_UNORD
:
2883 return UNORDERED_EXPR
;
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2908 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2909 enum tree_code rcode
, tree truth_type
,
2910 tree ll_arg
, tree lr_arg
)
2912 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2913 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2914 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2915 enum comparison_code compcode
;
2919 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2920 compcode
= lcompcode
& rcompcode
;
2923 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2924 compcode
= lcompcode
| rcompcode
;
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode
&= ~COMPCODE_UNORD
;
2936 if (compcode
== COMPCODE_LTGT
)
2937 compcode
= COMPCODE_NE
;
2938 else if (compcode
== COMPCODE_ORD
)
2939 compcode
= COMPCODE_TRUE
;
2941 else if (flag_trapping_math
)
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2946 && (lcompcode
!= COMPCODE_EQ
)
2947 && (lcompcode
!= COMPCODE_ORD
);
2948 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2949 && (rcompcode
!= COMPCODE_EQ
)
2950 && (rcompcode
!= COMPCODE_ORD
);
2951 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2952 && (compcode
!= COMPCODE_EQ
)
2953 && (compcode
!= COMPCODE_ORD
);
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2962 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2968 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap
|| rtrap
) != trap
)
2976 if (compcode
== COMPCODE_TRUE
)
2977 return constant_boolean_node (true, truth_type
);
2978 else if (compcode
== COMPCODE_FALSE
)
2979 return constant_boolean_node (false, truth_type
);
2981 return fold_build2 (compcode_to_comparison (compcode
),
2982 truth_type
, ll_arg
, lr_arg
);
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2988 truth_value_p (enum tree_code code
)
2990 return (TREE_CODE_CLASS (code
) == tcc_comparison
2991 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2992 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2993 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3023 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
3029 /* If both types don't have the same signedness, then we can't consider
3030 them equal. We must check this before the STRIP_NOPS calls
3031 because they may change the signedness of the arguments. */
3032 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3035 /* If both types don't have the same precision, then it is not safe
3037 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
3043 /* In case both args are comparisons but with different comparison
3044 code, try to swap the comparison operands of one arg to produce
3045 a match and compare that variant. */
3046 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3047 && COMPARISON_CLASS_P (arg0
)
3048 && COMPARISON_CLASS_P (arg1
))
3050 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3052 if (TREE_CODE (arg0
) == swap_code
)
3053 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3054 TREE_OPERAND (arg1
, 1), flags
)
3055 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3056 TREE_OPERAND (arg1
, 0), flags
);
3059 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3060 /* This is needed for conversions and for COMPONENT_REF.
3061 Might as well play it safe and always test this. */
3062 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3063 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3064 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
3067 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3068 We don't care about side effects in that case because the SAVE_EXPR
3069 takes care of that for us. In all other cases, two expressions are
3070 equal if they have no side effects. If we have two identical
3071 expressions with side effects that should be treated the same due
3072 to the only side effects being identical SAVE_EXPR's, that will
3073 be detected in the recursive calls below. */
3074 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3075 && (TREE_CODE (arg0
) == SAVE_EXPR
3076 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3079 /* Next handle constant cases, those for which we can return 1 even
3080 if ONLY_CONST is set. */
3081 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3082 switch (TREE_CODE (arg0
))
3085 return tree_int_cst_equal (arg0
, arg1
);
3088 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3089 TREE_FIXED_CST (arg1
));
3092 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
3093 TREE_REAL_CST (arg1
)))
3097 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
3099 /* If we do not distinguish between signed and unsigned zero,
3100 consider them equal. */
3101 if (real_zerop (arg0
) && real_zerop (arg1
))
3110 v1
= TREE_VECTOR_CST_ELTS (arg0
);
3111 v2
= TREE_VECTOR_CST_ELTS (arg1
);
3114 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
3117 v1
= TREE_CHAIN (v1
);
3118 v2
= TREE_CHAIN (v2
);
3125 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3127 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3131 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3132 && ! memcmp (TREE_STRING_POINTER (arg0
),
3133 TREE_STRING_POINTER (arg1
),
3134 TREE_STRING_LENGTH (arg0
)));
3137 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3143 if (flags
& OEP_ONLY_CONST
)
3146 /* Define macros to test an operand from arg0 and arg1 for equality and a
3147 variant that allows null and views null as being different from any
3148 non-null value. In the latter case, if either is null, the both
3149 must be; otherwise, do the normal comparison. */
3150 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3151 TREE_OPERAND (arg1, N), flags)
3153 #define OP_SAME_WITH_NULL(N) \
3154 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3155 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3157 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3160 /* Two conversions are equal only if signedness and modes match. */
3161 switch (TREE_CODE (arg0
))
3165 case FIX_TRUNC_EXPR
:
3166 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3167 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3177 case tcc_comparison
:
3179 if (OP_SAME (0) && OP_SAME (1))
3182 /* For commutative ops, allow the other order. */
3183 return (commutative_tree_code (TREE_CODE (arg0
))
3184 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3185 TREE_OPERAND (arg1
, 1), flags
)
3186 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3187 TREE_OPERAND (arg1
, 0), flags
));
3190 /* If either of the pointer (or reference) expressions we are
3191 dereferencing contain a side effect, these cannot be equal. */
3192 if (TREE_SIDE_EFFECTS (arg0
)
3193 || TREE_SIDE_EFFECTS (arg1
))
3196 switch (TREE_CODE (arg0
))
3199 case ALIGN_INDIRECT_REF
:
3200 case MISALIGNED_INDIRECT_REF
:
3206 case ARRAY_RANGE_REF
:
3207 /* Operands 2 and 3 may be null.
3208 Compare the array index by value if it is constant first as we
3209 may have different types but same value here. */
3211 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3212 TREE_OPERAND (arg1
, 1))
3214 && OP_SAME_WITH_NULL (2)
3215 && OP_SAME_WITH_NULL (3));
3218 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3219 may be NULL when we're called to compare MEM_EXPRs. */
3220 return OP_SAME_WITH_NULL (0)
3222 && OP_SAME_WITH_NULL (2);
3225 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3231 case tcc_expression
:
3232 switch (TREE_CODE (arg0
))
3235 case TRUTH_NOT_EXPR
:
3238 case TRUTH_ANDIF_EXPR
:
3239 case TRUTH_ORIF_EXPR
:
3240 return OP_SAME (0) && OP_SAME (1);
3242 case TRUTH_AND_EXPR
:
3244 case TRUTH_XOR_EXPR
:
3245 if (OP_SAME (0) && OP_SAME (1))
3248 /* Otherwise take into account this is a commutative operation. */
3249 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3250 TREE_OPERAND (arg1
, 1), flags
)
3251 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3252 TREE_OPERAND (arg1
, 0), flags
));
3259 switch (TREE_CODE (arg0
))
3262 /* If the CALL_EXPRs call different functions, then they
3263 clearly can not be equal. */
3264 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3269 unsigned int cef
= call_expr_flags (arg0
);
3270 if (flags
& OEP_PURE_SAME
)
3271 cef
&= ECF_CONST
| ECF_PURE
;
3278 /* Now see if all the arguments are the same. */
3280 const_call_expr_arg_iterator iter0
, iter1
;
3282 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3283 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3285 a0
= next_const_call_expr_arg (&iter0
),
3286 a1
= next_const_call_expr_arg (&iter1
))
3287 if (! operand_equal_p (a0
, a1
, flags
))
3290 /* If we get here and both argument lists are exhausted
3291 then the CALL_EXPRs are equal. */
3292 return ! (a0
|| a1
);
3298 case tcc_declaration
:
3299 /* Consider __builtin_sqrt equal to sqrt. */
3300 return (TREE_CODE (arg0
) == FUNCTION_DECL
3301 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3302 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3303 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3310 #undef OP_SAME_WITH_NULL
3313 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3314 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3316 When in doubt, return 0. */
3319 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3321 int unsignedp1
, unsignedpo
;
3322 tree primarg0
, primarg1
, primother
;
3323 unsigned int correct_width
;
3325 if (operand_equal_p (arg0
, arg1
, 0))
3328 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3329 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3332 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3333 and see if the inner values are the same. This removes any
3334 signedness comparison, which doesn't matter here. */
3335 primarg0
= arg0
, primarg1
= arg1
;
3336 STRIP_NOPS (primarg0
);
3337 STRIP_NOPS (primarg1
);
3338 if (operand_equal_p (primarg0
, primarg1
, 0))
3341 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3342 actual comparison operand, ARG0.
3344 First throw away any conversions to wider types
3345 already present in the operands. */
3347 primarg1
= get_narrower (arg1
, &unsignedp1
);
3348 primother
= get_narrower (other
, &unsignedpo
);
3350 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3351 if (unsignedp1
== unsignedpo
3352 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3353 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3355 tree type
= TREE_TYPE (arg0
);
3357 /* Make sure shorter operand is extended the right way
3358 to match the longer operand. */
3359 primarg1
= fold_convert (signed_or_unsigned_type_for
3360 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3362 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3369 /* See if ARG is an expression that is either a comparison or is performing
3370 arithmetic on comparisons. The comparisons must only be comparing
3371 two different values, which will be stored in *CVAL1 and *CVAL2; if
3372 they are nonzero it means that some operands have already been found.
3373 No variables may be used anywhere else in the expression except in the
3374 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3375 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3377 If this is true, return 1. Otherwise, return zero. */
3380 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3382 enum tree_code code
= TREE_CODE (arg
);
3383 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3385 /* We can handle some of the tcc_expression cases here. */
3386 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3388 else if (class == tcc_expression
3389 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3390 || code
== COMPOUND_EXPR
))
3393 else if (class == tcc_expression
&& code
== SAVE_EXPR
3394 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3396 /* If we've already found a CVAL1 or CVAL2, this expression is
3397 two complex to handle. */
3398 if (*cval1
|| *cval2
)
3408 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3411 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3412 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3413 cval1
, cval2
, save_p
));
3418 case tcc_expression
:
3419 if (code
== COND_EXPR
)
3420 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3421 cval1
, cval2
, save_p
)
3422 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3423 cval1
, cval2
, save_p
)
3424 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3425 cval1
, cval2
, save_p
));
3428 case tcc_comparison
:
3429 /* First see if we can handle the first operand, then the second. For
3430 the second operand, we know *CVAL1 can't be zero. It must be that
3431 one side of the comparison is each of the values; test for the
3432 case where this isn't true by failing if the two operands
3435 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3436 TREE_OPERAND (arg
, 1), 0))
3440 *cval1
= TREE_OPERAND (arg
, 0);
3441 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3443 else if (*cval2
== 0)
3444 *cval2
= TREE_OPERAND (arg
, 0);
3445 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3450 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3452 else if (*cval2
== 0)
3453 *cval2
= TREE_OPERAND (arg
, 1);
3454 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3466 /* ARG is a tree that is known to contain just arithmetic operations and
3467 comparisons. Evaluate the operations in the tree substituting NEW0 for
3468 any occurrence of OLD0 as an operand of a comparison and likewise for
3472 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
3474 tree type
= TREE_TYPE (arg
);
3475 enum tree_code code
= TREE_CODE (arg
);
3476 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3478 /* We can handle some of the tcc_expression cases here. */
3479 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3481 else if (class == tcc_expression
3482 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3488 return fold_build1 (code
, type
,
3489 eval_subst (TREE_OPERAND (arg
, 0),
3490 old0
, new0
, old1
, new1
));
3493 return fold_build2 (code
, type
,
3494 eval_subst (TREE_OPERAND (arg
, 0),
3495 old0
, new0
, old1
, new1
),
3496 eval_subst (TREE_OPERAND (arg
, 1),
3497 old0
, new0
, old1
, new1
));
3499 case tcc_expression
:
3503 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
3506 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
3509 return fold_build3 (code
, type
,
3510 eval_subst (TREE_OPERAND (arg
, 0),
3511 old0
, new0
, old1
, new1
),
3512 eval_subst (TREE_OPERAND (arg
, 1),
3513 old0
, new0
, old1
, new1
),
3514 eval_subst (TREE_OPERAND (arg
, 2),
3515 old0
, new0
, old1
, new1
));
3519 /* Fall through - ??? */
3521 case tcc_comparison
:
3523 tree arg0
= TREE_OPERAND (arg
, 0);
3524 tree arg1
= TREE_OPERAND (arg
, 1);
3526 /* We need to check both for exact equality and tree equality. The
3527 former will be true if the operand has a side-effect. In that
3528 case, we know the operand occurred exactly once. */
3530 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3532 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3535 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3537 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3540 return fold_build2 (code
, type
, arg0
, arg1
);
3548 /* Return a tree for the case when the result of an expression is RESULT
3549 converted to TYPE and OMITTED was previously an operand of the expression
3550 but is now not needed (e.g., we folded OMITTED * 0).
3552 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3553 the conversion of RESULT to TYPE. */
3556 omit_one_operand (tree type
, tree result
, tree omitted
)
3558 tree t
= fold_convert (type
, result
);
3560 /* If the resulting operand is an empty statement, just return the omitted
3561 statement casted to void. */
3562 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3563 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3565 if (TREE_SIDE_EFFECTS (omitted
))
3566 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3568 return non_lvalue (t
);
3571 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3574 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3576 tree t
= fold_convert (type
, result
);
3578 /* If the resulting operand is an empty statement, just return the omitted
3579 statement casted to void. */
3580 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3581 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3583 if (TREE_SIDE_EFFECTS (omitted
))
3584 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3586 return pedantic_non_lvalue (t
);
3589 /* Return a tree for the case when the result of an expression is RESULT
3590 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3591 of the expression but are now not needed.
3593 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3594 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3595 evaluated before OMITTED2. Otherwise, if neither has side effects,
3596 just do the conversion of RESULT to TYPE. */
3599 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3601 tree t
= fold_convert (type
, result
);
3603 if (TREE_SIDE_EFFECTS (omitted2
))
3604 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3605 if (TREE_SIDE_EFFECTS (omitted1
))
3606 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3608 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3612 /* Return a simplified tree node for the truth-negation of ARG. This
3613 never alters ARG itself. We assume that ARG is an operation that
3614 returns a truth value (0 or 1).
3616 FIXME: one would think we would fold the result, but it causes
3617 problems with the dominator optimizer. */
3620 fold_truth_not_expr (tree arg
)
3622 tree type
= TREE_TYPE (arg
);
3623 enum tree_code code
= TREE_CODE (arg
);
3625 /* If this is a comparison, we can simply invert it, except for
3626 floating-point non-equality comparisons, in which case we just
3627 enclose a TRUTH_NOT_EXPR around what we have. */
3629 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3631 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3632 if (FLOAT_TYPE_P (op_type
)
3633 && flag_trapping_math
3634 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3635 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3639 code
= invert_tree_comparison (code
,
3640 HONOR_NANS (TYPE_MODE (op_type
)));
3641 if (code
== ERROR_MARK
)
3644 return build2 (code
, type
,
3645 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3652 return constant_boolean_node (integer_zerop (arg
), type
);
3654 case TRUTH_AND_EXPR
:
3655 return build2 (TRUTH_OR_EXPR
, type
,
3656 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3657 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3660 return build2 (TRUTH_AND_EXPR
, type
,
3661 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3662 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3664 case TRUTH_XOR_EXPR
:
3665 /* Here we can invert either operand. We invert the first operand
3666 unless the second operand is a TRUTH_NOT_EXPR in which case our
3667 result is the XOR of the first operand with the inside of the
3668 negation of the second operand. */
3670 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3671 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3672 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3674 return build2 (TRUTH_XOR_EXPR
, type
,
3675 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3676 TREE_OPERAND (arg
, 1));
3678 case TRUTH_ANDIF_EXPR
:
3679 return build2 (TRUTH_ORIF_EXPR
, type
,
3680 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3681 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3683 case TRUTH_ORIF_EXPR
:
3684 return build2 (TRUTH_ANDIF_EXPR
, type
,
3685 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3686 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3688 case TRUTH_NOT_EXPR
:
3689 return TREE_OPERAND (arg
, 0);
3693 tree arg1
= TREE_OPERAND (arg
, 1);
3694 tree arg2
= TREE_OPERAND (arg
, 2);
3695 /* A COND_EXPR may have a throw as one operand, which
3696 then has void type. Just leave void operands
3698 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3699 VOID_TYPE_P (TREE_TYPE (arg1
))
3700 ? arg1
: invert_truthvalue (arg1
),
3701 VOID_TYPE_P (TREE_TYPE (arg2
))
3702 ? arg2
: invert_truthvalue (arg2
));
3706 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3707 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3709 case NON_LVALUE_EXPR
:
3710 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3713 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3714 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3718 return build1 (TREE_CODE (arg
), type
,
3719 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3722 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3724 return build2 (EQ_EXPR
, type
, arg
,
3725 build_int_cst (type
, 0));
3728 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3730 case CLEANUP_POINT_EXPR
:
3731 return build1 (CLEANUP_POINT_EXPR
, type
,
3732 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3741 /* Return a simplified tree node for the truth-negation of ARG. This
3742 never alters ARG itself. We assume that ARG is an operation that
3743 returns a truth value (0 or 1).
3745 FIXME: one would think we would fold the result, but it causes
3746 problems with the dominator optimizer. */
3749 invert_truthvalue (tree arg
)
3753 if (TREE_CODE (arg
) == ERROR_MARK
)
3756 tem
= fold_truth_not_expr (arg
);
3758 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3763 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3764 operands are another bit-wise operation with a common input. If so,
3765 distribute the bit operations to save an operation and possibly two if
3766 constants are involved. For example, convert
3767 (A | B) & (A | C) into A | (B & C)
3768 Further simplification will occur if B and C are constants.
3770 If this optimization cannot be done, 0 will be returned. */
3773 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3778 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3779 || TREE_CODE (arg0
) == code
3780 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3781 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3784 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3786 common
= TREE_OPERAND (arg0
, 0);
3787 left
= TREE_OPERAND (arg0
, 1);
3788 right
= TREE_OPERAND (arg1
, 1);
3790 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3792 common
= TREE_OPERAND (arg0
, 0);
3793 left
= TREE_OPERAND (arg0
, 1);
3794 right
= TREE_OPERAND (arg1
, 0);
3796 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3798 common
= TREE_OPERAND (arg0
, 1);
3799 left
= TREE_OPERAND (arg0
, 0);
3800 right
= TREE_OPERAND (arg1
, 1);
3802 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3804 common
= TREE_OPERAND (arg0
, 1);
3805 left
= TREE_OPERAND (arg0
, 0);
3806 right
= TREE_OPERAND (arg1
, 0);
3811 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3812 fold_build2 (code
, type
, left
, right
));
3815 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3816 with code CODE. This optimization is unsafe. */
3818 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3820 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3821 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3823 /* (A / C) +- (B / C) -> (A +- B) / C. */
3825 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3826 TREE_OPERAND (arg1
, 1), 0))
3827 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3828 fold_build2 (code
, type
,
3829 TREE_OPERAND (arg0
, 0),
3830 TREE_OPERAND (arg1
, 0)),
3831 TREE_OPERAND (arg0
, 1));
3833 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3834 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3835 TREE_OPERAND (arg1
, 0), 0)
3836 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3837 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3839 REAL_VALUE_TYPE r0
, r1
;
3840 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3841 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3843 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3845 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3846 real_arithmetic (&r0
, code
, &r0
, &r1
);
3847 return fold_build2 (MULT_EXPR
, type
,
3848 TREE_OPERAND (arg0
, 0),
3849 build_real (type
, r0
));
3855 /* Subroutine for fold_truthop: decode a field reference.
3857 If EXP is a comparison reference, we return the innermost reference.
3859 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3860 set to the starting bit number.
3862 If the innermost field can be completely contained in a mode-sized
3863 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3865 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3866 otherwise it is not changed.
3868 *PUNSIGNEDP is set to the signedness of the field.
3870 *PMASK is set to the mask used. This is either contained in a
3871 BIT_AND_EXPR or derived from the width of the field.
3873 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3875 Return 0 if this is not a component reference or is one that we can't
3876 do anything with. */
3879 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3880 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3881 int *punsignedp
, int *pvolatilep
,
3882 tree
*pmask
, tree
*pand_mask
)
3884 tree outer_type
= 0;
3886 tree mask
, inner
, offset
;
3888 unsigned int precision
;
3890 /* All the optimizations using this function assume integer fields.
3891 There are problems with FP fields since the type_for_size call
3892 below can fail for, e.g., XFmode. */
3893 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3896 /* We are interested in the bare arrangement of bits, so strip everything
3897 that doesn't affect the machine mode. However, record the type of the
3898 outermost expression if it may matter below. */
3899 if (TREE_CODE (exp
) == NOP_EXPR
3900 || TREE_CODE (exp
) == CONVERT_EXPR
3901 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3902 outer_type
= TREE_TYPE (exp
);
3905 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3907 and_mask
= TREE_OPERAND (exp
, 1);
3908 exp
= TREE_OPERAND (exp
, 0);
3909 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3910 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3914 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3915 punsignedp
, pvolatilep
, false);
3916 if ((inner
== exp
&& and_mask
== 0)
3917 || *pbitsize
< 0 || offset
!= 0
3918 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3921 /* If the number of bits in the reference is the same as the bitsize of
3922 the outer type, then the outer type gives the signedness. Otherwise
3923 (in case of a small bitfield) the signedness is unchanged. */
3924 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3925 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3927 /* Compute the mask to access the bitfield. */
3928 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3929 precision
= TYPE_PRECISION (unsigned_type
);
3931 mask
= build_int_cst_type (unsigned_type
, -1);
3933 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3934 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3936 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3938 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3939 fold_convert (unsigned_type
, and_mask
), mask
);
3942 *pand_mask
= and_mask
;
3946 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3947 represents the sign bit of EXP's type. If EXP represents a sign
3948 or zero extension, also test VAL against the unextended type.
3949 The return value is the (sub)expression whose sign bit is VAL,
3950 or NULL_TREE otherwise. */
3953 sign_bit_p (tree exp
, const_tree val
)
3955 unsigned HOST_WIDE_INT mask_lo
, lo
;
3956 HOST_WIDE_INT mask_hi
, hi
;
3960 /* Tree EXP must have an integral type. */
3961 t
= TREE_TYPE (exp
);
3962 if (! INTEGRAL_TYPE_P (t
))
3965 /* Tree VAL must be an integer constant. */
3966 if (TREE_CODE (val
) != INTEGER_CST
3967 || TREE_OVERFLOW (val
))
3970 width
= TYPE_PRECISION (t
);
3971 if (width
> HOST_BITS_PER_WIDE_INT
)
3973 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3976 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3977 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3983 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3986 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3987 >> (HOST_BITS_PER_WIDE_INT
- width
));
3990 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3991 treat VAL as if it were unsigned. */
3992 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3993 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3996 /* Handle extension from a narrower type. */
3997 if (TREE_CODE (exp
) == NOP_EXPR
3998 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3999 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4004 /* Subroutine for fold_truthop: determine if an operand is simple enough
4005 to be evaluated unconditionally. */
4008 simple_operand_p (const_tree exp
)
4010 /* Strip any conversions that don't change the machine mode. */
4013 return (CONSTANT_CLASS_P (exp
)
4014 || TREE_CODE (exp
) == SSA_NAME
4016 && ! TREE_ADDRESSABLE (exp
)
4017 && ! TREE_THIS_VOLATILE (exp
)
4018 && ! DECL_NONLOCAL (exp
)
4019 /* Don't regard global variables as simple. They may be
4020 allocated in ways unknown to the compiler (shared memory,
4021 #pragma weak, etc). */
4022 && ! TREE_PUBLIC (exp
)
4023 && ! DECL_EXTERNAL (exp
)
4024 /* Loading a static variable is unduly expensive, but global
4025 registers aren't expensive. */
4026 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4029 /* The following functions are subroutines to fold_range_test and allow it to
4030 try to change a logical combination of comparisons into a range test.
4033 X == 2 || X == 3 || X == 4 || X == 5
4037 (unsigned) (X - 2) <= 3
4039 We describe each set of comparisons as being either inside or outside
4040 a range, using a variable named like IN_P, and then describe the
4041 range with a lower and upper bound. If one of the bounds is omitted,
4042 it represents either the highest or lowest value of the type.
4044 In the comments below, we represent a range by two numbers in brackets
4045 preceded by a "+" to designate being inside that range, or a "-" to
4046 designate being outside that range, so the condition can be inverted by
4047 flipping the prefix. An omitted bound is represented by a "-". For
4048 example, "- [-, 10]" means being outside the range starting at the lowest
4049 possible value and ending at 10, in other words, being greater than 10.
4050 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4053 We set up things so that the missing bounds are handled in a consistent
4054 manner so neither a missing bound nor "true" and "false" need to be
4055 handled using a special case. */
4057 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4058 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4059 and UPPER1_P are nonzero if the respective argument is an upper bound
4060 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4061 must be specified for a comparison. ARG1 will be converted to ARG0's
4062 type if both are specified. */
4065 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4066 tree arg1
, int upper1_p
)
4072 /* If neither arg represents infinity, do the normal operation.
4073 Else, if not a comparison, return infinity. Else handle the special
4074 comparison rules. Note that most of the cases below won't occur, but
4075 are handled for consistency. */
4077 if (arg0
!= 0 && arg1
!= 0)
4079 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4080 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4082 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4085 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4088 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4089 for neither. In real maths, we cannot assume open ended ranges are
4090 the same. But, this is computer arithmetic, where numbers are finite.
4091 We can therefore make the transformation of any unbounded range with
4092 the value Z, Z being greater than any representable number. This permits
4093 us to treat unbounded ranges as equal. */
4094 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4095 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4099 result
= sgn0
== sgn1
;
4102 result
= sgn0
!= sgn1
;
4105 result
= sgn0
< sgn1
;
4108 result
= sgn0
<= sgn1
;
4111 result
= sgn0
> sgn1
;
4114 result
= sgn0
>= sgn1
;
4120 return constant_boolean_node (result
, type
);
4123 /* Given EXP, a logical expression, set the range it is testing into
4124 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4125 actually being tested. *PLOW and *PHIGH will be made of the same
4126 type as the returned expression. If EXP is not a comparison, we
4127 will most likely not be returning a useful value and range. Set
4128 *STRICT_OVERFLOW_P to true if the return value is only valid
4129 because signed overflow is undefined; otherwise, do not change
4130 *STRICT_OVERFLOW_P. */
4133 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4134 bool *strict_overflow_p
)
4136 enum tree_code code
;
4137 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4138 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
4140 tree low
, high
, n_low
, n_high
;
4142 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4143 and see if we can refine the range. Some of the cases below may not
4144 happen, but it doesn't seem worth worrying about this. We "continue"
4145 the outer loop when we've changed something; otherwise we "break"
4146 the switch, which will "break" the while. */
4149 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4153 code
= TREE_CODE (exp
);
4154 exp_type
= TREE_TYPE (exp
);
4156 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4158 if (TREE_OPERAND_LENGTH (exp
) > 0)
4159 arg0
= TREE_OPERAND (exp
, 0);
4160 if (TREE_CODE_CLASS (code
) == tcc_comparison
4161 || TREE_CODE_CLASS (code
) == tcc_unary
4162 || TREE_CODE_CLASS (code
) == tcc_binary
)
4163 arg0_type
= TREE_TYPE (arg0
);
4164 if (TREE_CODE_CLASS (code
) == tcc_binary
4165 || TREE_CODE_CLASS (code
) == tcc_comparison
4166 || (TREE_CODE_CLASS (code
) == tcc_expression
4167 && TREE_OPERAND_LENGTH (exp
) > 1))
4168 arg1
= TREE_OPERAND (exp
, 1);
4173 case TRUTH_NOT_EXPR
:
4174 in_p
= ! in_p
, exp
= arg0
;
4177 case EQ_EXPR
: case NE_EXPR
:
4178 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4179 /* We can only do something if the range is testing for zero
4180 and if the second operand is an integer constant. Note that
4181 saying something is "in" the range we make is done by
4182 complementing IN_P since it will set in the initial case of
4183 being not equal to zero; "out" is leaving it alone. */
4184 if (low
== 0 || high
== 0
4185 || ! integer_zerop (low
) || ! integer_zerop (high
)
4186 || TREE_CODE (arg1
) != INTEGER_CST
)
4191 case NE_EXPR
: /* - [c, c] */
4194 case EQ_EXPR
: /* + [c, c] */
4195 in_p
= ! in_p
, low
= high
= arg1
;
4197 case GT_EXPR
: /* - [-, c] */
4198 low
= 0, high
= arg1
;
4200 case GE_EXPR
: /* + [c, -] */
4201 in_p
= ! in_p
, low
= arg1
, high
= 0;
4203 case LT_EXPR
: /* - [c, -] */
4204 low
= arg1
, high
= 0;
4206 case LE_EXPR
: /* + [-, c] */
4207 in_p
= ! in_p
, low
= 0, high
= arg1
;
4213 /* If this is an unsigned comparison, we also know that EXP is
4214 greater than or equal to zero. We base the range tests we make
4215 on that fact, so we record it here so we can parse existing
4216 range tests. We test arg0_type since often the return type
4217 of, e.g. EQ_EXPR, is boolean. */
4218 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4220 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4222 build_int_cst (arg0_type
, 0),
4226 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4228 /* If the high bound is missing, but we have a nonzero low
4229 bound, reverse the range so it goes from zero to the low bound
4231 if (high
== 0 && low
&& ! integer_zerop (low
))
4234 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4235 integer_one_node
, 0);
4236 low
= build_int_cst (arg0_type
, 0);
4244 /* (-x) IN [a,b] -> x in [-b, -a] */
4245 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4246 build_int_cst (exp_type
, 0),
4248 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4249 build_int_cst (exp_type
, 0),
4251 low
= n_low
, high
= n_high
;
4257 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4258 build_int_cst (exp_type
, 1));
4261 case PLUS_EXPR
: case MINUS_EXPR
:
4262 if (TREE_CODE (arg1
) != INTEGER_CST
)
4265 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4266 move a constant to the other side. */
4267 if (!TYPE_UNSIGNED (arg0_type
)
4268 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4271 /* If EXP is signed, any overflow in the computation is undefined,
4272 so we don't worry about it so long as our computations on
4273 the bounds don't overflow. For unsigned, overflow is defined
4274 and this is exactly the right thing. */
4275 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4276 arg0_type
, low
, 0, arg1
, 0);
4277 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4278 arg0_type
, high
, 1, arg1
, 0);
4279 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4280 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4283 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4284 *strict_overflow_p
= true;
4286 /* Check for an unsigned range which has wrapped around the maximum
4287 value thus making n_high < n_low, and normalize it. */
4288 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4290 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4291 integer_one_node
, 0);
4292 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4293 integer_one_node
, 0);
4295 /* If the range is of the form +/- [ x+1, x ], we won't
4296 be able to normalize it. But then, it represents the
4297 whole range or the empty set, so make it
4299 if (tree_int_cst_equal (n_low
, low
)
4300 && tree_int_cst_equal (n_high
, high
))
4306 low
= n_low
, high
= n_high
;
4311 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
4312 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4315 if (! INTEGRAL_TYPE_P (arg0_type
)
4316 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4317 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4320 n_low
= low
, n_high
= high
;
4323 n_low
= fold_convert (arg0_type
, n_low
);
4326 n_high
= fold_convert (arg0_type
, n_high
);
4329 /* If we're converting arg0 from an unsigned type, to exp,
4330 a signed type, we will be doing the comparison as unsigned.
4331 The tests above have already verified that LOW and HIGH
4334 So we have to ensure that we will handle large unsigned
4335 values the same way that the current signed bounds treat
4338 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4342 /* For fixed-point modes, we need to pass the saturating flag
4343 as the 2nd parameter. */
4344 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4345 equiv_type
= lang_hooks
.types
.type_for_mode
4346 (TYPE_MODE (arg0_type
),
4347 TYPE_SATURATING (arg0_type
));
4349 equiv_type
= lang_hooks
.types
.type_for_mode
4350 (TYPE_MODE (arg0_type
), 1);
4352 /* A range without an upper bound is, naturally, unbounded.
4353 Since convert would have cropped a very large value, use
4354 the max value for the destination type. */
4356 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4357 : TYPE_MAX_VALUE (arg0_type
);
4359 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4360 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4361 fold_convert (arg0_type
,
4363 build_int_cst (arg0_type
, 1));
4365 /* If the low bound is specified, "and" the range with the
4366 range for which the original unsigned value will be
4370 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4371 1, n_low
, n_high
, 1,
4372 fold_convert (arg0_type
,
4377 in_p
= (n_in_p
== in_p
);
4381 /* Otherwise, "or" the range with the range of the input
4382 that will be interpreted as negative. */
4383 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4384 0, n_low
, n_high
, 1,
4385 fold_convert (arg0_type
,
4390 in_p
= (in_p
!= n_in_p
);
4395 low
= n_low
, high
= n_high
;
4405 /* If EXP is a constant, we can evaluate whether this is true or false. */
4406 if (TREE_CODE (exp
) == INTEGER_CST
)
4408 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4410 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4416 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4420 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4421 type, TYPE, return an expression to test if EXP is in (or out of, depending
4422 on IN_P) the range. Return 0 if the test couldn't be created. */
4425 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4427 tree etype
= TREE_TYPE (exp
);
4430 #ifdef HAVE_canonicalize_funcptr_for_compare
4431 /* Disable this optimization for function pointer expressions
4432 on targets that require function pointer canonicalization. */
4433 if (HAVE_canonicalize_funcptr_for_compare
4434 && TREE_CODE (etype
) == POINTER_TYPE
4435 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4441 value
= build_range_check (type
, exp
, 1, low
, high
);
4443 return invert_truthvalue (value
);
4448 if (low
== 0 && high
== 0)
4449 return build_int_cst (type
, 1);
4452 return fold_build2 (LE_EXPR
, type
, exp
,
4453 fold_convert (etype
, high
));
4456 return fold_build2 (GE_EXPR
, type
, exp
,
4457 fold_convert (etype
, low
));
4459 if (operand_equal_p (low
, high
, 0))
4460 return fold_build2 (EQ_EXPR
, type
, exp
,
4461 fold_convert (etype
, low
));
4463 if (integer_zerop (low
))
4465 if (! TYPE_UNSIGNED (etype
))
4467 etype
= unsigned_type_for (etype
);
4468 high
= fold_convert (etype
, high
);
4469 exp
= fold_convert (etype
, exp
);
4471 return build_range_check (type
, exp
, 1, 0, high
);
4474 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4475 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4477 unsigned HOST_WIDE_INT lo
;
4481 prec
= TYPE_PRECISION (etype
);
4482 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4485 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4489 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4490 lo
= (unsigned HOST_WIDE_INT
) -1;
4493 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4495 if (TYPE_UNSIGNED (etype
))
4497 etype
= signed_type_for (etype
);
4498 exp
= fold_convert (etype
, exp
);
4500 return fold_build2 (GT_EXPR
, type
, exp
,
4501 build_int_cst (etype
, 0));
4505 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4506 This requires wrap-around arithmetics for the type of the expression. */
4507 switch (TREE_CODE (etype
))
4510 /* There is no requirement that LOW be within the range of ETYPE
4511 if the latter is a subtype. It must, however, be within the base
4512 type of ETYPE. So be sure we do the subtraction in that type. */
4513 if (TREE_TYPE (etype
))
4514 etype
= TREE_TYPE (etype
);
4519 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4520 TYPE_UNSIGNED (etype
));
4527 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4528 if (TREE_CODE (etype
) == INTEGER_TYPE
4529 && !TYPE_OVERFLOW_WRAPS (etype
))
4531 tree utype
, minv
, maxv
;
4533 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4534 for the type in question, as we rely on this here. */
4535 utype
= unsigned_type_for (etype
);
4536 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4537 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4538 integer_one_node
, 1);
4539 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4541 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4548 high
= fold_convert (etype
, high
);
4549 low
= fold_convert (etype
, low
);
4550 exp
= fold_convert (etype
, exp
);
4552 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4555 if (POINTER_TYPE_P (etype
))
4557 if (value
!= 0 && !TREE_OVERFLOW (value
))
4559 low
= fold_convert (sizetype
, low
);
4560 low
= fold_build1 (NEGATE_EXPR
, sizetype
, low
);
4561 return build_range_check (type
,
4562 fold_build2 (POINTER_PLUS_EXPR
, etype
, exp
, low
),
4563 1, build_int_cst (etype
, 0), value
);
4568 if (value
!= 0 && !TREE_OVERFLOW (value
))
4569 return build_range_check (type
,
4570 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4571 1, build_int_cst (etype
, 0), value
);
4576 /* Return the predecessor of VAL in its type, handling the infinite case. */
4579 range_predecessor (tree val
)
4581 tree type
= TREE_TYPE (val
);
4583 if (INTEGRAL_TYPE_P (type
)
4584 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4587 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4590 /* Return the successor of VAL in its type, handling the infinite case. */
4593 range_successor (tree val
)
4595 tree type
= TREE_TYPE (val
);
4597 if (INTEGRAL_TYPE_P (type
)
4598 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4601 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4604 /* Given two ranges, see if we can merge them into one. Return 1 if we
4605 can, 0 if we can't. Set the output range into the specified parameters. */
4608 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4609 tree high0
, int in1_p
, tree low1
, tree high1
)
4617 int lowequal
= ((low0
== 0 && low1
== 0)
4618 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4619 low0
, 0, low1
, 0)));
4620 int highequal
= ((high0
== 0 && high1
== 0)
4621 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4622 high0
, 1, high1
, 1)));
4624 /* Make range 0 be the range that starts first, or ends last if they
4625 start at the same value. Swap them if it isn't. */
4626 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4629 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4630 high1
, 1, high0
, 1))))
4632 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4633 tem
= low0
, low0
= low1
, low1
= tem
;
4634 tem
= high0
, high0
= high1
, high1
= tem
;
4637 /* Now flag two cases, whether the ranges are disjoint or whether the
4638 second range is totally subsumed in the first. Note that the tests
4639 below are simplified by the ones above. */
4640 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4641 high0
, 1, low1
, 0));
4642 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4643 high1
, 1, high0
, 1));
4645 /* We now have four cases, depending on whether we are including or
4646 excluding the two ranges. */
4649 /* If they don't overlap, the result is false. If the second range
4650 is a subset it is the result. Otherwise, the range is from the start
4651 of the second to the end of the first. */
4653 in_p
= 0, low
= high
= 0;
4655 in_p
= 1, low
= low1
, high
= high1
;
4657 in_p
= 1, low
= low1
, high
= high0
;
4660 else if (in0_p
&& ! in1_p
)
4662 /* If they don't overlap, the result is the first range. If they are
4663 equal, the result is false. If the second range is a subset of the
4664 first, and the ranges begin at the same place, we go from just after
4665 the end of the second range to the end of the first. If the second
4666 range is not a subset of the first, or if it is a subset and both
4667 ranges end at the same place, the range starts at the start of the
4668 first range and ends just before the second range.
4669 Otherwise, we can't describe this as a single range. */
4671 in_p
= 1, low
= low0
, high
= high0
;
4672 else if (lowequal
&& highequal
)
4673 in_p
= 0, low
= high
= 0;
4674 else if (subset
&& lowequal
)
4676 low
= range_successor (high1
);
4681 /* We are in the weird situation where high0 > high1 but
4682 high1 has no successor. Punt. */
4686 else if (! subset
|| highequal
)
4689 high
= range_predecessor (low1
);
4693 /* low0 < low1 but low1 has no predecessor. Punt. */
4701 else if (! in0_p
&& in1_p
)
4703 /* If they don't overlap, the result is the second range. If the second
4704 is a subset of the first, the result is false. Otherwise,
4705 the range starts just after the first range and ends at the
4706 end of the second. */
4708 in_p
= 1, low
= low1
, high
= high1
;
4709 else if (subset
|| highequal
)
4710 in_p
= 0, low
= high
= 0;
4713 low
= range_successor (high0
);
4718 /* high1 > high0 but high0 has no successor. Punt. */
4726 /* The case where we are excluding both ranges. Here the complex case
4727 is if they don't overlap. In that case, the only time we have a
4728 range is if they are adjacent. If the second is a subset of the
4729 first, the result is the first. Otherwise, the range to exclude
4730 starts at the beginning of the first range and ends at the end of the
4734 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4735 range_successor (high0
),
4737 in_p
= 0, low
= low0
, high
= high1
;
4740 /* Canonicalize - [min, x] into - [-, x]. */
4741 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4742 switch (TREE_CODE (TREE_TYPE (low0
)))
4745 if (TYPE_PRECISION (TREE_TYPE (low0
))
4746 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4750 if (tree_int_cst_equal (low0
,
4751 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4755 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4756 && integer_zerop (low0
))
4763 /* Canonicalize - [x, max] into - [x, -]. */
4764 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4765 switch (TREE_CODE (TREE_TYPE (high1
)))
4768 if (TYPE_PRECISION (TREE_TYPE (high1
))
4769 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4773 if (tree_int_cst_equal (high1
,
4774 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4778 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4779 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4781 integer_one_node
, 1)))
4788 /* The ranges might be also adjacent between the maximum and
4789 minimum values of the given type. For
4790 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4791 return + [x + 1, y - 1]. */
4792 if (low0
== 0 && high1
== 0)
4794 low
= range_successor (high0
);
4795 high
= range_predecessor (low1
);
4796 if (low
== 0 || high
== 0)
4806 in_p
= 0, low
= low0
, high
= high0
;
4808 in_p
= 0, low
= low0
, high
= high1
;
4811 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4816 /* Subroutine of fold, looking inside expressions of the form
4817 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4818 of the COND_EXPR. This function is being used also to optimize
4819 A op B ? C : A, by reversing the comparison first.
4821 Return a folded expression whose code is not a COND_EXPR
4822 anymore, or NULL_TREE if no folding opportunity is found. */
4825 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4827 enum tree_code comp_code
= TREE_CODE (arg0
);
4828 tree arg00
= TREE_OPERAND (arg0
, 0);
4829 tree arg01
= TREE_OPERAND (arg0
, 1);
4830 tree arg1_type
= TREE_TYPE (arg1
);
4836 /* If we have A op 0 ? A : -A, consider applying the following
4839 A == 0? A : -A same as -A
4840 A != 0? A : -A same as A
4841 A >= 0? A : -A same as abs (A)
4842 A > 0? A : -A same as abs (A)
4843 A <= 0? A : -A same as -abs (A)
4844 A < 0? A : -A same as -abs (A)
4846 None of these transformations work for modes with signed
4847 zeros. If A is +/-0, the first two transformations will
4848 change the sign of the result (from +0 to -0, or vice
4849 versa). The last four will fix the sign of the result,
4850 even though the original expressions could be positive or
4851 negative, depending on the sign of A.
4853 Note that all these transformations are correct if A is
4854 NaN, since the two alternatives (A and -A) are also NaNs. */
4855 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4856 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4857 ? real_zerop (arg01
)
4858 : integer_zerop (arg01
))
4859 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4860 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4861 /* In the case that A is of the form X-Y, '-A' (arg2) may
4862 have already been folded to Y-X, check for that. */
4863 || (TREE_CODE (arg1
) == MINUS_EXPR
4864 && TREE_CODE (arg2
) == MINUS_EXPR
4865 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4866 TREE_OPERAND (arg2
, 1), 0)
4867 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4868 TREE_OPERAND (arg2
, 0), 0))))
4873 tem
= fold_convert (arg1_type
, arg1
);
4874 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4877 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4880 if (flag_trapping_math
)
4885 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4886 arg1
= fold_convert (signed_type_for
4887 (TREE_TYPE (arg1
)), arg1
);
4888 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4889 return pedantic_non_lvalue (fold_convert (type
, tem
));
4892 if (flag_trapping_math
)
4896 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4897 arg1
= fold_convert (signed_type_for
4898 (TREE_TYPE (arg1
)), arg1
);
4899 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4900 return negate_expr (fold_convert (type
, tem
));
4902 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4906 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4907 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4908 both transformations are correct when A is NaN: A != 0
4909 is then true, and A == 0 is false. */
4911 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4912 && integer_zerop (arg01
) && integer_zerop (arg2
))
4914 if (comp_code
== NE_EXPR
)
4915 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4916 else if (comp_code
== EQ_EXPR
)
4917 return build_int_cst (type
, 0);
4920 /* Try some transformations of A op B ? A : B.
4922 A == B? A : B same as B
4923 A != B? A : B same as A
4924 A >= B? A : B same as max (A, B)
4925 A > B? A : B same as max (B, A)
4926 A <= B? A : B same as min (A, B)
4927 A < B? A : B same as min (B, A)
4929 As above, these transformations don't work in the presence
4930 of signed zeros. For example, if A and B are zeros of
4931 opposite sign, the first two transformations will change
4932 the sign of the result. In the last four, the original
4933 expressions give different results for (A=+0, B=-0) and
4934 (A=-0, B=+0), but the transformed expressions do not.
4936 The first two transformations are correct if either A or B
4937 is a NaN. In the first transformation, the condition will
4938 be false, and B will indeed be chosen. In the case of the
4939 second transformation, the condition A != B will be true,
4940 and A will be chosen.
4942 The conversions to max() and min() are not correct if B is
4943 a number and A is not. The conditions in the original
4944 expressions will be false, so all four give B. The min()
4945 and max() versions would give a NaN instead. */
4946 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4947 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4948 /* Avoid these transformations if the COND_EXPR may be used
4949 as an lvalue in the C++ front-end. PR c++/19199. */
4951 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4952 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4953 || ! maybe_lvalue_p (arg1
)
4954 || ! maybe_lvalue_p (arg2
)))
4956 tree comp_op0
= arg00
;
4957 tree comp_op1
= arg01
;
4958 tree comp_type
= TREE_TYPE (comp_op0
);
4960 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4961 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4971 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4973 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4978 /* In C++ a ?: expression can be an lvalue, so put the
4979 operand which will be used if they are equal first
4980 so that we can convert this back to the
4981 corresponding COND_EXPR. */
4982 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4984 comp_op0
= fold_convert (comp_type
, comp_op0
);
4985 comp_op1
= fold_convert (comp_type
, comp_op1
);
4986 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4987 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4988 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4989 return pedantic_non_lvalue (fold_convert (type
, tem
));
4996 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4998 comp_op0
= fold_convert (comp_type
, comp_op0
);
4999 comp_op1
= fold_convert (comp_type
, comp_op1
);
5000 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5001 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5002 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
5003 return pedantic_non_lvalue (fold_convert (type
, tem
));
5007 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5008 return pedantic_non_lvalue (fold_convert (type
, arg2
));
5011 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5012 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5015 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5020 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5021 we might still be able to simplify this. For example,
5022 if C1 is one less or one more than C2, this might have started
5023 out as a MIN or MAX and been transformed by this function.
5024 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5026 if (INTEGRAL_TYPE_P (type
)
5027 && TREE_CODE (arg01
) == INTEGER_CST
5028 && TREE_CODE (arg2
) == INTEGER_CST
)
5032 /* We can replace A with C1 in this case. */
5033 arg1
= fold_convert (type
, arg01
);
5034 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
5037 /* If C1 is C2 + 1, this is min(A, C2). */
5038 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5040 && operand_equal_p (arg01
,
5041 const_binop (PLUS_EXPR
, arg2
,
5042 build_int_cst (type
, 1), 0),
5044 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5046 fold_convert (type
, arg1
),
5051 /* If C1 is C2 - 1, this is min(A, C2). */
5052 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5054 && operand_equal_p (arg01
,
5055 const_binop (MINUS_EXPR
, arg2
,
5056 build_int_cst (type
, 1), 0),
5058 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5060 fold_convert (type
, arg1
),
5065 /* If C1 is C2 - 1, this is max(A, C2). */
5066 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5068 && operand_equal_p (arg01
,
5069 const_binop (MINUS_EXPR
, arg2
,
5070 build_int_cst (type
, 1), 0),
5072 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5074 fold_convert (type
, arg1
),
5079 /* If C1 is C2 + 1, this is max(A, C2). */
5080 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5082 && operand_equal_p (arg01
,
5083 const_binop (PLUS_EXPR
, arg2
,
5084 build_int_cst (type
, 1), 0),
5086 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5088 fold_convert (type
, arg1
),
5102 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5103 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5106 /* EXP is some logical combination of boolean tests. See if we can
5107 merge it into some range test. Return the new tree if so. */
5110 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
5112 int or_op
= (code
== TRUTH_ORIF_EXPR
5113 || code
== TRUTH_OR_EXPR
);
5114 int in0_p
, in1_p
, in_p
;
5115 tree low0
, low1
, low
, high0
, high1
, high
;
5116 bool strict_overflow_p
= false;
5117 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5118 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5120 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5121 "when simplifying range test");
5123 /* If this is an OR operation, invert both sides; we will invert
5124 again at the end. */
5126 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5128 /* If both expressions are the same, if we can merge the ranges, and we
5129 can build the range test, return it or it inverted. If one of the
5130 ranges is always true or always false, consider it to be the same
5131 expression as the other. */
5132 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5133 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5135 && 0 != (tem
= (build_range_check (type
,
5137 : rhs
!= 0 ? rhs
: integer_zero_node
,
5140 if (strict_overflow_p
)
5141 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5142 return or_op
? invert_truthvalue (tem
) : tem
;
5145 /* On machines where the branch cost is expensive, if this is a
5146 short-circuited branch and the underlying object on both sides
5147 is the same, make a non-short-circuit operation. */
5148 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5149 && lhs
!= 0 && rhs
!= 0
5150 && (code
== TRUTH_ANDIF_EXPR
5151 || code
== TRUTH_ORIF_EXPR
)
5152 && operand_equal_p (lhs
, rhs
, 0))
5154 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5155 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5156 which cases we can't do this. */
5157 if (simple_operand_p (lhs
))
5158 return build2 (code
== TRUTH_ANDIF_EXPR
5159 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5162 else if (lang_hooks
.decls
.global_bindings_p () == 0
5163 && ! CONTAINS_PLACEHOLDER_P (lhs
))
5165 tree common
= save_expr (lhs
);
5167 if (0 != (lhs
= build_range_check (type
, common
,
5168 or_op
? ! in0_p
: in0_p
,
5170 && (0 != (rhs
= build_range_check (type
, common
,
5171 or_op
? ! in1_p
: in1_p
,
5174 if (strict_overflow_p
)
5175 fold_overflow_warning (warnmsg
,
5176 WARN_STRICT_OVERFLOW_COMPARISON
);
5177 return build2 (code
== TRUTH_ANDIF_EXPR
5178 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5187 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5188 bit value. Arrange things so the extra bits will be set to zero if and
5189 only if C is signed-extended to its full width. If MASK is nonzero,
5190 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5193 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5195 tree type
= TREE_TYPE (c
);
5196 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5199 if (p
== modesize
|| unsignedp
)
5202 /* We work by getting just the sign bit into the low-order bit, then
5203 into the high-order bit, then sign-extend. We then XOR that value
5205 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
5206 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
5208 /* We must use a signed type in order to get an arithmetic right shift.
5209 However, we must also avoid introducing accidental overflows, so that
5210 a subsequent call to integer_zerop will work. Hence we must
5211 do the type conversion here. At this point, the constant is either
5212 zero or one, and the conversion to a signed type can never overflow.
5213 We could get an overflow if this conversion is done anywhere else. */
5214 if (TYPE_UNSIGNED (type
))
5215 temp
= fold_convert (signed_type_for (type
), temp
);
5217 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
5218 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
5220 temp
= const_binop (BIT_AND_EXPR
, temp
,
5221 fold_convert (TREE_TYPE (c
), mask
), 0);
5222 /* If necessary, convert the type back to match the type of C. */
5223 if (TYPE_UNSIGNED (type
))
5224 temp
= fold_convert (type
, temp
);
5226 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
5229 /* Find ways of folding logical expressions of LHS and RHS:
5230 Try to merge two comparisons to the same innermost item.
5231 Look for range tests like "ch >= '0' && ch <= '9'".
5232 Look for combinations of simple terms on machines with expensive branches
5233 and evaluate the RHS unconditionally.
5235 For example, if we have p->a == 2 && p->b == 4 and we can make an
5236 object large enough to span both A and B, we can do this with a comparison
5237 against the object ANDed with the a mask.
5239 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5240 operations to do this with one comparison.
5242 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5243 function and the one above.
5245 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5246 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5248 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5251 We return the simplified tree or 0 if no optimization is possible. */
5254 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
5256 /* If this is the "or" of two comparisons, we can do something if
5257 the comparisons are NE_EXPR. If this is the "and", we can do something
5258 if the comparisons are EQ_EXPR. I.e.,
5259 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5261 WANTED_CODE is this operation code. For single bit fields, we can
5262 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5263 comparison for one-bit fields. */
5265 enum tree_code wanted_code
;
5266 enum tree_code lcode
, rcode
;
5267 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5268 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5269 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5270 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5271 HOST_WIDE_INT xll_bitpos
, xrl_bitpos
;
5272 HOST_WIDE_INT lnbitsize
, lnbitpos
;
5273 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5274 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5275 enum machine_mode lnmode
;
5276 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5277 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5278 tree l_const
, r_const
;
5279 tree lntype
, result
;
5280 int first_bit
, end_bit
;
5282 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5283 enum tree_code orig_code
= code
;
5285 /* Start by getting the comparison codes. Fail if anything is volatile.
5286 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5287 it were surrounded with a NE_EXPR. */
5289 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5292 lcode
= TREE_CODE (lhs
);
5293 rcode
= TREE_CODE (rhs
);
5295 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5297 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5298 build_int_cst (TREE_TYPE (lhs
), 0));
5302 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5304 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5305 build_int_cst (TREE_TYPE (rhs
), 0));
5309 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5310 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5313 ll_arg
= TREE_OPERAND (lhs
, 0);
5314 lr_arg
= TREE_OPERAND (lhs
, 1);
5315 rl_arg
= TREE_OPERAND (rhs
, 0);
5316 rr_arg
= TREE_OPERAND (rhs
, 1);
5318 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5319 if (simple_operand_p (ll_arg
)
5320 && simple_operand_p (lr_arg
))
5323 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5324 && operand_equal_p (lr_arg
, rr_arg
, 0))
5326 result
= combine_comparisons (code
, lcode
, rcode
,
5327 truth_type
, ll_arg
, lr_arg
);
5331 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5332 && operand_equal_p (lr_arg
, rl_arg
, 0))
5334 result
= combine_comparisons (code
, lcode
,
5335 swap_tree_comparison (rcode
),
5336 truth_type
, ll_arg
, lr_arg
);
5342 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5343 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5345 /* If the RHS can be evaluated unconditionally and its operands are
5346 simple, it wins to evaluate the RHS unconditionally on machines
5347 with expensive branches. In this case, this isn't a comparison
5348 that can be merged. Avoid doing this if the RHS is a floating-point
5349 comparison since those can trap. */
5351 if (BRANCH_COST
>= 2
5352 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5353 && simple_operand_p (rl_arg
)
5354 && simple_operand_p (rr_arg
))
5356 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5357 if (code
== TRUTH_OR_EXPR
5358 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5359 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5360 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5361 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5362 return build2 (NE_EXPR
, truth_type
,
5363 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5365 build_int_cst (TREE_TYPE (ll_arg
), 0));
5367 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5368 if (code
== TRUTH_AND_EXPR
5369 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5370 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5371 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5372 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5373 return build2 (EQ_EXPR
, truth_type
,
5374 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5376 build_int_cst (TREE_TYPE (ll_arg
), 0));
5378 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5380 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5381 return build2 (code
, truth_type
, lhs
, rhs
);
5386 /* See if the comparisons can be merged. Then get all the parameters for
5389 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5390 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5394 ll_inner
= decode_field_reference (ll_arg
,
5395 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5396 &ll_unsignedp
, &volatilep
, &ll_mask
,
5398 lr_inner
= decode_field_reference (lr_arg
,
5399 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5400 &lr_unsignedp
, &volatilep
, &lr_mask
,
5402 rl_inner
= decode_field_reference (rl_arg
,
5403 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5404 &rl_unsignedp
, &volatilep
, &rl_mask
,
5406 rr_inner
= decode_field_reference (rr_arg
,
5407 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5408 &rr_unsignedp
, &volatilep
, &rr_mask
,
5411 /* It must be true that the inner operation on the lhs of each
5412 comparison must be the same if we are to be able to do anything.
5413 Then see if we have constants. If not, the same must be true for
5415 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5416 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5419 if (TREE_CODE (lr_arg
) == INTEGER_CST
5420 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5421 l_const
= lr_arg
, r_const
= rr_arg
;
5422 else if (lr_inner
== 0 || rr_inner
== 0
5423 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5426 l_const
= r_const
= 0;
5428 /* If either comparison code is not correct for our logical operation,
5429 fail. However, we can convert a one-bit comparison against zero into
5430 the opposite comparison against that bit being set in the field. */
5432 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5433 if (lcode
!= wanted_code
)
5435 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5437 /* Make the left operand unsigned, since we are only interested
5438 in the value of one bit. Otherwise we are doing the wrong
5447 /* This is analogous to the code for l_const above. */
5448 if (rcode
!= wanted_code
)
5450 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5459 /* See if we can find a mode that contains both fields being compared on
5460 the left. If we can't, fail. Otherwise, update all constants and masks
5461 to be relative to a field of that size. */
5462 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5463 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5464 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5465 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5467 if (lnmode
== VOIDmode
)
5470 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5471 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5472 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5473 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5475 if (BYTES_BIG_ENDIAN
)
5477 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5478 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5481 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5482 size_int (xll_bitpos
), 0);
5483 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5484 size_int (xrl_bitpos
), 0);
5488 l_const
= fold_convert (lntype
, l_const
);
5489 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5490 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5491 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5492 fold_build1 (BIT_NOT_EXPR
,
5496 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5498 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5503 r_const
= fold_convert (lntype
, r_const
);
5504 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5505 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5506 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5507 fold_build1 (BIT_NOT_EXPR
,
5511 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5513 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5517 /* Handle the case of comparisons with constants. If there is something in
5518 common between the masks, those bits of the constants must be the same.
5519 If not, the condition is always false. Test for this to avoid generating
5520 incorrect code below. */
5521 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5522 if (! integer_zerop (result
)
5523 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5524 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5526 if (wanted_code
== NE_EXPR
)
5528 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5529 return constant_boolean_node (true, truth_type
);
5533 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5534 return constant_boolean_node (false, truth_type
);
5541 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5545 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5548 enum tree_code op_code
;
5549 tree comp_const
= op1
;
5551 int consts_equal
, consts_lt
;
5554 STRIP_SIGN_NOPS (arg0
);
5556 op_code
= TREE_CODE (arg0
);
5557 minmax_const
= TREE_OPERAND (arg0
, 1);
5558 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5559 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5560 inner
= TREE_OPERAND (arg0
, 0);
5562 /* If something does not permit us to optimize, return the original tree. */
5563 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5564 || TREE_CODE (comp_const
) != INTEGER_CST
5565 || TREE_OVERFLOW (comp_const
)
5566 || TREE_CODE (minmax_const
) != INTEGER_CST
5567 || TREE_OVERFLOW (minmax_const
))
5570 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5571 and GT_EXPR, doing the rest with recursive calls using logical
5575 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5577 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5580 return invert_truthvalue (tem
);
5586 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5587 optimize_minmax_comparison
5588 (EQ_EXPR
, type
, arg0
, comp_const
),
5589 optimize_minmax_comparison
5590 (GT_EXPR
, type
, arg0
, comp_const
));
5593 if (op_code
== MAX_EXPR
&& consts_equal
)
5594 /* MAX (X, 0) == 0 -> X <= 0 */
5595 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5597 else if (op_code
== MAX_EXPR
&& consts_lt
)
5598 /* MAX (X, 0) == 5 -> X == 5 */
5599 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5601 else if (op_code
== MAX_EXPR
)
5602 /* MAX (X, 0) == -1 -> false */
5603 return omit_one_operand (type
, integer_zero_node
, inner
);
5605 else if (consts_equal
)
5606 /* MIN (X, 0) == 0 -> X >= 0 */
5607 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5610 /* MIN (X, 0) == 5 -> false */
5611 return omit_one_operand (type
, integer_zero_node
, inner
);
5614 /* MIN (X, 0) == -1 -> X == -1 */
5615 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5618 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5619 /* MAX (X, 0) > 0 -> X > 0
5620 MAX (X, 0) > 5 -> X > 5 */
5621 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5623 else if (op_code
== MAX_EXPR
)
5624 /* MAX (X, 0) > -1 -> true */
5625 return omit_one_operand (type
, integer_one_node
, inner
);
5627 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5628 /* MIN (X, 0) > 0 -> false
5629 MIN (X, 0) > 5 -> false */
5630 return omit_one_operand (type
, integer_zero_node
, inner
);
5633 /* MIN (X, 0) > -1 -> X > -1 */
5634 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5641 /* T is an integer expression that is being multiplied, divided, or taken a
5642 modulus (CODE says which and what kind of divide or modulus) by a
5643 constant C. See if we can eliminate that operation by folding it with
5644 other operations already in T. WIDE_TYPE, if non-null, is a type that
5645 should be used for the computation if wider than our type.
5647 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5648 (X * 2) + (Y * 4). We must, however, be assured that either the original
5649 expression would not overflow or that overflow is undefined for the type
5650 in the language in question.
5652 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5653 the machine has a multiply-accumulate insn or that this is part of an
5654 addressing calculation.
5656 If we return a non-null expression, it is an equivalent form of the
5657 original computation, but need not be in the original type.
5659 We set *STRICT_OVERFLOW_P to true if the return values depends on
5660 signed overflow being undefined. Otherwise we do not change
5661 *STRICT_OVERFLOW_P. */
5664 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5665 bool *strict_overflow_p
)
5667 /* To avoid exponential search depth, refuse to allow recursion past
5668 three levels. Beyond that (1) it's highly unlikely that we'll find
5669 something interesting and (2) we've probably processed it before
5670 when we built the inner expression. */
5679 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5686 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5687 bool *strict_overflow_p
)
5689 tree type
= TREE_TYPE (t
);
5690 enum tree_code tcode
= TREE_CODE (t
);
5691 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5692 > GET_MODE_SIZE (TYPE_MODE (type
)))
5693 ? wide_type
: type
);
5695 int same_p
= tcode
== code
;
5696 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5697 bool sub_strict_overflow_p
;
5699 /* Don't deal with constants of zero here; they confuse the code below. */
5700 if (integer_zerop (c
))
5703 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5704 op0
= TREE_OPERAND (t
, 0);
5706 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5707 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5709 /* Note that we need not handle conditional operations here since fold
5710 already handles those cases. So just do arithmetic here. */
5714 /* For a constant, we can always simplify if we are a multiply
5715 or (for divide and modulus) if it is a multiple of our constant. */
5716 if (code
== MULT_EXPR
5717 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5718 return const_binop (code
, fold_convert (ctype
, t
),
5719 fold_convert (ctype
, c
), 0);
5722 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5723 /* If op0 is an expression ... */
5724 if ((COMPARISON_CLASS_P (op0
)
5725 || UNARY_CLASS_P (op0
)
5726 || BINARY_CLASS_P (op0
)
5727 || VL_EXP_CLASS_P (op0
)
5728 || EXPRESSION_CLASS_P (op0
))
5729 /* ... and is unsigned, and its type is smaller than ctype,
5730 then we cannot pass through as widening. */
5731 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5732 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5733 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5734 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5735 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5736 /* ... or this is a truncation (t is narrower than op0),
5737 then we cannot pass through this narrowing. */
5738 || (GET_MODE_SIZE (TYPE_MODE (type
))
5739 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5740 /* ... or signedness changes for division or modulus,
5741 then we cannot pass through this conversion. */
5742 || (code
!= MULT_EXPR
5743 && (TYPE_UNSIGNED (ctype
)
5744 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5745 /* ... or has undefined overflow while the converted to
5746 type has not, we cannot do the operation in the inner type
5747 as that would introduce undefined overflow. */
5748 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5749 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5752 /* Pass the constant down and see if we can make a simplification. If
5753 we can, replace this expression with the inner simplification for
5754 possible later conversion to our or some other type. */
5755 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5756 && TREE_CODE (t2
) == INTEGER_CST
5757 && !TREE_OVERFLOW (t2
)
5758 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5760 ? ctype
: NULL_TREE
,
5761 strict_overflow_p
))))
5766 /* If widening the type changes it from signed to unsigned, then we
5767 must avoid building ABS_EXPR itself as unsigned. */
5768 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5770 tree cstype
= (*signed_type_for
) (ctype
);
5771 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5774 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5775 return fold_convert (ctype
, t1
);
5779 /* If the constant is negative, we cannot simplify this. */
5780 if (tree_int_cst_sgn (c
) == -1)
5784 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5786 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5789 case MIN_EXPR
: case MAX_EXPR
:
5790 /* If widening the type changes the signedness, then we can't perform
5791 this optimization as that changes the result. */
5792 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5795 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5796 sub_strict_overflow_p
= false;
5797 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5798 &sub_strict_overflow_p
)) != 0
5799 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5800 &sub_strict_overflow_p
)) != 0)
5802 if (tree_int_cst_sgn (c
) < 0)
5803 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5804 if (sub_strict_overflow_p
)
5805 *strict_overflow_p
= true;
5806 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5807 fold_convert (ctype
, t2
));
5811 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5812 /* If the second operand is constant, this is a multiplication
5813 or floor division, by a power of two, so we can treat it that
5814 way unless the multiplier or divisor overflows. Signed
5815 left-shift overflow is implementation-defined rather than
5816 undefined in C90, so do not convert signed left shift into
5818 if (TREE_CODE (op1
) == INTEGER_CST
5819 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5820 /* const_binop may not detect overflow correctly,
5821 so check for it explicitly here. */
5822 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5823 && TREE_INT_CST_HIGH (op1
) == 0
5824 && 0 != (t1
= fold_convert (ctype
,
5825 const_binop (LSHIFT_EXPR
,
5828 && !TREE_OVERFLOW (t1
))
5829 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5830 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5831 ctype
, fold_convert (ctype
, op0
), t1
),
5832 c
, code
, wide_type
, strict_overflow_p
);
5835 case PLUS_EXPR
: case MINUS_EXPR
:
5836 /* See if we can eliminate the operation on both sides. If we can, we
5837 can return a new PLUS or MINUS. If we can't, the only remaining
5838 cases where we can do anything are if the second operand is a
5840 sub_strict_overflow_p
= false;
5841 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5842 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5843 if (t1
!= 0 && t2
!= 0
5844 && (code
== MULT_EXPR
5845 /* If not multiplication, we can only do this if both operands
5846 are divisible by c. */
5847 || (multiple_of_p (ctype
, op0
, c
)
5848 && multiple_of_p (ctype
, op1
, c
))))
5850 if (sub_strict_overflow_p
)
5851 *strict_overflow_p
= true;
5852 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5853 fold_convert (ctype
, t2
));
5856 /* If this was a subtraction, negate OP1 and set it to be an addition.
5857 This simplifies the logic below. */
5858 if (tcode
== MINUS_EXPR
)
5859 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5861 if (TREE_CODE (op1
) != INTEGER_CST
)
5864 /* If either OP1 or C are negative, this optimization is not safe for
5865 some of the division and remainder types while for others we need
5866 to change the code. */
5867 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5869 if (code
== CEIL_DIV_EXPR
)
5870 code
= FLOOR_DIV_EXPR
;
5871 else if (code
== FLOOR_DIV_EXPR
)
5872 code
= CEIL_DIV_EXPR
;
5873 else if (code
!= MULT_EXPR
5874 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5878 /* If it's a multiply or a division/modulus operation of a multiple
5879 of our constant, do the operation and verify it doesn't overflow. */
5880 if (code
== MULT_EXPR
5881 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5883 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5884 fold_convert (ctype
, c
), 0);
5885 /* We allow the constant to overflow with wrapping semantics. */
5887 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5893 /* If we have an unsigned type is not a sizetype, we cannot widen
5894 the operation since it will change the result if the original
5895 computation overflowed. */
5896 if (TYPE_UNSIGNED (ctype
)
5897 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5901 /* If we were able to eliminate our operation from the first side,
5902 apply our operation to the second side and reform the PLUS. */
5903 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5904 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5906 /* The last case is if we are a multiply. In that case, we can
5907 apply the distributive law to commute the multiply and addition
5908 if the multiplication of the constants doesn't overflow. */
5909 if (code
== MULT_EXPR
)
5910 return fold_build2 (tcode
, ctype
,
5911 fold_build2 (code
, ctype
,
5912 fold_convert (ctype
, op0
),
5913 fold_convert (ctype
, c
)),
5919 /* We have a special case here if we are doing something like
5920 (C * 8) % 4 since we know that's zero. */
5921 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5922 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5923 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5924 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5925 return omit_one_operand (type
, integer_zero_node
, op0
);
5927 /* ... fall through ... */
5929 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5930 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5931 /* If we can extract our operation from the LHS, do so and return a
5932 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5933 do something only if the second operand is a constant. */
5935 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5936 strict_overflow_p
)) != 0)
5937 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5938 fold_convert (ctype
, op1
));
5939 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5940 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5941 strict_overflow_p
)) != 0)
5942 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5943 fold_convert (ctype
, t1
));
5944 else if (TREE_CODE (op1
) != INTEGER_CST
)
5947 /* If these are the same operation types, we can associate them
5948 assuming no overflow. */
5950 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5951 fold_convert (ctype
, c
), 0))
5952 && !TREE_OVERFLOW (t1
))
5953 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5955 /* If these operations "cancel" each other, we have the main
5956 optimizations of this pass, which occur when either constant is a
5957 multiple of the other, in which case we replace this with either an
5958 operation or CODE or TCODE.
5960 If we have an unsigned type that is not a sizetype, we cannot do
5961 this since it will change the result if the original computation
5963 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5964 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5965 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5966 || (tcode
== MULT_EXPR
5967 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5968 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5969 && code
!= MULT_EXPR
)))
5971 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5973 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5974 *strict_overflow_p
= true;
5975 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5976 fold_convert (ctype
,
5977 const_binop (TRUNC_DIV_EXPR
,
5980 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5982 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5983 *strict_overflow_p
= true;
5984 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5985 fold_convert (ctype
,
5986 const_binop (TRUNC_DIV_EXPR
,
5999 /* Return a node which has the indicated constant VALUE (either 0 or
6000 1), and is of the indicated TYPE. */
6003 constant_boolean_node (int value
, tree type
)
6005 if (type
== integer_type_node
)
6006 return value
? integer_one_node
: integer_zero_node
;
6007 else if (type
== boolean_type_node
)
6008 return value
? boolean_true_node
: boolean_false_node
;
6010 return build_int_cst (type
, value
);
6014 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6015 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6016 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6017 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6018 COND is the first argument to CODE; otherwise (as in the example
6019 given here), it is the second argument. TYPE is the type of the
6020 original expression. Return NULL_TREE if no simplification is
6024 fold_binary_op_with_conditional_arg (enum tree_code code
,
6025 tree type
, tree op0
, tree op1
,
6026 tree cond
, tree arg
, int cond_first_p
)
6028 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6029 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6030 tree test
, true_value
, false_value
;
6031 tree lhs
= NULL_TREE
;
6032 tree rhs
= NULL_TREE
;
6034 /* This transformation is only worthwhile if we don't have to wrap
6035 arg in a SAVE_EXPR, and the operation can be simplified on at least
6036 one of the branches once its pushed inside the COND_EXPR. */
6037 if (!TREE_CONSTANT (arg
))
6040 if (TREE_CODE (cond
) == COND_EXPR
)
6042 test
= TREE_OPERAND (cond
, 0);
6043 true_value
= TREE_OPERAND (cond
, 1);
6044 false_value
= TREE_OPERAND (cond
, 2);
6045 /* If this operand throws an expression, then it does not make
6046 sense to try to perform a logical or arithmetic operation
6048 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6050 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6055 tree testtype
= TREE_TYPE (cond
);
6057 true_value
= constant_boolean_node (true, testtype
);
6058 false_value
= constant_boolean_node (false, testtype
);
6061 arg
= fold_convert (arg_type
, arg
);
6064 true_value
= fold_convert (cond_type
, true_value
);
6066 lhs
= fold_build2 (code
, type
, true_value
, arg
);
6068 lhs
= fold_build2 (code
, type
, arg
, true_value
);
6072 false_value
= fold_convert (cond_type
, false_value
);
6074 rhs
= fold_build2 (code
, type
, false_value
, arg
);
6076 rhs
= fold_build2 (code
, type
, arg
, false_value
);
6079 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
6080 return fold_convert (type
, test
);
6084 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6086 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6087 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6088 ADDEND is the same as X.
6090 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6091 and finite. The problematic cases are when X is zero, and its mode
6092 has signed zeros. In the case of rounding towards -infinity,
6093 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6094 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6097 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6099 if (!real_zerop (addend
))
6102 /* Don't allow the fold with -fsignaling-nans. */
6103 if (HONOR_SNANS (TYPE_MODE (type
)))
6106 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6107 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6110 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6111 if (TREE_CODE (addend
) == REAL_CST
6112 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6115 /* The mode has signed zeros, and we have to honor their sign.
6116 In this situation, there is only one case we can return true for.
6117 X - 0 is the same as X unless rounding towards -infinity is
6119 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6122 /* Subroutine of fold() that checks comparisons of built-in math
6123 functions against real constants.
6125 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6126 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6127 is the type of the result and ARG0 and ARG1 are the operands of the
6128 comparison. ARG1 must be a TREE_REAL_CST.
6130 The function returns the constant folded tree if a simplification
6131 can be made, and NULL_TREE otherwise. */
6134 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
6135 tree type
, tree arg0
, tree arg1
)
6139 if (BUILTIN_SQRT_P (fcode
))
6141 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6142 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6144 c
= TREE_REAL_CST (arg1
);
6145 if (REAL_VALUE_NEGATIVE (c
))
6147 /* sqrt(x) < y is always false, if y is negative. */
6148 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6149 return omit_one_operand (type
, integer_zero_node
, arg
);
6151 /* sqrt(x) > y is always true, if y is negative and we
6152 don't care about NaNs, i.e. negative values of x. */
6153 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6154 return omit_one_operand (type
, integer_one_node
, arg
);
6156 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6157 return fold_build2 (GE_EXPR
, type
, arg
,
6158 build_real (TREE_TYPE (arg
), dconst0
));
6160 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6164 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6165 real_convert (&c2
, mode
, &c2
);
6167 if (REAL_VALUE_ISINF (c2
))
6169 /* sqrt(x) > y is x == +Inf, when y is very large. */
6170 if (HONOR_INFINITIES (mode
))
6171 return fold_build2 (EQ_EXPR
, type
, arg
,
6172 build_real (TREE_TYPE (arg
), c2
));
6174 /* sqrt(x) > y is always false, when y is very large
6175 and we don't care about infinities. */
6176 return omit_one_operand (type
, integer_zero_node
, arg
);
6179 /* sqrt(x) > c is the same as x > c*c. */
6180 return fold_build2 (code
, type
, arg
,
6181 build_real (TREE_TYPE (arg
), c2
));
6183 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6187 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6188 real_convert (&c2
, mode
, &c2
);
6190 if (REAL_VALUE_ISINF (c2
))
6192 /* sqrt(x) < y is always true, when y is a very large
6193 value and we don't care about NaNs or Infinities. */
6194 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6195 return omit_one_operand (type
, integer_one_node
, arg
);
6197 /* sqrt(x) < y is x != +Inf when y is very large and we
6198 don't care about NaNs. */
6199 if (! HONOR_NANS (mode
))
6200 return fold_build2 (NE_EXPR
, type
, arg
,
6201 build_real (TREE_TYPE (arg
), c2
));
6203 /* sqrt(x) < y is x >= 0 when y is very large and we
6204 don't care about Infinities. */
6205 if (! HONOR_INFINITIES (mode
))
6206 return fold_build2 (GE_EXPR
, type
, arg
,
6207 build_real (TREE_TYPE (arg
), dconst0
));
6209 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6210 if (lang_hooks
.decls
.global_bindings_p () != 0
6211 || CONTAINS_PLACEHOLDER_P (arg
))
6214 arg
= save_expr (arg
);
6215 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6216 fold_build2 (GE_EXPR
, type
, arg
,
6217 build_real (TREE_TYPE (arg
),
6219 fold_build2 (NE_EXPR
, type
, arg
,
6220 build_real (TREE_TYPE (arg
),
6224 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6225 if (! HONOR_NANS (mode
))
6226 return fold_build2 (code
, type
, arg
,
6227 build_real (TREE_TYPE (arg
), c2
));
6229 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6230 if (lang_hooks
.decls
.global_bindings_p () == 0
6231 && ! CONTAINS_PLACEHOLDER_P (arg
))
6233 arg
= save_expr (arg
);
6234 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6235 fold_build2 (GE_EXPR
, type
, arg
,
6236 build_real (TREE_TYPE (arg
),
6238 fold_build2 (code
, type
, arg
,
6239 build_real (TREE_TYPE (arg
),
6248 /* Subroutine of fold() that optimizes comparisons against Infinities,
6249 either +Inf or -Inf.
6251 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6252 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6253 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6255 The function returns the constant folded tree if a simplification
6256 can be made, and NULL_TREE otherwise. */
6259 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6261 enum machine_mode mode
;
6262 REAL_VALUE_TYPE max
;
6266 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6268 /* For negative infinity swap the sense of the comparison. */
6269 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6271 code
= swap_tree_comparison (code
);
6276 /* x > +Inf is always false, if with ignore sNANs. */
6277 if (HONOR_SNANS (mode
))
6279 return omit_one_operand (type
, integer_zero_node
, arg0
);
6282 /* x <= +Inf is always true, if we don't case about NaNs. */
6283 if (! HONOR_NANS (mode
))
6284 return omit_one_operand (type
, integer_one_node
, arg0
);
6286 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6287 if (lang_hooks
.decls
.global_bindings_p () == 0
6288 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6290 arg0
= save_expr (arg0
);
6291 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6297 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6298 real_maxval (&max
, neg
, mode
);
6299 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6300 arg0
, build_real (TREE_TYPE (arg0
), max
));
6303 /* x < +Inf is always equal to x <= DBL_MAX. */
6304 real_maxval (&max
, neg
, mode
);
6305 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6306 arg0
, build_real (TREE_TYPE (arg0
), max
));
6309 /* x != +Inf is always equal to !(x > DBL_MAX). */
6310 real_maxval (&max
, neg
, mode
);
6311 if (! HONOR_NANS (mode
))
6312 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6313 arg0
, build_real (TREE_TYPE (arg0
), max
));
6315 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6316 arg0
, build_real (TREE_TYPE (arg0
), max
));
6317 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6326 /* Subroutine of fold() that optimizes comparisons of a division by
6327 a nonzero integer constant against an integer constant, i.e.
6330 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6331 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6332 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6334 The function returns the constant folded tree if a simplification
6335 can be made, and NULL_TREE otherwise. */
6338 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6340 tree prod
, tmp
, hi
, lo
;
6341 tree arg00
= TREE_OPERAND (arg0
, 0);
6342 tree arg01
= TREE_OPERAND (arg0
, 1);
6343 unsigned HOST_WIDE_INT lpart
;
6344 HOST_WIDE_INT hpart
;
6345 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6349 /* We have to do this the hard way to detect unsigned overflow.
6350 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6351 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6352 TREE_INT_CST_HIGH (arg01
),
6353 TREE_INT_CST_LOW (arg1
),
6354 TREE_INT_CST_HIGH (arg1
),
6355 &lpart
, &hpart
, unsigned_p
);
6356 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6358 neg_overflow
= false;
6362 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6363 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6366 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6367 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6368 TREE_INT_CST_HIGH (prod
),
6369 TREE_INT_CST_LOW (tmp
),
6370 TREE_INT_CST_HIGH (tmp
),
6371 &lpart
, &hpart
, unsigned_p
);
6372 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6373 -1, overflow
| TREE_OVERFLOW (prod
));
6375 else if (tree_int_cst_sgn (arg01
) >= 0)
6377 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6378 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6379 switch (tree_int_cst_sgn (arg1
))
6382 neg_overflow
= true;
6383 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6388 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6393 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6403 /* A negative divisor reverses the relational operators. */
6404 code
= swap_tree_comparison (code
);
6406 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6407 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6408 switch (tree_int_cst_sgn (arg1
))
6411 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6416 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6421 neg_overflow
= true;
6422 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6434 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6435 return omit_one_operand (type
, integer_zero_node
, arg00
);
6436 if (TREE_OVERFLOW (hi
))
6437 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6438 if (TREE_OVERFLOW (lo
))
6439 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6440 return build_range_check (type
, arg00
, 1, lo
, hi
);
6443 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6444 return omit_one_operand (type
, integer_one_node
, arg00
);
6445 if (TREE_OVERFLOW (hi
))
6446 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6447 if (TREE_OVERFLOW (lo
))
6448 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6449 return build_range_check (type
, arg00
, 0, lo
, hi
);
6452 if (TREE_OVERFLOW (lo
))
6454 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6455 return omit_one_operand (type
, tmp
, arg00
);
6457 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6460 if (TREE_OVERFLOW (hi
))
6462 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6463 return omit_one_operand (type
, tmp
, arg00
);
6465 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6468 if (TREE_OVERFLOW (hi
))
6470 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6471 return omit_one_operand (type
, tmp
, arg00
);
6473 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6476 if (TREE_OVERFLOW (lo
))
6478 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6479 return omit_one_operand (type
, tmp
, arg00
);
6481 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6491 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6492 equality/inequality test, then return a simplified form of the test
6493 using a sign testing. Otherwise return NULL. TYPE is the desired
6497 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6500 /* If this is testing a single bit, we can optimize the test. */
6501 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6502 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6503 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6505 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6506 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6507 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6509 if (arg00
!= NULL_TREE
6510 /* This is only a win if casting to a signed type is cheap,
6511 i.e. when arg00's type is not a partial mode. */
6512 && TYPE_PRECISION (TREE_TYPE (arg00
))
6513 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6515 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6516 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6517 result_type
, fold_convert (stype
, arg00
),
6518 build_int_cst (stype
, 0));
6525 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6526 equality/inequality test, then return a simplified form of
6527 the test using shifts and logical operations. Otherwise return
6528 NULL. TYPE is the desired result type. */
6531 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6534 /* If this is testing a single bit, we can optimize the test. */
6535 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6536 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6537 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6539 tree inner
= TREE_OPERAND (arg0
, 0);
6540 tree type
= TREE_TYPE (arg0
);
6541 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6542 enum machine_mode operand_mode
= TYPE_MODE (type
);
6544 tree signed_type
, unsigned_type
, intermediate_type
;
6547 /* First, see if we can fold the single bit test into a sign-bit
6549 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6554 /* Otherwise we have (A & C) != 0 where C is a single bit,
6555 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6556 Similarly for (A & C) == 0. */
6558 /* If INNER is a right shift of a constant and it plus BITNUM does
6559 not overflow, adjust BITNUM and INNER. */
6560 if (TREE_CODE (inner
) == RSHIFT_EXPR
6561 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6562 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6563 && bitnum
< TYPE_PRECISION (type
)
6564 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6565 bitnum
- TYPE_PRECISION (type
)))
6567 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6568 inner
= TREE_OPERAND (inner
, 0);
6571 /* If we are going to be able to omit the AND below, we must do our
6572 operations as unsigned. If we must use the AND, we have a choice.
6573 Normally unsigned is faster, but for some machines signed is. */
6574 #ifdef LOAD_EXTEND_OP
6575 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6576 && !flag_syntax_only
) ? 0 : 1;
6581 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6582 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6583 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6584 inner
= fold_convert (intermediate_type
, inner
);
6587 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6588 inner
, size_int (bitnum
));
6590 one
= build_int_cst (intermediate_type
, 1);
6592 if (code
== EQ_EXPR
)
6593 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6595 /* Put the AND last so it can combine with more things. */
6596 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6598 /* Make sure to return the proper type. */
6599 inner
= fold_convert (result_type
, inner
);
6606 /* Check whether we are allowed to reorder operands arg0 and arg1,
6607 such that the evaluation of arg1 occurs before arg0. */
6610 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6612 if (! flag_evaluation_order
)
6614 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6616 return ! TREE_SIDE_EFFECTS (arg0
)
6617 && ! TREE_SIDE_EFFECTS (arg1
);
6620 /* Test whether it is preferable two swap two operands, ARG0 and
6621 ARG1, for example because ARG0 is an integer constant and ARG1
6622 isn't. If REORDER is true, only recommend swapping if we can
6623 evaluate the operands in reverse order. */
6626 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6628 STRIP_SIGN_NOPS (arg0
);
6629 STRIP_SIGN_NOPS (arg1
);
6631 if (TREE_CODE (arg1
) == INTEGER_CST
)
6633 if (TREE_CODE (arg0
) == INTEGER_CST
)
6636 if (TREE_CODE (arg1
) == REAL_CST
)
6638 if (TREE_CODE (arg0
) == REAL_CST
)
6641 if (TREE_CODE (arg1
) == FIXED_CST
)
6643 if (TREE_CODE (arg0
) == FIXED_CST
)
6646 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6648 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6651 if (TREE_CONSTANT (arg1
))
6653 if (TREE_CONSTANT (arg0
))
6659 if (reorder
&& flag_evaluation_order
6660 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6663 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6664 for commutative and comparison operators. Ensuring a canonical
6665 form allows the optimizers to find additional redundancies without
6666 having to explicitly check for both orderings. */
6667 if (TREE_CODE (arg0
) == SSA_NAME
6668 && TREE_CODE (arg1
) == SSA_NAME
6669 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6672 /* Put SSA_NAMEs last. */
6673 if (TREE_CODE (arg1
) == SSA_NAME
)
6675 if (TREE_CODE (arg0
) == SSA_NAME
)
6678 /* Put variables last. */
6687 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6688 ARG0 is extended to a wider type. */
6691 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6693 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6695 tree shorter_type
, outer_type
;
6699 if (arg0_unw
== arg0
)
6701 shorter_type
= TREE_TYPE (arg0_unw
);
6703 #ifdef HAVE_canonicalize_funcptr_for_compare
6704 /* Disable this optimization if we're casting a function pointer
6705 type on targets that require function pointer canonicalization. */
6706 if (HAVE_canonicalize_funcptr_for_compare
6707 && TREE_CODE (shorter_type
) == POINTER_TYPE
6708 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6712 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6715 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6717 /* If possible, express the comparison in the shorter mode. */
6718 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6719 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6720 && (TREE_TYPE (arg1_unw
) == shorter_type
6721 || (TYPE_PRECISION (shorter_type
)
6722 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6723 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6724 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6725 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6726 && int_fits_type_p (arg1_unw
, shorter_type
))))
6727 return fold_build2 (code
, type
, arg0_unw
,
6728 fold_convert (shorter_type
, arg1_unw
));
6730 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6731 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6732 || !int_fits_type_p (arg1_unw
, shorter_type
))
6735 /* If we are comparing with the integer that does not fit into the range
6736 of the shorter type, the result is known. */
6737 outer_type
= TREE_TYPE (arg1_unw
);
6738 min
= lower_bound_in_type (outer_type
, shorter_type
);
6739 max
= upper_bound_in_type (outer_type
, shorter_type
);
6741 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6743 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6750 return omit_one_operand (type
, integer_zero_node
, arg0
);
6755 return omit_one_operand (type
, integer_one_node
, arg0
);
6761 return omit_one_operand (type
, integer_one_node
, arg0
);
6763 return omit_one_operand (type
, integer_zero_node
, arg0
);
6768 return omit_one_operand (type
, integer_zero_node
, arg0
);
6770 return omit_one_operand (type
, integer_one_node
, arg0
);
6779 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6780 ARG0 just the signedness is changed. */
6783 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6784 tree arg0
, tree arg1
)
6787 tree inner_type
, outer_type
;
6789 if (TREE_CODE (arg0
) != NOP_EXPR
6790 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6793 outer_type
= TREE_TYPE (arg0
);
6794 arg0_inner
= TREE_OPERAND (arg0
, 0);
6795 inner_type
= TREE_TYPE (arg0_inner
);
6797 #ifdef HAVE_canonicalize_funcptr_for_compare
6798 /* Disable this optimization if we're casting a function pointer
6799 type on targets that require function pointer canonicalization. */
6800 if (HAVE_canonicalize_funcptr_for_compare
6801 && TREE_CODE (inner_type
) == POINTER_TYPE
6802 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6806 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6809 if (TREE_CODE (arg1
) != INTEGER_CST
6810 && !((TREE_CODE (arg1
) == NOP_EXPR
6811 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6812 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6815 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6820 if (TREE_CODE (arg1
) == INTEGER_CST
)
6821 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
6822 TREE_INT_CST_HIGH (arg1
), 0,
6823 TREE_OVERFLOW (arg1
));
6825 arg1
= fold_convert (inner_type
, arg1
);
6827 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6830 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6831 step of the array. Reconstructs s and delta in the case of s * delta
6832 being an integer constant (and thus already folded).
6833 ADDR is the address. MULT is the multiplicative expression.
6834 If the function succeeds, the new address expression is returned. Otherwise
6835 NULL_TREE is returned. */
6838 try_move_mult_to_index (tree addr
, tree op1
)
6840 tree s
, delta
, step
;
6841 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6846 /* Strip the nops that might be added when converting op1 to sizetype. */
6849 /* Canonicalize op1 into a possibly non-constant delta
6850 and an INTEGER_CST s. */
6851 if (TREE_CODE (op1
) == MULT_EXPR
)
6853 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6858 if (TREE_CODE (arg0
) == INTEGER_CST
)
6863 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6871 else if (TREE_CODE (op1
) == INTEGER_CST
)
6878 /* Simulate we are delta * 1. */
6880 s
= integer_one_node
;
6883 for (;; ref
= TREE_OPERAND (ref
, 0))
6885 if (TREE_CODE (ref
) == ARRAY_REF
)
6887 /* Remember if this was a multi-dimensional array. */
6888 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6891 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6895 step
= array_ref_element_size (ref
);
6896 if (TREE_CODE (step
) != INTEGER_CST
)
6901 if (! tree_int_cst_equal (step
, s
))
6906 /* Try if delta is a multiple of step. */
6907 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6913 /* Only fold here if we can verify we do not overflow one
6914 dimension of a multi-dimensional array. */
6919 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6920 || !INTEGRAL_TYPE_P (itype
)
6921 || !TYPE_MAX_VALUE (itype
)
6922 || TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
)
6925 tmp
= fold_binary (PLUS_EXPR
, itype
,
6926 fold_convert (itype
,
6927 TREE_OPERAND (ref
, 1)),
6928 fold_convert (itype
, delta
));
6930 || TREE_CODE (tmp
) != INTEGER_CST
6931 || tree_int_cst_lt (TYPE_MAX_VALUE (itype
), tmp
))
6940 if (!handled_component_p (ref
))
6944 /* We found the suitable array reference. So copy everything up to it,
6945 and replace the index. */
6947 pref
= TREE_OPERAND (addr
, 0);
6948 ret
= copy_node (pref
);
6953 pref
= TREE_OPERAND (pref
, 0);
6954 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6955 pos
= TREE_OPERAND (pos
, 0);
6958 TREE_OPERAND (pos
, 1) = fold_build2 (PLUS_EXPR
, itype
,
6959 fold_convert (itype
,
6960 TREE_OPERAND (pos
, 1)),
6961 fold_convert (itype
, delta
));
6963 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6967 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6968 means A >= Y && A != MAX, but in this case we know that
6969 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6972 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6974 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6976 if (TREE_CODE (bound
) == LT_EXPR
)
6977 a
= TREE_OPERAND (bound
, 0);
6978 else if (TREE_CODE (bound
) == GT_EXPR
)
6979 a
= TREE_OPERAND (bound
, 1);
6983 typea
= TREE_TYPE (a
);
6984 if (!INTEGRAL_TYPE_P (typea
)
6985 && !POINTER_TYPE_P (typea
))
6988 if (TREE_CODE (ineq
) == LT_EXPR
)
6990 a1
= TREE_OPERAND (ineq
, 1);
6991 y
= TREE_OPERAND (ineq
, 0);
6993 else if (TREE_CODE (ineq
) == GT_EXPR
)
6995 a1
= TREE_OPERAND (ineq
, 0);
6996 y
= TREE_OPERAND (ineq
, 1);
7001 if (TREE_TYPE (a1
) != typea
)
7004 if (POINTER_TYPE_P (typea
))
7006 /* Convert the pointer types into integer before taking the difference. */
7007 tree ta
= fold_convert (ssizetype
, a
);
7008 tree ta1
= fold_convert (ssizetype
, a1
);
7009 diff
= fold_binary (MINUS_EXPR
, ssizetype
, ta1
, ta
);
7012 diff
= fold_binary (MINUS_EXPR
, typea
, a1
, a
);
7014 if (!diff
|| !integer_onep (diff
))
7017 return fold_build2 (GE_EXPR
, type
, a
, y
);
7020 /* Fold a sum or difference of at least one multiplication.
7021 Returns the folded tree or NULL if no simplification could be made. */
7024 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7026 tree arg00
, arg01
, arg10
, arg11
;
7027 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7029 /* (A * C) +- (B * C) -> (A+-B) * C.
7030 (A * C) +- A -> A * (C+-1).
7031 We are most concerned about the case where C is a constant,
7032 but other combinations show up during loop reduction. Since
7033 it is not difficult, try all four possibilities. */
7035 if (TREE_CODE (arg0
) == MULT_EXPR
)
7037 arg00
= TREE_OPERAND (arg0
, 0);
7038 arg01
= TREE_OPERAND (arg0
, 1);
7040 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7042 arg00
= build_one_cst (type
);
7047 /* We cannot generate constant 1 for fract. */
7048 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7051 arg01
= build_one_cst (type
);
7053 if (TREE_CODE (arg1
) == MULT_EXPR
)
7055 arg10
= TREE_OPERAND (arg1
, 0);
7056 arg11
= TREE_OPERAND (arg1
, 1);
7058 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7060 arg10
= build_one_cst (type
);
7065 /* We cannot generate constant 1 for fract. */
7066 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7069 arg11
= build_one_cst (type
);
7073 if (operand_equal_p (arg01
, arg11
, 0))
7074 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7075 else if (operand_equal_p (arg00
, arg10
, 0))
7076 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7077 else if (operand_equal_p (arg00
, arg11
, 0))
7078 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7079 else if (operand_equal_p (arg01
, arg10
, 0))
7080 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7082 /* No identical multiplicands; see if we can find a common
7083 power-of-two factor in non-power-of-two multiplies. This
7084 can help in multi-dimensional array access. */
7085 else if (host_integerp (arg01
, 0)
7086 && host_integerp (arg11
, 0))
7088 HOST_WIDE_INT int01
, int11
, tmp
;
7091 int01
= TREE_INT_CST_LOW (arg01
);
7092 int11
= TREE_INT_CST_LOW (arg11
);
7094 /* Move min of absolute values to int11. */
7095 if ((int01
>= 0 ? int01
: -int01
)
7096 < (int11
>= 0 ? int11
: -int11
))
7098 tmp
= int01
, int01
= int11
, int11
= tmp
;
7099 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7106 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0)
7108 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7109 build_int_cst (TREE_TYPE (arg00
),
7114 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7119 return fold_build2 (MULT_EXPR
, type
,
7120 fold_build2 (code
, type
,
7121 fold_convert (type
, alt0
),
7122 fold_convert (type
, alt1
)),
7123 fold_convert (type
, same
));
7128 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7129 specified by EXPR into the buffer PTR of length LEN bytes.
7130 Return the number of bytes placed in the buffer, or zero
7134 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7136 tree type
= TREE_TYPE (expr
);
7137 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7138 int byte
, offset
, word
, words
;
7139 unsigned char value
;
7141 if (total_bytes
> len
)
7143 words
= total_bytes
/ UNITS_PER_WORD
;
7145 for (byte
= 0; byte
< total_bytes
; byte
++)
7147 int bitpos
= byte
* BITS_PER_UNIT
;
7148 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7149 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7151 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7152 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7154 if (total_bytes
> UNITS_PER_WORD
)
7156 word
= byte
/ UNITS_PER_WORD
;
7157 if (WORDS_BIG_ENDIAN
)
7158 word
= (words
- 1) - word
;
7159 offset
= word
* UNITS_PER_WORD
;
7160 if (BYTES_BIG_ENDIAN
)
7161 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7163 offset
+= byte
% UNITS_PER_WORD
;
7166 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7167 ptr
[offset
] = value
;
7173 /* Subroutine of native_encode_expr. Encode the REAL_CST
7174 specified by EXPR into the buffer PTR of length LEN bytes.
7175 Return the number of bytes placed in the buffer, or zero
7179 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7181 tree type
= TREE_TYPE (expr
);
7182 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7183 int byte
, offset
, word
, words
, bitpos
;
7184 unsigned char value
;
7186 /* There are always 32 bits in each long, no matter the size of
7187 the hosts long. We handle floating point representations with
7191 if (total_bytes
> len
)
7193 words
= 32 / UNITS_PER_WORD
;
7195 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7197 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7198 bitpos
+= BITS_PER_UNIT
)
7200 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7201 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7203 if (UNITS_PER_WORD
< 4)
7205 word
= byte
/ UNITS_PER_WORD
;
7206 if (WORDS_BIG_ENDIAN
)
7207 word
= (words
- 1) - word
;
7208 offset
= word
* UNITS_PER_WORD
;
7209 if (BYTES_BIG_ENDIAN
)
7210 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7212 offset
+= byte
% UNITS_PER_WORD
;
7215 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7216 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7221 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7222 specified by EXPR into the buffer PTR of length LEN bytes.
7223 Return the number of bytes placed in the buffer, or zero
7227 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7232 part
= TREE_REALPART (expr
);
7233 rsize
= native_encode_expr (part
, ptr
, len
);
7236 part
= TREE_IMAGPART (expr
);
7237 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7240 return rsize
+ isize
;
7244 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7245 specified by EXPR into the buffer PTR of length LEN bytes.
7246 Return the number of bytes placed in the buffer, or zero
7250 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7252 int i
, size
, offset
, count
;
7253 tree itype
, elem
, elements
;
7256 elements
= TREE_VECTOR_CST_ELTS (expr
);
7257 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7258 itype
= TREE_TYPE (TREE_TYPE (expr
));
7259 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7260 for (i
= 0; i
< count
; i
++)
7264 elem
= TREE_VALUE (elements
);
7265 elements
= TREE_CHAIN (elements
);
7272 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7277 if (offset
+ size
> len
)
7279 memset (ptr
+offset
, 0, size
);
7287 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7288 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7289 buffer PTR of length LEN bytes. Return the number of bytes
7290 placed in the buffer, or zero upon failure. */
7293 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7295 switch (TREE_CODE (expr
))
7298 return native_encode_int (expr
, ptr
, len
);
7301 return native_encode_real (expr
, ptr
, len
);
7304 return native_encode_complex (expr
, ptr
, len
);
7307 return native_encode_vector (expr
, ptr
, len
);
7315 /* Subroutine of native_interpret_expr. Interpret the contents of
7316 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7317 If the buffer cannot be interpreted, return NULL_TREE. */
7320 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7322 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7323 int byte
, offset
, word
, words
;
7324 unsigned char value
;
7325 unsigned int HOST_WIDE_INT lo
= 0;
7326 HOST_WIDE_INT hi
= 0;
7328 if (total_bytes
> len
)
7330 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7332 words
= total_bytes
/ UNITS_PER_WORD
;
7334 for (byte
= 0; byte
< total_bytes
; byte
++)
7336 int bitpos
= byte
* BITS_PER_UNIT
;
7337 if (total_bytes
> UNITS_PER_WORD
)
7339 word
= byte
/ UNITS_PER_WORD
;
7340 if (WORDS_BIG_ENDIAN
)
7341 word
= (words
- 1) - word
;
7342 offset
= word
* UNITS_PER_WORD
;
7343 if (BYTES_BIG_ENDIAN
)
7344 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7346 offset
+= byte
% UNITS_PER_WORD
;
7349 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7350 value
= ptr
[offset
];
7352 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7353 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7355 hi
|= (unsigned HOST_WIDE_INT
) value
7356 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7359 return build_int_cst_wide_type (type
, lo
, hi
);
7363 /* Subroutine of native_interpret_expr. Interpret the contents of
7364 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7365 If the buffer cannot be interpreted, return NULL_TREE. */
7368 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7370 enum machine_mode mode
= TYPE_MODE (type
);
7371 int total_bytes
= GET_MODE_SIZE (mode
);
7372 int byte
, offset
, word
, words
, bitpos
;
7373 unsigned char value
;
7374 /* There are always 32 bits in each long, no matter the size of
7375 the hosts long. We handle floating point representations with
7380 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7381 if (total_bytes
> len
|| total_bytes
> 24)
7383 words
= 32 / UNITS_PER_WORD
;
7385 memset (tmp
, 0, sizeof (tmp
));
7386 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7387 bitpos
+= BITS_PER_UNIT
)
7389 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7390 if (UNITS_PER_WORD
< 4)
7392 word
= byte
/ UNITS_PER_WORD
;
7393 if (WORDS_BIG_ENDIAN
)
7394 word
= (words
- 1) - word
;
7395 offset
= word
* UNITS_PER_WORD
;
7396 if (BYTES_BIG_ENDIAN
)
7397 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7399 offset
+= byte
% UNITS_PER_WORD
;
7402 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7403 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7405 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7408 real_from_target (&r
, tmp
, mode
);
7409 return build_real (type
, r
);
7413 /* Subroutine of native_interpret_expr. Interpret the contents of
7414 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7415 If the buffer cannot be interpreted, return NULL_TREE. */
7418 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7420 tree etype
, rpart
, ipart
;
7423 etype
= TREE_TYPE (type
);
7424 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7427 rpart
= native_interpret_expr (etype
, ptr
, size
);
7430 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7433 return build_complex (type
, rpart
, ipart
);
7437 /* Subroutine of native_interpret_expr. Interpret the contents of
7438 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7439 If the buffer cannot be interpreted, return NULL_TREE. */
7442 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7444 tree etype
, elem
, elements
;
7447 etype
= TREE_TYPE (type
);
7448 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7449 count
= TYPE_VECTOR_SUBPARTS (type
);
7450 if (size
* count
> len
)
7453 elements
= NULL_TREE
;
7454 for (i
= count
- 1; i
>= 0; i
--)
7456 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7459 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7461 return build_vector (type
, elements
);
7465 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7466 the buffer PTR of length LEN as a constant of type TYPE. For
7467 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7468 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7469 return NULL_TREE. */
7472 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7474 switch (TREE_CODE (type
))
7479 return native_interpret_int (type
, ptr
, len
);
7482 return native_interpret_real (type
, ptr
, len
);
7485 return native_interpret_complex (type
, ptr
, len
);
7488 return native_interpret_vector (type
, ptr
, len
);
7496 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7497 TYPE at compile-time. If we're unable to perform the conversion
7498 return NULL_TREE. */
7501 fold_view_convert_expr (tree type
, tree expr
)
7503 /* We support up to 512-bit values (for V8DFmode). */
7504 unsigned char buffer
[64];
7507 /* Check that the host and target are sane. */
7508 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7511 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7515 return native_interpret_expr (type
, buffer
, len
);
7518 /* Build an expression for the address of T. Folds away INDIRECT_REF
7519 to avoid confusing the gimplify process. When IN_FOLD is true
7520 avoid modifications of T. */
7523 build_fold_addr_expr_with_type_1 (tree t
, tree ptrtype
, bool in_fold
)
7525 /* The size of the object is not relevant when talking about its address. */
7526 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7527 t
= TREE_OPERAND (t
, 0);
7529 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7530 if (TREE_CODE (t
) == INDIRECT_REF
7531 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
7533 t
= TREE_OPERAND (t
, 0);
7535 if (TREE_TYPE (t
) != ptrtype
)
7536 t
= build1 (NOP_EXPR
, ptrtype
, t
);
7542 while (handled_component_p (base
))
7543 base
= TREE_OPERAND (base
, 0);
7546 TREE_ADDRESSABLE (base
) = 1;
7548 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7551 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7556 /* Build an expression for the address of T with type PTRTYPE. This
7557 function modifies the input parameter 'T' by sometimes setting the
7558 TREE_ADDRESSABLE flag. */
7561 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
7563 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, false);
7566 /* Build an expression for the address of T. This function modifies
7567 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7568 flag. When called from fold functions, use fold_addr_expr instead. */
7571 build_fold_addr_expr (tree t
)
7573 return build_fold_addr_expr_with_type_1 (t
,
7574 build_pointer_type (TREE_TYPE (t
)),
7578 /* Same as build_fold_addr_expr, builds an expression for the address
7579 of T, but avoids touching the input node 't'. Fold functions
7580 should use this version. */
7583 fold_addr_expr (tree t
)
7585 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7587 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, true);
7590 /* Fold a unary expression of code CODE and type TYPE with operand
7591 OP0. Return the folded expression if folding is successful.
7592 Otherwise, return NULL_TREE. */
7595 fold_unary (enum tree_code code
, tree type
, tree op0
)
7599 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7601 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7602 && TREE_CODE_LENGTH (code
) == 1);
7607 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7608 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7610 /* Don't use STRIP_NOPS, because signedness of argument type
7612 STRIP_SIGN_NOPS (arg0
);
7616 /* Strip any conversions that don't change the mode. This
7617 is safe for every expression, except for a comparison
7618 expression because its signedness is derived from its
7621 Note that this is done as an internal manipulation within
7622 the constant folder, in order to find the simplest
7623 representation of the arguments so that their form can be
7624 studied. In any cases, the appropriate type conversions
7625 should be put back in the tree that will get out of the
7631 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7633 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7634 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7635 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7636 else if (TREE_CODE (arg0
) == COND_EXPR
)
7638 tree arg01
= TREE_OPERAND (arg0
, 1);
7639 tree arg02
= TREE_OPERAND (arg0
, 2);
7640 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7641 arg01
= fold_build1 (code
, type
, arg01
);
7642 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7643 arg02
= fold_build1 (code
, type
, arg02
);
7644 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7647 /* If this was a conversion, and all we did was to move into
7648 inside the COND_EXPR, bring it back out. But leave it if
7649 it is a conversion from integer to integer and the
7650 result precision is no wider than a word since such a
7651 conversion is cheap and may be optimized away by combine,
7652 while it couldn't if it were outside the COND_EXPR. Then return
7653 so we don't get into an infinite recursion loop taking the
7654 conversion out and then back in. */
7656 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7657 || code
== NON_LVALUE_EXPR
)
7658 && TREE_CODE (tem
) == COND_EXPR
7659 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7660 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7661 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7662 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7663 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7664 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7665 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7667 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7668 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7669 || flag_syntax_only
))
7670 tem
= build1 (code
, type
,
7672 TREE_TYPE (TREE_OPERAND
7673 (TREE_OPERAND (tem
, 1), 0)),
7674 TREE_OPERAND (tem
, 0),
7675 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7676 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7679 else if (COMPARISON_CLASS_P (arg0
))
7681 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7683 arg0
= copy_node (arg0
);
7684 TREE_TYPE (arg0
) = type
;
7687 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7688 return fold_build3 (COND_EXPR
, type
, arg0
,
7689 fold_build1 (code
, type
,
7691 fold_build1 (code
, type
,
7692 integer_zero_node
));
7699 /* Re-association barriers around constants and other re-association
7700 barriers can be removed. */
7701 if (CONSTANT_CLASS_P (op0
)
7702 || TREE_CODE (op0
) == PAREN_EXPR
)
7703 return fold_convert (type
, op0
);
7709 case FIX_TRUNC_EXPR
:
7710 if (TREE_TYPE (op0
) == type
)
7713 /* If we have (type) (a CMP b) and type is an integral type, return
7714 new expression involving the new type. */
7715 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7716 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7717 TREE_OPERAND (op0
, 1));
7719 /* Handle cases of two conversions in a row. */
7720 if (TREE_CODE (op0
) == NOP_EXPR
7721 || TREE_CODE (op0
) == CONVERT_EXPR
)
7723 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7724 tree inter_type
= TREE_TYPE (op0
);
7725 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7726 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7727 int inside_float
= FLOAT_TYPE_P (inside_type
);
7728 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7729 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7730 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7731 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7732 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7733 int inter_float
= FLOAT_TYPE_P (inter_type
);
7734 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7735 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7736 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7737 int final_int
= INTEGRAL_TYPE_P (type
);
7738 int final_ptr
= POINTER_TYPE_P (type
);
7739 int final_float
= FLOAT_TYPE_P (type
);
7740 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7741 unsigned int final_prec
= TYPE_PRECISION (type
);
7742 int final_unsignedp
= TYPE_UNSIGNED (type
);
7744 /* In addition to the cases of two conversions in a row
7745 handled below, if we are converting something to its own
7746 type via an object of identical or wider precision, neither
7747 conversion is needed. */
7748 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7749 && (((inter_int
|| inter_ptr
) && final_int
)
7750 || (inter_float
&& final_float
))
7751 && inter_prec
>= final_prec
)
7752 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7754 /* Likewise, if the intermediate and final types are either both
7755 float or both integer, we don't need the middle conversion if
7756 it is wider than the final type and doesn't change the signedness
7757 (for integers). Avoid this if the final type is a pointer
7758 since then we sometimes need the inner conversion. Likewise if
7759 the outer has a precision not equal to the size of its mode. */
7760 if (((inter_int
&& inside_int
)
7761 || (inter_float
&& inside_float
)
7762 || (inter_vec
&& inside_vec
))
7763 && inter_prec
>= inside_prec
7764 && (inter_float
|| inter_vec
7765 || inter_unsignedp
== inside_unsignedp
)
7766 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7767 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7769 && (! final_vec
|| inter_prec
== inside_prec
))
7770 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7772 /* If we have a sign-extension of a zero-extended value, we can
7773 replace that by a single zero-extension. */
7774 if (inside_int
&& inter_int
&& final_int
7775 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7776 && inside_unsignedp
&& !inter_unsignedp
)
7777 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7779 /* Two conversions in a row are not needed unless:
7780 - some conversion is floating-point (overstrict for now), or
7781 - some conversion is a vector (overstrict for now), or
7782 - the intermediate type is narrower than both initial and
7784 - the intermediate type and innermost type differ in signedness,
7785 and the outermost type is wider than the intermediate, or
7786 - the initial type is a pointer type and the precisions of the
7787 intermediate and final types differ, or
7788 - the final type is a pointer type and the precisions of the
7789 initial and intermediate types differ. */
7790 if (! inside_float
&& ! inter_float
&& ! final_float
7791 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7792 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7793 && ! (inside_int
&& inter_int
7794 && inter_unsignedp
!= inside_unsignedp
7795 && inter_prec
< final_prec
)
7796 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7797 == (final_unsignedp
&& final_prec
> inter_prec
))
7798 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7799 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7800 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7801 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7802 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7805 /* Handle (T *)&A.B.C for A being of type T and B and C
7806 living at offset zero. This occurs frequently in
7807 C++ upcasting and then accessing the base. */
7808 if (TREE_CODE (op0
) == ADDR_EXPR
7809 && POINTER_TYPE_P (type
)
7810 && handled_component_p (TREE_OPERAND (op0
, 0)))
7812 HOST_WIDE_INT bitsize
, bitpos
;
7814 enum machine_mode mode
;
7815 int unsignedp
, volatilep
;
7816 tree base
= TREE_OPERAND (op0
, 0);
7817 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7818 &mode
, &unsignedp
, &volatilep
, false);
7819 /* If the reference was to a (constant) zero offset, we can use
7820 the address of the base if it has the same base type
7821 as the result type. */
7822 if (! offset
&& bitpos
== 0
7823 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7824 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7825 return fold_convert (type
, fold_addr_expr (base
));
7828 if ((TREE_CODE (op0
) == MODIFY_EXPR
7829 || TREE_CODE (op0
) == GIMPLE_MODIFY_STMT
)
7830 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0
, 1))
7831 /* Detect assigning a bitfield. */
7832 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7834 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0
, 0), 1))))
7836 /* Don't leave an assignment inside a conversion
7837 unless assigning a bitfield. */
7838 tem
= fold_build1 (code
, type
, GENERIC_TREE_OPERAND (op0
, 1));
7839 /* First do the assignment, then return converted constant. */
7840 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7841 TREE_NO_WARNING (tem
) = 1;
7842 TREE_USED (tem
) = 1;
7846 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7847 constants (if x has signed type, the sign bit cannot be set
7848 in c). This folds extension into the BIT_AND_EXPR. */
7849 if (INTEGRAL_TYPE_P (type
)
7850 && TREE_CODE (type
) != BOOLEAN_TYPE
7851 && TREE_CODE (op0
) == BIT_AND_EXPR
7852 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7855 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
7858 if (TYPE_UNSIGNED (TREE_TYPE (and))
7859 || (TYPE_PRECISION (type
)
7860 <= TYPE_PRECISION (TREE_TYPE (and))))
7862 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7863 <= HOST_BITS_PER_WIDE_INT
7864 && host_integerp (and1
, 1))
7866 unsigned HOST_WIDE_INT cst
;
7868 cst
= tree_low_cst (and1
, 1);
7869 cst
&= (HOST_WIDE_INT
) -1
7870 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7871 change
= (cst
== 0);
7872 #ifdef LOAD_EXTEND_OP
7874 && !flag_syntax_only
7875 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7878 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7879 and0
= fold_convert (uns
, and0
);
7880 and1
= fold_convert (uns
, and1
);
7886 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
7887 TREE_INT_CST_HIGH (and1
), 0,
7888 TREE_OVERFLOW (and1
));
7889 return fold_build2 (BIT_AND_EXPR
, type
,
7890 fold_convert (type
, and0
), tem
);
7894 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7895 when one of the new casts will fold away. Conservatively we assume
7896 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7897 if (POINTER_TYPE_P (type
)
7898 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7899 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7900 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7901 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7903 tree arg00
= TREE_OPERAND (arg0
, 0);
7904 tree arg01
= TREE_OPERAND (arg0
, 1);
7906 return fold_build2 (TREE_CODE (arg0
), type
, fold_convert (type
, arg00
),
7907 fold_convert (sizetype
, arg01
));
7910 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7911 of the same precision, and X is an integer type not narrower than
7912 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7913 if (INTEGRAL_TYPE_P (type
)
7914 && TREE_CODE (op0
) == BIT_NOT_EXPR
7915 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7916 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
7917 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
7918 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7920 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7921 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7922 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7923 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
7926 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7927 type of X and Y (integer types only). */
7928 if (INTEGRAL_TYPE_P (type
)
7929 && TREE_CODE (op0
) == MULT_EXPR
7930 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7931 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7933 /* Be careful not to introduce new overflows. */
7935 if (TYPE_OVERFLOW_WRAPS (type
))
7938 mult_type
= unsigned_type_for (type
);
7940 tem
= fold_build2 (MULT_EXPR
, mult_type
,
7941 fold_convert (mult_type
, TREE_OPERAND (op0
, 0)),
7942 fold_convert (mult_type
, TREE_OPERAND (op0
, 1)));
7943 return fold_convert (type
, tem
);
7946 tem
= fold_convert_const (code
, type
, op0
);
7947 return tem
? tem
: NULL_TREE
;
7949 case FIXED_CONVERT_EXPR
:
7950 tem
= fold_convert_const (code
, type
, arg0
);
7951 return tem
? tem
: NULL_TREE
;
7953 case VIEW_CONVERT_EXPR
:
7954 if (TREE_TYPE (op0
) == type
)
7956 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7957 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7959 /* For integral conversions with the same precision or pointer
7960 conversions use a NOP_EXPR instead. */
7961 if ((INTEGRAL_TYPE_P (type
)
7962 || POINTER_TYPE_P (type
))
7963 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7964 || POINTER_TYPE_P (TREE_TYPE (op0
)))
7965 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
))
7966 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7967 a sub-type to its base type as generated by the Ada FE. */
7968 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7969 && TREE_TYPE (TREE_TYPE (op0
))))
7970 return fold_convert (type
, op0
);
7972 /* Strip inner integral conversions that do not change the precision. */
7973 if ((TREE_CODE (op0
) == NOP_EXPR
7974 || TREE_CODE (op0
) == CONVERT_EXPR
)
7975 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7976 || POINTER_TYPE_P (TREE_TYPE (op0
)))
7977 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
7978 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
7979 && (TYPE_PRECISION (TREE_TYPE (op0
))
7980 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
7981 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7983 return fold_view_convert_expr (type
, op0
);
7986 tem
= fold_negate_expr (arg0
);
7988 return fold_convert (type
, tem
);
7992 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7993 return fold_abs_const (arg0
, type
);
7994 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7995 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7996 /* Convert fabs((double)float) into (double)fabsf(float). */
7997 else if (TREE_CODE (arg0
) == NOP_EXPR
7998 && TREE_CODE (type
) == REAL_TYPE
)
8000 tree targ0
= strip_float_extensions (arg0
);
8002 return fold_convert (type
, fold_build1 (ABS_EXPR
,
8006 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8007 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8009 else if (tree_expr_nonnegative_p (arg0
))
8012 /* Strip sign ops from argument. */
8013 if (TREE_CODE (type
) == REAL_TYPE
)
8015 tem
= fold_strip_sign_ops (arg0
);
8017 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
8022 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8023 return fold_convert (type
, arg0
);
8024 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8026 tree itype
= TREE_TYPE (type
);
8027 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
8028 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
8029 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
8031 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8033 tree itype
= TREE_TYPE (type
);
8034 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
8035 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
8036 return build_complex (type
, rpart
, negate_expr (ipart
));
8038 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8039 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8043 if (TREE_CODE (arg0
) == INTEGER_CST
)
8044 return fold_not_const (arg0
, type
);
8045 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8046 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8047 /* Convert ~ (-A) to A - 1. */
8048 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8049 return fold_build2 (MINUS_EXPR
, type
,
8050 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8051 build_int_cst (type
, 1));
8052 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8053 else if (INTEGRAL_TYPE_P (type
)
8054 && ((TREE_CODE (arg0
) == MINUS_EXPR
8055 && integer_onep (TREE_OPERAND (arg0
, 1)))
8056 || (TREE_CODE (arg0
) == PLUS_EXPR
8057 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8058 return fold_build1 (NEGATE_EXPR
, type
,
8059 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8060 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8061 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8062 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8064 TREE_OPERAND (arg0
, 0)))))
8065 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
8066 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
8067 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8068 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8070 TREE_OPERAND (arg0
, 1)))))
8071 return fold_build2 (BIT_XOR_EXPR
, type
,
8072 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
8073 /* Perform BIT_NOT_EXPR on each element individually. */
8074 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8076 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8077 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8079 for (i
= 0; i
< count
; i
++)
8083 elem
= TREE_VALUE (elements
);
8084 elem
= fold_unary (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8085 if (elem
== NULL_TREE
)
8087 elements
= TREE_CHAIN (elements
);
8090 elem
= build_int_cst (TREE_TYPE (type
), -1);
8091 list
= tree_cons (NULL_TREE
, elem
, list
);
8094 return build_vector (type
, nreverse (list
));
8099 case TRUTH_NOT_EXPR
:
8100 /* The argument to invert_truthvalue must have Boolean type. */
8101 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8102 arg0
= fold_convert (boolean_type_node
, arg0
);
8104 /* Note that the operand of this must be an int
8105 and its values must be 0 or 1.
8106 ("true" is a fixed value perhaps depending on the language,
8107 but we don't handle values other than 1 correctly yet.) */
8108 tem
= fold_truth_not_expr (arg0
);
8111 return fold_convert (type
, tem
);
8114 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8115 return fold_convert (type
, arg0
);
8116 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8117 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8118 TREE_OPERAND (arg0
, 1));
8119 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8120 return fold_convert (type
, TREE_REALPART (arg0
));
8121 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8123 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8124 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8125 fold_build1 (REALPART_EXPR
, itype
,
8126 TREE_OPERAND (arg0
, 0)),
8127 fold_build1 (REALPART_EXPR
, itype
,
8128 TREE_OPERAND (arg0
, 1)));
8129 return fold_convert (type
, tem
);
8131 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8133 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8134 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8135 return fold_convert (type
, tem
);
8137 if (TREE_CODE (arg0
) == CALL_EXPR
)
8139 tree fn
= get_callee_fndecl (arg0
);
8140 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8141 switch (DECL_FUNCTION_CODE (fn
))
8143 CASE_FLT_FN (BUILT_IN_CEXPI
):
8144 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8146 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8156 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8157 return fold_convert (type
, integer_zero_node
);
8158 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8159 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8160 TREE_OPERAND (arg0
, 0));
8161 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8162 return fold_convert (type
, TREE_IMAGPART (arg0
));
8163 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8165 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8166 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8167 fold_build1 (IMAGPART_EXPR
, itype
,
8168 TREE_OPERAND (arg0
, 0)),
8169 fold_build1 (IMAGPART_EXPR
, itype
,
8170 TREE_OPERAND (arg0
, 1)));
8171 return fold_convert (type
, tem
);
8173 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8175 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8176 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8177 return fold_convert (type
, negate_expr (tem
));
8179 if (TREE_CODE (arg0
) == CALL_EXPR
)
8181 tree fn
= get_callee_fndecl (arg0
);
8182 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8183 switch (DECL_FUNCTION_CODE (fn
))
8185 CASE_FLT_FN (BUILT_IN_CEXPI
):
8186 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8188 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8199 } /* switch (code) */
8202 /* Fold a binary expression of code CODE and type TYPE with operands
8203 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8204 Return the folded expression if folding is successful. Otherwise,
8205 return NULL_TREE. */
8208 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
8210 enum tree_code compl_code
;
8212 if (code
== MIN_EXPR
)
8213 compl_code
= MAX_EXPR
;
8214 else if (code
== MAX_EXPR
)
8215 compl_code
= MIN_EXPR
;
8219 /* MIN (MAX (a, b), b) == b. */
8220 if (TREE_CODE (op0
) == compl_code
8221 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8222 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
8224 /* MIN (MAX (b, a), b) == b. */
8225 if (TREE_CODE (op0
) == compl_code
8226 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8227 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8228 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
8230 /* MIN (a, MAX (a, b)) == a. */
8231 if (TREE_CODE (op1
) == compl_code
8232 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8233 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8234 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
8236 /* MIN (a, MAX (b, a)) == a. */
8237 if (TREE_CODE (op1
) == compl_code
8238 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8239 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8240 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
8245 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8246 by changing CODE to reduce the magnitude of constants involved in
8247 ARG0 of the comparison.
8248 Returns a canonicalized comparison tree if a simplification was
8249 possible, otherwise returns NULL_TREE.
8250 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8251 valid if signed overflow is undefined. */
8254 maybe_canonicalize_comparison_1 (enum tree_code code
, tree type
,
8255 tree arg0
, tree arg1
,
8256 bool *strict_overflow_p
)
8258 enum tree_code code0
= TREE_CODE (arg0
);
8259 tree t
, cst0
= NULL_TREE
;
8263 /* Match A +- CST code arg1 and CST code arg1. */
8264 if (!(((code0
== MINUS_EXPR
8265 || code0
== PLUS_EXPR
)
8266 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8267 || code0
== INTEGER_CST
))
8270 /* Identify the constant in arg0 and its sign. */
8271 if (code0
== INTEGER_CST
)
8274 cst0
= TREE_OPERAND (arg0
, 1);
8275 sgn0
= tree_int_cst_sgn (cst0
);
8277 /* Overflowed constants and zero will cause problems. */
8278 if (integer_zerop (cst0
)
8279 || TREE_OVERFLOW (cst0
))
8282 /* See if we can reduce the magnitude of the constant in
8283 arg0 by changing the comparison code. */
8284 if (code0
== INTEGER_CST
)
8286 /* CST <= arg1 -> CST-1 < arg1. */
8287 if (code
== LE_EXPR
&& sgn0
== 1)
8289 /* -CST < arg1 -> -CST-1 <= arg1. */
8290 else if (code
== LT_EXPR
&& sgn0
== -1)
8292 /* CST > arg1 -> CST-1 >= arg1. */
8293 else if (code
== GT_EXPR
&& sgn0
== 1)
8295 /* -CST >= arg1 -> -CST-1 > arg1. */
8296 else if (code
== GE_EXPR
&& sgn0
== -1)
8300 /* arg1 code' CST' might be more canonical. */
8305 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8307 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8309 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8310 else if (code
== GT_EXPR
8311 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8313 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8314 else if (code
== LE_EXPR
8315 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8317 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8318 else if (code
== GE_EXPR
8319 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8323 *strict_overflow_p
= true;
8326 /* Now build the constant reduced in magnitude. */
8327 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8328 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8329 if (code0
!= INTEGER_CST
)
8330 t
= fold_build2 (code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8332 /* If swapping might yield to a more canonical form, do so. */
8334 return fold_build2 (swap_tree_comparison (code
), type
, arg1
, t
);
8336 return fold_build2 (code
, type
, t
, arg1
);
8339 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8340 overflow further. Try to decrease the magnitude of constants involved
8341 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8342 and put sole constants at the second argument position.
8343 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8346 maybe_canonicalize_comparison (enum tree_code code
, tree type
,
8347 tree arg0
, tree arg1
)
8350 bool strict_overflow_p
;
8351 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8352 "when reducing constant in comparison");
8354 /* In principle pointers also have undefined overflow behavior,
8355 but that causes problems elsewhere. */
8356 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8357 || POINTER_TYPE_P (TREE_TYPE (arg0
)))
8360 /* Try canonicalization by simplifying arg0. */
8361 strict_overflow_p
= false;
8362 t
= maybe_canonicalize_comparison_1 (code
, type
, arg0
, arg1
,
8363 &strict_overflow_p
);
8366 if (strict_overflow_p
)
8367 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8371 /* Try canonicalization by simplifying arg1 using the swapped
8373 code
= swap_tree_comparison (code
);
8374 strict_overflow_p
= false;
8375 t
= maybe_canonicalize_comparison_1 (code
, type
, arg1
, arg0
,
8376 &strict_overflow_p
);
8377 if (t
&& strict_overflow_p
)
8378 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8382 /* Subroutine of fold_binary. This routine performs all of the
8383 transformations that are common to the equality/inequality
8384 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8385 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8386 fold_binary should call fold_binary. Fold a comparison with
8387 tree code CODE and type TYPE with operands OP0 and OP1. Return
8388 the folded comparison or NULL_TREE. */
8391 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
8393 tree arg0
, arg1
, tem
;
8398 STRIP_SIGN_NOPS (arg0
);
8399 STRIP_SIGN_NOPS (arg1
);
8401 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8402 if (tem
!= NULL_TREE
)
8405 /* If one arg is a real or integer constant, put it last. */
8406 if (tree_swap_operands_p (arg0
, arg1
, true))
8407 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8409 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8410 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8411 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8412 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8413 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8414 && (TREE_CODE (arg1
) == INTEGER_CST
8415 && !TREE_OVERFLOW (arg1
)))
8417 tree const1
= TREE_OPERAND (arg0
, 1);
8419 tree variable
= TREE_OPERAND (arg0
, 0);
8422 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8424 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8425 TREE_TYPE (arg1
), const2
, const1
);
8427 /* If the constant operation overflowed this can be
8428 simplified as a comparison against INT_MAX/INT_MIN. */
8429 if (TREE_CODE (lhs
) == INTEGER_CST
8430 && TREE_OVERFLOW (lhs
))
8432 int const1_sgn
= tree_int_cst_sgn (const1
);
8433 enum tree_code code2
= code
;
8435 /* Get the sign of the constant on the lhs if the
8436 operation were VARIABLE + CONST1. */
8437 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8438 const1_sgn
= -const1_sgn
;
8440 /* The sign of the constant determines if we overflowed
8441 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8442 Canonicalize to the INT_MIN overflow by swapping the comparison
8444 if (const1_sgn
== -1)
8445 code2
= swap_tree_comparison (code
);
8447 /* We now can look at the canonicalized case
8448 VARIABLE + 1 CODE2 INT_MIN
8449 and decide on the result. */
8450 if (code2
== LT_EXPR
8452 || code2
== EQ_EXPR
)
8453 return omit_one_operand (type
, boolean_false_node
, variable
);
8454 else if (code2
== NE_EXPR
8456 || code2
== GT_EXPR
)
8457 return omit_one_operand (type
, boolean_true_node
, variable
);
8460 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8461 && (TREE_CODE (lhs
) != INTEGER_CST
8462 || !TREE_OVERFLOW (lhs
)))
8464 fold_overflow_warning (("assuming signed overflow does not occur "
8465 "when changing X +- C1 cmp C2 to "
8467 WARN_STRICT_OVERFLOW_COMPARISON
);
8468 return fold_build2 (code
, type
, variable
, lhs
);
8472 /* For comparisons of pointers we can decompose it to a compile time
8473 comparison of the base objects and the offsets into the object.
8474 This requires at least one operand being an ADDR_EXPR or a
8475 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8476 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8477 && (TREE_CODE (arg0
) == ADDR_EXPR
8478 || TREE_CODE (arg1
) == ADDR_EXPR
8479 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8480 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8482 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8483 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8484 enum machine_mode mode
;
8485 int volatilep
, unsignedp
;
8486 bool indirect_base0
= false;
8488 /* Get base and offset for the access. Strip ADDR_EXPR for
8489 get_inner_reference, but put it back by stripping INDIRECT_REF
8490 off the base object if possible. */
8492 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8494 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8495 &bitsize
, &bitpos0
, &offset0
, &mode
,
8496 &unsignedp
, &volatilep
, false);
8497 if (TREE_CODE (base0
) == INDIRECT_REF
)
8498 base0
= TREE_OPERAND (base0
, 0);
8500 indirect_base0
= true;
8502 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8504 base0
= TREE_OPERAND (arg0
, 0);
8505 offset0
= TREE_OPERAND (arg0
, 1);
8509 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8511 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8512 &bitsize
, &bitpos1
, &offset1
, &mode
,
8513 &unsignedp
, &volatilep
, false);
8514 /* We have to make sure to have an indirect/non-indirect base1
8515 just the same as we did for base0. */
8516 if (TREE_CODE (base1
) == INDIRECT_REF
8518 base1
= TREE_OPERAND (base1
, 0);
8519 else if (!indirect_base0
)
8522 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8524 base1
= TREE_OPERAND (arg1
, 0);
8525 offset1
= TREE_OPERAND (arg1
, 1);
8527 else if (indirect_base0
)
8530 /* If we have equivalent bases we might be able to simplify. */
8532 && operand_equal_p (base0
, base1
, 0))
8534 /* We can fold this expression to a constant if the non-constant
8535 offset parts are equal. */
8536 if (offset0
== offset1
8537 || (offset0
&& offset1
8538 && operand_equal_p (offset0
, offset1
, 0)))
8543 return build_int_cst (boolean_type_node
, bitpos0
== bitpos1
);
8545 return build_int_cst (boolean_type_node
, bitpos0
!= bitpos1
);
8547 return build_int_cst (boolean_type_node
, bitpos0
< bitpos1
);
8549 return build_int_cst (boolean_type_node
, bitpos0
<= bitpos1
);
8551 return build_int_cst (boolean_type_node
, bitpos0
>= bitpos1
);
8553 return build_int_cst (boolean_type_node
, bitpos0
> bitpos1
);
8557 /* We can simplify the comparison to a comparison of the variable
8558 offset parts if the constant offset parts are equal.
8559 Be careful to use signed size type here because otherwise we
8560 mess with array offsets in the wrong way. This is possible
8561 because pointer arithmetic is restricted to retain within an
8562 object and overflow on pointer differences is undefined as of
8563 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8564 else if (bitpos0
== bitpos1
)
8566 tree signed_size_type_node
;
8567 signed_size_type_node
= signed_type_for (size_type_node
);
8569 /* By converting to signed size type we cover middle-end pointer
8570 arithmetic which operates on unsigned pointer types of size
8571 type size and ARRAY_REF offsets which are properly sign or
8572 zero extended from their type in case it is narrower than
8574 if (offset0
== NULL_TREE
)
8575 offset0
= build_int_cst (signed_size_type_node
, 0);
8577 offset0
= fold_convert (signed_size_type_node
, offset0
);
8578 if (offset1
== NULL_TREE
)
8579 offset1
= build_int_cst (signed_size_type_node
, 0);
8581 offset1
= fold_convert (signed_size_type_node
, offset1
);
8583 return fold_build2 (code
, type
, offset0
, offset1
);
8588 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8589 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8590 the resulting offset is smaller in absolute value than the
8592 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8593 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8594 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8595 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8596 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8597 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8598 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8600 tree const1
= TREE_OPERAND (arg0
, 1);
8601 tree const2
= TREE_OPERAND (arg1
, 1);
8602 tree variable1
= TREE_OPERAND (arg0
, 0);
8603 tree variable2
= TREE_OPERAND (arg1
, 0);
8605 const char * const warnmsg
= G_("assuming signed overflow does not "
8606 "occur when combining constants around "
8609 /* Put the constant on the side where it doesn't overflow and is
8610 of lower absolute value than before. */
8611 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8612 ? MINUS_EXPR
: PLUS_EXPR
,
8614 if (!TREE_OVERFLOW (cst
)
8615 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8617 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8618 return fold_build2 (code
, type
,
8620 fold_build2 (TREE_CODE (arg1
), TREE_TYPE (arg1
),
8624 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8625 ? MINUS_EXPR
: PLUS_EXPR
,
8627 if (!TREE_OVERFLOW (cst
)
8628 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8630 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8631 return fold_build2 (code
, type
,
8632 fold_build2 (TREE_CODE (arg0
), TREE_TYPE (arg0
),
8638 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8639 signed arithmetic case. That form is created by the compiler
8640 often enough for folding it to be of value. One example is in
8641 computing loop trip counts after Operator Strength Reduction. */
8642 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8643 && TREE_CODE (arg0
) == MULT_EXPR
8644 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8645 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8646 && integer_zerop (arg1
))
8648 tree const1
= TREE_OPERAND (arg0
, 1);
8649 tree const2
= arg1
; /* zero */
8650 tree variable1
= TREE_OPERAND (arg0
, 0);
8651 enum tree_code cmp_code
= code
;
8653 gcc_assert (!integer_zerop (const1
));
8655 fold_overflow_warning (("assuming signed overflow does not occur when "
8656 "eliminating multiplication in comparison "
8658 WARN_STRICT_OVERFLOW_COMPARISON
);
8660 /* If const1 is negative we swap the sense of the comparison. */
8661 if (tree_int_cst_sgn (const1
) < 0)
8662 cmp_code
= swap_tree_comparison (cmp_code
);
8664 return fold_build2 (cmp_code
, type
, variable1
, const2
);
8667 tem
= maybe_canonicalize_comparison (code
, type
, op0
, op1
);
8671 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8673 tree targ0
= strip_float_extensions (arg0
);
8674 tree targ1
= strip_float_extensions (arg1
);
8675 tree newtype
= TREE_TYPE (targ0
);
8677 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8678 newtype
= TREE_TYPE (targ1
);
8680 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8681 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8682 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8683 fold_convert (newtype
, targ1
));
8685 /* (-a) CMP (-b) -> b CMP a */
8686 if (TREE_CODE (arg0
) == NEGATE_EXPR
8687 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8688 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8689 TREE_OPERAND (arg0
, 0));
8691 if (TREE_CODE (arg1
) == REAL_CST
)
8693 REAL_VALUE_TYPE cst
;
8694 cst
= TREE_REAL_CST (arg1
);
8696 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8697 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8698 return fold_build2 (swap_tree_comparison (code
), type
,
8699 TREE_OPERAND (arg0
, 0),
8700 build_real (TREE_TYPE (arg1
),
8701 REAL_VALUE_NEGATE (cst
)));
8703 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8704 /* a CMP (-0) -> a CMP 0 */
8705 if (REAL_VALUE_MINUS_ZERO (cst
))
8706 return fold_build2 (code
, type
, arg0
,
8707 build_real (TREE_TYPE (arg1
), dconst0
));
8709 /* x != NaN is always true, other ops are always false. */
8710 if (REAL_VALUE_ISNAN (cst
)
8711 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8713 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8714 return omit_one_operand (type
, tem
, arg0
);
8717 /* Fold comparisons against infinity. */
8718 if (REAL_VALUE_ISINF (cst
))
8720 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8721 if (tem
!= NULL_TREE
)
8726 /* If this is a comparison of a real constant with a PLUS_EXPR
8727 or a MINUS_EXPR of a real constant, we can convert it into a
8728 comparison with a revised real constant as long as no overflow
8729 occurs when unsafe_math_optimizations are enabled. */
8730 if (flag_unsafe_math_optimizations
8731 && TREE_CODE (arg1
) == REAL_CST
8732 && (TREE_CODE (arg0
) == PLUS_EXPR
8733 || TREE_CODE (arg0
) == MINUS_EXPR
)
8734 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8735 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8736 ? MINUS_EXPR
: PLUS_EXPR
,
8737 arg1
, TREE_OPERAND (arg0
, 1), 0))
8738 && !TREE_OVERFLOW (tem
))
8739 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8741 /* Likewise, we can simplify a comparison of a real constant with
8742 a MINUS_EXPR whose first operand is also a real constant, i.e.
8743 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8744 floating-point types only if -fassociative-math is set. */
8745 if (flag_associative_math
8746 && TREE_CODE (arg1
) == REAL_CST
8747 && TREE_CODE (arg0
) == MINUS_EXPR
8748 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8749 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8751 && !TREE_OVERFLOW (tem
))
8752 return fold_build2 (swap_tree_comparison (code
), type
,
8753 TREE_OPERAND (arg0
, 1), tem
);
8755 /* Fold comparisons against built-in math functions. */
8756 if (TREE_CODE (arg1
) == REAL_CST
8757 && flag_unsafe_math_optimizations
8758 && ! flag_errno_math
)
8760 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8762 if (fcode
!= END_BUILTINS
)
8764 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8765 if (tem
!= NULL_TREE
)
8771 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8772 && (TREE_CODE (arg0
) == NOP_EXPR
8773 || TREE_CODE (arg0
) == CONVERT_EXPR
))
8775 /* If we are widening one operand of an integer comparison,
8776 see if the other operand is similarly being widened. Perhaps we
8777 can do the comparison in the narrower type. */
8778 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
8782 /* Or if we are changing signedness. */
8783 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
8788 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8789 constant, we can simplify it. */
8790 if (TREE_CODE (arg1
) == INTEGER_CST
8791 && (TREE_CODE (arg0
) == MIN_EXPR
8792 || TREE_CODE (arg0
) == MAX_EXPR
)
8793 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8795 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
8800 /* Simplify comparison of something with itself. (For IEEE
8801 floating-point, we can only do some of these simplifications.) */
8802 if (operand_equal_p (arg0
, arg1
, 0))
8807 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8808 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8809 return constant_boolean_node (1, type
);
8814 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8815 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8816 return constant_boolean_node (1, type
);
8817 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
8820 /* For NE, we can only do this simplification if integer
8821 or we don't honor IEEE floating point NaNs. */
8822 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8823 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8825 /* ... fall through ... */
8828 return constant_boolean_node (0, type
);
8834 /* If we are comparing an expression that just has comparisons
8835 of two integer values, arithmetic expressions of those comparisons,
8836 and constants, we can simplify it. There are only three cases
8837 to check: the two values can either be equal, the first can be
8838 greater, or the second can be greater. Fold the expression for
8839 those three values. Since each value must be 0 or 1, we have
8840 eight possibilities, each of which corresponds to the constant 0
8841 or 1 or one of the six possible comparisons.
8843 This handles common cases like (a > b) == 0 but also handles
8844 expressions like ((x > y) - (y > x)) > 0, which supposedly
8845 occur in macroized code. */
8847 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8849 tree cval1
= 0, cval2
= 0;
8852 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8853 /* Don't handle degenerate cases here; they should already
8854 have been handled anyway. */
8855 && cval1
!= 0 && cval2
!= 0
8856 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8857 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8858 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8859 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8860 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8861 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8862 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8864 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8865 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8867 /* We can't just pass T to eval_subst in case cval1 or cval2
8868 was the same as ARG1. */
8871 = fold_build2 (code
, type
,
8872 eval_subst (arg0
, cval1
, maxval
,
8876 = fold_build2 (code
, type
,
8877 eval_subst (arg0
, cval1
, maxval
,
8881 = fold_build2 (code
, type
,
8882 eval_subst (arg0
, cval1
, minval
,
8886 /* All three of these results should be 0 or 1. Confirm they are.
8887 Then use those values to select the proper code to use. */
8889 if (TREE_CODE (high_result
) == INTEGER_CST
8890 && TREE_CODE (equal_result
) == INTEGER_CST
8891 && TREE_CODE (low_result
) == INTEGER_CST
)
8893 /* Make a 3-bit mask with the high-order bit being the
8894 value for `>', the next for '=', and the low for '<'. */
8895 switch ((integer_onep (high_result
) * 4)
8896 + (integer_onep (equal_result
) * 2)
8897 + integer_onep (low_result
))
8901 return omit_one_operand (type
, integer_zero_node
, arg0
);
8922 return omit_one_operand (type
, integer_one_node
, arg0
);
8926 return save_expr (build2 (code
, type
, cval1
, cval2
));
8927 return fold_build2 (code
, type
, cval1
, cval2
);
8932 /* Fold a comparison of the address of COMPONENT_REFs with the same
8933 type and component to a comparison of the address of the base
8934 object. In short, &x->a OP &y->a to x OP y and
8935 &x->a OP &y.a to x OP &y */
8936 if (TREE_CODE (arg0
) == ADDR_EXPR
8937 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
8938 && TREE_CODE (arg1
) == ADDR_EXPR
8939 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
8941 tree cref0
= TREE_OPERAND (arg0
, 0);
8942 tree cref1
= TREE_OPERAND (arg1
, 0);
8943 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
8945 tree op0
= TREE_OPERAND (cref0
, 0);
8946 tree op1
= TREE_OPERAND (cref1
, 0);
8947 return fold_build2 (code
, type
,
8948 fold_addr_expr (op0
),
8949 fold_addr_expr (op1
));
8953 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8954 into a single range test. */
8955 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8956 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8957 && TREE_CODE (arg1
) == INTEGER_CST
8958 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8959 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8960 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8961 && !TREE_OVERFLOW (arg1
))
8963 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
8964 if (tem
!= NULL_TREE
)
8968 /* Fold ~X op ~Y as Y op X. */
8969 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8970 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8972 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8973 return fold_build2 (code
, type
,
8974 fold_convert (cmp_type
, TREE_OPERAND (arg1
, 0)),
8975 TREE_OPERAND (arg0
, 0));
8978 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8979 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8980 && TREE_CODE (arg1
) == INTEGER_CST
)
8982 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8983 return fold_build2 (swap_tree_comparison (code
), type
,
8984 TREE_OPERAND (arg0
, 0),
8985 fold_build1 (BIT_NOT_EXPR
, cmp_type
,
8986 fold_convert (cmp_type
, arg1
)));
8993 /* Subroutine of fold_binary. Optimize complex multiplications of the
8994 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8995 argument EXPR represents the expression "z" of type TYPE. */
8998 fold_mult_zconjz (tree type
, tree expr
)
9000 tree itype
= TREE_TYPE (type
);
9001 tree rpart
, ipart
, tem
;
9003 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9005 rpart
= TREE_OPERAND (expr
, 0);
9006 ipart
= TREE_OPERAND (expr
, 1);
9008 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9010 rpart
= TREE_REALPART (expr
);
9011 ipart
= TREE_IMAGPART (expr
);
9015 expr
= save_expr (expr
);
9016 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
9017 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
9020 rpart
= save_expr (rpart
);
9021 ipart
= save_expr (ipart
);
9022 tem
= fold_build2 (PLUS_EXPR
, itype
,
9023 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
9024 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
9025 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
9026 fold_convert (itype
, integer_zero_node
));
9030 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9031 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9032 guarantees that P and N have the same least significant log2(M) bits.
9033 N is not otherwise constrained. In particular, N is not normalized to
9034 0 <= N < M as is common. In general, the precise value of P is unknown.
9035 M is chosen as large as possible such that constant N can be determined.
9037 Returns M and sets *RESIDUE to N. */
9039 static unsigned HOST_WIDE_INT
9040 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
)
9042 enum tree_code code
;
9046 code
= TREE_CODE (expr
);
9047 if (code
== ADDR_EXPR
)
9049 expr
= TREE_OPERAND (expr
, 0);
9050 if (handled_component_p (expr
))
9052 HOST_WIDE_INT bitsize
, bitpos
;
9054 enum machine_mode mode
;
9055 int unsignedp
, volatilep
;
9057 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9058 &mode
, &unsignedp
, &volatilep
, false);
9059 *residue
= bitpos
/ BITS_PER_UNIT
;
9062 if (TREE_CODE (offset
) == INTEGER_CST
)
9063 *residue
+= TREE_INT_CST_LOW (offset
);
9065 /* We don't handle more complicated offset expressions. */
9071 return DECL_ALIGN_UNIT (expr
);
9073 else if (code
== POINTER_PLUS_EXPR
)
9076 unsigned HOST_WIDE_INT modulus
;
9077 enum tree_code inner_code
;
9079 op0
= TREE_OPERAND (expr
, 0);
9081 modulus
= get_pointer_modulus_and_residue (op0
, residue
);
9083 op1
= TREE_OPERAND (expr
, 1);
9085 inner_code
= TREE_CODE (op1
);
9086 if (inner_code
== INTEGER_CST
)
9088 *residue
+= TREE_INT_CST_LOW (op1
);
9091 else if (inner_code
== MULT_EXPR
)
9093 op1
= TREE_OPERAND (op1
, 1);
9094 if (TREE_CODE (op1
) == INTEGER_CST
)
9096 unsigned HOST_WIDE_INT align
;
9098 /* Compute the greatest power-of-2 divisor of op1. */
9099 align
= TREE_INT_CST_LOW (op1
);
9102 /* If align is non-zero and less than *modulus, replace
9103 *modulus with align., If align is 0, then either op1 is 0
9104 or the greatest power-of-2 divisor of op1 doesn't fit in an
9105 unsigned HOST_WIDE_INT. In either case, no additional
9106 constraint is imposed. */
9108 modulus
= MIN (modulus
, align
);
9115 /* If we get here, we were unable to determine anything useful about the
9121 /* Fold a binary expression of code CODE and type TYPE with operands
9122 OP0 and OP1. Return the folded expression if folding is
9123 successful. Otherwise, return NULL_TREE. */
9126 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
9128 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9129 tree arg0
, arg1
, tem
;
9130 tree t1
= NULL_TREE
;
9131 bool strict_overflow_p
;
9133 gcc_assert ((IS_EXPR_CODE_CLASS (kind
)
9134 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
9135 && TREE_CODE_LENGTH (code
) == 2
9137 && op1
!= NULL_TREE
);
9142 /* Strip any conversions that don't change the mode. This is
9143 safe for every expression, except for a comparison expression
9144 because its signedness is derived from its operands. So, in
9145 the latter case, only strip conversions that don't change the
9148 Note that this is done as an internal manipulation within the
9149 constant folder, in order to find the simplest representation
9150 of the arguments so that their form can be studied. In any
9151 cases, the appropriate type conversions should be put back in
9152 the tree that will get out of the constant folder. */
9154 if (kind
== tcc_comparison
)
9156 STRIP_SIGN_NOPS (arg0
);
9157 STRIP_SIGN_NOPS (arg1
);
9165 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9166 constant but we can't do arithmetic on them. */
9167 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9168 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9169 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9170 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9171 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9172 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9174 if (kind
== tcc_binary
)
9176 /* Make sure type and arg0 have the same saturating flag. */
9177 gcc_assert (TYPE_SATURATING (type
)
9178 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9179 tem
= const_binop (code
, arg0
, arg1
, 0);
9181 else if (kind
== tcc_comparison
)
9182 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9186 if (tem
!= NULL_TREE
)
9188 if (TREE_TYPE (tem
) != type
)
9189 tem
= fold_convert (type
, tem
);
9194 /* If this is a commutative operation, and ARG0 is a constant, move it
9195 to ARG1 to reduce the number of tests below. */
9196 if (commutative_tree_code (code
)
9197 && tree_swap_operands_p (arg0
, arg1
, true))
9198 return fold_build2 (code
, type
, op1
, op0
);
9200 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9202 First check for cases where an arithmetic operation is applied to a
9203 compound, conditional, or comparison operation. Push the arithmetic
9204 operation inside the compound or conditional to see if any folding
9205 can then be done. Convert comparison to conditional for this purpose.
9206 The also optimizes non-constant cases that used to be done in
9209 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9210 one of the operands is a comparison and the other is a comparison, a
9211 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9212 code below would make the expression more complex. Change it to a
9213 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9214 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9216 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9217 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9218 && ((truth_value_p (TREE_CODE (arg0
))
9219 && (truth_value_p (TREE_CODE (arg1
))
9220 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9221 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9222 || (truth_value_p (TREE_CODE (arg1
))
9223 && (truth_value_p (TREE_CODE (arg0
))
9224 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9225 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9227 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9228 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9231 fold_convert (boolean_type_node
, arg0
),
9232 fold_convert (boolean_type_node
, arg1
));
9234 if (code
== EQ_EXPR
)
9235 tem
= invert_truthvalue (tem
);
9237 return fold_convert (type
, tem
);
9240 if (TREE_CODE_CLASS (code
) == tcc_binary
9241 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9243 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9244 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9245 fold_build2 (code
, type
,
9246 fold_convert (TREE_TYPE (op0
),
9247 TREE_OPERAND (arg0
, 1)),
9249 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9250 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9251 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9252 fold_build2 (code
, type
, op0
,
9253 fold_convert (TREE_TYPE (op1
),
9254 TREE_OPERAND (arg1
, 1))));
9256 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9258 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9260 /*cond_first_p=*/1);
9261 if (tem
!= NULL_TREE
)
9265 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9267 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9269 /*cond_first_p=*/0);
9270 if (tem
!= NULL_TREE
)
9277 case POINTER_PLUS_EXPR
:
9278 /* 0 +p index -> (type)index */
9279 if (integer_zerop (arg0
))
9280 return non_lvalue (fold_convert (type
, arg1
));
9282 /* PTR +p 0 -> PTR */
9283 if (integer_zerop (arg1
))
9284 return non_lvalue (fold_convert (type
, arg0
));
9286 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9287 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9288 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9289 return fold_convert (type
, fold_build2 (PLUS_EXPR
, sizetype
,
9290 fold_convert (sizetype
, arg1
),
9291 fold_convert (sizetype
, arg0
)));
9293 /* index +p PTR -> PTR +p index */
9294 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9295 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9296 return fold_build2 (POINTER_PLUS_EXPR
, type
,
9297 fold_convert (type
, arg1
),
9298 fold_convert (sizetype
, arg0
));
9300 /* (PTR +p B) +p A -> PTR +p (B + A) */
9301 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9304 tree arg01
= fold_convert (sizetype
, TREE_OPERAND (arg0
, 1));
9305 tree arg00
= TREE_OPERAND (arg0
, 0);
9306 inner
= fold_build2 (PLUS_EXPR
, sizetype
,
9307 arg01
, fold_convert (sizetype
, arg1
));
9308 return fold_convert (type
,
9309 fold_build2 (POINTER_PLUS_EXPR
,
9310 TREE_TYPE (arg00
), arg00
, inner
));
9313 /* PTR_CST +p CST -> CST1 */
9314 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9315 return fold_build2 (PLUS_EXPR
, type
, arg0
, fold_convert (type
, arg1
));
9317 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9318 of the array. Loop optimizer sometimes produce this type of
9320 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9322 tem
= try_move_mult_to_index (arg0
, fold_convert (sizetype
, arg1
));
9324 return fold_convert (type
, tem
);
9330 /* PTR + INT -> (INT)(PTR p+ INT) */
9331 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9332 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9333 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9336 fold_convert (sizetype
, arg1
)));
9337 /* INT + PTR -> (INT)(PTR p+ INT) */
9338 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9339 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9340 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9343 fold_convert (sizetype
, arg0
)));
9344 /* A + (-B) -> A - B */
9345 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9346 return fold_build2 (MINUS_EXPR
, type
,
9347 fold_convert (type
, arg0
),
9348 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9349 /* (-A) + B -> B - A */
9350 if (TREE_CODE (arg0
) == NEGATE_EXPR
9351 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9352 return fold_build2 (MINUS_EXPR
, type
,
9353 fold_convert (type
, arg1
),
9354 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9356 if (INTEGRAL_TYPE_P (type
))
9358 /* Convert ~A + 1 to -A. */
9359 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9360 && integer_onep (arg1
))
9361 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
9364 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9365 && !TYPE_OVERFLOW_TRAPS (type
))
9367 tree tem
= TREE_OPERAND (arg0
, 0);
9370 if (operand_equal_p (tem
, arg1
, 0))
9372 t1
= build_int_cst_type (type
, -1);
9373 return omit_one_operand (type
, t1
, arg1
);
9378 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9379 && !TYPE_OVERFLOW_TRAPS (type
))
9381 tree tem
= TREE_OPERAND (arg1
, 0);
9384 if (operand_equal_p (arg0
, tem
, 0))
9386 t1
= build_int_cst_type (type
, -1);
9387 return omit_one_operand (type
, t1
, arg0
);
9391 /* X + (X / CST) * -CST is X % CST. */
9392 if (TREE_CODE (arg1
) == MULT_EXPR
9393 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9394 && operand_equal_p (arg0
,
9395 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9397 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9398 tree cst1
= TREE_OPERAND (arg1
, 1);
9399 tree sum
= fold_binary (PLUS_EXPR
, TREE_TYPE (cst1
), cst1
, cst0
);
9400 if (sum
&& integer_zerop (sum
))
9401 return fold_convert (type
,
9402 fold_build2 (TRUNC_MOD_EXPR
,
9403 TREE_TYPE (arg0
), arg0
, cst0
));
9407 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9408 same or one. Make sure type is not saturating.
9409 fold_plusminus_mult_expr will re-associate. */
9410 if ((TREE_CODE (arg0
) == MULT_EXPR
9411 || TREE_CODE (arg1
) == MULT_EXPR
)
9412 && !TYPE_SATURATING (type
)
9413 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9415 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9420 if (! FLOAT_TYPE_P (type
))
9422 if (integer_zerop (arg1
))
9423 return non_lvalue (fold_convert (type
, arg0
));
9425 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9426 with a constant, and the two constants have no bits in common,
9427 we should treat this as a BIT_IOR_EXPR since this may produce more
9429 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9430 && TREE_CODE (arg1
) == BIT_AND_EXPR
9431 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9432 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9433 && integer_zerop (const_binop (BIT_AND_EXPR
,
9434 TREE_OPERAND (arg0
, 1),
9435 TREE_OPERAND (arg1
, 1), 0)))
9437 code
= BIT_IOR_EXPR
;
9441 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9442 (plus (plus (mult) (mult)) (foo)) so that we can
9443 take advantage of the factoring cases below. */
9444 if (((TREE_CODE (arg0
) == PLUS_EXPR
9445 || TREE_CODE (arg0
) == MINUS_EXPR
)
9446 && TREE_CODE (arg1
) == MULT_EXPR
)
9447 || ((TREE_CODE (arg1
) == PLUS_EXPR
9448 || TREE_CODE (arg1
) == MINUS_EXPR
)
9449 && TREE_CODE (arg0
) == MULT_EXPR
))
9451 tree parg0
, parg1
, parg
, marg
;
9452 enum tree_code pcode
;
9454 if (TREE_CODE (arg1
) == MULT_EXPR
)
9455 parg
= arg0
, marg
= arg1
;
9457 parg
= arg1
, marg
= arg0
;
9458 pcode
= TREE_CODE (parg
);
9459 parg0
= TREE_OPERAND (parg
, 0);
9460 parg1
= TREE_OPERAND (parg
, 1);
9464 if (TREE_CODE (parg0
) == MULT_EXPR
9465 && TREE_CODE (parg1
) != MULT_EXPR
)
9466 return fold_build2 (pcode
, type
,
9467 fold_build2 (PLUS_EXPR
, type
,
9468 fold_convert (type
, parg0
),
9469 fold_convert (type
, marg
)),
9470 fold_convert (type
, parg1
));
9471 if (TREE_CODE (parg0
) != MULT_EXPR
9472 && TREE_CODE (parg1
) == MULT_EXPR
)
9473 return fold_build2 (PLUS_EXPR
, type
,
9474 fold_convert (type
, parg0
),
9475 fold_build2 (pcode
, type
,
9476 fold_convert (type
, marg
),
9483 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9484 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9485 return non_lvalue (fold_convert (type
, arg0
));
9487 /* Likewise if the operands are reversed. */
9488 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9489 return non_lvalue (fold_convert (type
, arg1
));
9491 /* Convert X + -C into X - C. */
9492 if (TREE_CODE (arg1
) == REAL_CST
9493 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9495 tem
= fold_negate_const (arg1
, type
);
9496 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9497 return fold_build2 (MINUS_EXPR
, type
,
9498 fold_convert (type
, arg0
),
9499 fold_convert (type
, tem
));
9502 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9503 to __complex__ ( x, y ). This is not the same for SNaNs or
9504 if signed zeros are involved. */
9505 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9506 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9507 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9509 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9510 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9511 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9512 bool arg0rz
= false, arg0iz
= false;
9513 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9514 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9516 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9517 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9518 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9520 tree rp
= arg1r
? arg1r
9521 : build1 (REALPART_EXPR
, rtype
, arg1
);
9522 tree ip
= arg0i
? arg0i
9523 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9524 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9526 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9528 tree rp
= arg0r
? arg0r
9529 : build1 (REALPART_EXPR
, rtype
, arg0
);
9530 tree ip
= arg1i
? arg1i
9531 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9532 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9537 if (flag_unsafe_math_optimizations
9538 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9539 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9540 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9543 /* Convert x+x into x*2.0. */
9544 if (operand_equal_p (arg0
, arg1
, 0)
9545 && SCALAR_FLOAT_TYPE_P (type
))
9546 return fold_build2 (MULT_EXPR
, type
, arg0
,
9547 build_real (type
, dconst2
));
9549 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9550 We associate floats only if the user has specified
9551 -fassociative-math. */
9552 if (flag_associative_math
9553 && TREE_CODE (arg1
) == PLUS_EXPR
9554 && TREE_CODE (arg0
) != MULT_EXPR
)
9556 tree tree10
= TREE_OPERAND (arg1
, 0);
9557 tree tree11
= TREE_OPERAND (arg1
, 1);
9558 if (TREE_CODE (tree11
) == MULT_EXPR
9559 && TREE_CODE (tree10
) == MULT_EXPR
)
9562 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
9563 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
9566 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9567 We associate floats only if the user has specified
9568 -fassociative-math. */
9569 if (flag_associative_math
9570 && TREE_CODE (arg0
) == PLUS_EXPR
9571 && TREE_CODE (arg1
) != MULT_EXPR
)
9573 tree tree00
= TREE_OPERAND (arg0
, 0);
9574 tree tree01
= TREE_OPERAND (arg0
, 1);
9575 if (TREE_CODE (tree01
) == MULT_EXPR
9576 && TREE_CODE (tree00
) == MULT_EXPR
)
9579 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
9580 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
9586 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9587 is a rotate of A by C1 bits. */
9588 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9589 is a rotate of A by B bits. */
9591 enum tree_code code0
, code1
;
9593 code0
= TREE_CODE (arg0
);
9594 code1
= TREE_CODE (arg1
);
9595 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9596 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9597 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9598 TREE_OPERAND (arg1
, 0), 0)
9599 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9600 TYPE_UNSIGNED (rtype
))
9601 /* Only create rotates in complete modes. Other cases are not
9602 expanded properly. */
9603 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
9605 tree tree01
, tree11
;
9606 enum tree_code code01
, code11
;
9608 tree01
= TREE_OPERAND (arg0
, 1);
9609 tree11
= TREE_OPERAND (arg1
, 1);
9610 STRIP_NOPS (tree01
);
9611 STRIP_NOPS (tree11
);
9612 code01
= TREE_CODE (tree01
);
9613 code11
= TREE_CODE (tree11
);
9614 if (code01
== INTEGER_CST
9615 && code11
== INTEGER_CST
9616 && TREE_INT_CST_HIGH (tree01
) == 0
9617 && TREE_INT_CST_HIGH (tree11
) == 0
9618 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9619 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9620 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9621 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9622 else if (code11
== MINUS_EXPR
)
9624 tree tree110
, tree111
;
9625 tree110
= TREE_OPERAND (tree11
, 0);
9626 tree111
= TREE_OPERAND (tree11
, 1);
9627 STRIP_NOPS (tree110
);
9628 STRIP_NOPS (tree111
);
9629 if (TREE_CODE (tree110
) == INTEGER_CST
9630 && 0 == compare_tree_int (tree110
,
9632 (TREE_TYPE (TREE_OPERAND
9634 && operand_equal_p (tree01
, tree111
, 0))
9635 return build2 ((code0
== LSHIFT_EXPR
9638 type
, TREE_OPERAND (arg0
, 0), tree01
);
9640 else if (code01
== MINUS_EXPR
)
9642 tree tree010
, tree011
;
9643 tree010
= TREE_OPERAND (tree01
, 0);
9644 tree011
= TREE_OPERAND (tree01
, 1);
9645 STRIP_NOPS (tree010
);
9646 STRIP_NOPS (tree011
);
9647 if (TREE_CODE (tree010
) == INTEGER_CST
9648 && 0 == compare_tree_int (tree010
,
9650 (TREE_TYPE (TREE_OPERAND
9652 && operand_equal_p (tree11
, tree011
, 0))
9653 return build2 ((code0
!= LSHIFT_EXPR
9656 type
, TREE_OPERAND (arg0
, 0), tree11
);
9662 /* In most languages, can't associate operations on floats through
9663 parentheses. Rather than remember where the parentheses were, we
9664 don't associate floats at all, unless the user has specified
9666 And, we need to make sure type is not saturating. */
9668 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9669 && !TYPE_SATURATING (type
))
9671 tree var0
, con0
, lit0
, minus_lit0
;
9672 tree var1
, con1
, lit1
, minus_lit1
;
9675 /* Split both trees into variables, constants, and literals. Then
9676 associate each group together, the constants with literals,
9677 then the result with variables. This increases the chances of
9678 literals being recombined later and of generating relocatable
9679 expressions for the sum of a constant and literal. */
9680 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9681 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9682 code
== MINUS_EXPR
);
9684 /* With undefined overflow we can only associate constants
9685 with one variable. */
9686 if ((POINTER_TYPE_P (type
)
9687 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9693 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9694 tmp0
= TREE_OPERAND (tmp0
, 0);
9695 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9696 tmp1
= TREE_OPERAND (tmp1
, 0);
9697 /* The only case we can still associate with two variables
9698 is if they are the same, modulo negation. */
9699 if (!operand_equal_p (tmp0
, tmp1
, 0))
9703 /* Only do something if we found more than two objects. Otherwise,
9704 nothing has changed and we risk infinite recursion. */
9706 && (2 < ((var0
!= 0) + (var1
!= 0)
9707 + (con0
!= 0) + (con1
!= 0)
9708 + (lit0
!= 0) + (lit1
!= 0)
9709 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9711 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9712 if (code
== MINUS_EXPR
)
9715 var0
= associate_trees (var0
, var1
, code
, type
);
9716 con0
= associate_trees (con0
, con1
, code
, type
);
9717 lit0
= associate_trees (lit0
, lit1
, code
, type
);
9718 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
9720 /* Preserve the MINUS_EXPR if the negative part of the literal is
9721 greater than the positive part. Otherwise, the multiplicative
9722 folding code (i.e extract_muldiv) may be fooled in case
9723 unsigned constants are subtracted, like in the following
9724 example: ((X*2 + 4) - 8U)/2. */
9725 if (minus_lit0
&& lit0
)
9727 if (TREE_CODE (lit0
) == INTEGER_CST
9728 && TREE_CODE (minus_lit0
) == INTEGER_CST
9729 && tree_int_cst_lt (lit0
, minus_lit0
))
9731 minus_lit0
= associate_trees (minus_lit0
, lit0
,
9737 lit0
= associate_trees (lit0
, minus_lit0
,
9745 return fold_convert (type
,
9746 associate_trees (var0
, minus_lit0
,
9750 con0
= associate_trees (con0
, minus_lit0
,
9752 return fold_convert (type
,
9753 associate_trees (var0
, con0
,
9758 con0
= associate_trees (con0
, lit0
, code
, type
);
9759 return fold_convert (type
, associate_trees (var0
, con0
,
9767 /* Pointer simplifications for subtraction, simple reassociations. */
9768 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
9770 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9771 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9772 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9774 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9775 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9776 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9777 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9778 return fold_build2 (PLUS_EXPR
, type
,
9779 fold_build2 (MINUS_EXPR
, type
, arg00
, arg10
),
9780 fold_build2 (MINUS_EXPR
, type
, arg01
, arg11
));
9782 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9783 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9785 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9786 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9787 tree tmp
= fold_binary (MINUS_EXPR
, type
, arg00
, fold_convert (type
, arg1
));
9789 return fold_build2 (PLUS_EXPR
, type
, tmp
, arg01
);
9792 /* A - (-B) -> A + B */
9793 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9794 return fold_build2 (PLUS_EXPR
, type
, op0
,
9795 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9796 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9797 if (TREE_CODE (arg0
) == NEGATE_EXPR
9798 && (FLOAT_TYPE_P (type
)
9799 || INTEGRAL_TYPE_P (type
))
9800 && negate_expr_p (arg1
)
9801 && reorder_operands_p (arg0
, arg1
))
9802 return fold_build2 (MINUS_EXPR
, type
,
9803 fold_convert (type
, negate_expr (arg1
)),
9804 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9805 /* Convert -A - 1 to ~A. */
9806 if (INTEGRAL_TYPE_P (type
)
9807 && TREE_CODE (arg0
) == NEGATE_EXPR
9808 && integer_onep (arg1
)
9809 && !TYPE_OVERFLOW_TRAPS (type
))
9810 return fold_build1 (BIT_NOT_EXPR
, type
,
9811 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9813 /* Convert -1 - A to ~A. */
9814 if (INTEGRAL_TYPE_P (type
)
9815 && integer_all_onesp (arg0
))
9816 return fold_build1 (BIT_NOT_EXPR
, type
, op1
);
9819 /* X - (X / CST) * CST is X % CST. */
9820 if (INTEGRAL_TYPE_P (type
)
9821 && TREE_CODE (arg1
) == MULT_EXPR
9822 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9823 && operand_equal_p (arg0
,
9824 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
9825 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
9826 TREE_OPERAND (arg1
, 1), 0))
9827 return fold_convert (type
,
9828 fold_build2 (TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
9829 arg0
, TREE_OPERAND (arg1
, 1)));
9831 if (! FLOAT_TYPE_P (type
))
9833 if (integer_zerop (arg0
))
9834 return negate_expr (fold_convert (type
, arg1
));
9835 if (integer_zerop (arg1
))
9836 return non_lvalue (fold_convert (type
, arg0
));
9838 /* Fold A - (A & B) into ~B & A. */
9839 if (!TREE_SIDE_EFFECTS (arg0
)
9840 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
9842 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
9844 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9845 return fold_build2 (BIT_AND_EXPR
, type
,
9846 fold_build1 (BIT_NOT_EXPR
, type
, arg10
),
9847 fold_convert (type
, arg0
));
9849 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9851 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9852 return fold_build2 (BIT_AND_EXPR
, type
,
9853 fold_build1 (BIT_NOT_EXPR
, type
, arg11
),
9854 fold_convert (type
, arg0
));
9858 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9859 any power of 2 minus 1. */
9860 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9861 && TREE_CODE (arg1
) == BIT_AND_EXPR
9862 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9863 TREE_OPERAND (arg1
, 0), 0))
9865 tree mask0
= TREE_OPERAND (arg0
, 1);
9866 tree mask1
= TREE_OPERAND (arg1
, 1);
9867 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
9869 if (operand_equal_p (tem
, mask1
, 0))
9871 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
9872 TREE_OPERAND (arg0
, 0), mask1
);
9873 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
9878 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9879 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
9880 return non_lvalue (fold_convert (type
, arg0
));
9882 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9883 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9884 (-ARG1 + ARG0) reduces to -ARG1. */
9885 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9886 return negate_expr (fold_convert (type
, arg1
));
9888 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9889 __complex__ ( x, -y ). This is not the same for SNaNs or if
9890 signed zeros are involved. */
9891 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9892 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9893 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9895 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9896 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9897 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9898 bool arg0rz
= false, arg0iz
= false;
9899 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9900 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9902 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9903 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9904 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9906 tree rp
= fold_build1 (NEGATE_EXPR
, rtype
,
9908 : build1 (REALPART_EXPR
, rtype
, arg1
));
9909 tree ip
= arg0i
? arg0i
9910 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9911 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9913 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9915 tree rp
= arg0r
? arg0r
9916 : build1 (REALPART_EXPR
, rtype
, arg0
);
9917 tree ip
= fold_build1 (NEGATE_EXPR
, rtype
,
9919 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9920 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9925 /* Fold &x - &x. This can happen from &x.foo - &x.
9926 This is unsafe for certain floats even in non-IEEE formats.
9927 In IEEE, it is unsafe because it does wrong for NaNs.
9928 Also note that operand_equal_p is always false if an operand
9931 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
9932 && operand_equal_p (arg0
, arg1
, 0))
9933 return fold_convert (type
, integer_zero_node
);
9935 /* A - B -> A + (-B) if B is easily negatable. */
9936 if (negate_expr_p (arg1
)
9937 && ((FLOAT_TYPE_P (type
)
9938 /* Avoid this transformation if B is a positive REAL_CST. */
9939 && (TREE_CODE (arg1
) != REAL_CST
9940 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
9941 || INTEGRAL_TYPE_P (type
)))
9942 return fold_build2 (PLUS_EXPR
, type
,
9943 fold_convert (type
, arg0
),
9944 fold_convert (type
, negate_expr (arg1
)));
9946 /* Try folding difference of addresses. */
9950 if ((TREE_CODE (arg0
) == ADDR_EXPR
9951 || TREE_CODE (arg1
) == ADDR_EXPR
)
9952 && ptr_difference_const (arg0
, arg1
, &diff
))
9953 return build_int_cst_type (type
, diff
);
9956 /* Fold &a[i] - &a[j] to i-j. */
9957 if (TREE_CODE (arg0
) == ADDR_EXPR
9958 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9959 && TREE_CODE (arg1
) == ADDR_EXPR
9960 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9962 tree aref0
= TREE_OPERAND (arg0
, 0);
9963 tree aref1
= TREE_OPERAND (arg1
, 0);
9964 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
9965 TREE_OPERAND (aref1
, 0), 0))
9967 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
9968 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
9969 tree esz
= array_ref_element_size (aref0
);
9970 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9971 return fold_build2 (MULT_EXPR
, type
, diff
,
9972 fold_convert (type
, esz
));
9977 if (flag_unsafe_math_optimizations
9978 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9979 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9980 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9983 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9984 same or one. Make sure type is not saturating.
9985 fold_plusminus_mult_expr will re-associate. */
9986 if ((TREE_CODE (arg0
) == MULT_EXPR
9987 || TREE_CODE (arg1
) == MULT_EXPR
)
9988 && !TYPE_SATURATING (type
)
9989 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9991 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9999 /* (-A) * (-B) -> A * B */
10000 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10001 return fold_build2 (MULT_EXPR
, type
,
10002 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10003 fold_convert (type
, negate_expr (arg1
)));
10004 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10005 return fold_build2 (MULT_EXPR
, type
,
10006 fold_convert (type
, negate_expr (arg0
)),
10007 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10009 if (! FLOAT_TYPE_P (type
))
10011 if (integer_zerop (arg1
))
10012 return omit_one_operand (type
, arg1
, arg0
);
10013 if (integer_onep (arg1
))
10014 return non_lvalue (fold_convert (type
, arg0
));
10015 /* Transform x * -1 into -x. Make sure to do the negation
10016 on the original operand with conversions not stripped
10017 because we can only strip non-sign-changing conversions. */
10018 if (integer_all_onesp (arg1
))
10019 return fold_convert (type
, negate_expr (op0
));
10020 /* Transform x * -C into -x * C if x is easily negatable. */
10021 if (TREE_CODE (arg1
) == INTEGER_CST
10022 && tree_int_cst_sgn (arg1
) == -1
10023 && negate_expr_p (arg0
)
10024 && (tem
= negate_expr (arg1
)) != arg1
10025 && !TREE_OVERFLOW (tem
))
10026 return fold_build2 (MULT_EXPR
, type
,
10027 fold_convert (type
, negate_expr (arg0
)), tem
);
10029 /* (a * (1 << b)) is (a << b) */
10030 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10031 && integer_onep (TREE_OPERAND (arg1
, 0)))
10032 return fold_build2 (LSHIFT_EXPR
, type
, op0
,
10033 TREE_OPERAND (arg1
, 1));
10034 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10035 && integer_onep (TREE_OPERAND (arg0
, 0)))
10036 return fold_build2 (LSHIFT_EXPR
, type
, op1
,
10037 TREE_OPERAND (arg0
, 1));
10039 strict_overflow_p
= false;
10040 if (TREE_CODE (arg1
) == INTEGER_CST
10041 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10042 &strict_overflow_p
)))
10044 if (strict_overflow_p
)
10045 fold_overflow_warning (("assuming signed overflow does not "
10046 "occur when simplifying "
10048 WARN_STRICT_OVERFLOW_MISC
);
10049 return fold_convert (type
, tem
);
10052 /* Optimize z * conj(z) for integer complex numbers. */
10053 if (TREE_CODE (arg0
) == CONJ_EXPR
10054 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10055 return fold_mult_zconjz (type
, arg1
);
10056 if (TREE_CODE (arg1
) == CONJ_EXPR
10057 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10058 return fold_mult_zconjz (type
, arg0
);
10062 /* Maybe fold x * 0 to 0. The expressions aren't the same
10063 when x is NaN, since x * 0 is also NaN. Nor are they the
10064 same in modes with signed zeros, since multiplying a
10065 negative value by 0 gives -0, not +0. */
10066 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10067 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10068 && real_zerop (arg1
))
10069 return omit_one_operand (type
, arg1
, arg0
);
10070 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10071 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10072 && real_onep (arg1
))
10073 return non_lvalue (fold_convert (type
, arg0
));
10075 /* Transform x * -1.0 into -x. */
10076 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10077 && real_minus_onep (arg1
))
10078 return fold_convert (type
, negate_expr (arg0
));
10080 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10081 the result for floating point types due to rounding so it is applied
10082 only if -fassociative-math was specify. */
10083 if (flag_associative_math
10084 && TREE_CODE (arg0
) == RDIV_EXPR
10085 && TREE_CODE (arg1
) == REAL_CST
10086 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10088 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10091 return fold_build2 (RDIV_EXPR
, type
, tem
,
10092 TREE_OPERAND (arg0
, 1));
10095 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10096 if (operand_equal_p (arg0
, arg1
, 0))
10098 tree tem
= fold_strip_sign_ops (arg0
);
10099 if (tem
!= NULL_TREE
)
10101 tem
= fold_convert (type
, tem
);
10102 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
10106 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10107 This is not the same for NaNs or if signed zeros are
10109 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10110 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10111 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10112 && TREE_CODE (arg1
) == COMPLEX_CST
10113 && real_zerop (TREE_REALPART (arg1
)))
10115 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10116 if (real_onep (TREE_IMAGPART (arg1
)))
10117 return fold_build2 (COMPLEX_EXPR
, type
,
10118 negate_expr (fold_build1 (IMAGPART_EXPR
,
10120 fold_build1 (REALPART_EXPR
, rtype
, arg0
));
10121 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10122 return fold_build2 (COMPLEX_EXPR
, type
,
10123 fold_build1 (IMAGPART_EXPR
, rtype
, arg0
),
10124 negate_expr (fold_build1 (REALPART_EXPR
,
10128 /* Optimize z * conj(z) for floating point complex numbers.
10129 Guarded by flag_unsafe_math_optimizations as non-finite
10130 imaginary components don't produce scalar results. */
10131 if (flag_unsafe_math_optimizations
10132 && TREE_CODE (arg0
) == CONJ_EXPR
10133 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10134 return fold_mult_zconjz (type
, arg1
);
10135 if (flag_unsafe_math_optimizations
10136 && TREE_CODE (arg1
) == CONJ_EXPR
10137 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10138 return fold_mult_zconjz (type
, arg0
);
10140 if (flag_unsafe_math_optimizations
)
10142 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10143 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10145 /* Optimizations of root(...)*root(...). */
10146 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10149 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10150 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10152 /* Optimize sqrt(x)*sqrt(x) as x. */
10153 if (BUILTIN_SQRT_P (fcode0
)
10154 && operand_equal_p (arg00
, arg10
, 0)
10155 && ! HONOR_SNANS (TYPE_MODE (type
)))
10158 /* Optimize root(x)*root(y) as root(x*y). */
10159 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10160 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10161 return build_call_expr (rootfn
, 1, arg
);
10164 /* Optimize expN(x)*expN(y) as expN(x+y). */
10165 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10167 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10168 tree arg
= fold_build2 (PLUS_EXPR
, type
,
10169 CALL_EXPR_ARG (arg0
, 0),
10170 CALL_EXPR_ARG (arg1
, 0));
10171 return build_call_expr (expfn
, 1, arg
);
10174 /* Optimizations of pow(...)*pow(...). */
10175 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10176 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10177 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10179 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10180 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10181 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10182 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10184 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10185 if (operand_equal_p (arg01
, arg11
, 0))
10187 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10188 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10189 return build_call_expr (powfn
, 2, arg
, arg01
);
10192 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10193 if (operand_equal_p (arg00
, arg10
, 0))
10195 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10196 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
10197 return build_call_expr (powfn
, 2, arg00
, arg
);
10201 /* Optimize tan(x)*cos(x) as sin(x). */
10202 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10203 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10204 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10205 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10206 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10207 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10208 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10209 CALL_EXPR_ARG (arg1
, 0), 0))
10211 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10213 if (sinfn
!= NULL_TREE
)
10214 return build_call_expr (sinfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10217 /* Optimize x*pow(x,c) as pow(x,c+1). */
10218 if (fcode1
== BUILT_IN_POW
10219 || fcode1
== BUILT_IN_POWF
10220 || fcode1
== BUILT_IN_POWL
)
10222 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10223 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10224 if (TREE_CODE (arg11
) == REAL_CST
10225 && !TREE_OVERFLOW (arg11
)
10226 && operand_equal_p (arg0
, arg10
, 0))
10228 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10232 c
= TREE_REAL_CST (arg11
);
10233 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10234 arg
= build_real (type
, c
);
10235 return build_call_expr (powfn
, 2, arg0
, arg
);
10239 /* Optimize pow(x,c)*x as pow(x,c+1). */
10240 if (fcode0
== BUILT_IN_POW
10241 || fcode0
== BUILT_IN_POWF
10242 || fcode0
== BUILT_IN_POWL
)
10244 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10245 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10246 if (TREE_CODE (arg01
) == REAL_CST
10247 && !TREE_OVERFLOW (arg01
)
10248 && operand_equal_p (arg1
, arg00
, 0))
10250 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10254 c
= TREE_REAL_CST (arg01
);
10255 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10256 arg
= build_real (type
, c
);
10257 return build_call_expr (powfn
, 2, arg1
, arg
);
10261 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10262 if (! optimize_size
10263 && operand_equal_p (arg0
, arg1
, 0))
10265 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10269 tree arg
= build_real (type
, dconst2
);
10270 return build_call_expr (powfn
, 2, arg0
, arg
);
10279 if (integer_all_onesp (arg1
))
10280 return omit_one_operand (type
, arg1
, arg0
);
10281 if (integer_zerop (arg1
))
10282 return non_lvalue (fold_convert (type
, arg0
));
10283 if (operand_equal_p (arg0
, arg1
, 0))
10284 return non_lvalue (fold_convert (type
, arg0
));
10286 /* ~X | X is -1. */
10287 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10288 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10290 t1
= fold_convert (type
, integer_zero_node
);
10291 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10292 return omit_one_operand (type
, t1
, arg1
);
10295 /* X | ~X is -1. */
10296 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10297 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10299 t1
= fold_convert (type
, integer_zero_node
);
10300 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10301 return omit_one_operand (type
, t1
, arg0
);
10304 /* Canonicalize (X & C1) | C2. */
10305 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10306 && TREE_CODE (arg1
) == INTEGER_CST
10307 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10309 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10310 int width
= TYPE_PRECISION (type
), w
;
10311 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10312 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10313 hi2
= TREE_INT_CST_HIGH (arg1
);
10314 lo2
= TREE_INT_CST_LOW (arg1
);
10316 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10317 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10318 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10320 if (width
> HOST_BITS_PER_WIDE_INT
)
10322 mhi
= (unsigned HOST_WIDE_INT
) -1
10323 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10329 mlo
= (unsigned HOST_WIDE_INT
) -1
10330 >> (HOST_BITS_PER_WIDE_INT
- width
);
10333 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10334 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10335 return fold_build2 (BIT_IOR_EXPR
, type
,
10336 TREE_OPERAND (arg0
, 0), arg1
);
10338 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10339 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10340 mode which allows further optimizations. */
10347 for (w
= BITS_PER_UNIT
;
10348 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10351 unsigned HOST_WIDE_INT mask
10352 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10353 if (((lo1
| lo2
) & mask
) == mask
10354 && (lo1
& ~mask
) == 0 && hi1
== 0)
10361 if (hi3
!= hi1
|| lo3
!= lo1
)
10362 return fold_build2 (BIT_IOR_EXPR
, type
,
10363 fold_build2 (BIT_AND_EXPR
, type
,
10364 TREE_OPERAND (arg0
, 0),
10365 build_int_cst_wide (type
,
10370 /* (X & Y) | Y is (X, Y). */
10371 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10372 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10373 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10374 /* (X & Y) | X is (Y, X). */
10375 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10376 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10377 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10378 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10379 /* X | (X & Y) is (Y, X). */
10380 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10381 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10382 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10383 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10384 /* X | (Y & X) is (Y, X). */
10385 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10386 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10387 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10388 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10390 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10391 if (t1
!= NULL_TREE
)
10394 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10396 This results in more efficient code for machines without a NAND
10397 instruction. Combine will canonicalize to the first form
10398 which will allow use of NAND instructions provided by the
10399 backend if they exist. */
10400 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10401 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10403 return fold_build1 (BIT_NOT_EXPR
, type
,
10404 build2 (BIT_AND_EXPR
, type
,
10405 TREE_OPERAND (arg0
, 0),
10406 TREE_OPERAND (arg1
, 0)));
10409 /* See if this can be simplified into a rotate first. If that
10410 is unsuccessful continue in the association code. */
10414 if (integer_zerop (arg1
))
10415 return non_lvalue (fold_convert (type
, arg0
));
10416 if (integer_all_onesp (arg1
))
10417 return fold_build1 (BIT_NOT_EXPR
, type
, op0
);
10418 if (operand_equal_p (arg0
, arg1
, 0))
10419 return omit_one_operand (type
, integer_zero_node
, arg0
);
10421 /* ~X ^ X is -1. */
10422 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10423 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10425 t1
= fold_convert (type
, integer_zero_node
);
10426 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10427 return omit_one_operand (type
, t1
, arg1
);
10430 /* X ^ ~X is -1. */
10431 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10432 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10434 t1
= fold_convert (type
, integer_zero_node
);
10435 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10436 return omit_one_operand (type
, t1
, arg0
);
10439 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10440 with a constant, and the two constants have no bits in common,
10441 we should treat this as a BIT_IOR_EXPR since this may produce more
10442 simplifications. */
10443 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10444 && TREE_CODE (arg1
) == BIT_AND_EXPR
10445 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10446 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10447 && integer_zerop (const_binop (BIT_AND_EXPR
,
10448 TREE_OPERAND (arg0
, 1),
10449 TREE_OPERAND (arg1
, 1), 0)))
10451 code
= BIT_IOR_EXPR
;
10455 /* (X | Y) ^ X -> Y & ~ X*/
10456 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10457 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10459 tree t2
= TREE_OPERAND (arg0
, 1);
10460 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10462 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10463 fold_convert (type
, t1
));
10467 /* (Y | X) ^ X -> Y & ~ X*/
10468 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10469 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10471 tree t2
= TREE_OPERAND (arg0
, 0);
10472 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10474 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10475 fold_convert (type
, t1
));
10479 /* X ^ (X | Y) -> Y & ~ X*/
10480 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10481 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10483 tree t2
= TREE_OPERAND (arg1
, 1);
10484 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10486 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10487 fold_convert (type
, t1
));
10491 /* X ^ (Y | X) -> Y & ~ X*/
10492 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10493 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10495 tree t2
= TREE_OPERAND (arg1
, 0);
10496 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10498 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10499 fold_convert (type
, t1
));
10503 /* Convert ~X ^ ~Y to X ^ Y. */
10504 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10505 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10506 return fold_build2 (code
, type
,
10507 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10508 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10510 /* Convert ~X ^ C to X ^ ~C. */
10511 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10512 && TREE_CODE (arg1
) == INTEGER_CST
)
10513 return fold_build2 (code
, type
,
10514 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10515 fold_build1 (BIT_NOT_EXPR
, type
, arg1
));
10517 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10518 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10519 && integer_onep (TREE_OPERAND (arg0
, 1))
10520 && integer_onep (arg1
))
10521 return fold_build2 (EQ_EXPR
, type
, arg0
,
10522 build_int_cst (TREE_TYPE (arg0
), 0));
10524 /* Fold (X & Y) ^ Y as ~X & Y. */
10525 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10526 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10528 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10529 return fold_build2 (BIT_AND_EXPR
, type
,
10530 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10531 fold_convert (type
, arg1
));
10533 /* Fold (X & Y) ^ X as ~Y & X. */
10534 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10535 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10536 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10538 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10539 return fold_build2 (BIT_AND_EXPR
, type
,
10540 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10541 fold_convert (type
, arg1
));
10543 /* Fold X ^ (X & Y) as X & ~Y. */
10544 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10545 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10547 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10548 return fold_build2 (BIT_AND_EXPR
, type
,
10549 fold_convert (type
, arg0
),
10550 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10552 /* Fold X ^ (Y & X) as ~Y & X. */
10553 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10554 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10555 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10557 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10558 return fold_build2 (BIT_AND_EXPR
, type
,
10559 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10560 fold_convert (type
, arg0
));
10563 /* See if this can be simplified into a rotate first. If that
10564 is unsuccessful continue in the association code. */
10568 if (integer_all_onesp (arg1
))
10569 return non_lvalue (fold_convert (type
, arg0
));
10570 if (integer_zerop (arg1
))
10571 return omit_one_operand (type
, arg1
, arg0
);
10572 if (operand_equal_p (arg0
, arg1
, 0))
10573 return non_lvalue (fold_convert (type
, arg0
));
10575 /* ~X & X is always zero. */
10576 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10577 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10578 return omit_one_operand (type
, integer_zero_node
, arg1
);
10580 /* X & ~X is always zero. */
10581 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10582 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10583 return omit_one_operand (type
, integer_zero_node
, arg0
);
10585 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10586 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10587 && TREE_CODE (arg1
) == INTEGER_CST
10588 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10590 tree tmp1
= fold_convert (TREE_TYPE (arg0
), arg1
);
10591 tree tmp2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10592 TREE_OPERAND (arg0
, 0), tmp1
);
10593 tree tmp3
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10594 TREE_OPERAND (arg0
, 1), tmp1
);
10595 return fold_convert (type
,
10596 fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (arg0
),
10600 /* (X | Y) & Y is (X, Y). */
10601 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10602 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10603 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10604 /* (X | Y) & X is (Y, X). */
10605 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10606 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10607 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10608 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10609 /* X & (X | Y) is (Y, X). */
10610 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10611 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10612 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10613 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10614 /* X & (Y | X) is (Y, X). */
10615 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10616 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10617 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10618 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10620 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10621 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10622 && integer_onep (TREE_OPERAND (arg0
, 1))
10623 && integer_onep (arg1
))
10625 tem
= TREE_OPERAND (arg0
, 0);
10626 return fold_build2 (EQ_EXPR
, type
,
10627 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10628 build_int_cst (TREE_TYPE (tem
), 1)),
10629 build_int_cst (TREE_TYPE (tem
), 0));
10631 /* Fold ~X & 1 as (X & 1) == 0. */
10632 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10633 && integer_onep (arg1
))
10635 tem
= TREE_OPERAND (arg0
, 0);
10636 return fold_build2 (EQ_EXPR
, type
,
10637 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10638 build_int_cst (TREE_TYPE (tem
), 1)),
10639 build_int_cst (TREE_TYPE (tem
), 0));
10642 /* Fold (X ^ Y) & Y as ~X & Y. */
10643 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10644 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10646 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10647 return fold_build2 (BIT_AND_EXPR
, type
,
10648 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10649 fold_convert (type
, arg1
));
10651 /* Fold (X ^ Y) & X as ~Y & X. */
10652 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10653 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10654 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10656 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10657 return fold_build2 (BIT_AND_EXPR
, type
,
10658 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10659 fold_convert (type
, arg1
));
10661 /* Fold X & (X ^ Y) as X & ~Y. */
10662 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10663 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10665 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10666 return fold_build2 (BIT_AND_EXPR
, type
,
10667 fold_convert (type
, arg0
),
10668 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10670 /* Fold X & (Y ^ X) as ~Y & X. */
10671 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10672 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10673 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10675 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10676 return fold_build2 (BIT_AND_EXPR
, type
,
10677 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10678 fold_convert (type
, arg0
));
10681 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10682 if (t1
!= NULL_TREE
)
10684 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10685 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10686 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10689 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10691 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
10692 && (~TREE_INT_CST_LOW (arg1
)
10693 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
10694 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
10697 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10699 This results in more efficient code for machines without a NOR
10700 instruction. Combine will canonicalize to the first form
10701 which will allow use of NOR instructions provided by the
10702 backend if they exist. */
10703 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10704 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10706 return fold_build1 (BIT_NOT_EXPR
, type
,
10707 build2 (BIT_IOR_EXPR
, type
,
10708 fold_convert (type
,
10709 TREE_OPERAND (arg0
, 0)),
10710 fold_convert (type
,
10711 TREE_OPERAND (arg1
, 0))));
10714 /* If arg0 is derived from the address of an object or function, we may
10715 be able to fold this expression using the object or function's
10717 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
10719 unsigned HOST_WIDE_INT modulus
, residue
;
10720 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
10722 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
);
10724 /* This works because modulus is a power of 2. If this weren't the
10725 case, we'd have to replace it by its greatest power-of-2
10726 divisor: modulus & -modulus. */
10728 return build_int_cst (type
, residue
& low
);
10731 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10732 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10733 if the new mask might be further optimized. */
10734 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
10735 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
10736 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
10737 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
10738 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
10739 < TYPE_PRECISION (TREE_TYPE (arg0
))
10740 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
10741 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
10743 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
10744 unsigned HOST_WIDE_INT mask
10745 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
10746 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
10747 tree shift_type
= TREE_TYPE (arg0
);
10749 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
10750 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
10751 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
10752 && TYPE_PRECISION (TREE_TYPE (arg0
))
10753 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
10755 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
10756 tree arg00
= TREE_OPERAND (arg0
, 0);
10757 /* See if more bits can be proven as zero because of
10759 if (TREE_CODE (arg00
) == NOP_EXPR
10760 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
10762 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
10763 if (TYPE_PRECISION (inner_type
)
10764 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
10765 && TYPE_PRECISION (inner_type
) < prec
)
10767 prec
= TYPE_PRECISION (inner_type
);
10768 /* See if we can shorten the right shift. */
10770 shift_type
= inner_type
;
10773 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
10774 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
10775 zerobits
<<= prec
- shiftc
;
10776 /* For arithmetic shift if sign bit could be set, zerobits
10777 can contain actually sign bits, so no transformation is
10778 possible, unless MASK masks them all away. In that
10779 case the shift needs to be converted into logical shift. */
10780 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
10781 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
10783 if ((mask
& zerobits
) == 0)
10784 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
10790 /* ((X << 16) & 0xff00) is (X, 0). */
10791 if ((mask
& zerobits
) == mask
)
10792 return omit_one_operand (type
, build_int_cst (type
, 0), arg0
);
10794 newmask
= mask
| zerobits
;
10795 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
10799 /* Only do the transformation if NEWMASK is some integer
10801 for (prec
= BITS_PER_UNIT
;
10802 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
10803 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
10805 if (prec
< HOST_BITS_PER_WIDE_INT
10806 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
10808 if (shift_type
!= TREE_TYPE (arg0
))
10810 tem
= fold_build2 (TREE_CODE (arg0
), shift_type
,
10811 fold_convert (shift_type
,
10812 TREE_OPERAND (arg0
, 0)),
10813 TREE_OPERAND (arg0
, 1));
10814 tem
= fold_convert (type
, tem
);
10818 return fold_build2 (BIT_AND_EXPR
, type
, tem
,
10819 build_int_cst_type (TREE_TYPE (op1
),
10828 /* Don't touch a floating-point divide by zero unless the mode
10829 of the constant can represent infinity. */
10830 if (TREE_CODE (arg1
) == REAL_CST
10831 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10832 && real_zerop (arg1
))
10835 /* Optimize A / A to 1.0 if we don't care about
10836 NaNs or Infinities. Skip the transformation
10837 for non-real operands. */
10838 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10839 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10840 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
10841 && operand_equal_p (arg0
, arg1
, 0))
10843 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
10845 return omit_two_operands (type
, r
, arg0
, arg1
);
10848 /* The complex version of the above A / A optimization. */
10849 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10850 && operand_equal_p (arg0
, arg1
, 0))
10852 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
10853 if (! HONOR_NANS (TYPE_MODE (elem_type
))
10854 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
10856 tree r
= build_real (elem_type
, dconst1
);
10857 /* omit_two_operands will call fold_convert for us. */
10858 return omit_two_operands (type
, r
, arg0
, arg1
);
10862 /* (-A) / (-B) -> A / B */
10863 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10864 return fold_build2 (RDIV_EXPR
, type
,
10865 TREE_OPERAND (arg0
, 0),
10866 negate_expr (arg1
));
10867 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10868 return fold_build2 (RDIV_EXPR
, type
,
10869 negate_expr (arg0
),
10870 TREE_OPERAND (arg1
, 0));
10872 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10873 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10874 && real_onep (arg1
))
10875 return non_lvalue (fold_convert (type
, arg0
));
10877 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10878 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10879 && real_minus_onep (arg1
))
10880 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
10882 /* If ARG1 is a constant, we can convert this to a multiply by the
10883 reciprocal. This does not have the same rounding properties,
10884 so only do this if -freciprocal-math. We can actually
10885 always safely do it if ARG1 is a power of two, but it's hard to
10886 tell if it is or not in a portable manner. */
10887 if (TREE_CODE (arg1
) == REAL_CST
)
10889 if (flag_reciprocal_math
10890 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
10892 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
10893 /* Find the reciprocal if optimizing and the result is exact. */
10897 r
= TREE_REAL_CST (arg1
);
10898 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
10900 tem
= build_real (type
, r
);
10901 return fold_build2 (MULT_EXPR
, type
,
10902 fold_convert (type
, arg0
), tem
);
10906 /* Convert A/B/C to A/(B*C). */
10907 if (flag_reciprocal_math
10908 && TREE_CODE (arg0
) == RDIV_EXPR
)
10909 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10910 fold_build2 (MULT_EXPR
, type
,
10911 TREE_OPERAND (arg0
, 1), arg1
));
10913 /* Convert A/(B/C) to (A/B)*C. */
10914 if (flag_reciprocal_math
10915 && TREE_CODE (arg1
) == RDIV_EXPR
)
10916 return fold_build2 (MULT_EXPR
, type
,
10917 fold_build2 (RDIV_EXPR
, type
, arg0
,
10918 TREE_OPERAND (arg1
, 0)),
10919 TREE_OPERAND (arg1
, 1));
10921 /* Convert C1/(X*C2) into (C1/C2)/X. */
10922 if (flag_reciprocal_math
10923 && TREE_CODE (arg1
) == MULT_EXPR
10924 && TREE_CODE (arg0
) == REAL_CST
10925 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
10927 tree tem
= const_binop (RDIV_EXPR
, arg0
,
10928 TREE_OPERAND (arg1
, 1), 0);
10930 return fold_build2 (RDIV_EXPR
, type
, tem
,
10931 TREE_OPERAND (arg1
, 0));
10934 if (flag_unsafe_math_optimizations
)
10936 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10937 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10939 /* Optimize sin(x)/cos(x) as tan(x). */
10940 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
10941 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
10942 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
10943 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10944 CALL_EXPR_ARG (arg1
, 0), 0))
10946 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10948 if (tanfn
!= NULL_TREE
)
10949 return build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10952 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10953 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
10954 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
10955 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
10956 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10957 CALL_EXPR_ARG (arg1
, 0), 0))
10959 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10961 if (tanfn
!= NULL_TREE
)
10963 tree tmp
= build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10964 return fold_build2 (RDIV_EXPR
, type
,
10965 build_real (type
, dconst1
), tmp
);
10969 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10970 NaNs or Infinities. */
10971 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
10972 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
10973 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
10975 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10976 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10978 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
10979 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
10980 && operand_equal_p (arg00
, arg01
, 0))
10982 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10984 if (cosfn
!= NULL_TREE
)
10985 return build_call_expr (cosfn
, 1, arg00
);
10989 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10990 NaNs or Infinities. */
10991 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
10992 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
10993 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
10995 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10996 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10998 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
10999 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11000 && operand_equal_p (arg00
, arg01
, 0))
11002 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11004 if (cosfn
!= NULL_TREE
)
11006 tree tmp
= build_call_expr (cosfn
, 1, arg00
);
11007 return fold_build2 (RDIV_EXPR
, type
,
11008 build_real (type
, dconst1
),
11014 /* Optimize pow(x,c)/x as pow(x,c-1). */
11015 if (fcode0
== BUILT_IN_POW
11016 || fcode0
== BUILT_IN_POWF
11017 || fcode0
== BUILT_IN_POWL
)
11019 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11020 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11021 if (TREE_CODE (arg01
) == REAL_CST
11022 && !TREE_OVERFLOW (arg01
)
11023 && operand_equal_p (arg1
, arg00
, 0))
11025 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11029 c
= TREE_REAL_CST (arg01
);
11030 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11031 arg
= build_real (type
, c
);
11032 return build_call_expr (powfn
, 2, arg1
, arg
);
11036 /* Optimize a/root(b/c) into a*root(c/b). */
11037 if (BUILTIN_ROOT_P (fcode1
))
11039 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11041 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11043 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11044 tree b
= TREE_OPERAND (rootarg
, 0);
11045 tree c
= TREE_OPERAND (rootarg
, 1);
11047 tree tmp
= fold_build2 (RDIV_EXPR
, type
, c
, b
);
11049 tmp
= build_call_expr (rootfn
, 1, tmp
);
11050 return fold_build2 (MULT_EXPR
, type
, arg0
, tmp
);
11054 /* Optimize x/expN(y) into x*expN(-y). */
11055 if (BUILTIN_EXPONENT_P (fcode1
))
11057 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11058 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11059 arg1
= build_call_expr (expfn
, 1, fold_convert (type
, arg
));
11060 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11063 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11064 if (fcode1
== BUILT_IN_POW
11065 || fcode1
== BUILT_IN_POWF
11066 || fcode1
== BUILT_IN_POWL
)
11068 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11069 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11070 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11071 tree neg11
= fold_convert (type
, negate_expr (arg11
));
11072 arg1
= build_call_expr (powfn
, 2, arg10
, neg11
);
11073 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11078 case TRUNC_DIV_EXPR
:
11079 case FLOOR_DIV_EXPR
:
11080 /* Simplify A / (B << N) where A and B are positive and B is
11081 a power of 2, to A >> (N + log2(B)). */
11082 strict_overflow_p
= false;
11083 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11084 && (TYPE_UNSIGNED (type
)
11085 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11087 tree sval
= TREE_OPERAND (arg1
, 0);
11088 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11090 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11091 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11093 if (strict_overflow_p
)
11094 fold_overflow_warning (("assuming signed overflow does not "
11095 "occur when simplifying A / (B << N)"),
11096 WARN_STRICT_OVERFLOW_MISC
);
11098 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11099 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
11100 return fold_build2 (RSHIFT_EXPR
, type
,
11101 fold_convert (type
, arg0
), sh_cnt
);
11105 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11106 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11107 if (INTEGRAL_TYPE_P (type
)
11108 && TYPE_UNSIGNED (type
)
11109 && code
== FLOOR_DIV_EXPR
)
11110 return fold_build2 (TRUNC_DIV_EXPR
, type
, op0
, op1
);
11114 case ROUND_DIV_EXPR
:
11115 case CEIL_DIV_EXPR
:
11116 case EXACT_DIV_EXPR
:
11117 if (integer_onep (arg1
))
11118 return non_lvalue (fold_convert (type
, arg0
));
11119 if (integer_zerop (arg1
))
11121 /* X / -1 is -X. */
11122 if (!TYPE_UNSIGNED (type
)
11123 && TREE_CODE (arg1
) == INTEGER_CST
11124 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11125 && TREE_INT_CST_HIGH (arg1
) == -1)
11126 return fold_convert (type
, negate_expr (arg0
));
11128 /* Convert -A / -B to A / B when the type is signed and overflow is
11130 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11131 && TREE_CODE (arg0
) == NEGATE_EXPR
11132 && negate_expr_p (arg1
))
11134 if (INTEGRAL_TYPE_P (type
))
11135 fold_overflow_warning (("assuming signed overflow does not occur "
11136 "when distributing negation across "
11138 WARN_STRICT_OVERFLOW_MISC
);
11139 return fold_build2 (code
, type
,
11140 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11141 negate_expr (arg1
));
11143 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11144 && TREE_CODE (arg1
) == NEGATE_EXPR
11145 && negate_expr_p (arg0
))
11147 if (INTEGRAL_TYPE_P (type
))
11148 fold_overflow_warning (("assuming signed overflow does not occur "
11149 "when distributing negation across "
11151 WARN_STRICT_OVERFLOW_MISC
);
11152 return fold_build2 (code
, type
, negate_expr (arg0
),
11153 TREE_OPERAND (arg1
, 0));
11156 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11157 operation, EXACT_DIV_EXPR.
11159 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11160 At one time others generated faster code, it's not clear if they do
11161 after the last round to changes to the DIV code in expmed.c. */
11162 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11163 && multiple_of_p (type
, arg0
, arg1
))
11164 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11166 strict_overflow_p
= false;
11167 if (TREE_CODE (arg1
) == INTEGER_CST
11168 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11169 &strict_overflow_p
)))
11171 if (strict_overflow_p
)
11172 fold_overflow_warning (("assuming signed overflow does not occur "
11173 "when simplifying division"),
11174 WARN_STRICT_OVERFLOW_MISC
);
11175 return fold_convert (type
, tem
);
11180 case CEIL_MOD_EXPR
:
11181 case FLOOR_MOD_EXPR
:
11182 case ROUND_MOD_EXPR
:
11183 case TRUNC_MOD_EXPR
:
11184 /* X % 1 is always zero, but be sure to preserve any side
11186 if (integer_onep (arg1
))
11187 return omit_one_operand (type
, integer_zero_node
, arg0
);
11189 /* X % 0, return X % 0 unchanged so that we can get the
11190 proper warnings and errors. */
11191 if (integer_zerop (arg1
))
11194 /* 0 % X is always zero, but be sure to preserve any side
11195 effects in X. Place this after checking for X == 0. */
11196 if (integer_zerop (arg0
))
11197 return omit_one_operand (type
, integer_zero_node
, arg1
);
11199 /* X % -1 is zero. */
11200 if (!TYPE_UNSIGNED (type
)
11201 && TREE_CODE (arg1
) == INTEGER_CST
11202 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11203 && TREE_INT_CST_HIGH (arg1
) == -1)
11204 return omit_one_operand (type
, integer_zero_node
, arg0
);
11206 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11207 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11208 strict_overflow_p
= false;
11209 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11210 && (TYPE_UNSIGNED (type
)
11211 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11214 /* Also optimize A % (C << N) where C is a power of 2,
11215 to A & ((C << N) - 1). */
11216 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11217 c
= TREE_OPERAND (arg1
, 0);
11219 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11221 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11222 build_int_cst (TREE_TYPE (arg1
), 1));
11223 if (strict_overflow_p
)
11224 fold_overflow_warning (("assuming signed overflow does not "
11225 "occur when simplifying "
11226 "X % (power of two)"),
11227 WARN_STRICT_OVERFLOW_MISC
);
11228 return fold_build2 (BIT_AND_EXPR
, type
,
11229 fold_convert (type
, arg0
),
11230 fold_convert (type
, mask
));
11234 /* X % -C is the same as X % C. */
11235 if (code
== TRUNC_MOD_EXPR
11236 && !TYPE_UNSIGNED (type
)
11237 && TREE_CODE (arg1
) == INTEGER_CST
11238 && !TREE_OVERFLOW (arg1
)
11239 && TREE_INT_CST_HIGH (arg1
) < 0
11240 && !TYPE_OVERFLOW_TRAPS (type
)
11241 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11242 && !sign_bit_p (arg1
, arg1
))
11243 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11244 fold_convert (type
, negate_expr (arg1
)));
11246 /* X % -Y is the same as X % Y. */
11247 if (code
== TRUNC_MOD_EXPR
11248 && !TYPE_UNSIGNED (type
)
11249 && TREE_CODE (arg1
) == NEGATE_EXPR
11250 && !TYPE_OVERFLOW_TRAPS (type
))
11251 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11252 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
11254 if (TREE_CODE (arg1
) == INTEGER_CST
11255 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11256 &strict_overflow_p
)))
11258 if (strict_overflow_p
)
11259 fold_overflow_warning (("assuming signed overflow does not occur "
11260 "when simplifying modulos"),
11261 WARN_STRICT_OVERFLOW_MISC
);
11262 return fold_convert (type
, tem
);
11269 if (integer_all_onesp (arg0
))
11270 return omit_one_operand (type
, arg0
, arg1
);
11274 /* Optimize -1 >> x for arithmetic right shifts. */
11275 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
11276 return omit_one_operand (type
, arg0
, arg1
);
11277 /* ... fall through ... */
11281 if (integer_zerop (arg1
))
11282 return non_lvalue (fold_convert (type
, arg0
));
11283 if (integer_zerop (arg0
))
11284 return omit_one_operand (type
, arg0
, arg1
);
11286 /* Since negative shift count is not well-defined,
11287 don't try to compute it in the compiler. */
11288 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11291 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11292 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11293 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11294 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11295 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11297 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11298 + TREE_INT_CST_LOW (arg1
));
11300 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11301 being well defined. */
11302 if (low
>= TYPE_PRECISION (type
))
11304 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11305 low
= low
% TYPE_PRECISION (type
);
11306 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11307 return build_int_cst (type
, 0);
11309 low
= TYPE_PRECISION (type
) - 1;
11312 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11313 build_int_cst (type
, low
));
11316 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11317 into x & ((unsigned)-1 >> c) for unsigned types. */
11318 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11319 || (TYPE_UNSIGNED (type
)
11320 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11321 && host_integerp (arg1
, false)
11322 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11323 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11324 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11326 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11327 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11333 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
11335 lshift
= build_int_cst (type
, -1);
11336 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
11338 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
11342 /* Rewrite an LROTATE_EXPR by a constant into an
11343 RROTATE_EXPR by a new constant. */
11344 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
11346 tree tem
= build_int_cst (TREE_TYPE (arg1
),
11347 TYPE_PRECISION (type
));
11348 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
11349 return fold_build2 (RROTATE_EXPR
, type
, op0
, tem
);
11352 /* If we have a rotate of a bit operation with the rotate count and
11353 the second operand of the bit operation both constant,
11354 permute the two operations. */
11355 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11356 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11357 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11358 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11359 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11360 return fold_build2 (TREE_CODE (arg0
), type
,
11361 fold_build2 (code
, type
,
11362 TREE_OPERAND (arg0
, 0), arg1
),
11363 fold_build2 (code
, type
,
11364 TREE_OPERAND (arg0
, 1), arg1
));
11366 /* Two consecutive rotates adding up to the precision of the
11367 type can be ignored. */
11368 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11369 && TREE_CODE (arg0
) == RROTATE_EXPR
11370 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11371 && TREE_INT_CST_HIGH (arg1
) == 0
11372 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
11373 && ((TREE_INT_CST_LOW (arg1
)
11374 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
11375 == (unsigned int) TYPE_PRECISION (type
)))
11376 return TREE_OPERAND (arg0
, 0);
11378 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11379 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11380 if the latter can be further optimized. */
11381 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11382 && TREE_CODE (arg0
) == BIT_AND_EXPR
11383 && TREE_CODE (arg1
) == INTEGER_CST
11384 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11386 tree mask
= fold_build2 (code
, type
,
11387 fold_convert (type
, TREE_OPERAND (arg0
, 1)),
11389 tree shift
= fold_build2 (code
, type
,
11390 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11392 tem
= fold_binary (BIT_AND_EXPR
, type
, shift
, mask
);
11400 if (operand_equal_p (arg0
, arg1
, 0))
11401 return omit_one_operand (type
, arg0
, arg1
);
11402 if (INTEGRAL_TYPE_P (type
)
11403 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
11404 return omit_one_operand (type
, arg1
, arg0
);
11405 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
11411 if (operand_equal_p (arg0
, arg1
, 0))
11412 return omit_one_operand (type
, arg0
, arg1
);
11413 if (INTEGRAL_TYPE_P (type
)
11414 && TYPE_MAX_VALUE (type
)
11415 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
11416 return omit_one_operand (type
, arg1
, arg0
);
11417 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
11422 case TRUTH_ANDIF_EXPR
:
11423 /* Note that the operands of this must be ints
11424 and their values must be 0 or 1.
11425 ("true" is a fixed value perhaps depending on the language.) */
11426 /* If first arg is constant zero, return it. */
11427 if (integer_zerop (arg0
))
11428 return fold_convert (type
, arg0
);
11429 case TRUTH_AND_EXPR
:
11430 /* If either arg is constant true, drop it. */
11431 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11432 return non_lvalue (fold_convert (type
, arg1
));
11433 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11434 /* Preserve sequence points. */
11435 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11436 return non_lvalue (fold_convert (type
, arg0
));
11437 /* If second arg is constant zero, result is zero, but first arg
11438 must be evaluated. */
11439 if (integer_zerop (arg1
))
11440 return omit_one_operand (type
, arg1
, arg0
);
11441 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11442 case will be handled here. */
11443 if (integer_zerop (arg0
))
11444 return omit_one_operand (type
, arg0
, arg1
);
11446 /* !X && X is always false. */
11447 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11448 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11449 return omit_one_operand (type
, integer_zero_node
, arg1
);
11450 /* X && !X is always false. */
11451 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11452 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11453 return omit_one_operand (type
, integer_zero_node
, arg0
);
11455 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11456 means A >= Y && A != MAX, but in this case we know that
11459 if (!TREE_SIDE_EFFECTS (arg0
)
11460 && !TREE_SIDE_EFFECTS (arg1
))
11462 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
11463 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11464 return fold_build2 (code
, type
, tem
, arg1
);
11466 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
11467 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11468 return fold_build2 (code
, type
, arg0
, tem
);
11472 /* We only do these simplifications if we are optimizing. */
11476 /* Check for things like (A || B) && (A || C). We can convert this
11477 to A || (B && C). Note that either operator can be any of the four
11478 truth and/or operations and the transformation will still be
11479 valid. Also note that we only care about order for the
11480 ANDIF and ORIF operators. If B contains side effects, this
11481 might change the truth-value of A. */
11482 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
11483 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
11484 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
11485 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
11486 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
11487 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
11489 tree a00
= TREE_OPERAND (arg0
, 0);
11490 tree a01
= TREE_OPERAND (arg0
, 1);
11491 tree a10
= TREE_OPERAND (arg1
, 0);
11492 tree a11
= TREE_OPERAND (arg1
, 1);
11493 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
11494 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
11495 && (code
== TRUTH_AND_EXPR
11496 || code
== TRUTH_OR_EXPR
));
11498 if (operand_equal_p (a00
, a10
, 0))
11499 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11500 fold_build2 (code
, type
, a01
, a11
));
11501 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
11502 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11503 fold_build2 (code
, type
, a01
, a10
));
11504 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
11505 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
11506 fold_build2 (code
, type
, a00
, a11
));
11508 /* This case if tricky because we must either have commutative
11509 operators or else A10 must not have side-effects. */
11511 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
11512 && operand_equal_p (a01
, a11
, 0))
11513 return fold_build2 (TREE_CODE (arg0
), type
,
11514 fold_build2 (code
, type
, a00
, a10
),
11518 /* See if we can build a range comparison. */
11519 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
11522 /* Check for the possibility of merging component references. If our
11523 lhs is another similar operation, try to merge its rhs with our
11524 rhs. Then try to merge our lhs and rhs. */
11525 if (TREE_CODE (arg0
) == code
11526 && 0 != (tem
= fold_truthop (code
, type
,
11527 TREE_OPERAND (arg0
, 1), arg1
)))
11528 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11530 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
11535 case TRUTH_ORIF_EXPR
:
11536 /* Note that the operands of this must be ints
11537 and their values must be 0 or true.
11538 ("true" is a fixed value perhaps depending on the language.) */
11539 /* If first arg is constant true, return it. */
11540 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11541 return fold_convert (type
, arg0
);
11542 case TRUTH_OR_EXPR
:
11543 /* If either arg is constant zero, drop it. */
11544 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11545 return non_lvalue (fold_convert (type
, arg1
));
11546 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11547 /* Preserve sequence points. */
11548 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11549 return non_lvalue (fold_convert (type
, arg0
));
11550 /* If second arg is constant true, result is true, but we must
11551 evaluate first arg. */
11552 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11553 return omit_one_operand (type
, arg1
, arg0
);
11554 /* Likewise for first arg, but note this only occurs here for
11556 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11557 return omit_one_operand (type
, arg0
, arg1
);
11559 /* !X || X is always true. */
11560 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11561 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11562 return omit_one_operand (type
, integer_one_node
, arg1
);
11563 /* X || !X is always true. */
11564 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11565 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11566 return omit_one_operand (type
, integer_one_node
, arg0
);
11570 case TRUTH_XOR_EXPR
:
11571 /* If the second arg is constant zero, drop it. */
11572 if (integer_zerop (arg1
))
11573 return non_lvalue (fold_convert (type
, arg0
));
11574 /* If the second arg is constant true, this is a logical inversion. */
11575 if (integer_onep (arg1
))
11577 /* Only call invert_truthvalue if operand is a truth value. */
11578 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
11579 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
11581 tem
= invert_truthvalue (arg0
);
11582 return non_lvalue (fold_convert (type
, tem
));
11584 /* Identical arguments cancel to zero. */
11585 if (operand_equal_p (arg0
, arg1
, 0))
11586 return omit_one_operand (type
, integer_zero_node
, arg0
);
11588 /* !X ^ X is always true. */
11589 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11590 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11591 return omit_one_operand (type
, integer_one_node
, arg1
);
11593 /* X ^ !X is always true. */
11594 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11595 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11596 return omit_one_operand (type
, integer_one_node
, arg0
);
11602 tem
= fold_comparison (code
, type
, op0
, op1
);
11603 if (tem
!= NULL_TREE
)
11606 /* bool_var != 0 becomes bool_var. */
11607 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11608 && code
== NE_EXPR
)
11609 return non_lvalue (fold_convert (type
, arg0
));
11611 /* bool_var == 1 becomes bool_var. */
11612 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11613 && code
== EQ_EXPR
)
11614 return non_lvalue (fold_convert (type
, arg0
));
11616 /* bool_var != 1 becomes !bool_var. */
11617 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11618 && code
== NE_EXPR
)
11619 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11621 /* bool_var == 0 becomes !bool_var. */
11622 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11623 && code
== EQ_EXPR
)
11624 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11626 /* If this is an equality comparison of the address of two non-weak,
11627 unaliased symbols neither of which are extern (since we do not
11628 have access to attributes for externs), then we know the result. */
11629 if (TREE_CODE (arg0
) == ADDR_EXPR
11630 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
11631 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
11632 && ! lookup_attribute ("alias",
11633 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
11634 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
11635 && TREE_CODE (arg1
) == ADDR_EXPR
11636 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
11637 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
11638 && ! lookup_attribute ("alias",
11639 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
11640 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
11642 /* We know that we're looking at the address of two
11643 non-weak, unaliased, static _DECL nodes.
11645 It is both wasteful and incorrect to call operand_equal_p
11646 to compare the two ADDR_EXPR nodes. It is wasteful in that
11647 all we need to do is test pointer equality for the arguments
11648 to the two ADDR_EXPR nodes. It is incorrect to use
11649 operand_equal_p as that function is NOT equivalent to a
11650 C equality test. It can in fact return false for two
11651 objects which would test as equal using the C equality
11653 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
11654 return constant_boolean_node (equal
11655 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
11659 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11660 a MINUS_EXPR of a constant, we can convert it into a comparison with
11661 a revised constant as long as no overflow occurs. */
11662 if (TREE_CODE (arg1
) == INTEGER_CST
11663 && (TREE_CODE (arg0
) == PLUS_EXPR
11664 || TREE_CODE (arg0
) == MINUS_EXPR
)
11665 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11666 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
11667 ? MINUS_EXPR
: PLUS_EXPR
,
11668 fold_convert (TREE_TYPE (arg0
), arg1
),
11669 TREE_OPERAND (arg0
, 1), 0))
11670 && !TREE_OVERFLOW (tem
))
11671 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11673 /* Similarly for a NEGATE_EXPR. */
11674 if (TREE_CODE (arg0
) == NEGATE_EXPR
11675 && TREE_CODE (arg1
) == INTEGER_CST
11676 && 0 != (tem
= negate_expr (arg1
))
11677 && TREE_CODE (tem
) == INTEGER_CST
11678 && !TREE_OVERFLOW (tem
))
11679 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11681 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11682 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11683 && TREE_CODE (arg1
) == INTEGER_CST
11684 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11685 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11686 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg0
),
11687 fold_convert (TREE_TYPE (arg0
), arg1
),
11688 TREE_OPERAND (arg0
, 1)));
11690 /* Transform comparisons of the form X +- C CMP X. */
11691 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11692 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11693 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11694 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11695 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
11697 tree cst
= TREE_OPERAND (arg0
, 1);
11699 if (code
== EQ_EXPR
11700 && !integer_zerop (cst
))
11701 return omit_two_operands (type
, boolean_false_node
,
11702 TREE_OPERAND (arg0
, 0), arg1
);
11704 return omit_two_operands (type
, boolean_true_node
,
11705 TREE_OPERAND (arg0
, 0), arg1
);
11708 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11709 for !=. Don't do this for ordered comparisons due to overflow. */
11710 if (TREE_CODE (arg0
) == MINUS_EXPR
11711 && integer_zerop (arg1
))
11712 return fold_build2 (code
, type
,
11713 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
11715 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11716 if (TREE_CODE (arg0
) == ABS_EXPR
11717 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11718 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
11720 /* If this is an EQ or NE comparison with zero and ARG0 is
11721 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11722 two operations, but the latter can be done in one less insn
11723 on machines that have only two-operand insns or on which a
11724 constant cannot be the first operand. */
11725 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11726 && integer_zerop (arg1
))
11728 tree arg00
= TREE_OPERAND (arg0
, 0);
11729 tree arg01
= TREE_OPERAND (arg0
, 1);
11730 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11731 && integer_onep (TREE_OPERAND (arg00
, 0)))
11733 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
11734 arg01
, TREE_OPERAND (arg00
, 1));
11735 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11736 build_int_cst (TREE_TYPE (arg0
), 1));
11737 return fold_build2 (code
, type
,
11738 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
11740 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
11741 && integer_onep (TREE_OPERAND (arg01
, 0)))
11743 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
11744 arg00
, TREE_OPERAND (arg01
, 1));
11745 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11746 build_int_cst (TREE_TYPE (arg0
), 1));
11747 return fold_build2 (code
, type
,
11748 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
11752 /* If this is an NE or EQ comparison of zero against the result of a
11753 signed MOD operation whose second operand is a power of 2, make
11754 the MOD operation unsigned since it is simpler and equivalent. */
11755 if (integer_zerop (arg1
)
11756 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
11757 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
11758 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
11759 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
11760 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
11761 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11763 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
11764 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
11765 fold_convert (newtype
,
11766 TREE_OPERAND (arg0
, 0)),
11767 fold_convert (newtype
,
11768 TREE_OPERAND (arg0
, 1)));
11770 return fold_build2 (code
, type
, newmod
,
11771 fold_convert (newtype
, arg1
));
11774 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11775 C1 is a valid shift constant, and C2 is a power of two, i.e.
11777 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11778 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
11779 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11781 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11782 && integer_zerop (arg1
))
11784 tree itype
= TREE_TYPE (arg0
);
11785 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
11786 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11788 /* Check for a valid shift count. */
11789 if (TREE_INT_CST_HIGH (arg001
) == 0
11790 && TREE_INT_CST_LOW (arg001
) < prec
)
11792 tree arg01
= TREE_OPERAND (arg0
, 1);
11793 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11794 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11795 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11796 can be rewritten as (X & (C2 << C1)) != 0. */
11797 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11799 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
11800 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
11801 return fold_build2 (code
, type
, tem
, arg1
);
11803 /* Otherwise, for signed (arithmetic) shifts,
11804 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11805 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11806 else if (!TYPE_UNSIGNED (itype
))
11807 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
11808 arg000
, build_int_cst (itype
, 0));
11809 /* Otherwise, of unsigned (logical) shifts,
11810 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11811 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11813 return omit_one_operand (type
,
11814 code
== EQ_EXPR
? integer_one_node
11815 : integer_zero_node
,
11820 /* If this is an NE comparison of zero with an AND of one, remove the
11821 comparison since the AND will give the correct value. */
11822 if (code
== NE_EXPR
11823 && integer_zerop (arg1
)
11824 && TREE_CODE (arg0
) == BIT_AND_EXPR
11825 && integer_onep (TREE_OPERAND (arg0
, 1)))
11826 return fold_convert (type
, arg0
);
11828 /* If we have (A & C) == C where C is a power of 2, convert this into
11829 (A & C) != 0. Similarly for NE_EXPR. */
11830 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11831 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11832 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11833 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11834 arg0
, fold_convert (TREE_TYPE (arg0
),
11835 integer_zero_node
));
11837 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11838 bit, then fold the expression into A < 0 or A >= 0. */
11839 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
11843 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11844 Similarly for NE_EXPR. */
11845 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11846 && TREE_CODE (arg1
) == INTEGER_CST
11847 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11849 tree notc
= fold_build1 (BIT_NOT_EXPR
,
11850 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
11851 TREE_OPERAND (arg0
, 1));
11852 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11854 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11855 if (integer_nonzerop (dandnotc
))
11856 return omit_one_operand (type
, rslt
, arg0
);
11859 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11860 Similarly for NE_EXPR. */
11861 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11862 && TREE_CODE (arg1
) == INTEGER_CST
11863 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11865 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
11866 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11867 TREE_OPERAND (arg0
, 1), notd
);
11868 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11869 if (integer_nonzerop (candnotd
))
11870 return omit_one_operand (type
, rslt
, arg0
);
11873 /* Optimize comparisons of strlen vs zero to a compare of the
11874 first character of the string vs zero. To wit,
11875 strlen(ptr) == 0 => *ptr == 0
11876 strlen(ptr) != 0 => *ptr != 0
11877 Other cases should reduce to one of these two (or a constant)
11878 due to the return value of strlen being unsigned. */
11879 if (TREE_CODE (arg0
) == CALL_EXPR
11880 && integer_zerop (arg1
))
11882 tree fndecl
= get_callee_fndecl (arg0
);
11885 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
11886 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
11887 && call_expr_nargs (arg0
) == 1
11888 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
11890 tree iref
= build_fold_indirect_ref (CALL_EXPR_ARG (arg0
, 0));
11891 return fold_build2 (code
, type
, iref
,
11892 build_int_cst (TREE_TYPE (iref
), 0));
11896 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11897 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11898 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11899 && integer_zerop (arg1
)
11900 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11902 tree arg00
= TREE_OPERAND (arg0
, 0);
11903 tree arg01
= TREE_OPERAND (arg0
, 1);
11904 tree itype
= TREE_TYPE (arg00
);
11905 if (TREE_INT_CST_HIGH (arg01
) == 0
11906 && TREE_INT_CST_LOW (arg01
)
11907 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
11909 if (TYPE_UNSIGNED (itype
))
11911 itype
= signed_type_for (itype
);
11912 arg00
= fold_convert (itype
, arg00
);
11914 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
11915 type
, arg00
, build_int_cst (itype
, 0));
11919 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11920 if (integer_zerop (arg1
)
11921 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11922 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11923 TREE_OPERAND (arg0
, 1));
11925 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11926 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11927 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11928 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11929 build_int_cst (TREE_TYPE (arg1
), 0));
11930 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11931 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11932 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11933 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11934 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
11935 build_int_cst (TREE_TYPE (arg1
), 0));
11937 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11938 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11939 && TREE_CODE (arg1
) == INTEGER_CST
11940 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11941 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11942 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
11943 TREE_OPERAND (arg0
, 1), arg1
));
11945 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11946 (X & C) == 0 when C is a single bit. */
11947 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11948 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11949 && integer_zerop (arg1
)
11950 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11952 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11953 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11954 TREE_OPERAND (arg0
, 1));
11955 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11959 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11960 constant C is a power of two, i.e. a single bit. */
11961 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11962 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11963 && integer_zerop (arg1
)
11964 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11965 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11966 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11968 tree arg00
= TREE_OPERAND (arg0
, 0);
11969 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11970 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11973 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11974 when is C is a power of two, i.e. a single bit. */
11975 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11976 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
11977 && integer_zerop (arg1
)
11978 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11979 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11980 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11982 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11983 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
11984 arg000
, TREE_OPERAND (arg0
, 1));
11985 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11986 tem
, build_int_cst (TREE_TYPE (tem
), 0));
11989 if (integer_zerop (arg1
)
11990 && tree_expr_nonzero_p (arg0
))
11992 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
11993 return omit_one_operand (type
, res
, arg0
);
11996 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11997 if (TREE_CODE (arg0
) == NEGATE_EXPR
11998 && TREE_CODE (arg1
) == NEGATE_EXPR
)
11999 return fold_build2 (code
, type
,
12000 TREE_OPERAND (arg0
, 0),
12001 TREE_OPERAND (arg1
, 0));
12003 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12004 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12005 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12007 tree arg00
= TREE_OPERAND (arg0
, 0);
12008 tree arg01
= TREE_OPERAND (arg0
, 1);
12009 tree arg10
= TREE_OPERAND (arg1
, 0);
12010 tree arg11
= TREE_OPERAND (arg1
, 1);
12011 tree itype
= TREE_TYPE (arg0
);
12013 if (operand_equal_p (arg01
, arg11
, 0))
12014 return fold_build2 (code
, type
,
12015 fold_build2 (BIT_AND_EXPR
, itype
,
12016 fold_build2 (BIT_XOR_EXPR
, itype
,
12019 build_int_cst (itype
, 0));
12021 if (operand_equal_p (arg01
, arg10
, 0))
12022 return fold_build2 (code
, type
,
12023 fold_build2 (BIT_AND_EXPR
, itype
,
12024 fold_build2 (BIT_XOR_EXPR
, itype
,
12027 build_int_cst (itype
, 0));
12029 if (operand_equal_p (arg00
, arg11
, 0))
12030 return fold_build2 (code
, type
,
12031 fold_build2 (BIT_AND_EXPR
, itype
,
12032 fold_build2 (BIT_XOR_EXPR
, itype
,
12035 build_int_cst (itype
, 0));
12037 if (operand_equal_p (arg00
, arg10
, 0))
12038 return fold_build2 (code
, type
,
12039 fold_build2 (BIT_AND_EXPR
, itype
,
12040 fold_build2 (BIT_XOR_EXPR
, itype
,
12043 build_int_cst (itype
, 0));
12046 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12047 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12049 tree arg00
= TREE_OPERAND (arg0
, 0);
12050 tree arg01
= TREE_OPERAND (arg0
, 1);
12051 tree arg10
= TREE_OPERAND (arg1
, 0);
12052 tree arg11
= TREE_OPERAND (arg1
, 1);
12053 tree itype
= TREE_TYPE (arg0
);
12055 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12056 operand_equal_p guarantees no side-effects so we don't need
12057 to use omit_one_operand on Z. */
12058 if (operand_equal_p (arg01
, arg11
, 0))
12059 return fold_build2 (code
, type
, arg00
, arg10
);
12060 if (operand_equal_p (arg01
, arg10
, 0))
12061 return fold_build2 (code
, type
, arg00
, arg11
);
12062 if (operand_equal_p (arg00
, arg11
, 0))
12063 return fold_build2 (code
, type
, arg01
, arg10
);
12064 if (operand_equal_p (arg00
, arg10
, 0))
12065 return fold_build2 (code
, type
, arg01
, arg11
);
12067 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12068 if (TREE_CODE (arg01
) == INTEGER_CST
12069 && TREE_CODE (arg11
) == INTEGER_CST
)
12070 return fold_build2 (code
, type
,
12071 fold_build2 (BIT_XOR_EXPR
, itype
, arg00
,
12072 fold_build2 (BIT_XOR_EXPR
, itype
,
12077 /* Attempt to simplify equality/inequality comparisons of complex
12078 values. Only lower the comparison if the result is known or
12079 can be simplified to a single scalar comparison. */
12080 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12081 || TREE_CODE (arg0
) == COMPLEX_CST
)
12082 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12083 || TREE_CODE (arg1
) == COMPLEX_CST
))
12085 tree real0
, imag0
, real1
, imag1
;
12088 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12090 real0
= TREE_OPERAND (arg0
, 0);
12091 imag0
= TREE_OPERAND (arg0
, 1);
12095 real0
= TREE_REALPART (arg0
);
12096 imag0
= TREE_IMAGPART (arg0
);
12099 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12101 real1
= TREE_OPERAND (arg1
, 0);
12102 imag1
= TREE_OPERAND (arg1
, 1);
12106 real1
= TREE_REALPART (arg1
);
12107 imag1
= TREE_IMAGPART (arg1
);
12110 rcond
= fold_binary (code
, type
, real0
, real1
);
12111 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12113 if (integer_zerop (rcond
))
12115 if (code
== EQ_EXPR
)
12116 return omit_two_operands (type
, boolean_false_node
,
12118 return fold_build2 (NE_EXPR
, type
, imag0
, imag1
);
12122 if (code
== NE_EXPR
)
12123 return omit_two_operands (type
, boolean_true_node
,
12125 return fold_build2 (EQ_EXPR
, type
, imag0
, imag1
);
12129 icond
= fold_binary (code
, type
, imag0
, imag1
);
12130 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12132 if (integer_zerop (icond
))
12134 if (code
== EQ_EXPR
)
12135 return omit_two_operands (type
, boolean_false_node
,
12137 return fold_build2 (NE_EXPR
, type
, real0
, real1
);
12141 if (code
== NE_EXPR
)
12142 return omit_two_operands (type
, boolean_true_node
,
12144 return fold_build2 (EQ_EXPR
, type
, real0
, real1
);
12155 tem
= fold_comparison (code
, type
, op0
, op1
);
12156 if (tem
!= NULL_TREE
)
12159 /* Transform comparisons of the form X +- C CMP X. */
12160 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12161 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12162 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12163 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12164 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12165 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12167 tree arg01
= TREE_OPERAND (arg0
, 1);
12168 enum tree_code code0
= TREE_CODE (arg0
);
12171 if (TREE_CODE (arg01
) == REAL_CST
)
12172 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12174 is_positive
= tree_int_cst_sgn (arg01
);
12176 /* (X - c) > X becomes false. */
12177 if (code
== GT_EXPR
12178 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12179 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12181 if (TREE_CODE (arg01
) == INTEGER_CST
12182 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12183 fold_overflow_warning (("assuming signed overflow does not "
12184 "occur when assuming that (X - c) > X "
12185 "is always false"),
12186 WARN_STRICT_OVERFLOW_ALL
);
12187 return constant_boolean_node (0, type
);
12190 /* Likewise (X + c) < X becomes false. */
12191 if (code
== LT_EXPR
12192 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12193 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12195 if (TREE_CODE (arg01
) == INTEGER_CST
12196 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12197 fold_overflow_warning (("assuming signed overflow does not "
12198 "occur when assuming that "
12199 "(X + c) < X is always false"),
12200 WARN_STRICT_OVERFLOW_ALL
);
12201 return constant_boolean_node (0, type
);
12204 /* Convert (X - c) <= X to true. */
12205 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12207 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12208 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12210 if (TREE_CODE (arg01
) == INTEGER_CST
12211 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12212 fold_overflow_warning (("assuming signed overflow does not "
12213 "occur when assuming that "
12214 "(X - c) <= X is always true"),
12215 WARN_STRICT_OVERFLOW_ALL
);
12216 return constant_boolean_node (1, type
);
12219 /* Convert (X + c) >= X to true. */
12220 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12222 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12223 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12225 if (TREE_CODE (arg01
) == INTEGER_CST
12226 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12227 fold_overflow_warning (("assuming signed overflow does not "
12228 "occur when assuming that "
12229 "(X + c) >= X is always true"),
12230 WARN_STRICT_OVERFLOW_ALL
);
12231 return constant_boolean_node (1, type
);
12234 if (TREE_CODE (arg01
) == INTEGER_CST
)
12236 /* Convert X + c > X and X - c < X to true for integers. */
12237 if (code
== GT_EXPR
12238 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12239 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12241 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12242 fold_overflow_warning (("assuming signed overflow does "
12243 "not occur when assuming that "
12244 "(X + c) > X is always true"),
12245 WARN_STRICT_OVERFLOW_ALL
);
12246 return constant_boolean_node (1, type
);
12249 if (code
== LT_EXPR
12250 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12251 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12253 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12254 fold_overflow_warning (("assuming signed overflow does "
12255 "not occur when assuming that "
12256 "(X - c) < X is always true"),
12257 WARN_STRICT_OVERFLOW_ALL
);
12258 return constant_boolean_node (1, type
);
12261 /* Convert X + c <= X and X - c >= X to false for integers. */
12262 if (code
== LE_EXPR
12263 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12264 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12266 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12267 fold_overflow_warning (("assuming signed overflow does "
12268 "not occur when assuming that "
12269 "(X + c) <= X is always false"),
12270 WARN_STRICT_OVERFLOW_ALL
);
12271 return constant_boolean_node (0, type
);
12274 if (code
== GE_EXPR
12275 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12276 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12278 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12279 fold_overflow_warning (("assuming signed overflow does "
12280 "not occur when assuming that "
12281 "(X - c) >= X is always false"),
12282 WARN_STRICT_OVERFLOW_ALL
);
12283 return constant_boolean_node (0, type
);
12288 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12289 This transformation affects the cases which are handled in later
12290 optimizations involving comparisons with non-negative constants. */
12291 if (TREE_CODE (arg1
) == INTEGER_CST
12292 && TREE_CODE (arg0
) != INTEGER_CST
12293 && tree_int_cst_sgn (arg1
) > 0)
12295 if (code
== GE_EXPR
)
12297 arg1
= const_binop (MINUS_EXPR
, arg1
,
12298 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12299 return fold_build2 (GT_EXPR
, type
, arg0
,
12300 fold_convert (TREE_TYPE (arg0
), arg1
));
12302 if (code
== LT_EXPR
)
12304 arg1
= const_binop (MINUS_EXPR
, arg1
,
12305 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12306 return fold_build2 (LE_EXPR
, type
, arg0
,
12307 fold_convert (TREE_TYPE (arg0
), arg1
));
12311 /* Comparisons with the highest or lowest possible integer of
12312 the specified precision will have known values. */
12314 tree arg1_type
= TREE_TYPE (arg1
);
12315 unsigned int width
= TYPE_PRECISION (arg1_type
);
12317 if (TREE_CODE (arg1
) == INTEGER_CST
12318 && !TREE_OVERFLOW (arg1
)
12319 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12320 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12322 HOST_WIDE_INT signed_max_hi
;
12323 unsigned HOST_WIDE_INT signed_max_lo
;
12324 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12326 if (width
<= HOST_BITS_PER_WIDE_INT
)
12328 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12333 if (TYPE_UNSIGNED (arg1_type
))
12335 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12341 max_lo
= signed_max_lo
;
12342 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12348 width
-= HOST_BITS_PER_WIDE_INT
;
12349 signed_max_lo
= -1;
12350 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12355 if (TYPE_UNSIGNED (arg1_type
))
12357 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12362 max_hi
= signed_max_hi
;
12363 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12367 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
12368 && TREE_INT_CST_LOW (arg1
) == max_lo
)
12372 return omit_one_operand (type
, integer_zero_node
, arg0
);
12375 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12378 return omit_one_operand (type
, integer_one_node
, arg0
);
12381 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12383 /* The GE_EXPR and LT_EXPR cases above are not normally
12384 reached because of previous transformations. */
12389 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12391 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
12395 arg1
= const_binop (PLUS_EXPR
, arg1
,
12396 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12397 return fold_build2 (EQ_EXPR
, type
,
12398 fold_convert (TREE_TYPE (arg1
), arg0
),
12401 arg1
= const_binop (PLUS_EXPR
, arg1
,
12402 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12403 return fold_build2 (NE_EXPR
, type
,
12404 fold_convert (TREE_TYPE (arg1
), arg0
),
12409 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12411 && TREE_INT_CST_LOW (arg1
) == min_lo
)
12415 return omit_one_operand (type
, integer_zero_node
, arg0
);
12418 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12421 return omit_one_operand (type
, integer_one_node
, arg0
);
12424 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12429 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12431 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
12435 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12436 return fold_build2 (NE_EXPR
, type
,
12437 fold_convert (TREE_TYPE (arg1
), arg0
),
12440 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12441 return fold_build2 (EQ_EXPR
, type
,
12442 fold_convert (TREE_TYPE (arg1
), arg0
),
12448 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
12449 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
12450 && TYPE_UNSIGNED (arg1_type
)
12451 /* We will flip the signedness of the comparison operator
12452 associated with the mode of arg1, so the sign bit is
12453 specified by this mode. Check that arg1 is the signed
12454 max associated with this sign bit. */
12455 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
12456 /* signed_type does not work on pointer types. */
12457 && INTEGRAL_TYPE_P (arg1_type
))
12459 /* The following case also applies to X < signed_max+1
12460 and X >= signed_max+1 because previous transformations. */
12461 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12464 st
= signed_type_for (TREE_TYPE (arg1
));
12465 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12466 type
, fold_convert (st
, arg0
),
12467 build_int_cst (st
, 0));
12473 /* If we are comparing an ABS_EXPR with a constant, we can
12474 convert all the cases into explicit comparisons, but they may
12475 well not be faster than doing the ABS and one comparison.
12476 But ABS (X) <= C is a range comparison, which becomes a subtraction
12477 and a comparison, and is probably faster. */
12478 if (code
== LE_EXPR
12479 && TREE_CODE (arg1
) == INTEGER_CST
12480 && TREE_CODE (arg0
) == ABS_EXPR
12481 && ! TREE_SIDE_EFFECTS (arg0
)
12482 && (0 != (tem
= negate_expr (arg1
)))
12483 && TREE_CODE (tem
) == INTEGER_CST
12484 && !TREE_OVERFLOW (tem
))
12485 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12486 build2 (GE_EXPR
, type
,
12487 TREE_OPERAND (arg0
, 0), tem
),
12488 build2 (LE_EXPR
, type
,
12489 TREE_OPERAND (arg0
, 0), arg1
));
12491 /* Convert ABS_EXPR<x> >= 0 to true. */
12492 strict_overflow_p
= false;
12493 if (code
== GE_EXPR
12494 && (integer_zerop (arg1
)
12495 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12496 && real_zerop (arg1
)))
12497 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12499 if (strict_overflow_p
)
12500 fold_overflow_warning (("assuming signed overflow does not occur "
12501 "when simplifying comparison of "
12502 "absolute value and zero"),
12503 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12504 return omit_one_operand (type
, integer_one_node
, arg0
);
12507 /* Convert ABS_EXPR<x> < 0 to false. */
12508 strict_overflow_p
= false;
12509 if (code
== LT_EXPR
12510 && (integer_zerop (arg1
) || real_zerop (arg1
))
12511 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12513 if (strict_overflow_p
)
12514 fold_overflow_warning (("assuming signed overflow does not occur "
12515 "when simplifying comparison of "
12516 "absolute value and zero"),
12517 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12518 return omit_one_operand (type
, integer_zero_node
, arg0
);
12521 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12522 and similarly for >= into !=. */
12523 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12524 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12525 && TREE_CODE (arg1
) == LSHIFT_EXPR
12526 && integer_onep (TREE_OPERAND (arg1
, 0)))
12527 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12528 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12529 TREE_OPERAND (arg1
, 1)),
12530 build_int_cst (TREE_TYPE (arg0
), 0));
12532 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12533 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12534 && (TREE_CODE (arg1
) == NOP_EXPR
12535 || TREE_CODE (arg1
) == CONVERT_EXPR
)
12536 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12537 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12539 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12540 fold_convert (TREE_TYPE (arg0
),
12541 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12542 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
12544 build_int_cst (TREE_TYPE (arg0
), 0));
12548 case UNORDERED_EXPR
:
12556 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
12558 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
12559 if (t1
!= NULL_TREE
)
12563 /* If the first operand is NaN, the result is constant. */
12564 if (TREE_CODE (arg0
) == REAL_CST
12565 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
12566 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12568 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12569 ? integer_zero_node
12570 : integer_one_node
;
12571 return omit_one_operand (type
, t1
, arg1
);
12574 /* If the second operand is NaN, the result is constant. */
12575 if (TREE_CODE (arg1
) == REAL_CST
12576 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12577 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12579 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12580 ? integer_zero_node
12581 : integer_one_node
;
12582 return omit_one_operand (type
, t1
, arg0
);
12585 /* Simplify unordered comparison of something with itself. */
12586 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12587 && operand_equal_p (arg0
, arg1
, 0))
12588 return constant_boolean_node (1, type
);
12590 if (code
== LTGT_EXPR
12591 && !flag_trapping_math
12592 && operand_equal_p (arg0
, arg1
, 0))
12593 return constant_boolean_node (0, type
);
12595 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12597 tree targ0
= strip_float_extensions (arg0
);
12598 tree targ1
= strip_float_extensions (arg1
);
12599 tree newtype
= TREE_TYPE (targ0
);
12601 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12602 newtype
= TREE_TYPE (targ1
);
12604 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12605 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
12606 fold_convert (newtype
, targ1
));
12611 case COMPOUND_EXPR
:
12612 /* When pedantic, a compound expression can be neither an lvalue
12613 nor an integer constant expression. */
12614 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12616 /* Don't let (0, 0) be null pointer constant. */
12617 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12618 : fold_convert (type
, arg1
);
12619 return pedantic_non_lvalue (tem
);
12622 if ((TREE_CODE (arg0
) == REAL_CST
12623 && TREE_CODE (arg1
) == REAL_CST
)
12624 || (TREE_CODE (arg0
) == INTEGER_CST
12625 && TREE_CODE (arg1
) == INTEGER_CST
))
12626 return build_complex (type
, arg0
, arg1
);
12630 /* An ASSERT_EXPR should never be passed to fold_binary. */
12631 gcc_unreachable ();
12635 } /* switch (code) */
12638 /* Callback for walk_tree, looking for LABEL_EXPR.
12639 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12640 Do not check the sub-tree of GOTO_EXPR. */
12643 contains_label_1 (tree
*tp
,
12644 int *walk_subtrees
,
12645 void *data ATTRIBUTE_UNUSED
)
12647 switch (TREE_CODE (*tp
))
12652 *walk_subtrees
= 0;
12659 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12660 accessible from outside the sub-tree. Returns NULL_TREE if no
12661 addressable label is found. */
12664 contains_label_p (tree st
)
12666 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
12669 /* Fold a ternary expression of code CODE and type TYPE with operands
12670 OP0, OP1, and OP2. Return the folded expression if folding is
12671 successful. Otherwise, return NULL_TREE. */
12674 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
12677 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
12678 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12680 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12681 && TREE_CODE_LENGTH (code
) == 3);
12683 /* Strip any conversions that don't change the mode. This is safe
12684 for every expression, except for a comparison expression because
12685 its signedness is derived from its operands. So, in the latter
12686 case, only strip conversions that don't change the signedness.
12688 Note that this is done as an internal manipulation within the
12689 constant folder, in order to find the simplest representation of
12690 the arguments so that their form can be studied. In any cases,
12691 the appropriate type conversions should be put back in the tree
12692 that will get out of the constant folder. */
12707 case COMPONENT_REF
:
12708 if (TREE_CODE (arg0
) == CONSTRUCTOR
12709 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12711 unsigned HOST_WIDE_INT idx
;
12713 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12720 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12721 so all simple results must be passed through pedantic_non_lvalue. */
12722 if (TREE_CODE (arg0
) == INTEGER_CST
)
12724 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12725 tem
= integer_zerop (arg0
) ? op2
: op1
;
12726 /* Only optimize constant conditions when the selected branch
12727 has the same type as the COND_EXPR. This avoids optimizing
12728 away "c ? x : throw", where the throw has a void type.
12729 Avoid throwing away that operand which contains label. */
12730 if ((!TREE_SIDE_EFFECTS (unused_op
)
12731 || !contains_label_p (unused_op
))
12732 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12733 || VOID_TYPE_P (type
)))
12734 return pedantic_non_lvalue (tem
);
12737 if (operand_equal_p (arg1
, op2
, 0))
12738 return pedantic_omit_one_operand (type
, arg1
, arg0
);
12740 /* If we have A op B ? A : C, we may be able to convert this to a
12741 simpler expression, depending on the operation and the values
12742 of B and C. Signed zeros prevent all of these transformations,
12743 for reasons given above each one.
12745 Also try swapping the arguments and inverting the conditional. */
12746 if (COMPARISON_CLASS_P (arg0
)
12747 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12748 arg1
, TREE_OPERAND (arg0
, 1))
12749 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
12751 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
12756 if (COMPARISON_CLASS_P (arg0
)
12757 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12759 TREE_OPERAND (arg0
, 1))
12760 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
12762 tem
= fold_truth_not_expr (arg0
);
12763 if (tem
&& COMPARISON_CLASS_P (tem
))
12765 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
12771 /* If the second operand is simpler than the third, swap them
12772 since that produces better jump optimization results. */
12773 if (truth_value_p (TREE_CODE (arg0
))
12774 && tree_swap_operands_p (op1
, op2
, false))
12776 /* See if this can be inverted. If it can't, possibly because
12777 it was a floating-point inequality comparison, don't do
12779 tem
= fold_truth_not_expr (arg0
);
12781 return fold_build3 (code
, type
, tem
, op2
, op1
);
12784 /* Convert A ? 1 : 0 to simply A. */
12785 if (integer_onep (op1
)
12786 && integer_zerop (op2
)
12787 /* If we try to convert OP0 to our type, the
12788 call to fold will try to move the conversion inside
12789 a COND, which will recurse. In that case, the COND_EXPR
12790 is probably the best choice, so leave it alone. */
12791 && type
== TREE_TYPE (arg0
))
12792 return pedantic_non_lvalue (arg0
);
12794 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12795 over COND_EXPR in cases such as floating point comparisons. */
12796 if (integer_zerop (op1
)
12797 && integer_onep (op2
)
12798 && truth_value_p (TREE_CODE (arg0
)))
12799 return pedantic_non_lvalue (fold_convert (type
,
12800 invert_truthvalue (arg0
)));
12802 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12803 if (TREE_CODE (arg0
) == LT_EXPR
12804 && integer_zerop (TREE_OPERAND (arg0
, 1))
12805 && integer_zerop (op2
)
12806 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12808 /* sign_bit_p only checks ARG1 bits within A's precision.
12809 If <sign bit of A> has wider type than A, bits outside
12810 of A's precision in <sign bit of A> need to be checked.
12811 If they are all 0, this optimization needs to be done
12812 in unsigned A's type, if they are all 1 in signed A's type,
12813 otherwise this can't be done. */
12814 if (TYPE_PRECISION (TREE_TYPE (tem
))
12815 < TYPE_PRECISION (TREE_TYPE (arg1
))
12816 && TYPE_PRECISION (TREE_TYPE (tem
))
12817 < TYPE_PRECISION (type
))
12819 unsigned HOST_WIDE_INT mask_lo
;
12820 HOST_WIDE_INT mask_hi
;
12821 int inner_width
, outer_width
;
12824 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12825 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12826 if (outer_width
> TYPE_PRECISION (type
))
12827 outer_width
= TYPE_PRECISION (type
);
12829 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
12831 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
12832 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
12838 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
12839 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
12841 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
12843 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
12844 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12848 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
12849 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12851 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
12852 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
12854 tem_type
= signed_type_for (TREE_TYPE (tem
));
12855 tem
= fold_convert (tem_type
, tem
);
12857 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
12858 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
12860 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12861 tem
= fold_convert (tem_type
, tem
);
12868 return fold_convert (type
,
12869 fold_build2 (BIT_AND_EXPR
,
12870 TREE_TYPE (tem
), tem
,
12871 fold_convert (TREE_TYPE (tem
),
12875 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12876 already handled above. */
12877 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12878 && integer_onep (TREE_OPERAND (arg0
, 1))
12879 && integer_zerop (op2
)
12880 && integer_pow2p (arg1
))
12882 tree tem
= TREE_OPERAND (arg0
, 0);
12884 if (TREE_CODE (tem
) == RSHIFT_EXPR
12885 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
12886 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
12887 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
12888 return fold_build2 (BIT_AND_EXPR
, type
,
12889 TREE_OPERAND (tem
, 0), arg1
);
12892 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12893 is probably obsolete because the first operand should be a
12894 truth value (that's why we have the two cases above), but let's
12895 leave it in until we can confirm this for all front-ends. */
12896 if (integer_zerop (op2
)
12897 && TREE_CODE (arg0
) == NE_EXPR
12898 && integer_zerop (TREE_OPERAND (arg0
, 1))
12899 && integer_pow2p (arg1
)
12900 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12901 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12902 arg1
, OEP_ONLY_CONST
))
12903 return pedantic_non_lvalue (fold_convert (type
,
12904 TREE_OPERAND (arg0
, 0)));
12906 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12907 if (integer_zerop (op2
)
12908 && truth_value_p (TREE_CODE (arg0
))
12909 && truth_value_p (TREE_CODE (arg1
)))
12910 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12911 fold_convert (type
, arg0
),
12914 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12915 if (integer_onep (op2
)
12916 && truth_value_p (TREE_CODE (arg0
))
12917 && truth_value_p (TREE_CODE (arg1
)))
12919 /* Only perform transformation if ARG0 is easily inverted. */
12920 tem
= fold_truth_not_expr (arg0
);
12922 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
12923 fold_convert (type
, tem
),
12927 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12928 if (integer_zerop (arg1
)
12929 && truth_value_p (TREE_CODE (arg0
))
12930 && truth_value_p (TREE_CODE (op2
)))
12932 /* Only perform transformation if ARG0 is easily inverted. */
12933 tem
= fold_truth_not_expr (arg0
);
12935 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12936 fold_convert (type
, tem
),
12940 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12941 if (integer_onep (arg1
)
12942 && truth_value_p (TREE_CODE (arg0
))
12943 && truth_value_p (TREE_CODE (op2
)))
12944 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
12945 fold_convert (type
, arg0
),
12951 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12952 of fold_ternary on them. */
12953 gcc_unreachable ();
12955 case BIT_FIELD_REF
:
12956 if ((TREE_CODE (arg0
) == VECTOR_CST
12957 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
12958 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
12960 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
12961 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
12964 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
12965 && (idx
% width
) == 0
12966 && (idx
= idx
/ width
)
12967 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
12969 tree elements
= NULL_TREE
;
12971 if (TREE_CODE (arg0
) == VECTOR_CST
)
12972 elements
= TREE_VECTOR_CST_ELTS (arg0
);
12975 unsigned HOST_WIDE_INT idx
;
12978 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
12979 elements
= tree_cons (NULL_TREE
, value
, elements
);
12981 while (idx
-- > 0 && elements
)
12982 elements
= TREE_CHAIN (elements
);
12984 return TREE_VALUE (elements
);
12986 return fold_convert (type
, integer_zero_node
);
12993 } /* switch (code) */
12996 /* Perform constant folding and related simplification of EXPR.
12997 The related simplifications include x*1 => x, x*0 => 0, etc.,
12998 and application of the associative law.
12999 NOP_EXPR conversions may be removed freely (as long as we
13000 are careful not to change the type of the overall expression).
13001 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13002 but we can constant-fold them if they have constant operands. */
13004 #ifdef ENABLE_FOLD_CHECKING
13005 # define fold(x) fold_1 (x)
13006 static tree
fold_1 (tree
);
13012 const tree t
= expr
;
13013 enum tree_code code
= TREE_CODE (t
);
13014 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13017 /* Return right away if a constant. */
13018 if (kind
== tcc_constant
)
13021 /* CALL_EXPR-like objects with variable numbers of operands are
13022 treated specially. */
13023 if (kind
== tcc_vl_exp
)
13025 if (code
== CALL_EXPR
)
13027 tem
= fold_call_expr (expr
, false);
13028 return tem
? tem
: expr
;
13033 if (IS_EXPR_CODE_CLASS (kind
)
13034 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
13036 tree type
= TREE_TYPE (t
);
13037 tree op0
, op1
, op2
;
13039 switch (TREE_CODE_LENGTH (code
))
13042 op0
= TREE_OPERAND (t
, 0);
13043 tem
= fold_unary (code
, type
, op0
);
13044 return tem
? tem
: expr
;
13046 op0
= TREE_OPERAND (t
, 0);
13047 op1
= TREE_OPERAND (t
, 1);
13048 tem
= fold_binary (code
, type
, op0
, op1
);
13049 return tem
? tem
: expr
;
13051 op0
= TREE_OPERAND (t
, 0);
13052 op1
= TREE_OPERAND (t
, 1);
13053 op2
= TREE_OPERAND (t
, 2);
13054 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13055 return tem
? tem
: expr
;
13064 return fold (DECL_INITIAL (t
));
13068 } /* switch (code) */
13071 #ifdef ENABLE_FOLD_CHECKING
13074 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13075 static void fold_check_failed (const_tree
, const_tree
);
13076 void print_fold_checksum (const_tree
);
13078 /* When --enable-checking=fold, compute a digest of expr before
13079 and after actual fold call to see if fold did not accidentally
13080 change original expr. */
13086 struct md5_ctx ctx
;
13087 unsigned char checksum_before
[16], checksum_after
[16];
13090 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13091 md5_init_ctx (&ctx
);
13092 fold_checksum_tree (expr
, &ctx
, ht
);
13093 md5_finish_ctx (&ctx
, checksum_before
);
13096 ret
= fold_1 (expr
);
13098 md5_init_ctx (&ctx
);
13099 fold_checksum_tree (expr
, &ctx
, ht
);
13100 md5_finish_ctx (&ctx
, checksum_after
);
13103 if (memcmp (checksum_before
, checksum_after
, 16))
13104 fold_check_failed (expr
, ret
);
13110 print_fold_checksum (const_tree expr
)
13112 struct md5_ctx ctx
;
13113 unsigned char checksum
[16], cnt
;
13116 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13117 md5_init_ctx (&ctx
);
13118 fold_checksum_tree (expr
, &ctx
, ht
);
13119 md5_finish_ctx (&ctx
, checksum
);
13121 for (cnt
= 0; cnt
< 16; ++cnt
)
13122 fprintf (stderr
, "%02x", checksum
[cnt
]);
13123 putc ('\n', stderr
);
13127 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13129 internal_error ("fold check: original tree changed by fold");
13133 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13136 enum tree_code code
;
13137 struct tree_function_decl buf
;
13142 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13143 <= sizeof (struct tree_function_decl
))
13144 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13147 slot
= (const void **) htab_find_slot (ht
, expr
, INSERT
);
13151 code
= TREE_CODE (expr
);
13152 if (TREE_CODE_CLASS (code
) == tcc_declaration
13153 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13155 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13156 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13157 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13158 expr
= (tree
) &buf
;
13160 else if (TREE_CODE_CLASS (code
) == tcc_type
13161 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
13162 || TYPE_CACHED_VALUES_P (expr
)
13163 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
13165 /* Allow these fields to be modified. */
13167 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13168 expr
= tmp
= (tree
) &buf
;
13169 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13170 TYPE_POINTER_TO (tmp
) = NULL
;
13171 TYPE_REFERENCE_TO (tmp
) = NULL
;
13172 if (TYPE_CACHED_VALUES_P (tmp
))
13174 TYPE_CACHED_VALUES_P (tmp
) = 0;
13175 TYPE_CACHED_VALUES (tmp
) = NULL
;
13178 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13179 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13180 if (TREE_CODE_CLASS (code
) != tcc_type
13181 && TREE_CODE_CLASS (code
) != tcc_declaration
13182 && code
!= TREE_LIST
13183 && code
!= SSA_NAME
)
13184 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13185 switch (TREE_CODE_CLASS (code
))
13191 md5_process_bytes (TREE_STRING_POINTER (expr
),
13192 TREE_STRING_LENGTH (expr
), ctx
);
13195 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13196 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13199 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13205 case tcc_exceptional
:
13209 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13210 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13211 expr
= TREE_CHAIN (expr
);
13212 goto recursive_label
;
13215 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13216 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13222 case tcc_expression
:
13223 case tcc_reference
:
13224 case tcc_comparison
:
13227 case tcc_statement
:
13229 len
= TREE_OPERAND_LENGTH (expr
);
13230 for (i
= 0; i
< len
; ++i
)
13231 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13233 case tcc_declaration
:
13234 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13235 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13236 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13238 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13239 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13240 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13241 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13242 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13244 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13245 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13247 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13249 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13250 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13251 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13255 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13256 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13257 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13258 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13259 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13260 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13261 if (INTEGRAL_TYPE_P (expr
)
13262 || SCALAR_FLOAT_TYPE_P (expr
))
13264 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13265 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13267 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13268 if (TREE_CODE (expr
) == RECORD_TYPE
13269 || TREE_CODE (expr
) == UNION_TYPE
13270 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13271 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13272 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13279 /* Helper function for outputting the checksum of a tree T. When
13280 debugging with gdb, you can "define mynext" to be "next" followed
13281 by "call debug_fold_checksum (op0)", then just trace down till the
13285 debug_fold_checksum (const_tree t
)
13288 unsigned char checksum
[16];
13289 struct md5_ctx ctx
;
13290 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13292 md5_init_ctx (&ctx
);
13293 fold_checksum_tree (t
, &ctx
, ht
);
13294 md5_finish_ctx (&ctx
, checksum
);
13297 for (i
= 0; i
< 16; i
++)
13298 fprintf (stderr
, "%d ", checksum
[i
]);
13300 fprintf (stderr
, "\n");
13305 /* Fold a unary tree expression with code CODE of type TYPE with an
13306 operand OP0. Return a folded expression if successful. Otherwise,
13307 return a tree expression with code CODE of type TYPE with an
13311 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13314 #ifdef ENABLE_FOLD_CHECKING
13315 unsigned char checksum_before
[16], checksum_after
[16];
13316 struct md5_ctx ctx
;
13319 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13320 md5_init_ctx (&ctx
);
13321 fold_checksum_tree (op0
, &ctx
, ht
);
13322 md5_finish_ctx (&ctx
, checksum_before
);
13326 tem
= fold_unary (code
, type
, op0
);
13328 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
13330 #ifdef ENABLE_FOLD_CHECKING
13331 md5_init_ctx (&ctx
);
13332 fold_checksum_tree (op0
, &ctx
, ht
);
13333 md5_finish_ctx (&ctx
, checksum_after
);
13336 if (memcmp (checksum_before
, checksum_after
, 16))
13337 fold_check_failed (op0
, tem
);
13342 /* Fold a binary tree expression with code CODE of type TYPE with
13343 operands OP0 and OP1. Return a folded expression if successful.
13344 Otherwise, return a tree expression with code CODE of type TYPE
13345 with operands OP0 and OP1. */
13348 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
13352 #ifdef ENABLE_FOLD_CHECKING
13353 unsigned char checksum_before_op0
[16],
13354 checksum_before_op1
[16],
13355 checksum_after_op0
[16],
13356 checksum_after_op1
[16];
13357 struct md5_ctx ctx
;
13360 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13361 md5_init_ctx (&ctx
);
13362 fold_checksum_tree (op0
, &ctx
, ht
);
13363 md5_finish_ctx (&ctx
, checksum_before_op0
);
13366 md5_init_ctx (&ctx
);
13367 fold_checksum_tree (op1
, &ctx
, ht
);
13368 md5_finish_ctx (&ctx
, checksum_before_op1
);
13372 tem
= fold_binary (code
, type
, op0
, op1
);
13374 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
13376 #ifdef ENABLE_FOLD_CHECKING
13377 md5_init_ctx (&ctx
);
13378 fold_checksum_tree (op0
, &ctx
, ht
);
13379 md5_finish_ctx (&ctx
, checksum_after_op0
);
13382 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13383 fold_check_failed (op0
, tem
);
13385 md5_init_ctx (&ctx
);
13386 fold_checksum_tree (op1
, &ctx
, ht
);
13387 md5_finish_ctx (&ctx
, checksum_after_op1
);
13390 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13391 fold_check_failed (op1
, tem
);
13396 /* Fold a ternary tree expression with code CODE of type TYPE with
13397 operands OP0, OP1, and OP2. Return a folded expression if
13398 successful. Otherwise, return a tree expression with code CODE of
13399 type TYPE with operands OP0, OP1, and OP2. */
13402 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
13406 #ifdef ENABLE_FOLD_CHECKING
13407 unsigned char checksum_before_op0
[16],
13408 checksum_before_op1
[16],
13409 checksum_before_op2
[16],
13410 checksum_after_op0
[16],
13411 checksum_after_op1
[16],
13412 checksum_after_op2
[16];
13413 struct md5_ctx ctx
;
13416 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13417 md5_init_ctx (&ctx
);
13418 fold_checksum_tree (op0
, &ctx
, ht
);
13419 md5_finish_ctx (&ctx
, checksum_before_op0
);
13422 md5_init_ctx (&ctx
);
13423 fold_checksum_tree (op1
, &ctx
, ht
);
13424 md5_finish_ctx (&ctx
, checksum_before_op1
);
13427 md5_init_ctx (&ctx
);
13428 fold_checksum_tree (op2
, &ctx
, ht
);
13429 md5_finish_ctx (&ctx
, checksum_before_op2
);
13433 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13434 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13436 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13438 #ifdef ENABLE_FOLD_CHECKING
13439 md5_init_ctx (&ctx
);
13440 fold_checksum_tree (op0
, &ctx
, ht
);
13441 md5_finish_ctx (&ctx
, checksum_after_op0
);
13444 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13445 fold_check_failed (op0
, tem
);
13447 md5_init_ctx (&ctx
);
13448 fold_checksum_tree (op1
, &ctx
, ht
);
13449 md5_finish_ctx (&ctx
, checksum_after_op1
);
13452 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13453 fold_check_failed (op1
, tem
);
13455 md5_init_ctx (&ctx
);
13456 fold_checksum_tree (op2
, &ctx
, ht
);
13457 md5_finish_ctx (&ctx
, checksum_after_op2
);
13460 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13461 fold_check_failed (op2
, tem
);
13466 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13467 arguments in ARGARRAY, and a null static chain.
13468 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13469 of type TYPE from the given operands as constructed by build_call_array. */
13472 fold_build_call_array (tree type
, tree fn
, int nargs
, tree
*argarray
)
13475 #ifdef ENABLE_FOLD_CHECKING
13476 unsigned char checksum_before_fn
[16],
13477 checksum_before_arglist
[16],
13478 checksum_after_fn
[16],
13479 checksum_after_arglist
[16];
13480 struct md5_ctx ctx
;
13484 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13485 md5_init_ctx (&ctx
);
13486 fold_checksum_tree (fn
, &ctx
, ht
);
13487 md5_finish_ctx (&ctx
, checksum_before_fn
);
13490 md5_init_ctx (&ctx
);
13491 for (i
= 0; i
< nargs
; i
++)
13492 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13493 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13497 tem
= fold_builtin_call_array (type
, fn
, nargs
, argarray
);
13499 #ifdef ENABLE_FOLD_CHECKING
13500 md5_init_ctx (&ctx
);
13501 fold_checksum_tree (fn
, &ctx
, ht
);
13502 md5_finish_ctx (&ctx
, checksum_after_fn
);
13505 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13506 fold_check_failed (fn
, tem
);
13508 md5_init_ctx (&ctx
);
13509 for (i
= 0; i
< nargs
; i
++)
13510 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13511 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13514 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13515 fold_check_failed (NULL_TREE
, tem
);
13520 /* Perform constant folding and related simplification of initializer
13521 expression EXPR. These behave identically to "fold_buildN" but ignore
13522 potential run-time traps and exceptions that fold must preserve. */
13524 #define START_FOLD_INIT \
13525 int saved_signaling_nans = flag_signaling_nans;\
13526 int saved_trapping_math = flag_trapping_math;\
13527 int saved_rounding_math = flag_rounding_math;\
13528 int saved_trapv = flag_trapv;\
13529 int saved_folding_initializer = folding_initializer;\
13530 flag_signaling_nans = 0;\
13531 flag_trapping_math = 0;\
13532 flag_rounding_math = 0;\
13534 folding_initializer = 1;
13536 #define END_FOLD_INIT \
13537 flag_signaling_nans = saved_signaling_nans;\
13538 flag_trapping_math = saved_trapping_math;\
13539 flag_rounding_math = saved_rounding_math;\
13540 flag_trapv = saved_trapv;\
13541 folding_initializer = saved_folding_initializer;
13544 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
13549 result
= fold_build1 (code
, type
, op
);
13556 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
13561 result
= fold_build2 (code
, type
, op0
, op1
);
13568 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
13574 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
13581 fold_build_call_array_initializer (tree type
, tree fn
,
13582 int nargs
, tree
*argarray
)
13587 result
= fold_build_call_array (type
, fn
, nargs
, argarray
);
13593 #undef START_FOLD_INIT
13594 #undef END_FOLD_INIT
13596 /* Determine if first argument is a multiple of second argument. Return 0 if
13597 it is not, or we cannot easily determined it to be.
13599 An example of the sort of thing we care about (at this point; this routine
13600 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13601 fold cases do now) is discovering that
13603 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13609 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13611 This code also handles discovering that
13613 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13615 is a multiple of 8 so we don't have to worry about dealing with a
13616 possible remainder.
13618 Note that we *look* inside a SAVE_EXPR only to determine how it was
13619 calculated; it is not safe for fold to do much of anything else with the
13620 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13621 at run time. For example, the latter example above *cannot* be implemented
13622 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13623 evaluation time of the original SAVE_EXPR is not necessarily the same at
13624 the time the new expression is evaluated. The only optimization of this
13625 sort that would be valid is changing
13627 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13631 SAVE_EXPR (I) * SAVE_EXPR (J)
13633 (where the same SAVE_EXPR (J) is used in the original and the
13634 transformed version). */
13637 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13639 if (operand_equal_p (top
, bottom
, 0))
13642 if (TREE_CODE (type
) != INTEGER_TYPE
)
13645 switch (TREE_CODE (top
))
13648 /* Bitwise and provides a power of two multiple. If the mask is
13649 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13650 if (!integer_pow2p (bottom
))
13655 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13656 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13660 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13661 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13664 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13668 op1
= TREE_OPERAND (top
, 1);
13669 /* const_binop may not detect overflow correctly,
13670 so check for it explicitly here. */
13671 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
13672 > TREE_INT_CST_LOW (op1
)
13673 && TREE_INT_CST_HIGH (op1
) == 0
13674 && 0 != (t1
= fold_convert (type
,
13675 const_binop (LSHIFT_EXPR
,
13678 && !TREE_OVERFLOW (t1
))
13679 return multiple_of_p (type
, t1
, bottom
);
13684 /* Can't handle conversions from non-integral or wider integral type. */
13685 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13686 || (TYPE_PRECISION (type
)
13687 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13690 /* .. fall through ... */
13693 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13696 if (TREE_CODE (bottom
) != INTEGER_CST
13697 || integer_zerop (bottom
)
13698 || (TYPE_UNSIGNED (type
)
13699 && (tree_int_cst_sgn (top
) < 0
13700 || tree_int_cst_sgn (bottom
) < 0)))
13702 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
13710 /* Return true if CODE or TYPE is known to be non-negative. */
13713 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
13715 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
13716 && truth_value_p (code
))
13717 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13718 have a signed:1 type (where the value is -1 and 0). */
13723 /* Return true if (CODE OP0) is known to be non-negative. If the return
13724 value is based on the assumption that signed overflow is undefined,
13725 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13726 *STRICT_OVERFLOW_P. */
13729 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13730 bool *strict_overflow_p
)
13732 if (TYPE_UNSIGNED (type
))
13738 /* We can't return 1 if flag_wrapv is set because
13739 ABS_EXPR<INT_MIN> = INT_MIN. */
13740 if (!INTEGRAL_TYPE_P (type
))
13742 if (TYPE_OVERFLOW_UNDEFINED (type
))
13744 *strict_overflow_p
= true;
13749 case NON_LVALUE_EXPR
:
13751 case FIX_TRUNC_EXPR
:
13752 return tree_expr_nonnegative_warnv_p (op0
,
13753 strict_overflow_p
);
13757 tree inner_type
= TREE_TYPE (op0
);
13758 tree outer_type
= type
;
13760 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13762 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13763 return tree_expr_nonnegative_warnv_p (op0
,
13764 strict_overflow_p
);
13765 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13767 if (TYPE_UNSIGNED (inner_type
))
13769 return tree_expr_nonnegative_warnv_p (op0
,
13770 strict_overflow_p
);
13773 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
13775 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13776 return tree_expr_nonnegative_warnv_p (op0
,
13777 strict_overflow_p
);
13778 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13779 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13780 && TYPE_UNSIGNED (inner_type
);
13786 return tree_simple_nonnegative_warnv_p (code
, type
);
13789 /* We don't know sign of `t', so be conservative and return false. */
13793 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13794 value is based on the assumption that signed overflow is undefined,
13795 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13796 *STRICT_OVERFLOW_P. */
13799 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13800 tree op1
, bool *strict_overflow_p
)
13802 if (TYPE_UNSIGNED (type
))
13807 case POINTER_PLUS_EXPR
:
13809 if (FLOAT_TYPE_P (type
))
13810 return (tree_expr_nonnegative_warnv_p (op0
,
13812 && tree_expr_nonnegative_warnv_p (op1
,
13813 strict_overflow_p
));
13815 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13816 both unsigned and at least 2 bits shorter than the result. */
13817 if (TREE_CODE (type
) == INTEGER_TYPE
13818 && TREE_CODE (op0
) == NOP_EXPR
13819 && TREE_CODE (op1
) == NOP_EXPR
)
13821 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13822 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13823 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13824 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13826 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13827 TYPE_PRECISION (inner2
)) + 1;
13828 return prec
< TYPE_PRECISION (type
);
13834 if (FLOAT_TYPE_P (type
))
13836 /* x * x for floating point x is always non-negative. */
13837 if (operand_equal_p (op0
, op1
, 0))
13839 return (tree_expr_nonnegative_warnv_p (op0
,
13841 && tree_expr_nonnegative_warnv_p (op1
,
13842 strict_overflow_p
));
13845 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13846 both unsigned and their total bits is shorter than the result. */
13847 if (TREE_CODE (type
) == INTEGER_TYPE
13848 && TREE_CODE (op0
) == NOP_EXPR
13849 && TREE_CODE (op1
) == NOP_EXPR
)
13851 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13852 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13853 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13854 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13855 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
13856 < TYPE_PRECISION (type
);
13862 return (tree_expr_nonnegative_warnv_p (op0
,
13864 || tree_expr_nonnegative_warnv_p (op1
,
13865 strict_overflow_p
));
13871 case TRUNC_DIV_EXPR
:
13872 case CEIL_DIV_EXPR
:
13873 case FLOOR_DIV_EXPR
:
13874 case ROUND_DIV_EXPR
:
13875 return (tree_expr_nonnegative_warnv_p (op0
,
13877 && tree_expr_nonnegative_warnv_p (op1
,
13878 strict_overflow_p
));
13880 case TRUNC_MOD_EXPR
:
13881 case CEIL_MOD_EXPR
:
13882 case FLOOR_MOD_EXPR
:
13883 case ROUND_MOD_EXPR
:
13884 return tree_expr_nonnegative_warnv_p (op0
,
13885 strict_overflow_p
);
13887 return tree_simple_nonnegative_warnv_p (code
, type
);
13890 /* We don't know sign of `t', so be conservative and return false. */
13894 /* Return true if T is known to be non-negative. If the return
13895 value is based on the assumption that signed overflow is undefined,
13896 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13897 *STRICT_OVERFLOW_P. */
13900 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13902 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13905 switch (TREE_CODE (t
))
13908 /* Query VRP to see if it has recorded any information about
13909 the range of this object. */
13910 return ssa_name_nonnegative_p (t
);
13913 return tree_int_cst_sgn (t
) >= 0;
13916 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
13919 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
13922 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13924 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
13925 strict_overflow_p
));
13927 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
13930 /* We don't know sign of `t', so be conservative and return false. */
13934 /* Return true if T is known to be non-negative. If the return
13935 value is based on the assumption that signed overflow is undefined,
13936 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13937 *STRICT_OVERFLOW_P. */
13940 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13942 enum tree_code code
= TREE_CODE (t
);
13943 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13950 tree temp
= TARGET_EXPR_SLOT (t
);
13951 t
= TARGET_EXPR_INITIAL (t
);
13953 /* If the initializer is non-void, then it's a normal expression
13954 that will be assigned to the slot. */
13955 if (!VOID_TYPE_P (t
))
13956 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
13958 /* Otherwise, the initializer sets the slot in some way. One common
13959 way is an assignment statement at the end of the initializer. */
13962 if (TREE_CODE (t
) == BIND_EXPR
)
13963 t
= expr_last (BIND_EXPR_BODY (t
));
13964 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
13965 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
13966 t
= expr_last (TREE_OPERAND (t
, 0));
13967 else if (TREE_CODE (t
) == STATEMENT_LIST
)
13972 if ((TREE_CODE (t
) == MODIFY_EXPR
13973 || TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
13974 && GENERIC_TREE_OPERAND (t
, 0) == temp
)
13975 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
13976 strict_overflow_p
);
13983 tree fndecl
= get_callee_fndecl (t
);
13984 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
13985 switch (DECL_FUNCTION_CODE (fndecl
))
13987 CASE_FLT_FN (BUILT_IN_ACOS
):
13988 CASE_FLT_FN (BUILT_IN_ACOSH
):
13989 CASE_FLT_FN (BUILT_IN_CABS
):
13990 CASE_FLT_FN (BUILT_IN_COSH
):
13991 CASE_FLT_FN (BUILT_IN_ERFC
):
13992 CASE_FLT_FN (BUILT_IN_EXP
):
13993 CASE_FLT_FN (BUILT_IN_EXP10
):
13994 CASE_FLT_FN (BUILT_IN_EXP2
):
13995 CASE_FLT_FN (BUILT_IN_FABS
):
13996 CASE_FLT_FN (BUILT_IN_FDIM
):
13997 CASE_FLT_FN (BUILT_IN_HYPOT
):
13998 CASE_FLT_FN (BUILT_IN_POW10
):
13999 CASE_INT_FN (BUILT_IN_FFS
):
14000 CASE_INT_FN (BUILT_IN_PARITY
):
14001 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14002 case BUILT_IN_BSWAP32
:
14003 case BUILT_IN_BSWAP64
:
14007 CASE_FLT_FN (BUILT_IN_SQRT
):
14008 /* sqrt(-0.0) is -0.0. */
14009 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
14011 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14012 strict_overflow_p
);
14014 CASE_FLT_FN (BUILT_IN_ASINH
):
14015 CASE_FLT_FN (BUILT_IN_ATAN
):
14016 CASE_FLT_FN (BUILT_IN_ATANH
):
14017 CASE_FLT_FN (BUILT_IN_CBRT
):
14018 CASE_FLT_FN (BUILT_IN_CEIL
):
14019 CASE_FLT_FN (BUILT_IN_ERF
):
14020 CASE_FLT_FN (BUILT_IN_EXPM1
):
14021 CASE_FLT_FN (BUILT_IN_FLOOR
):
14022 CASE_FLT_FN (BUILT_IN_FMOD
):
14023 CASE_FLT_FN (BUILT_IN_FREXP
):
14024 CASE_FLT_FN (BUILT_IN_LCEIL
):
14025 CASE_FLT_FN (BUILT_IN_LDEXP
):
14026 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14027 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14028 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14029 CASE_FLT_FN (BUILT_IN_LLRINT
):
14030 CASE_FLT_FN (BUILT_IN_LLROUND
):
14031 CASE_FLT_FN (BUILT_IN_LRINT
):
14032 CASE_FLT_FN (BUILT_IN_LROUND
):
14033 CASE_FLT_FN (BUILT_IN_MODF
):
14034 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14035 CASE_FLT_FN (BUILT_IN_RINT
):
14036 CASE_FLT_FN (BUILT_IN_ROUND
):
14037 CASE_FLT_FN (BUILT_IN_SCALB
):
14038 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14039 CASE_FLT_FN (BUILT_IN_SCALBN
):
14040 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14041 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14042 CASE_FLT_FN (BUILT_IN_SINH
):
14043 CASE_FLT_FN (BUILT_IN_TANH
):
14044 CASE_FLT_FN (BUILT_IN_TRUNC
):
14045 /* True if the 1st argument is nonnegative. */
14046 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14047 strict_overflow_p
);
14049 CASE_FLT_FN (BUILT_IN_FMAX
):
14050 /* True if the 1st OR 2nd arguments are nonnegative. */
14051 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14053 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14054 strict_overflow_p
)));
14056 CASE_FLT_FN (BUILT_IN_FMIN
):
14057 /* True if the 1st AND 2nd arguments are nonnegative. */
14058 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14060 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14061 strict_overflow_p
)));
14063 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14064 /* True if the 2nd argument is nonnegative. */
14065 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14066 strict_overflow_p
);
14068 CASE_FLT_FN (BUILT_IN_POWI
):
14069 /* True if the 1st argument is nonnegative or the second
14070 argument is an even integer. */
14071 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == INTEGER_CST
)
14073 tree arg1
= CALL_EXPR_ARG (t
, 1);
14074 if ((TREE_INT_CST_LOW (arg1
) & 1) == 0)
14077 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14078 strict_overflow_p
);
14080 CASE_FLT_FN (BUILT_IN_POW
):
14081 /* True if the 1st argument is nonnegative or the second
14082 argument is an even integer valued real. */
14083 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == REAL_CST
)
14088 c
= TREE_REAL_CST (CALL_EXPR_ARG (t
, 1));
14089 n
= real_to_integer (&c
);
14092 REAL_VALUE_TYPE cint
;
14093 real_from_integer (&cint
, VOIDmode
, n
,
14094 n
< 0 ? -1 : 0, 0);
14095 if (real_identical (&c
, &cint
))
14099 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14100 strict_overflow_p
);
14105 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14110 case COMPOUND_EXPR
:
14112 case GIMPLE_MODIFY_STMT
:
14113 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14114 strict_overflow_p
);
14116 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14117 strict_overflow_p
);
14119 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14120 strict_overflow_p
);
14123 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14127 /* We don't know sign of `t', so be conservative and return false. */
14131 /* Return true if T is known to be non-negative. If the return
14132 value is based on the assumption that signed overflow is undefined,
14133 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14134 *STRICT_OVERFLOW_P. */
14137 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14139 enum tree_code code
;
14140 if (t
== error_mark_node
)
14143 code
= TREE_CODE (t
);
14144 switch (TREE_CODE_CLASS (code
))
14147 case tcc_comparison
:
14148 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14150 TREE_OPERAND (t
, 0),
14151 TREE_OPERAND (t
, 1),
14152 strict_overflow_p
);
14155 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14157 TREE_OPERAND (t
, 0),
14158 strict_overflow_p
);
14161 case tcc_declaration
:
14162 case tcc_reference
:
14163 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14171 case TRUTH_AND_EXPR
:
14172 case TRUTH_OR_EXPR
:
14173 case TRUTH_XOR_EXPR
:
14174 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14176 TREE_OPERAND (t
, 0),
14177 TREE_OPERAND (t
, 1),
14178 strict_overflow_p
);
14179 case TRUTH_NOT_EXPR
:
14180 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14182 TREE_OPERAND (t
, 0),
14183 strict_overflow_p
);
14190 case WITH_SIZE_EXPR
:
14194 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14197 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14201 /* Return true if `t' is known to be non-negative. Handle warnings
14202 about undefined signed overflow. */
14205 tree_expr_nonnegative_p (tree t
)
14207 bool ret
, strict_overflow_p
;
14209 strict_overflow_p
= false;
14210 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14211 if (strict_overflow_p
)
14212 fold_overflow_warning (("assuming signed overflow does not occur when "
14213 "determining that expression is always "
14215 WARN_STRICT_OVERFLOW_MISC
);
14220 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14221 For floating point we further ensure that T is not denormal.
14222 Similar logic is present in nonzero_address in rtlanal.h.
14224 If the return value is based on the assumption that signed overflow
14225 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14226 change *STRICT_OVERFLOW_P. */
14229 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14230 bool *strict_overflow_p
)
14235 return tree_expr_nonzero_warnv_p (op0
,
14236 strict_overflow_p
);
14240 tree inner_type
= TREE_TYPE (op0
);
14241 tree outer_type
= type
;
14243 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14244 && tree_expr_nonzero_warnv_p (op0
,
14245 strict_overflow_p
));
14249 case NON_LVALUE_EXPR
:
14250 return tree_expr_nonzero_warnv_p (op0
,
14251 strict_overflow_p
);
14260 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14261 For floating point we further ensure that T is not denormal.
14262 Similar logic is present in nonzero_address in rtlanal.h.
14264 If the return value is based on the assumption that signed overflow
14265 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14266 change *STRICT_OVERFLOW_P. */
14269 tree_binary_nonzero_warnv_p (enum tree_code code
,
14272 tree op1
, bool *strict_overflow_p
)
14274 bool sub_strict_overflow_p
;
14277 case POINTER_PLUS_EXPR
:
14279 if (TYPE_OVERFLOW_UNDEFINED (type
))
14281 /* With the presence of negative values it is hard
14282 to say something. */
14283 sub_strict_overflow_p
= false;
14284 if (!tree_expr_nonnegative_warnv_p (op0
,
14285 &sub_strict_overflow_p
)
14286 || !tree_expr_nonnegative_warnv_p (op1
,
14287 &sub_strict_overflow_p
))
14289 /* One of operands must be positive and the other non-negative. */
14290 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14291 overflows, on a twos-complement machine the sum of two
14292 nonnegative numbers can never be zero. */
14293 return (tree_expr_nonzero_warnv_p (op0
,
14295 || tree_expr_nonzero_warnv_p (op1
,
14296 strict_overflow_p
));
14301 if (TYPE_OVERFLOW_UNDEFINED (type
))
14303 if (tree_expr_nonzero_warnv_p (op0
,
14305 && tree_expr_nonzero_warnv_p (op1
,
14306 strict_overflow_p
))
14308 *strict_overflow_p
= true;
14315 sub_strict_overflow_p
= false;
14316 if (tree_expr_nonzero_warnv_p (op0
,
14317 &sub_strict_overflow_p
)
14318 && tree_expr_nonzero_warnv_p (op1
,
14319 &sub_strict_overflow_p
))
14321 if (sub_strict_overflow_p
)
14322 *strict_overflow_p
= true;
14327 sub_strict_overflow_p
= false;
14328 if (tree_expr_nonzero_warnv_p (op0
,
14329 &sub_strict_overflow_p
))
14331 if (sub_strict_overflow_p
)
14332 *strict_overflow_p
= true;
14334 /* When both operands are nonzero, then MAX must be too. */
14335 if (tree_expr_nonzero_warnv_p (op1
,
14336 strict_overflow_p
))
14339 /* MAX where operand 0 is positive is positive. */
14340 return tree_expr_nonnegative_warnv_p (op0
,
14341 strict_overflow_p
);
14343 /* MAX where operand 1 is positive is positive. */
14344 else if (tree_expr_nonzero_warnv_p (op1
,
14345 &sub_strict_overflow_p
)
14346 && tree_expr_nonnegative_warnv_p (op1
,
14347 &sub_strict_overflow_p
))
14349 if (sub_strict_overflow_p
)
14350 *strict_overflow_p
= true;
14356 return (tree_expr_nonzero_warnv_p (op1
,
14358 || tree_expr_nonzero_warnv_p (op0
,
14359 strict_overflow_p
));
14368 /* Return true when T is an address and is known to be nonzero.
14369 For floating point we further ensure that T is not denormal.
14370 Similar logic is present in nonzero_address in rtlanal.h.
14372 If the return value is based on the assumption that signed overflow
14373 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14374 change *STRICT_OVERFLOW_P. */
14377 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14379 bool sub_strict_overflow_p
;
14380 switch (TREE_CODE (t
))
14383 /* Query VRP to see if it has recorded any information about
14384 the range of this object. */
14385 return ssa_name_nonzero_p (t
);
14388 return !integer_zerop (t
);
14392 tree base
= get_base_address (TREE_OPERAND (t
, 0));
14397 /* Weak declarations may link to NULL. */
14398 if (VAR_OR_FUNCTION_DECL_P (base
))
14399 return !DECL_WEAK (base
);
14401 /* Constants are never weak. */
14402 if (CONSTANT_CLASS_P (base
))
14409 sub_strict_overflow_p
= false;
14410 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14411 &sub_strict_overflow_p
)
14412 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14413 &sub_strict_overflow_p
))
14415 if (sub_strict_overflow_p
)
14416 *strict_overflow_p
= true;
14427 /* Return true when T is an address and is known to be nonzero.
14428 For floating point we further ensure that T is not denormal.
14429 Similar logic is present in nonzero_address in rtlanal.h.
14431 If the return value is based on the assumption that signed overflow
14432 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14433 change *STRICT_OVERFLOW_P. */
14436 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14438 tree type
= TREE_TYPE (t
);
14439 enum tree_code code
;
14441 /* Doing something useful for floating point would need more work. */
14442 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
14445 code
= TREE_CODE (t
);
14446 switch (TREE_CODE_CLASS (code
))
14449 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
14450 strict_overflow_p
);
14452 case tcc_comparison
:
14453 return tree_binary_nonzero_warnv_p (code
, type
,
14454 TREE_OPERAND (t
, 0),
14455 TREE_OPERAND (t
, 1),
14456 strict_overflow_p
);
14458 case tcc_declaration
:
14459 case tcc_reference
:
14460 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
14468 case TRUTH_NOT_EXPR
:
14469 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
14470 strict_overflow_p
);
14472 case TRUTH_AND_EXPR
:
14473 case TRUTH_OR_EXPR
:
14474 case TRUTH_XOR_EXPR
:
14475 return tree_binary_nonzero_warnv_p (code
, type
,
14476 TREE_OPERAND (t
, 0),
14477 TREE_OPERAND (t
, 1),
14478 strict_overflow_p
);
14485 case WITH_SIZE_EXPR
:
14489 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
14491 case COMPOUND_EXPR
:
14493 case GIMPLE_MODIFY_STMT
:
14495 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14496 strict_overflow_p
);
14499 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14500 strict_overflow_p
);
14503 return alloca_call_p (t
);
14511 /* Return true when T is an address and is known to be nonzero.
14512 Handle warnings about undefined signed overflow. */
14515 tree_expr_nonzero_p (tree t
)
14517 bool ret
, strict_overflow_p
;
14519 strict_overflow_p
= false;
14520 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
14521 if (strict_overflow_p
)
14522 fold_overflow_warning (("assuming signed overflow does not occur when "
14523 "determining that expression is always "
14525 WARN_STRICT_OVERFLOW_MISC
);
14529 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14530 attempt to fold the expression to a constant without modifying TYPE,
14533 If the expression could be simplified to a constant, then return
14534 the constant. If the expression would not be simplified to a
14535 constant, then return NULL_TREE. */
14538 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14540 tree tem
= fold_binary (code
, type
, op0
, op1
);
14541 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14544 /* Given the components of a unary expression CODE, TYPE and OP0,
14545 attempt to fold the expression to a constant without modifying
14548 If the expression could be simplified to a constant, then return
14549 the constant. If the expression would not be simplified to a
14550 constant, then return NULL_TREE. */
14553 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14555 tree tem
= fold_unary (code
, type
, op0
);
14556 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14559 /* If EXP represents referencing an element in a constant string
14560 (either via pointer arithmetic or array indexing), return the
14561 tree representing the value accessed, otherwise return NULL. */
14564 fold_read_from_constant_string (tree exp
)
14566 if ((TREE_CODE (exp
) == INDIRECT_REF
14567 || TREE_CODE (exp
) == ARRAY_REF
)
14568 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14570 tree exp1
= TREE_OPERAND (exp
, 0);
14574 if (TREE_CODE (exp
) == INDIRECT_REF
)
14575 string
= string_constant (exp1
, &index
);
14578 tree low_bound
= array_ref_low_bound (exp
);
14579 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
14581 /* Optimize the special-case of a zero lower bound.
14583 We convert the low_bound to sizetype to avoid some problems
14584 with constant folding. (E.g. suppose the lower bound is 1,
14585 and its mode is QI. Without the conversion,l (ARRAY
14586 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14587 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14588 if (! integer_zerop (low_bound
))
14589 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
14595 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14596 && TREE_CODE (string
) == STRING_CST
14597 && TREE_CODE (index
) == INTEGER_CST
14598 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14599 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
14601 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
14602 return build_int_cst_type (TREE_TYPE (exp
),
14603 (TREE_STRING_POINTER (string
)
14604 [TREE_INT_CST_LOW (index
)]));
14609 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14610 an integer constant, real, or fixed-point constant.
14612 TYPE is the type of the result. */
14615 fold_negate_const (tree arg0
, tree type
)
14617 tree t
= NULL_TREE
;
14619 switch (TREE_CODE (arg0
))
14623 unsigned HOST_WIDE_INT low
;
14624 HOST_WIDE_INT high
;
14625 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14626 TREE_INT_CST_HIGH (arg0
),
14628 t
= force_fit_type_double (type
, low
, high
, 1,
14629 (overflow
| TREE_OVERFLOW (arg0
))
14630 && !TYPE_UNSIGNED (type
));
14635 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14640 FIXED_VALUE_TYPE f
;
14641 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14642 &(TREE_FIXED_CST (arg0
)), NULL
,
14643 TYPE_SATURATING (type
));
14644 t
= build_fixed (type
, f
);
14645 /* Propagate overflow flags. */
14646 if (overflow_p
| TREE_OVERFLOW (arg0
))
14648 TREE_OVERFLOW (t
) = 1;
14649 TREE_CONSTANT_OVERFLOW (t
) = 1;
14651 else if (TREE_CONSTANT_OVERFLOW (arg0
))
14652 TREE_CONSTANT_OVERFLOW (t
) = 1;
14657 gcc_unreachable ();
14663 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14664 an integer constant or real constant.
14666 TYPE is the type of the result. */
14669 fold_abs_const (tree arg0
, tree type
)
14671 tree t
= NULL_TREE
;
14673 switch (TREE_CODE (arg0
))
14676 /* If the value is unsigned, then the absolute value is
14677 the same as the ordinary value. */
14678 if (TYPE_UNSIGNED (type
))
14680 /* Similarly, if the value is non-negative. */
14681 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
14683 /* If the value is negative, then the absolute value is
14687 unsigned HOST_WIDE_INT low
;
14688 HOST_WIDE_INT high
;
14689 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14690 TREE_INT_CST_HIGH (arg0
),
14692 t
= force_fit_type_double (type
, low
, high
, -1,
14693 overflow
| TREE_OVERFLOW (arg0
));
14698 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14699 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14705 gcc_unreachable ();
14711 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14712 constant. TYPE is the type of the result. */
14715 fold_not_const (tree arg0
, tree type
)
14717 tree t
= NULL_TREE
;
14719 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14721 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
14722 ~TREE_INT_CST_HIGH (arg0
), 0,
14723 TREE_OVERFLOW (arg0
));
14728 /* Given CODE, a relational operator, the target type, TYPE and two
14729 constant operands OP0 and OP1, return the result of the
14730 relational operation. If the result is not a compile time
14731 constant, then return NULL_TREE. */
14734 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14736 int result
, invert
;
14738 /* From here on, the only cases we handle are when the result is
14739 known to be a constant. */
14741 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14743 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14744 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14746 /* Handle the cases where either operand is a NaN. */
14747 if (real_isnan (c0
) || real_isnan (c1
))
14757 case UNORDERED_EXPR
:
14771 if (flag_trapping_math
)
14777 gcc_unreachable ();
14780 return constant_boolean_node (result
, type
);
14783 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14786 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14788 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14789 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14790 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14793 /* Handle equality/inequality of complex constants. */
14794 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14796 tree rcond
= fold_relational_const (code
, type
,
14797 TREE_REALPART (op0
),
14798 TREE_REALPART (op1
));
14799 tree icond
= fold_relational_const (code
, type
,
14800 TREE_IMAGPART (op0
),
14801 TREE_IMAGPART (op1
));
14802 if (code
== EQ_EXPR
)
14803 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14804 else if (code
== NE_EXPR
)
14805 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14810 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14812 To compute GT, swap the arguments and do LT.
14813 To compute GE, do LT and invert the result.
14814 To compute LE, swap the arguments, do LT and invert the result.
14815 To compute NE, do EQ and invert the result.
14817 Therefore, the code below must handle only EQ and LT. */
14819 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14824 code
= swap_tree_comparison (code
);
14827 /* Note that it is safe to invert for real values here because we
14828 have already handled the one case that it matters. */
14831 if (code
== NE_EXPR
|| code
== GE_EXPR
)
14834 code
= invert_tree_comparison (code
, false);
14837 /* Compute a result for LT or EQ if args permit;
14838 Otherwise return T. */
14839 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
14841 if (code
== EQ_EXPR
)
14842 result
= tree_int_cst_equal (op0
, op1
);
14843 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
14844 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
14846 result
= INT_CST_LT (op0
, op1
);
14853 return constant_boolean_node (result
, type
);
14856 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14857 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14861 fold_build_cleanup_point_expr (tree type
, tree expr
)
14863 /* If the expression does not have side effects then we don't have to wrap
14864 it with a cleanup point expression. */
14865 if (!TREE_SIDE_EFFECTS (expr
))
14868 /* If the expression is a return, check to see if the expression inside the
14869 return has no side effects or the right hand side of the modify expression
14870 inside the return. If either don't have side effects set we don't need to
14871 wrap the expression in a cleanup point expression. Note we don't check the
14872 left hand side of the modify because it should always be a return decl. */
14873 if (TREE_CODE (expr
) == RETURN_EXPR
)
14875 tree op
= TREE_OPERAND (expr
, 0);
14876 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14878 op
= TREE_OPERAND (op
, 1);
14879 if (!TREE_SIDE_EFFECTS (op
))
14883 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14886 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14887 of an indirection through OP0, or NULL_TREE if no simplification is
14891 fold_indirect_ref_1 (tree type
, tree op0
)
14897 subtype
= TREE_TYPE (sub
);
14898 if (!POINTER_TYPE_P (subtype
))
14901 if (TREE_CODE (sub
) == ADDR_EXPR
)
14903 tree op
= TREE_OPERAND (sub
, 0);
14904 tree optype
= TREE_TYPE (op
);
14905 /* *&CONST_DECL -> to the value of the const decl. */
14906 if (TREE_CODE (op
) == CONST_DECL
)
14907 return DECL_INITIAL (op
);
14908 /* *&p => p; make sure to handle *&"str"[cst] here. */
14909 if (type
== optype
)
14911 tree fop
= fold_read_from_constant_string (op
);
14917 /* *(foo *)&fooarray => fooarray[0] */
14918 else if (TREE_CODE (optype
) == ARRAY_TYPE
14919 && type
== TREE_TYPE (optype
))
14921 tree type_domain
= TYPE_DOMAIN (optype
);
14922 tree min_val
= size_zero_node
;
14923 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14924 min_val
= TYPE_MIN_VALUE (type_domain
);
14925 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
14927 /* *(foo *)&complexfoo => __real__ complexfoo */
14928 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14929 && type
== TREE_TYPE (optype
))
14930 return fold_build1 (REALPART_EXPR
, type
, op
);
14931 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14932 else if (TREE_CODE (optype
) == VECTOR_TYPE
14933 && type
== TREE_TYPE (optype
))
14935 tree part_width
= TYPE_SIZE (type
);
14936 tree index
= bitsize_int (0);
14937 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
14941 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14942 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14943 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14945 tree op00
= TREE_OPERAND (sub
, 0);
14946 tree op01
= TREE_OPERAND (sub
, 1);
14950 op00type
= TREE_TYPE (op00
);
14951 if (TREE_CODE (op00
) == ADDR_EXPR
14952 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
14953 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
14955 tree size
= TYPE_SIZE_UNIT (type
);
14956 if (tree_int_cst_equal (size
, op01
))
14957 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
14961 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14962 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14963 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
14966 tree min_val
= size_zero_node
;
14967 sub
= build_fold_indirect_ref (sub
);
14968 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14969 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14970 min_val
= TYPE_MIN_VALUE (type_domain
);
14971 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
14977 /* Builds an expression for an indirection through T, simplifying some
14981 build_fold_indirect_ref (tree t
)
14983 tree type
= TREE_TYPE (TREE_TYPE (t
));
14984 tree sub
= fold_indirect_ref_1 (type
, t
);
14989 return build1 (INDIRECT_REF
, type
, t
);
14992 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14995 fold_indirect_ref (tree t
)
14997 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15005 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15006 whose result is ignored. The type of the returned tree need not be
15007 the same as the original expression. */
15010 fold_ignored_result (tree t
)
15012 if (!TREE_SIDE_EFFECTS (t
))
15013 return integer_zero_node
;
15016 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15019 t
= TREE_OPERAND (t
, 0);
15023 case tcc_comparison
:
15024 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15025 t
= TREE_OPERAND (t
, 0);
15026 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15027 t
= TREE_OPERAND (t
, 1);
15032 case tcc_expression
:
15033 switch (TREE_CODE (t
))
15035 case COMPOUND_EXPR
:
15036 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15038 t
= TREE_OPERAND (t
, 0);
15042 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15043 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15045 t
= TREE_OPERAND (t
, 0);
15058 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15059 This can only be applied to objects of a sizetype. */
15062 round_up (tree value
, int divisor
)
15064 tree div
= NULL_TREE
;
15066 gcc_assert (divisor
> 0);
15070 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15071 have to do anything. Only do this when we are not given a const,
15072 because in that case, this check is more expensive than just
15074 if (TREE_CODE (value
) != INTEGER_CST
)
15076 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15078 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15082 /* If divisor is a power of two, simplify this to bit manipulation. */
15083 if (divisor
== (divisor
& -divisor
))
15085 if (TREE_CODE (value
) == INTEGER_CST
)
15087 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
15088 unsigned HOST_WIDE_INT high
;
15091 if ((low
& (divisor
- 1)) == 0)
15094 overflow_p
= TREE_OVERFLOW (value
);
15095 high
= TREE_INT_CST_HIGH (value
);
15096 low
&= ~(divisor
- 1);
15105 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
15112 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15113 value
= size_binop (PLUS_EXPR
, value
, t
);
15114 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15115 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15121 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15122 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
15123 value
= size_binop (MULT_EXPR
, value
, div
);
15129 /* Likewise, but round down. */
15132 round_down (tree value
, int divisor
)
15134 tree div
= NULL_TREE
;
15136 gcc_assert (divisor
> 0);
15140 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15141 have to do anything. Only do this when we are not given a const,
15142 because in that case, this check is more expensive than just
15144 if (TREE_CODE (value
) != INTEGER_CST
)
15146 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15148 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15152 /* If divisor is a power of two, simplify this to bit manipulation. */
15153 if (divisor
== (divisor
& -divisor
))
15157 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15158 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15163 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15164 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
15165 value
= size_binop (MULT_EXPR
, value
, div
);
15171 /* Returns the pointer to the base of the object addressed by EXP and
15172 extracts the information about the offset of the access, storing it
15173 to PBITPOS and POFFSET. */
15176 split_address_to_core_and_offset (tree exp
,
15177 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15180 enum machine_mode mode
;
15181 int unsignedp
, volatilep
;
15182 HOST_WIDE_INT bitsize
;
15184 if (TREE_CODE (exp
) == ADDR_EXPR
)
15186 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15187 poffset
, &mode
, &unsignedp
, &volatilep
,
15189 core
= fold_addr_expr (core
);
15195 *poffset
= NULL_TREE
;
15201 /* Returns true if addresses of E1 and E2 differ by a constant, false
15202 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15205 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15208 HOST_WIDE_INT bitpos1
, bitpos2
;
15209 tree toffset1
, toffset2
, tdiff
, type
;
15211 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15212 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15214 if (bitpos1
% BITS_PER_UNIT
!= 0
15215 || bitpos2
% BITS_PER_UNIT
!= 0
15216 || !operand_equal_p (core1
, core2
, 0))
15219 if (toffset1
&& toffset2
)
15221 type
= TREE_TYPE (toffset1
);
15222 if (type
!= TREE_TYPE (toffset2
))
15223 toffset2
= fold_convert (type
, toffset2
);
15225 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15226 if (!cst_and_fits_in_hwi (tdiff
))
15229 *diff
= int_cst_value (tdiff
);
15231 else if (toffset1
|| toffset2
)
15233 /* If only one of the offsets is non-constant, the difference cannot
15240 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
15244 /* Simplify the floating point expression EXP when the sign of the
15245 result is not significant. Return NULL_TREE if no simplification
15249 fold_strip_sign_ops (tree exp
)
15253 switch (TREE_CODE (exp
))
15257 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15258 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
15262 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
15264 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15265 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15266 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
15267 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
15268 arg0
? arg0
: TREE_OPERAND (exp
, 0),
15269 arg1
? arg1
: TREE_OPERAND (exp
, 1));
15272 case COMPOUND_EXPR
:
15273 arg0
= TREE_OPERAND (exp
, 0);
15274 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15276 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
15280 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15281 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
15283 return fold_build3 (COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
15284 arg0
? arg0
: TREE_OPERAND (exp
, 1),
15285 arg1
? arg1
: TREE_OPERAND (exp
, 2));
15290 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
15293 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15294 /* Strip copysign function call, return the 1st argument. */
15295 arg0
= CALL_EXPR_ARG (exp
, 0);
15296 arg1
= CALL_EXPR_ARG (exp
, 1);
15297 return omit_one_operand (TREE_TYPE (exp
), arg0
, arg1
);
15300 /* Strip sign ops from the argument of "odd" math functions. */
15301 if (negate_mathfn_p (fcode
))
15303 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
15305 return build_call_expr (get_callee_fndecl (exp
), 1, arg0
);