1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code
{
84 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
85 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
86 static bool negate_mathfn_p (enum built_in_function
);
87 static bool negate_expr_p (tree
);
88 static tree
negate_expr (tree
);
89 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
90 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
91 static tree
const_binop (enum tree_code
, tree
, tree
, int);
92 static enum comparison_code
comparison_to_compcode (enum tree_code
);
93 static enum tree_code
compcode_to_comparison (enum comparison_code
);
94 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
95 enum tree_code
, tree
, tree
, tree
);
96 static int truth_value_p (enum tree_code
);
97 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
98 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
99 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
100 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
101 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
102 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
103 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
104 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
105 enum machine_mode
*, int *, int *,
107 static int all_ones_mask_p (tree
, int);
108 static tree
sign_bit_p (tree
, tree
);
109 static int simple_operand_p (tree
);
110 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
111 static tree
make_range (tree
, int *, tree
*, tree
*);
112 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
113 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
115 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
116 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
117 static tree
unextend (tree
, int, int, tree
);
118 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
119 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
120 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
121 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
122 static int multiple_of_p (tree
, tree
, tree
);
123 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
126 static bool fold_real_zero_addition_p (tree
, tree
, int);
127 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
129 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
130 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
131 static bool reorder_operands_p (tree
, tree
);
132 static tree
fold_negate_const (tree
, tree
);
133 static tree
fold_not_const (tree
, tree
);
134 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
164 words
[0] = LOWPART (low
);
165 words
[1] = HIGHPART (low
);
166 words
[2] = LOWPART (hi
);
167 words
[3] = HIGHPART (hi
);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
178 *low
= words
[0] + words
[1] * BASE
;
179 *hi
= words
[2] + words
[3] * BASE
;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t
, int overflowable
,
199 bool overflowed
, bool overflowed_const
)
201 unsigned HOST_WIDE_INT low
;
204 int sign_extended_type
;
206 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
208 low
= TREE_INT_CST_LOW (t
);
209 high
= TREE_INT_CST_HIGH (t
);
211 if (POINTER_TYPE_P (TREE_TYPE (t
))
212 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
215 prec
= TYPE_PRECISION (TREE_TYPE (t
));
216 /* Size types *are* sign extended. */
217 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
218 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
225 else if (prec
> HOST_BITS_PER_WIDE_INT
)
226 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
230 if (prec
< HOST_BITS_PER_WIDE_INT
)
231 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
234 if (!sign_extended_type
)
235 /* No sign extension */;
236 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
237 /* Correct width already. */;
238 else if (prec
> HOST_BITS_PER_WIDE_INT
)
240 /* Sign extend top half? */
241 if (high
& ((unsigned HOST_WIDE_INT
)1
242 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
243 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
245 else if (prec
== HOST_BITS_PER_WIDE_INT
)
247 if ((HOST_WIDE_INT
)low
< 0)
252 /* Sign extend bottom half? */
253 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
256 low
|= (HOST_WIDE_INT
)(-1) << prec
;
260 /* If the value changed, return a new node. */
261 if (overflowed
|| overflowed_const
262 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
264 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
268 || (overflowable
> 0 && sign_extended_type
))
271 TREE_OVERFLOW (t
) = 1;
272 TREE_CONSTANT_OVERFLOW (t
) = 1;
274 else if (overflowed_const
)
277 TREE_CONSTANT_OVERFLOW (t
) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
291 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
292 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
294 unsigned HOST_WIDE_INT l
;
298 h
= h1
+ h2
+ (l
< l1
);
302 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
312 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
318 return (*hv
& h1
) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
336 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
337 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
339 HOST_WIDE_INT arg1
[4];
340 HOST_WIDE_INT arg2
[4];
341 HOST_WIDE_INT prod
[4 * 2];
342 unsigned HOST_WIDE_INT carry
;
344 unsigned HOST_WIDE_INT toplow
, neglow
;
345 HOST_WIDE_INT tophigh
, neghigh
;
347 encode (arg1
, l1
, h1
);
348 encode (arg2
, l2
, h2
);
350 memset (prod
, 0, sizeof prod
);
352 for (i
= 0; i
< 4; i
++)
355 for (j
= 0; j
< 4; j
++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry
+= arg1
[i
] * arg2
[j
];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod
[k
] = LOWPART (carry
);
363 carry
= HIGHPART (carry
);
368 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod
+ 4, &toplow
, &tophigh
);
375 neg_double (l2
, h2
, &neglow
, &neghigh
);
376 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
380 neg_double (l1
, h1
, &neglow
, &neghigh
);
381 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
383 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
394 HOST_WIDE_INT count
, unsigned int prec
,
395 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
397 unsigned HOST_WIDE_INT signmask
;
401 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
405 if (SHIFT_COUNT_TRUNCATED
)
408 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count
>= HOST_BITS_PER_WIDE_INT
)
417 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
422 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
423 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT
) *hv
431 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
432 : (*lv
>> (prec
- 1))) & 1);
434 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
436 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
438 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
439 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
444 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
445 *lv
|= signmask
<< prec
;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
456 HOST_WIDE_INT count
, unsigned int prec
,
457 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
460 unsigned HOST_WIDE_INT signmask
;
463 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
466 if (SHIFT_COUNT_TRUNCATED
)
469 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count
>= HOST_BITS_PER_WIDE_INT
)
479 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
483 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
485 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count
>= (HOST_WIDE_INT
)prec
)
495 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
497 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
499 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
500 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
505 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
506 *lv
|= signmask
<< (prec
- count
);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
517 HOST_WIDE_INT count
, unsigned int prec
,
518 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
520 unsigned HOST_WIDE_INT s1l
, s2l
;
521 HOST_WIDE_INT s1h
, s2h
;
527 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
528 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
539 HOST_WIDE_INT count
, unsigned int prec
,
540 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
542 unsigned HOST_WIDE_INT s1l
, s2l
;
543 HOST_WIDE_INT s1h
, s2h
;
549 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
550 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code
, int uns
,
566 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig
,
568 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig
,
570 unsigned HOST_WIDE_INT
*lquo
,
571 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
575 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den
[4], quo
[4];
578 unsigned HOST_WIDE_INT work
;
579 unsigned HOST_WIDE_INT carry
= 0;
580 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
581 HOST_WIDE_INT hnum
= hnum_orig
;
582 unsigned HOST_WIDE_INT lden
= lden_orig
;
583 HOST_WIDE_INT hden
= hden_orig
;
586 if (hden
== 0 && lden
== 0)
587 overflow
= 1, lden
= 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
597 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
603 neg_double (lden
, hden
, &lden
, &hden
);
607 if (hnum
== 0 && hden
== 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo
, 0, sizeof quo
);
626 memset (num
, 0, sizeof num
); /* to zero 9th element */
627 memset (den
, 0, sizeof den
);
629 encode (num
, lnum
, hnum
);
630 encode (den
, lden
, hden
);
632 /* Special code for when the divisor < BASE. */
633 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
635 /* hnum != 0 already checked. */
636 for (i
= 4 - 1; i
>= 0; i
--)
638 work
= num
[i
] + carry
* BASE
;
639 quo
[i
] = work
/ lden
;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig
, den_hi_sig
;
648 unsigned HOST_WIDE_INT quo_est
, scale
;
650 /* Find the highest nonzero divisor digit. */
651 for (i
= 4 - 1;; i
--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale
= BASE
/ (den
[den_hi_sig
] + 1);
663 { /* scale divisor and dividend */
665 for (i
= 0; i
<= 4 - 1; i
++)
667 work
= (num
[i
] * scale
) + carry
;
668 num
[i
] = LOWPART (work
);
669 carry
= HIGHPART (work
);
674 for (i
= 0; i
<= 4 - 1; i
++)
676 work
= (den
[i
] * scale
) + carry
;
677 den
[i
] = LOWPART (work
);
678 carry
= HIGHPART (work
);
679 if (den
[i
] != 0) den_hi_sig
= i
;
686 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp
;
693 num_hi_sig
= i
+ den_hi_sig
+ 1;
694 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
695 if (num
[num_hi_sig
] != den
[den_hi_sig
])
696 quo_est
= work
/ den
[den_hi_sig
];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp
= work
- quo_est
* den
[den_hi_sig
];
703 && (den
[den_hi_sig
- 1] * quo_est
704 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j
= 0; j
<= den_hi_sig
; j
++)
714 work
= quo_est
* den
[j
] + carry
;
715 carry
= HIGHPART (work
);
716 work
= num
[i
+ j
] - LOWPART (work
);
717 num
[i
+ j
] = LOWPART (work
);
718 carry
+= HIGHPART (work
) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
726 carry
= 0; /* add divisor back in */
727 for (j
= 0; j
<= den_hi_sig
; j
++)
729 work
= num
[i
+ j
] + den
[j
] + carry
;
730 carry
= HIGHPART (work
);
731 num
[i
+ j
] = LOWPART (work
);
734 num
[num_hi_sig
] += carry
;
737 /* Store the quotient digit. */
742 decode (quo
, lquo
, hquo
);
745 /* If result is negative, make it so. */
747 neg_double (*lquo
, *hquo
, lquo
, hquo
);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
751 neg_double (*lrem
, *hrem
, lrem
, hrem
);
752 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
757 case TRUNC_MOD_EXPR
: /* round toward zero */
758 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
763 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
774 case CEIL_MOD_EXPR
: /* round toward positive infinity */
775 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
785 case ROUND_MOD_EXPR
: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
788 HOST_WIDE_INT habs_rem
= *hrem
;
789 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
790 HOST_WIDE_INT habs_den
= hden
, htwice
;
792 /* Get absolute values. */
794 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
796 neg_double (lden
, hden
, &labs_den
, &habs_den
);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
800 labs_rem
, habs_rem
, <wice
, &htwice
);
802 if (((unsigned HOST_WIDE_INT
) habs_den
803 < (unsigned HOST_WIDE_INT
) htwice
)
804 || (((unsigned HOST_WIDE_INT
) habs_den
805 == (unsigned HOST_WIDE_INT
) htwice
)
806 && (labs_den
< ltwice
)))
810 add_double (*lquo
, *hquo
,
811 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
814 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
828 neg_double (*lrem
, *hrem
, lrem
, hrem
);
829 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
840 unsigned HOST_WIDE_INT int1l
, int2l
;
841 HOST_WIDE_INT int1h
, int2h
;
842 unsigned HOST_WIDE_INT quol
, reml
;
843 HOST_WIDE_INT quoh
, remh
;
844 tree type
= TREE_TYPE (arg1
);
845 int uns
= TYPE_UNSIGNED (type
);
847 int1l
= TREE_INT_CST_LOW (arg1
);
848 int1h
= TREE_INT_CST_HIGH (arg1
);
849 int2l
= TREE_INT_CST_LOW (arg2
);
850 int2h
= TREE_INT_CST_HIGH (arg2
);
852 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
853 &quol
, &quoh
, &reml
, &remh
);
854 if (remh
!= 0 || reml
!= 0)
857 return build_int_cst_wide (type
, quol
, quoh
);
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code
)
888 /* Check whether we may negate an integer constant T without causing
892 may_negate_without_overflow_p (tree t
)
894 unsigned HOST_WIDE_INT val
;
898 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
900 type
= TREE_TYPE (t
);
901 if (TYPE_UNSIGNED (type
))
904 prec
= TYPE_PRECISION (type
);
905 if (prec
> HOST_BITS_PER_WIDE_INT
)
907 if (TREE_INT_CST_LOW (t
) != 0)
909 prec
-= HOST_BITS_PER_WIDE_INT
;
910 val
= TREE_INT_CST_HIGH (t
);
913 val
= TREE_INT_CST_LOW (t
);
914 if (prec
< HOST_BITS_PER_WIDE_INT
)
915 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
916 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
923 negate_expr_p (tree t
)
930 type
= TREE_TYPE (t
);
933 switch (TREE_CODE (t
))
936 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t
);
947 return negate_expr_p (TREE_REALPART (t
))
948 && negate_expr_p (TREE_IMAGPART (t
));
951 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t
, 1))
955 && reorder_operands_p (TREE_OPERAND (t
, 0),
956 TREE_OPERAND (t
, 1)))
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t
, 0));
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
964 && reorder_operands_p (TREE_OPERAND (t
, 0),
965 TREE_OPERAND (t
, 1));
968 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
975 return negate_expr_p (TREE_OPERAND (t
, 1))
976 || negate_expr_p (TREE_OPERAND (t
, 0));
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type
) == REAL_TYPE
)
983 tree tem
= strip_float_extensions (t
);
985 return negate_expr_p (tem
);
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t
)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
999 tree op1
= TREE_OPERAND (t
, 1);
1000 if (TREE_INT_CST_HIGH (op1
) == 0
1001 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1002 == TREE_INT_CST_LOW (op1
))
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1017 negate_expr (tree t
)
1025 type
= TREE_TYPE (t
);
1026 STRIP_SIGN_NOPS (t
);
1028 switch (TREE_CODE (t
))
1031 tem
= fold_negate_const (t
, type
);
1032 if (! TREE_OVERFLOW (tem
)
1033 || TYPE_UNSIGNED (type
)
1039 tem
= fold_negate_const (t
, type
);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1042 return fold_convert (type
, tem
);
1047 tree rpart
= negate_expr (TREE_REALPART (t
));
1048 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1050 if ((TREE_CODE (rpart
) == REAL_CST
1051 && TREE_CODE (ipart
) == REAL_CST
)
1052 || (TREE_CODE (rpart
) == INTEGER_CST
1053 && TREE_CODE (ipart
) == INTEGER_CST
))
1054 return build_complex (type
, rpart
, ipart
);
1059 return fold_convert (type
, TREE_OPERAND (t
, 0));
1062 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t
, 1))
1066 && reorder_operands_p (TREE_OPERAND (t
, 0),
1067 TREE_OPERAND (t
, 1)))
1069 tem
= negate_expr (TREE_OPERAND (t
, 1));
1070 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1071 tem
, TREE_OPERAND (t
, 0));
1072 return fold_convert (type
, tem
);
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1078 tem
= negate_expr (TREE_OPERAND (t
, 0));
1079 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1080 tem
, TREE_OPERAND (t
, 1));
1081 return fold_convert (type
, tem
);
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1089 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1090 return fold_convert (type
,
1091 fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1092 TREE_OPERAND (t
, 1),
1093 TREE_OPERAND (t
, 0)));
1097 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1105 tem
= TREE_OPERAND (t
, 1);
1106 if (negate_expr_p (tem
))
1107 return fold_convert (type
,
1108 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1109 TREE_OPERAND (t
, 0),
1110 negate_expr (tem
)));
1111 tem
= TREE_OPERAND (t
, 0);
1112 if (negate_expr_p (tem
))
1113 return fold_convert (type
,
1114 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1116 TREE_OPERAND (t
, 1)));
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type
) == REAL_TYPE
)
1124 tem
= strip_float_extensions (t
);
1125 if (tem
!= t
&& negate_expr_p (tem
))
1126 return fold_convert (type
, negate_expr (tem
));
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t
))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1135 tree fndecl
, arg
, arglist
;
1137 fndecl
= get_callee_fndecl (t
);
1138 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1139 arglist
= build_tree_list (NULL_TREE
, arg
);
1140 return build_function_call_expr (fndecl
, arglist
);
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1148 tree op1
= TREE_OPERAND (t
, 1);
1149 if (TREE_INT_CST_HIGH (op1
) == 0
1150 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1151 == TREE_INT_CST_LOW (op1
))
1153 tree ntype
= TYPE_UNSIGNED (type
)
1154 ? lang_hooks
.types
.signed_type (type
)
1155 : lang_hooks
.types
.unsigned_type (type
);
1156 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1157 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1158 return fold_convert (type
, temp
);
1167 tem
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1168 return fold_convert (type
, tem
);
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1186 If IN is itself a literal or constant, return it as appropriate.
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1192 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1193 tree
*minus_litp
, int negate_p
)
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in
);
1204 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1206 else if (TREE_CODE (in
) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1213 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1215 tree op0
= TREE_OPERAND (in
, 0);
1216 tree op1
= TREE_OPERAND (in
, 1);
1217 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1218 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1222 *litp
= op0
, op0
= 0;
1223 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1224 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1226 if (op0
!= 0 && TREE_CONSTANT (op0
))
1227 *conp
= op0
, op0
= 0;
1228 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1229 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0
!= 0 && op1
!= 0)
1238 var
= op1
, neg_var_p
= neg1_p
;
1240 /* Now do any needed negations. */
1242 *minus_litp
= *litp
, *litp
= 0;
1244 *conp
= negate_expr (*conp
);
1246 var
= negate_expr (var
);
1248 else if (TREE_CONSTANT (in
))
1256 *minus_litp
= *litp
, *litp
= 0;
1257 else if (*minus_litp
)
1258 *litp
= *minus_litp
, *minus_litp
= 0;
1259 *conp
= negate_expr (*conp
);
1260 var
= negate_expr (var
);
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1271 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1282 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1284 if (code
== PLUS_EXPR
)
1286 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1287 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1288 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1289 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1290 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1291 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1292 else if (integer_zerop (t2
))
1293 return fold_convert (type
, t1
);
1295 else if (code
== MINUS_EXPR
)
1297 if (integer_zerop (t2
))
1298 return fold_convert (type
, t1
);
1301 return build2 (code
, type
, fold_convert (type
, t1
),
1302 fold_convert (type
, t2
));
1305 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1306 fold_convert (type
, t2
));
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1317 unsigned HOST_WIDE_INT int1l
, int2l
;
1318 HOST_WIDE_INT int1h
, int2h
;
1319 unsigned HOST_WIDE_INT low
;
1321 unsigned HOST_WIDE_INT garbagel
;
1322 HOST_WIDE_INT garbageh
;
1324 tree type
= TREE_TYPE (arg1
);
1325 int uns
= TYPE_UNSIGNED (type
);
1327 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1330 int1l
= TREE_INT_CST_LOW (arg1
);
1331 int1h
= TREE_INT_CST_HIGH (arg1
);
1332 int2l
= TREE_INT_CST_LOW (arg2
);
1333 int2h
= TREE_INT_CST_HIGH (arg2
);
1338 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1342 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1346 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1362 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1367 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1371 neg_double (int2l
, int2h
, &low
, &hi
);
1372 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1373 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1377 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1380 case TRUNC_DIV_EXPR
:
1381 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1382 case EXACT_DIV_EXPR
:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1387 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1389 if (code
== CEIL_DIV_EXPR
)
1392 low
= int1l
/ int2l
, hi
= 0;
1396 /* ... fall through ... */
1398 case ROUND_DIV_EXPR
:
1399 if (int2h
== 0 && int2l
== 1)
1401 low
= int1l
, hi
= int1h
;
1404 if (int1l
== int2l
&& int1h
== int2h
1405 && ! (int1l
== 0 && int1h
== 0))
1410 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1411 &low
, &hi
, &garbagel
, &garbageh
);
1414 case TRUNC_MOD_EXPR
:
1415 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1420 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1422 if (code
== CEIL_MOD_EXPR
)
1424 low
= int1l
% int2l
, hi
= 0;
1428 /* ... fall through ... */
1430 case ROUND_MOD_EXPR
:
1431 overflow
= div_and_round_double (code
, uns
,
1432 int1l
, int1h
, int2l
, int2h
,
1433 &garbagel
, &garbageh
, &low
, &hi
);
1439 low
= (((unsigned HOST_WIDE_INT
) int1h
1440 < (unsigned HOST_WIDE_INT
) int2h
)
1441 || (((unsigned HOST_WIDE_INT
) int1h
1442 == (unsigned HOST_WIDE_INT
) int2h
)
1445 low
= (int1h
< int2h
1446 || (int1h
== int2h
&& int1l
< int2l
));
1448 if (low
== (code
== MIN_EXPR
))
1449 low
= int1l
, hi
= int1h
;
1451 low
= int2l
, hi
= int2h
;
1458 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns
|| is_sizetype
) && overflow
)
1464 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1467 TREE_OVERFLOW (t
) = 1;
1468 TREE_CONSTANT_OVERFLOW (t
) = 1;
1470 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1473 TREE_CONSTANT_OVERFLOW (t
) = 1;
1477 t
= force_fit_type (t
, 1,
1478 ((!uns
|| is_sizetype
) && overflow
)
1479 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1480 TREE_CONSTANT_OVERFLOW (arg1
)
1481 | TREE_CONSTANT_OVERFLOW (arg2
));
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1498 if (TREE_CODE (arg1
) == INTEGER_CST
)
1499 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1501 if (TREE_CODE (arg1
) == REAL_CST
)
1503 enum machine_mode mode
;
1506 REAL_VALUE_TYPE value
;
1507 REAL_VALUE_TYPE result
;
1511 d1
= TREE_REAL_CST (arg1
);
1512 d2
= TREE_REAL_CST (arg2
);
1514 type
= TREE_TYPE (arg1
);
1515 mode
= TYPE_MODE (type
);
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode
)
1520 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code
== RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2
, dconst0
)
1527 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1
))
1534 else if (REAL_VALUE_ISNAN (d2
))
1537 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1538 real_convert (&result
, mode
, &value
);
1540 /* Don't constant fold this floating point operation if the
1541 result may dependent upon the run-time rounding mode and
1542 flag_rounding_math is set, or if GCC's software emulation
1543 is unable to accurately represent the result. */
1545 if ((flag_rounding_math
1546 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1547 && !flag_unsafe_math_optimizations
))
1548 && (inexact
|| !real_identical (&result
, &value
)))
1551 t
= build_real (type
, result
);
1553 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1554 TREE_CONSTANT_OVERFLOW (t
)
1556 | TREE_CONSTANT_OVERFLOW (arg1
)
1557 | TREE_CONSTANT_OVERFLOW (arg2
);
1560 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1562 tree type
= TREE_TYPE (arg1
);
1563 tree r1
= TREE_REALPART (arg1
);
1564 tree i1
= TREE_IMAGPART (arg1
);
1565 tree r2
= TREE_REALPART (arg2
);
1566 tree i2
= TREE_IMAGPART (arg2
);
1572 t
= build_complex (type
,
1573 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1574 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1578 t
= build_complex (type
,
1579 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1580 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1584 t
= build_complex (type
,
1585 const_binop (MINUS_EXPR
,
1586 const_binop (MULT_EXPR
,
1588 const_binop (MULT_EXPR
,
1591 const_binop (PLUS_EXPR
,
1592 const_binop (MULT_EXPR
,
1594 const_binop (MULT_EXPR
,
1601 tree t1
, t2
, real
, imag
;
1603 = const_binop (PLUS_EXPR
,
1604 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1605 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1608 t1
= const_binop (PLUS_EXPR
,
1609 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1610 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1612 t2
= const_binop (MINUS_EXPR
,
1613 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1614 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1617 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1619 real
= const_binop (TRUNC_DIV_EXPR
, t1
, magsquared
, notrunc
);
1620 imag
= const_binop (TRUNC_DIV_EXPR
, t2
, magsquared
, notrunc
);
1624 real
= const_binop (RDIV_EXPR
, t1
, magsquared
, notrunc
);
1625 imag
= const_binop (RDIV_EXPR
, t2
, magsquared
, notrunc
);
1630 t
= build_complex (type
, real
, imag
);
1642 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1643 indicates which particular sizetype to create. */
1646 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1648 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1651 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1652 is a tree code. The type of the result is taken from the operands.
1653 Both must be the same type integer type and it must be a size type.
1654 If the operands are constant, so is the result. */
1657 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1659 tree type
= TREE_TYPE (arg0
);
1661 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1662 && type
== TREE_TYPE (arg1
));
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1667 /* And some specific cases even faster than that. */
1668 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1670 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1671 && integer_zerop (arg1
))
1673 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code
, arg0
, arg1
, 0);
1680 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1681 return error_mark_node
;
1683 return fold_build2 (code
, type
, arg0
, arg1
);
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1691 size_diffop (tree arg0
, tree arg1
)
1693 tree type
= TREE_TYPE (arg0
);
1696 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1697 && type
== TREE_TYPE (arg1
));
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type
))
1701 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1703 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1709 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1710 fold_convert (ctype
, arg1
));
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0
, arg1
))
1717 return fold_convert (ctype
, integer_zero_node
);
1718 else if (tree_int_cst_lt (arg1
, arg0
))
1719 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1721 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1722 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1726 /* A subroutine of fold_convert_const handling conversions of an
1727 INTEGER_CST to another integer type. */
1730 fold_convert_const_int_from_int (tree type
, tree arg1
)
1734 /* Given an integer constant, make new constant with new type,
1735 appropriately sign-extended or truncated. */
1736 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1737 TREE_INT_CST_HIGH (arg1
));
1739 t
= force_fit_type (t
,
1740 /* Don't set the overflow when
1741 converting a pointer */
1742 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1743 (TREE_INT_CST_HIGH (arg1
) < 0
1744 && (TYPE_UNSIGNED (type
)
1745 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1746 | TREE_OVERFLOW (arg1
),
1747 TREE_CONSTANT_OVERFLOW (arg1
));
1752 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1753 to an integer type. */
1756 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1761 /* The following code implements the floating point to integer
1762 conversion rules required by the Java Language Specification,
1763 that IEEE NaNs are mapped to zero and values that overflow
1764 the target precision saturate, i.e. values greater than
1765 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1766 are mapped to INT_MIN. These semantics are allowed by the
1767 C and C++ standards that simply state that the behavior of
1768 FP-to-integer conversion is unspecified upon overflow. */
1770 HOST_WIDE_INT high
, low
;
1772 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1776 case FIX_TRUNC_EXPR
:
1777 real_trunc (&r
, VOIDmode
, &x
);
1781 real_ceil (&r
, VOIDmode
, &x
);
1784 case FIX_FLOOR_EXPR
:
1785 real_floor (&r
, VOIDmode
, &x
);
1788 case FIX_ROUND_EXPR
:
1789 real_round (&r
, VOIDmode
, &x
);
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r
))
1804 /* See if R is less than the lower bound or greater than the
1809 tree lt
= TYPE_MIN_VALUE (type
);
1810 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1811 if (REAL_VALUES_LESS (r
, l
))
1814 high
= TREE_INT_CST_HIGH (lt
);
1815 low
= TREE_INT_CST_LOW (lt
);
1821 tree ut
= TYPE_MAX_VALUE (type
);
1824 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1825 if (REAL_VALUES_LESS (u
, r
))
1828 high
= TREE_INT_CST_HIGH (ut
);
1829 low
= TREE_INT_CST_LOW (ut
);
1835 REAL_VALUE_TO_INT (&low
, &high
, r
);
1837 t
= build_int_cst_wide (type
, low
, high
);
1839 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1840 TREE_CONSTANT_OVERFLOW (arg1
));
1844 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1845 to another floating point type. */
1848 fold_convert_const_real_from_real (tree type
, tree arg1
)
1850 REAL_VALUE_TYPE value
;
1853 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1854 t
= build_real (type
, value
);
1856 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1857 TREE_CONSTANT_OVERFLOW (t
)
1858 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1862 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1863 type TYPE. If no simplification can be done return NULL_TREE. */
1866 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1868 if (TREE_TYPE (arg1
) == type
)
1871 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1873 if (TREE_CODE (arg1
) == INTEGER_CST
)
1874 return fold_convert_const_int_from_int (type
, arg1
);
1875 else if (TREE_CODE (arg1
) == REAL_CST
)
1876 return fold_convert_const_int_from_real (code
, type
, arg1
);
1878 else if (TREE_CODE (type
) == REAL_TYPE
)
1880 if (TREE_CODE (arg1
) == INTEGER_CST
)
1881 return build_real_from_int_cst (type
, arg1
);
1882 if (TREE_CODE (arg1
) == REAL_CST
)
1883 return fold_convert_const_real_from_real (type
, arg1
);
1888 /* Construct a vector of zero elements of vector type TYPE. */
1891 build_zero_vector (tree type
)
1896 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1897 units
= TYPE_VECTOR_SUBPARTS (type
);
1900 for (i
= 0; i
< units
; i
++)
1901 list
= tree_cons (NULL_TREE
, elem
, list
);
1902 return build_vector (type
, list
);
1905 /* Convert expression ARG to type TYPE. Used by the middle-end for
1906 simple conversions in preference to calling the front-end's convert. */
1909 fold_convert (tree type
, tree arg
)
1911 tree orig
= TREE_TYPE (arg
);
1917 if (TREE_CODE (arg
) == ERROR_MARK
1918 || TREE_CODE (type
) == ERROR_MARK
1919 || TREE_CODE (orig
) == ERROR_MARK
)
1920 return error_mark_node
;
1922 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1923 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1924 TYPE_MAIN_VARIANT (orig
)))
1925 return fold_build1 (NOP_EXPR
, type
, arg
);
1927 switch (TREE_CODE (type
))
1929 case INTEGER_TYPE
: case CHAR_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1930 case POINTER_TYPE
: case REFERENCE_TYPE
:
1932 if (TREE_CODE (arg
) == INTEGER_CST
)
1934 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1935 if (tem
!= NULL_TREE
)
1938 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1939 || TREE_CODE (orig
) == OFFSET_TYPE
)
1940 return fold_build1 (NOP_EXPR
, type
, arg
);
1941 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1943 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1944 return fold_convert (type
, tem
);
1946 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1947 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1948 return fold_build1 (NOP_EXPR
, type
, arg
);
1951 if (TREE_CODE (arg
) == INTEGER_CST
)
1953 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1954 if (tem
!= NULL_TREE
)
1957 else if (TREE_CODE (arg
) == REAL_CST
)
1959 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1960 if (tem
!= NULL_TREE
)
1964 switch (TREE_CODE (orig
))
1966 case INTEGER_TYPE
: case CHAR_TYPE
:
1967 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1968 case POINTER_TYPE
: case REFERENCE_TYPE
:
1969 return fold_build1 (FLOAT_EXPR
, type
, arg
);
1972 return fold_build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1976 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1977 return fold_convert (type
, tem
);
1984 switch (TREE_CODE (orig
))
1986 case INTEGER_TYPE
: case CHAR_TYPE
:
1987 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1988 case POINTER_TYPE
: case REFERENCE_TYPE
:
1990 return build2 (COMPLEX_EXPR
, type
,
1991 fold_convert (TREE_TYPE (type
), arg
),
1992 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1997 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1999 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2000 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2001 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2004 arg
= save_expr (arg
);
2005 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2006 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2007 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2008 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2009 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2017 if (integer_zerop (arg
))
2018 return build_zero_vector (type
);
2019 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2020 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2021 || TREE_CODE (orig
) == VECTOR_TYPE
);
2022 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2025 return fold_build1 (CONVERT_EXPR
, type
, fold_ignored_result (arg
));
2032 /* Return false if expr can be assumed not to be an lvalue, true
2036 maybe_lvalue_p (tree x
)
2038 /* We only need to wrap lvalue tree codes. */
2039 switch (TREE_CODE (x
))
2050 case ALIGN_INDIRECT_REF
:
2051 case MISALIGNED_INDIRECT_REF
:
2053 case ARRAY_RANGE_REF
:
2059 case PREINCREMENT_EXPR
:
2060 case PREDECREMENT_EXPR
:
2062 case TRY_CATCH_EXPR
:
2063 case WITH_CLEANUP_EXPR
:
2074 /* Assume the worst for front-end tree codes. */
2075 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2083 /* Return an expr equal to X but certainly not valid as an lvalue. */
2088 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2093 if (! maybe_lvalue_p (x
))
2095 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2098 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2099 Zero means allow extended lvalues. */
2101 int pedantic_lvalues
;
2103 /* When pedantic, return an expr equal to X but certainly not valid as a
2104 pedantic lvalue. Otherwise, return X. */
2107 pedantic_non_lvalue (tree x
)
2109 if (pedantic_lvalues
)
2110 return non_lvalue (x
);
2115 /* Given a tree comparison code, return the code that is the logical inverse
2116 of the given code. It is not safe to do this for floating-point
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2121 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2123 if (honor_nans
&& flag_trapping_math
)
2133 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2135 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2137 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2139 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2153 return UNORDERED_EXPR
;
2154 case UNORDERED_EXPR
:
2155 return ORDERED_EXPR
;
2161 /* Similar, but return the comparison that results if the operands are
2162 swapped. This is safe for floating-point. */
2165 swap_tree_comparison (enum tree_code code
)
2172 case UNORDERED_EXPR
:
2198 /* Convert a comparison tree code from an enum tree_code representation
2199 into a compcode bit-based encoding. This function is the inverse of
2200 compcode_to_comparison. */
2202 static enum comparison_code
2203 comparison_to_compcode (enum tree_code code
)
2220 return COMPCODE_ORD
;
2221 case UNORDERED_EXPR
:
2222 return COMPCODE_UNORD
;
2224 return COMPCODE_UNLT
;
2226 return COMPCODE_UNEQ
;
2228 return COMPCODE_UNLE
;
2230 return COMPCODE_UNGT
;
2232 return COMPCODE_LTGT
;
2234 return COMPCODE_UNGE
;
2240 /* Convert a compcode bit-based encoding of a comparison operator back
2241 to GCC's enum tree_code representation. This function is the
2242 inverse of comparison_to_compcode. */
2244 static enum tree_code
2245 compcode_to_comparison (enum comparison_code code
)
2262 return ORDERED_EXPR
;
2263 case COMPCODE_UNORD
:
2264 return UNORDERED_EXPR
;
2282 /* Return a tree for the comparison which is the combination of
2283 doing the AND or OR (depending on CODE) of the two operations LCODE
2284 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2285 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2286 if this makes the transformation invalid. */
2289 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2290 enum tree_code rcode
, tree truth_type
,
2291 tree ll_arg
, tree lr_arg
)
2293 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2294 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2295 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2296 enum comparison_code compcode
;
2300 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2301 compcode
= lcompcode
& rcompcode
;
2304 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2305 compcode
= lcompcode
| rcompcode
;
2314 /* Eliminate unordered comparisons, as well as LTGT and ORD
2315 which are not used unless the mode has NaNs. */
2316 compcode
&= ~COMPCODE_UNORD
;
2317 if (compcode
== COMPCODE_LTGT
)
2318 compcode
= COMPCODE_NE
;
2319 else if (compcode
== COMPCODE_ORD
)
2320 compcode
= COMPCODE_TRUE
;
2322 else if (flag_trapping_math
)
2324 /* Check that the original operation and the optimized ones will trap
2325 under the same condition. */
2326 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2327 && (lcompcode
!= COMPCODE_EQ
)
2328 && (lcompcode
!= COMPCODE_ORD
);
2329 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2330 && (rcompcode
!= COMPCODE_EQ
)
2331 && (rcompcode
!= COMPCODE_ORD
);
2332 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2333 && (compcode
!= COMPCODE_EQ
)
2334 && (compcode
!= COMPCODE_ORD
);
2336 /* In a short-circuited boolean expression the LHS might be
2337 such that the RHS, if evaluated, will never trap. For
2338 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2339 if neither x nor y is NaN. (This is a mixed blessing: for
2340 example, the expression above will never trap, hence
2341 optimizing it to x < y would be invalid). */
2342 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2343 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2346 /* If the comparison was short-circuited, and only the RHS
2347 trapped, we may now generate a spurious trap. */
2349 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2352 /* If we changed the conditions that cause a trap, we lose. */
2353 if ((ltrap
|| rtrap
) != trap
)
2357 if (compcode
== COMPCODE_TRUE
)
2358 return constant_boolean_node (true, truth_type
);
2359 else if (compcode
== COMPCODE_FALSE
)
2360 return constant_boolean_node (false, truth_type
);
2362 return fold_build2 (compcode_to_comparison (compcode
),
2363 truth_type
, ll_arg
, lr_arg
);
2366 /* Return nonzero if CODE is a tree code that represents a truth value. */
2369 truth_value_p (enum tree_code code
)
2371 return (TREE_CODE_CLASS (code
) == tcc_comparison
2372 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2373 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2374 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2377 /* Return nonzero if two operands (typically of the same tree node)
2378 are necessarily equal. If either argument has side-effects this
2379 function returns zero. FLAGS modifies behavior as follows:
2381 If OEP_ONLY_CONST is set, only return nonzero for constants.
2382 This function tests whether the operands are indistinguishable;
2383 it does not test whether they are equal using C's == operation.
2384 The distinction is important for IEEE floating point, because
2385 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2386 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2388 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2389 even though it may hold multiple values during a function.
2390 This is because a GCC tree node guarantees that nothing else is
2391 executed between the evaluation of its "operands" (which may often
2392 be evaluated in arbitrary order). Hence if the operands themselves
2393 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2394 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2395 unset means assuming isochronic (or instantaneous) tree equivalence.
2396 Unless comparing arbitrary expression trees, such as from different
2397 statements, this flag can usually be left unset.
2399 If OEP_PURE_SAME is set, then pure functions with identical arguments
2400 are considered the same. It is used when the caller has other ways
2401 to ensure that global memory is unchanged in between. */
2404 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2406 /* If either is ERROR_MARK, they aren't equal. */
2407 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2410 /* If both types don't have the same signedness, then we can't consider
2411 them equal. We must check this before the STRIP_NOPS calls
2412 because they may change the signedness of the arguments. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2419 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2420 /* This is needed for conversions and for COMPONENT_REF.
2421 Might as well play it safe and always test this. */
2422 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2423 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2424 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2427 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2428 We don't care about side effects in that case because the SAVE_EXPR
2429 takes care of that for us. In all other cases, two expressions are
2430 equal if they have no side effects. If we have two identical
2431 expressions with side effects that should be treated the same due
2432 to the only side effects being identical SAVE_EXPR's, that will
2433 be detected in the recursive calls below. */
2434 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2435 && (TREE_CODE (arg0
) == SAVE_EXPR
2436 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2439 /* Next handle constant cases, those for which we can return 1 even
2440 if ONLY_CONST is set. */
2441 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2442 switch (TREE_CODE (arg0
))
2445 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2446 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2447 && tree_int_cst_equal (arg0
, arg1
));
2450 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2451 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2452 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2453 TREE_REAL_CST (arg1
)));
2459 if (TREE_CONSTANT_OVERFLOW (arg0
)
2460 || TREE_CONSTANT_OVERFLOW (arg1
))
2463 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2464 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2467 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2470 v1
= TREE_CHAIN (v1
);
2471 v2
= TREE_CHAIN (v2
);
2478 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2480 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2484 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2485 && ! memcmp (TREE_STRING_POINTER (arg0
),
2486 TREE_STRING_POINTER (arg1
),
2487 TREE_STRING_LENGTH (arg0
)));
2490 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2496 if (flags
& OEP_ONLY_CONST
)
2499 /* Define macros to test an operand from arg0 and arg1 for equality and a
2500 variant that allows null and views null as being different from any
2501 non-null value. In the latter case, if either is null, the both
2502 must be; otherwise, do the normal comparison. */
2503 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2504 TREE_OPERAND (arg1, N), flags)
2506 #define OP_SAME_WITH_NULL(N) \
2507 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2508 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2510 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2513 /* Two conversions are equal only if signedness and modes match. */
2514 switch (TREE_CODE (arg0
))
2519 case FIX_TRUNC_EXPR
:
2520 case FIX_FLOOR_EXPR
:
2521 case FIX_ROUND_EXPR
:
2522 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2523 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2533 case tcc_comparison
:
2535 if (OP_SAME (0) && OP_SAME (1))
2538 /* For commutative ops, allow the other order. */
2539 return (commutative_tree_code (TREE_CODE (arg0
))
2540 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2541 TREE_OPERAND (arg1
, 1), flags
)
2542 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2543 TREE_OPERAND (arg1
, 0), flags
));
2546 /* If either of the pointer (or reference) expressions we are
2547 dereferencing contain a side effect, these cannot be equal. */
2548 if (TREE_SIDE_EFFECTS (arg0
)
2549 || TREE_SIDE_EFFECTS (arg1
))
2552 switch (TREE_CODE (arg0
))
2555 case ALIGN_INDIRECT_REF
:
2556 case MISALIGNED_INDIRECT_REF
:
2562 case ARRAY_RANGE_REF
:
2563 /* Operands 2 and 3 may be null. */
2566 && OP_SAME_WITH_NULL (2)
2567 && OP_SAME_WITH_NULL (3));
2570 /* Handle operand 2 the same as for ARRAY_REF. */
2571 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2574 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2580 case tcc_expression
:
2581 switch (TREE_CODE (arg0
))
2584 case TRUTH_NOT_EXPR
:
2587 case TRUTH_ANDIF_EXPR
:
2588 case TRUTH_ORIF_EXPR
:
2589 return OP_SAME (0) && OP_SAME (1);
2591 case TRUTH_AND_EXPR
:
2593 case TRUTH_XOR_EXPR
:
2594 if (OP_SAME (0) && OP_SAME (1))
2597 /* Otherwise take into account this is a commutative operation. */
2598 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2599 TREE_OPERAND (arg1
, 1), flags
)
2600 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2601 TREE_OPERAND (arg1
, 0), flags
));
2604 /* If the CALL_EXPRs call different functions, then they
2605 clearly can not be equal. */
2610 unsigned int cef
= call_expr_flags (arg0
);
2611 if (flags
& OEP_PURE_SAME
)
2612 cef
&= ECF_CONST
| ECF_PURE
;
2619 /* Now see if all the arguments are the same. operand_equal_p
2620 does not handle TREE_LIST, so we walk the operands here
2621 feeding them to operand_equal_p. */
2622 arg0
= TREE_OPERAND (arg0
, 1);
2623 arg1
= TREE_OPERAND (arg1
, 1);
2624 while (arg0
&& arg1
)
2626 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2630 arg0
= TREE_CHAIN (arg0
);
2631 arg1
= TREE_CHAIN (arg1
);
2634 /* If we get here and both argument lists are exhausted
2635 then the CALL_EXPRs are equal. */
2636 return ! (arg0
|| arg1
);
2642 case tcc_declaration
:
2643 /* Consider __builtin_sqrt equal to sqrt. */
2644 return (TREE_CODE (arg0
) == FUNCTION_DECL
2645 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2646 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2647 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2654 #undef OP_SAME_WITH_NULL
2657 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2658 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2660 When in doubt, return 0. */
2663 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2665 int unsignedp1
, unsignedpo
;
2666 tree primarg0
, primarg1
, primother
;
2667 unsigned int correct_width
;
2669 if (operand_equal_p (arg0
, arg1
, 0))
2672 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2673 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2676 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2677 and see if the inner values are the same. This removes any
2678 signedness comparison, which doesn't matter here. */
2679 primarg0
= arg0
, primarg1
= arg1
;
2680 STRIP_NOPS (primarg0
);
2681 STRIP_NOPS (primarg1
);
2682 if (operand_equal_p (primarg0
, primarg1
, 0))
2685 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2686 actual comparison operand, ARG0.
2688 First throw away any conversions to wider types
2689 already present in the operands. */
2691 primarg1
= get_narrower (arg1
, &unsignedp1
);
2692 primother
= get_narrower (other
, &unsignedpo
);
2694 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2695 if (unsignedp1
== unsignedpo
2696 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2697 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2699 tree type
= TREE_TYPE (arg0
);
2701 /* Make sure shorter operand is extended the right way
2702 to match the longer operand. */
2703 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2704 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2706 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2713 /* See if ARG is an expression that is either a comparison or is performing
2714 arithmetic on comparisons. The comparisons must only be comparing
2715 two different values, which will be stored in *CVAL1 and *CVAL2; if
2716 they are nonzero it means that some operands have already been found.
2717 No variables may be used anywhere else in the expression except in the
2718 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2719 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2721 If this is true, return 1. Otherwise, return zero. */
2724 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2726 enum tree_code code
= TREE_CODE (arg
);
2727 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2729 /* We can handle some of the tcc_expression cases here. */
2730 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2732 else if (class == tcc_expression
2733 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2734 || code
== COMPOUND_EXPR
))
2737 else if (class == tcc_expression
&& code
== SAVE_EXPR
2738 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2740 /* If we've already found a CVAL1 or CVAL2, this expression is
2741 two complex to handle. */
2742 if (*cval1
|| *cval2
)
2752 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2755 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2756 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2757 cval1
, cval2
, save_p
));
2762 case tcc_expression
:
2763 if (code
== COND_EXPR
)
2764 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2765 cval1
, cval2
, save_p
)
2766 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2767 cval1
, cval2
, save_p
)
2768 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2769 cval1
, cval2
, save_p
));
2772 case tcc_comparison
:
2773 /* First see if we can handle the first operand, then the second. For
2774 the second operand, we know *CVAL1 can't be zero. It must be that
2775 one side of the comparison is each of the values; test for the
2776 case where this isn't true by failing if the two operands
2779 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2780 TREE_OPERAND (arg
, 1), 0))
2784 *cval1
= TREE_OPERAND (arg
, 0);
2785 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2787 else if (*cval2
== 0)
2788 *cval2
= TREE_OPERAND (arg
, 0);
2789 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2794 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2796 else if (*cval2
== 0)
2797 *cval2
= TREE_OPERAND (arg
, 1);
2798 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2810 /* ARG is a tree that is known to contain just arithmetic operations and
2811 comparisons. Evaluate the operations in the tree substituting NEW0 for
2812 any occurrence of OLD0 as an operand of a comparison and likewise for
2816 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2818 tree type
= TREE_TYPE (arg
);
2819 enum tree_code code
= TREE_CODE (arg
);
2820 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2822 /* We can handle some of the tcc_expression cases here. */
2823 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2825 else if (class == tcc_expression
2826 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2832 return fold_build1 (code
, type
,
2833 eval_subst (TREE_OPERAND (arg
, 0),
2834 old0
, new0
, old1
, new1
));
2837 return fold_build2 (code
, type
,
2838 eval_subst (TREE_OPERAND (arg
, 0),
2839 old0
, new0
, old1
, new1
),
2840 eval_subst (TREE_OPERAND (arg
, 1),
2841 old0
, new0
, old1
, new1
));
2843 case tcc_expression
:
2847 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2850 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2853 return fold_build3 (code
, type
,
2854 eval_subst (TREE_OPERAND (arg
, 0),
2855 old0
, new0
, old1
, new1
),
2856 eval_subst (TREE_OPERAND (arg
, 1),
2857 old0
, new0
, old1
, new1
),
2858 eval_subst (TREE_OPERAND (arg
, 2),
2859 old0
, new0
, old1
, new1
));
2863 /* Fall through - ??? */
2865 case tcc_comparison
:
2867 tree arg0
= TREE_OPERAND (arg
, 0);
2868 tree arg1
= TREE_OPERAND (arg
, 1);
2870 /* We need to check both for exact equality and tree equality. The
2871 former will be true if the operand has a side-effect. In that
2872 case, we know the operand occurred exactly once. */
2874 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2876 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2879 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2881 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2884 return fold_build2 (code
, type
, arg0
, arg1
);
2892 /* Return a tree for the case when the result of an expression is RESULT
2893 converted to TYPE and OMITTED was previously an operand of the expression
2894 but is now not needed (e.g., we folded OMITTED * 0).
2896 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2897 the conversion of RESULT to TYPE. */
2900 omit_one_operand (tree type
, tree result
, tree omitted
)
2902 tree t
= fold_convert (type
, result
);
2904 if (TREE_SIDE_EFFECTS (omitted
))
2905 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2907 return non_lvalue (t
);
2910 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2913 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2915 tree t
= fold_convert (type
, result
);
2917 if (TREE_SIDE_EFFECTS (omitted
))
2918 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2920 return pedantic_non_lvalue (t
);
2923 /* Return a tree for the case when the result of an expression is RESULT
2924 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2925 of the expression but are now not needed.
2927 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2928 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2929 evaluated before OMITTED2. Otherwise, if neither has side effects,
2930 just do the conversion of RESULT to TYPE. */
2933 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2935 tree t
= fold_convert (type
, result
);
2937 if (TREE_SIDE_EFFECTS (omitted2
))
2938 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2939 if (TREE_SIDE_EFFECTS (omitted1
))
2940 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2942 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2946 /* Return a simplified tree node for the truth-negation of ARG. This
2947 never alters ARG itself. We assume that ARG is an operation that
2948 returns a truth value (0 or 1).
2950 FIXME: one would think we would fold the result, but it causes
2951 problems with the dominator optimizer. */
2953 invert_truthvalue (tree arg
)
2955 tree type
= TREE_TYPE (arg
);
2956 enum tree_code code
= TREE_CODE (arg
);
2958 if (code
== ERROR_MARK
)
2961 /* If this is a comparison, we can simply invert it, except for
2962 floating-point non-equality comparisons, in which case we just
2963 enclose a TRUTH_NOT_EXPR around what we have. */
2965 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2967 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2968 if (FLOAT_TYPE_P (op_type
)
2969 && flag_trapping_math
2970 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2971 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2972 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2975 code
= invert_tree_comparison (code
,
2976 HONOR_NANS (TYPE_MODE (op_type
)));
2977 if (code
== ERROR_MARK
)
2978 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2980 return build2 (code
, type
,
2981 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2988 return constant_boolean_node (integer_zerop (arg
), type
);
2990 case TRUTH_AND_EXPR
:
2991 return build2 (TRUTH_OR_EXPR
, type
,
2992 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2993 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2996 return build2 (TRUTH_AND_EXPR
, type
,
2997 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2998 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3000 case TRUTH_XOR_EXPR
:
3001 /* Here we can invert either operand. We invert the first operand
3002 unless the second operand is a TRUTH_NOT_EXPR in which case our
3003 result is the XOR of the first operand with the inside of the
3004 negation of the second operand. */
3006 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3007 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3008 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3010 return build2 (TRUTH_XOR_EXPR
, type
,
3011 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3012 TREE_OPERAND (arg
, 1));
3014 case TRUTH_ANDIF_EXPR
:
3015 return build2 (TRUTH_ORIF_EXPR
, type
,
3016 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3017 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3019 case TRUTH_ORIF_EXPR
:
3020 return build2 (TRUTH_ANDIF_EXPR
, type
,
3021 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3022 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3024 case TRUTH_NOT_EXPR
:
3025 return TREE_OPERAND (arg
, 0);
3029 tree arg1
= TREE_OPERAND (arg
, 1);
3030 tree arg2
= TREE_OPERAND (arg
, 2);
3031 /* A COND_EXPR may have a throw as one operand, which
3032 then has void type. Just leave void operands
3034 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3035 VOID_TYPE_P (TREE_TYPE (arg1
))
3036 ? arg1
: invert_truthvalue (arg1
),
3037 VOID_TYPE_P (TREE_TYPE (arg2
))
3038 ? arg2
: invert_truthvalue (arg2
));
3042 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3043 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3045 case NON_LVALUE_EXPR
:
3046 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3049 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3054 return build1 (TREE_CODE (arg
), type
,
3055 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3058 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3060 return build2 (EQ_EXPR
, type
, arg
,
3061 fold_convert (type
, integer_zero_node
));
3064 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3066 case CLEANUP_POINT_EXPR
:
3067 return build1 (CLEANUP_POINT_EXPR
, type
,
3068 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3073 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
);
3074 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3077 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3078 operands are another bit-wise operation with a common input. If so,
3079 distribute the bit operations to save an operation and possibly two if
3080 constants are involved. For example, convert
3081 (A | B) & (A | C) into A | (B & C)
3082 Further simplification will occur if B and C are constants.
3084 If this optimization cannot be done, 0 will be returned. */
3087 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3092 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3093 || TREE_CODE (arg0
) == code
3094 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3095 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3098 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3100 common
= TREE_OPERAND (arg0
, 0);
3101 left
= TREE_OPERAND (arg0
, 1);
3102 right
= TREE_OPERAND (arg1
, 1);
3104 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3106 common
= TREE_OPERAND (arg0
, 0);
3107 left
= TREE_OPERAND (arg0
, 1);
3108 right
= TREE_OPERAND (arg1
, 0);
3110 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3112 common
= TREE_OPERAND (arg0
, 1);
3113 left
= TREE_OPERAND (arg0
, 0);
3114 right
= TREE_OPERAND (arg1
, 1);
3116 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3118 common
= TREE_OPERAND (arg0
, 1);
3119 left
= TREE_OPERAND (arg0
, 0);
3120 right
= TREE_OPERAND (arg1
, 0);
3125 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3126 fold_build2 (code
, type
, left
, right
));
3129 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3130 with code CODE. This optimization is unsafe. */
3132 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3134 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3135 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3137 /* (A / C) +- (B / C) -> (A +- B) / C. */
3139 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3140 TREE_OPERAND (arg1
, 1), 0))
3141 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3142 fold_build2 (code
, type
,
3143 TREE_OPERAND (arg0
, 0),
3144 TREE_OPERAND (arg1
, 0)),
3145 TREE_OPERAND (arg0
, 1));
3147 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3148 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3149 TREE_OPERAND (arg1
, 0), 0)
3150 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3151 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3153 REAL_VALUE_TYPE r0
, r1
;
3154 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3155 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3157 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3159 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3160 real_arithmetic (&r0
, code
, &r0
, &r1
);
3161 return fold_build2 (MULT_EXPR
, type
,
3162 TREE_OPERAND (arg0
, 0),
3163 build_real (type
, r0
));
3169 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3170 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3173 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3180 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3181 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3182 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3183 && host_integerp (size
, 0)
3184 && tree_low_cst (size
, 0) == bitsize
)
3185 return fold_convert (type
, inner
);
3188 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3189 size_int (bitsize
), bitsize_int (bitpos
));
3191 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3196 /* Optimize a bit-field compare.
3198 There are two cases: First is a compare against a constant and the
3199 second is a comparison of two items where the fields are at the same
3200 bit position relative to the start of a chunk (byte, halfword, word)
3201 large enough to contain it. In these cases we can avoid the shift
3202 implicit in bitfield extractions.
3204 For constants, we emit a compare of the shifted constant with the
3205 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3206 compared. For two fields at the same position, we do the ANDs with the
3207 similar mask and compare the result of the ANDs.
3209 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3210 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3211 are the left and right operands of the comparison, respectively.
3213 If the optimization described above can be done, we return the resulting
3214 tree. Otherwise we return zero. */
3217 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3220 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3221 tree type
= TREE_TYPE (lhs
);
3222 tree signed_type
, unsigned_type
;
3223 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3224 enum machine_mode lmode
, rmode
, nmode
;
3225 int lunsignedp
, runsignedp
;
3226 int lvolatilep
= 0, rvolatilep
= 0;
3227 tree linner
, rinner
= NULL_TREE
;
3231 /* Get all the information about the extractions being done. If the bit size
3232 if the same as the size of the underlying object, we aren't doing an
3233 extraction at all and so can do nothing. We also don't want to
3234 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3235 then will no longer be able to replace it. */
3236 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3237 &lunsignedp
, &lvolatilep
, false);
3238 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3239 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3244 /* If this is not a constant, we can only do something if bit positions,
3245 sizes, and signedness are the same. */
3246 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3247 &runsignedp
, &rvolatilep
, false);
3249 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3250 || lunsignedp
!= runsignedp
|| offset
!= 0
3251 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3255 /* See if we can find a mode to refer to this field. We should be able to,
3256 but fail if we can't. */
3257 nmode
= get_best_mode (lbitsize
, lbitpos
,
3258 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3259 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3260 TYPE_ALIGN (TREE_TYPE (rinner
))),
3261 word_mode
, lvolatilep
|| rvolatilep
);
3262 if (nmode
== VOIDmode
)
3265 /* Set signed and unsigned types of the precision of this mode for the
3267 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3268 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3270 /* Compute the bit position and size for the new reference and our offset
3271 within it. If the new reference is the same size as the original, we
3272 won't optimize anything, so return zero. */
3273 nbitsize
= GET_MODE_BITSIZE (nmode
);
3274 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3276 if (nbitsize
== lbitsize
)
3279 if (BYTES_BIG_ENDIAN
)
3280 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3282 /* Make the mask to be used against the extracted field. */
3283 mask
= build_int_cst (unsigned_type
, -1);
3284 mask
= force_fit_type (mask
, 0, false, false);
3285 mask
= fold_convert (unsigned_type
, mask
);
3286 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3287 mask
= const_binop (RSHIFT_EXPR
, mask
,
3288 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3291 /* If not comparing with constant, just rework the comparison
3293 return build2 (code
, compare_type
,
3294 build2 (BIT_AND_EXPR
, unsigned_type
,
3295 make_bit_field_ref (linner
, unsigned_type
,
3296 nbitsize
, nbitpos
, 1),
3298 build2 (BIT_AND_EXPR
, unsigned_type
,
3299 make_bit_field_ref (rinner
, unsigned_type
,
3300 nbitsize
, nbitpos
, 1),
3303 /* Otherwise, we are handling the constant case. See if the constant is too
3304 big for the field. Warn and return a tree of for 0 (false) if so. We do
3305 this not only for its own sake, but to avoid having to test for this
3306 error case below. If we didn't, we might generate wrong code.
3308 For unsigned fields, the constant shifted right by the field length should
3309 be all zero. For signed fields, the high-order bits should agree with
3314 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3315 fold_convert (unsigned_type
, rhs
),
3316 size_int (lbitsize
), 0)))
3318 warning (0, "comparison is always %d due to width of bit-field",
3320 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3325 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3326 size_int (lbitsize
- 1), 0);
3327 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3329 warning (0, "comparison is always %d due to width of bit-field",
3331 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3335 /* Single-bit compares should always be against zero. */
3336 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3338 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3339 rhs
= fold_convert (type
, integer_zero_node
);
3342 /* Make a new bitfield reference, shift the constant over the
3343 appropriate number of bits and mask it with the computed mask
3344 (in case this was a signed field). If we changed it, make a new one. */
3345 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3348 TREE_SIDE_EFFECTS (lhs
) = 1;
3349 TREE_THIS_VOLATILE (lhs
) = 1;
3352 rhs
= const_binop (BIT_AND_EXPR
,
3353 const_binop (LSHIFT_EXPR
,
3354 fold_convert (unsigned_type
, rhs
),
3355 size_int (lbitpos
), 0),
3358 return build2 (code
, compare_type
,
3359 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3363 /* Subroutine for fold_truthop: decode a field reference.
3365 If EXP is a comparison reference, we return the innermost reference.
3367 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3368 set to the starting bit number.
3370 If the innermost field can be completely contained in a mode-sized
3371 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3373 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3374 otherwise it is not changed.
3376 *PUNSIGNEDP is set to the signedness of the field.
3378 *PMASK is set to the mask used. This is either contained in a
3379 BIT_AND_EXPR or derived from the width of the field.
3381 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3383 Return 0 if this is not a component reference or is one that we can't
3384 do anything with. */
3387 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3388 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3389 int *punsignedp
, int *pvolatilep
,
3390 tree
*pmask
, tree
*pand_mask
)
3392 tree outer_type
= 0;
3394 tree mask
, inner
, offset
;
3396 unsigned int precision
;
3398 /* All the optimizations using this function assume integer fields.
3399 There are problems with FP fields since the type_for_size call
3400 below can fail for, e.g., XFmode. */
3401 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3404 /* We are interested in the bare arrangement of bits, so strip everything
3405 that doesn't affect the machine mode. However, record the type of the
3406 outermost expression if it may matter below. */
3407 if (TREE_CODE (exp
) == NOP_EXPR
3408 || TREE_CODE (exp
) == CONVERT_EXPR
3409 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3410 outer_type
= TREE_TYPE (exp
);
3413 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3415 and_mask
= TREE_OPERAND (exp
, 1);
3416 exp
= TREE_OPERAND (exp
, 0);
3417 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3418 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3422 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3423 punsignedp
, pvolatilep
, false);
3424 if ((inner
== exp
&& and_mask
== 0)
3425 || *pbitsize
< 0 || offset
!= 0
3426 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3429 /* If the number of bits in the reference is the same as the bitsize of
3430 the outer type, then the outer type gives the signedness. Otherwise
3431 (in case of a small bitfield) the signedness is unchanged. */
3432 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3433 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3435 /* Compute the mask to access the bitfield. */
3436 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3437 precision
= TYPE_PRECISION (unsigned_type
);
3439 mask
= build_int_cst (unsigned_type
, -1);
3440 mask
= force_fit_type (mask
, 0, false, false);
3442 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3443 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3445 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3447 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3448 fold_convert (unsigned_type
, and_mask
), mask
);
3451 *pand_mask
= and_mask
;
3455 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3459 all_ones_mask_p (tree mask
, int size
)
3461 tree type
= TREE_TYPE (mask
);
3462 unsigned int precision
= TYPE_PRECISION (type
);
3465 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3466 tmask
= force_fit_type (tmask
, 0, false, false);
3469 tree_int_cst_equal (mask
,
3470 const_binop (RSHIFT_EXPR
,
3471 const_binop (LSHIFT_EXPR
, tmask
,
3472 size_int (precision
- size
),
3474 size_int (precision
- size
), 0));
3477 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3478 represents the sign bit of EXP's type. If EXP represents a sign
3479 or zero extension, also test VAL against the unextended type.
3480 The return value is the (sub)expression whose sign bit is VAL,
3481 or NULL_TREE otherwise. */
3484 sign_bit_p (tree exp
, tree val
)
3486 unsigned HOST_WIDE_INT mask_lo
, lo
;
3487 HOST_WIDE_INT mask_hi
, hi
;
3491 /* Tree EXP must have an integral type. */
3492 t
= TREE_TYPE (exp
);
3493 if (! INTEGRAL_TYPE_P (t
))
3496 /* Tree VAL must be an integer constant. */
3497 if (TREE_CODE (val
) != INTEGER_CST
3498 || TREE_CONSTANT_OVERFLOW (val
))
3501 width
= TYPE_PRECISION (t
);
3502 if (width
> HOST_BITS_PER_WIDE_INT
)
3504 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3507 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3508 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3514 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3517 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3518 >> (HOST_BITS_PER_WIDE_INT
- width
));
3521 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3522 treat VAL as if it were unsigned. */
3523 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3524 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3527 /* Handle extension from a narrower type. */
3528 if (TREE_CODE (exp
) == NOP_EXPR
3529 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3530 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3535 /* Subroutine for fold_truthop: determine if an operand is simple enough
3536 to be evaluated unconditionally. */
3539 simple_operand_p (tree exp
)
3541 /* Strip any conversions that don't change the machine mode. */
3544 return (CONSTANT_CLASS_P (exp
)
3545 || TREE_CODE (exp
) == SSA_NAME
3547 && ! TREE_ADDRESSABLE (exp
)
3548 && ! TREE_THIS_VOLATILE (exp
)
3549 && ! DECL_NONLOCAL (exp
)
3550 /* Don't regard global variables as simple. They may be
3551 allocated in ways unknown to the compiler (shared memory,
3552 #pragma weak, etc). */
3553 && ! TREE_PUBLIC (exp
)
3554 && ! DECL_EXTERNAL (exp
)
3555 /* Loading a static variable is unduly expensive, but global
3556 registers aren't expensive. */
3557 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3560 /* The following functions are subroutines to fold_range_test and allow it to
3561 try to change a logical combination of comparisons into a range test.
3564 X == 2 || X == 3 || X == 4 || X == 5
3568 (unsigned) (X - 2) <= 3
3570 We describe each set of comparisons as being either inside or outside
3571 a range, using a variable named like IN_P, and then describe the
3572 range with a lower and upper bound. If one of the bounds is omitted,
3573 it represents either the highest or lowest value of the type.
3575 In the comments below, we represent a range by two numbers in brackets
3576 preceded by a "+" to designate being inside that range, or a "-" to
3577 designate being outside that range, so the condition can be inverted by
3578 flipping the prefix. An omitted bound is represented by a "-". For
3579 example, "- [-, 10]" means being outside the range starting at the lowest
3580 possible value and ending at 10, in other words, being greater than 10.
3581 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3584 We set up things so that the missing bounds are handled in a consistent
3585 manner so neither a missing bound nor "true" and "false" need to be
3586 handled using a special case. */
3588 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3589 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3590 and UPPER1_P are nonzero if the respective argument is an upper bound
3591 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3592 must be specified for a comparison. ARG1 will be converted to ARG0's
3593 type if both are specified. */
3596 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3597 tree arg1
, int upper1_p
)
3603 /* If neither arg represents infinity, do the normal operation.
3604 Else, if not a comparison, return infinity. Else handle the special
3605 comparison rules. Note that most of the cases below won't occur, but
3606 are handled for consistency. */
3608 if (arg0
!= 0 && arg1
!= 0)
3610 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3611 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3613 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3616 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3619 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3620 for neither. In real maths, we cannot assume open ended ranges are
3621 the same. But, this is computer arithmetic, where numbers are finite.
3622 We can therefore make the transformation of any unbounded range with
3623 the value Z, Z being greater than any representable number. This permits
3624 us to treat unbounded ranges as equal. */
3625 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3626 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3630 result
= sgn0
== sgn1
;
3633 result
= sgn0
!= sgn1
;
3636 result
= sgn0
< sgn1
;
3639 result
= sgn0
<= sgn1
;
3642 result
= sgn0
> sgn1
;
3645 result
= sgn0
>= sgn1
;
3651 return constant_boolean_node (result
, type
);
3654 /* Given EXP, a logical expression, set the range it is testing into
3655 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3656 actually being tested. *PLOW and *PHIGH will be made of the same type
3657 as the returned expression. If EXP is not a comparison, we will most
3658 likely not be returning a useful value and range. */
3661 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3663 enum tree_code code
;
3664 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3665 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3667 tree low
, high
, n_low
, n_high
;
3669 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3670 and see if we can refine the range. Some of the cases below may not
3671 happen, but it doesn't seem worth worrying about this. We "continue"
3672 the outer loop when we've changed something; otherwise we "break"
3673 the switch, which will "break" the while. */
3676 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3680 code
= TREE_CODE (exp
);
3681 exp_type
= TREE_TYPE (exp
);
3683 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3685 if (TREE_CODE_LENGTH (code
) > 0)
3686 arg0
= TREE_OPERAND (exp
, 0);
3687 if (TREE_CODE_CLASS (code
) == tcc_comparison
3688 || TREE_CODE_CLASS (code
) == tcc_unary
3689 || TREE_CODE_CLASS (code
) == tcc_binary
)
3690 arg0_type
= TREE_TYPE (arg0
);
3691 if (TREE_CODE_CLASS (code
) == tcc_binary
3692 || TREE_CODE_CLASS (code
) == tcc_comparison
3693 || (TREE_CODE_CLASS (code
) == tcc_expression
3694 && TREE_CODE_LENGTH (code
) > 1))
3695 arg1
= TREE_OPERAND (exp
, 1);
3700 case TRUTH_NOT_EXPR
:
3701 in_p
= ! in_p
, exp
= arg0
;
3704 case EQ_EXPR
: case NE_EXPR
:
3705 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3706 /* We can only do something if the range is testing for zero
3707 and if the second operand is an integer constant. Note that
3708 saying something is "in" the range we make is done by
3709 complementing IN_P since it will set in the initial case of
3710 being not equal to zero; "out" is leaving it alone. */
3711 if (low
== 0 || high
== 0
3712 || ! integer_zerop (low
) || ! integer_zerop (high
)
3713 || TREE_CODE (arg1
) != INTEGER_CST
)
3718 case NE_EXPR
: /* - [c, c] */
3721 case EQ_EXPR
: /* + [c, c] */
3722 in_p
= ! in_p
, low
= high
= arg1
;
3724 case GT_EXPR
: /* - [-, c] */
3725 low
= 0, high
= arg1
;
3727 case GE_EXPR
: /* + [c, -] */
3728 in_p
= ! in_p
, low
= arg1
, high
= 0;
3730 case LT_EXPR
: /* - [c, -] */
3731 low
= arg1
, high
= 0;
3733 case LE_EXPR
: /* + [-, c] */
3734 in_p
= ! in_p
, low
= 0, high
= arg1
;
3740 /* If this is an unsigned comparison, we also know that EXP is
3741 greater than or equal to zero. We base the range tests we make
3742 on that fact, so we record it here so we can parse existing
3743 range tests. We test arg0_type since often the return type
3744 of, e.g. EQ_EXPR, is boolean. */
3745 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3747 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3749 fold_convert (arg0_type
, integer_zero_node
),
3753 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3755 /* If the high bound is missing, but we have a nonzero low
3756 bound, reverse the range so it goes from zero to the low bound
3758 if (high
== 0 && low
&& ! integer_zerop (low
))
3761 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3762 integer_one_node
, 0);
3763 low
= fold_convert (arg0_type
, integer_zero_node
);
3771 /* (-x) IN [a,b] -> x in [-b, -a] */
3772 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3773 fold_convert (exp_type
, integer_zero_node
),
3775 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3776 fold_convert (exp_type
, integer_zero_node
),
3778 low
= n_low
, high
= n_high
;
3784 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3785 fold_convert (exp_type
, integer_one_node
));
3788 case PLUS_EXPR
: case MINUS_EXPR
:
3789 if (TREE_CODE (arg1
) != INTEGER_CST
)
3792 /* If EXP is signed, any overflow in the computation is undefined,
3793 so we don't worry about it so long as our computations on
3794 the bounds don't overflow. For unsigned, overflow is defined
3795 and this is exactly the right thing. */
3796 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3797 arg0_type
, low
, 0, arg1
, 0);
3798 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3799 arg0_type
, high
, 1, arg1
, 0);
3800 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3801 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3804 /* Check for an unsigned range which has wrapped around the maximum
3805 value thus making n_high < n_low, and normalize it. */
3806 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3808 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3809 integer_one_node
, 0);
3810 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3811 integer_one_node
, 0);
3813 /* If the range is of the form +/- [ x+1, x ], we won't
3814 be able to normalize it. But then, it represents the
3815 whole range or the empty set, so make it
3817 if (tree_int_cst_equal (n_low
, low
)
3818 && tree_int_cst_equal (n_high
, high
))
3824 low
= n_low
, high
= n_high
;
3829 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3830 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3833 if (! INTEGRAL_TYPE_P (arg0_type
)
3834 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3835 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3838 n_low
= low
, n_high
= high
;
3841 n_low
= fold_convert (arg0_type
, n_low
);
3844 n_high
= fold_convert (arg0_type
, n_high
);
3847 /* If we're converting arg0 from an unsigned type, to exp,
3848 a signed type, we will be doing the comparison as unsigned.
3849 The tests above have already verified that LOW and HIGH
3852 So we have to ensure that we will handle large unsigned
3853 values the same way that the current signed bounds treat
3856 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3859 tree equiv_type
= lang_hooks
.types
.type_for_mode
3860 (TYPE_MODE (arg0_type
), 1);
3862 /* A range without an upper bound is, naturally, unbounded.
3863 Since convert would have cropped a very large value, use
3864 the max value for the destination type. */
3866 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3867 : TYPE_MAX_VALUE (arg0_type
);
3869 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3870 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
3871 fold_convert (arg0_type
,
3873 fold_convert (arg0_type
,
3876 /* If the low bound is specified, "and" the range with the
3877 range for which the original unsigned value will be
3881 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3882 1, n_low
, n_high
, 1,
3883 fold_convert (arg0_type
,
3888 in_p
= (n_in_p
== in_p
);
3892 /* Otherwise, "or" the range with the range of the input
3893 that will be interpreted as negative. */
3894 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3895 0, n_low
, n_high
, 1,
3896 fold_convert (arg0_type
,
3901 in_p
= (in_p
!= n_in_p
);
3906 low
= n_low
, high
= n_high
;
3916 /* If EXP is a constant, we can evaluate whether this is true or false. */
3917 if (TREE_CODE (exp
) == INTEGER_CST
)
3919 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3921 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3927 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3931 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3932 type, TYPE, return an expression to test if EXP is in (or out of, depending
3933 on IN_P) the range. Return 0 if the test couldn't be created. */
3936 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3938 tree etype
= TREE_TYPE (exp
);
3941 #ifdef HAVE_canonicalize_funcptr_for_compare
3942 /* Disable this optimization for function pointer expressions
3943 on targets that require function pointer canonicalization. */
3944 if (HAVE_canonicalize_funcptr_for_compare
3945 && TREE_CODE (etype
) == POINTER_TYPE
3946 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
3952 value
= build_range_check (type
, exp
, 1, low
, high
);
3954 return invert_truthvalue (value
);
3959 if (low
== 0 && high
== 0)
3960 return fold_convert (type
, integer_one_node
);
3963 return fold_build2 (LE_EXPR
, type
, exp
,
3964 fold_convert (etype
, high
));
3967 return fold_build2 (GE_EXPR
, type
, exp
,
3968 fold_convert (etype
, low
));
3970 if (operand_equal_p (low
, high
, 0))
3971 return fold_build2 (EQ_EXPR
, type
, exp
,
3972 fold_convert (etype
, low
));
3974 if (integer_zerop (low
))
3976 if (! TYPE_UNSIGNED (etype
))
3978 etype
= lang_hooks
.types
.unsigned_type (etype
);
3979 high
= fold_convert (etype
, high
);
3980 exp
= fold_convert (etype
, exp
);
3982 return build_range_check (type
, exp
, 1, 0, high
);
3985 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3986 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3988 unsigned HOST_WIDE_INT lo
;
3992 prec
= TYPE_PRECISION (etype
);
3993 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3996 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4000 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4001 lo
= (unsigned HOST_WIDE_INT
) -1;
4004 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4006 if (TYPE_UNSIGNED (etype
))
4008 etype
= lang_hooks
.types
.signed_type (etype
);
4009 exp
= fold_convert (etype
, exp
);
4011 return fold_build2 (GT_EXPR
, type
, exp
,
4012 fold_convert (etype
, integer_zero_node
));
4016 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4017 if (value
!= 0 && TREE_OVERFLOW (value
) && ! TYPE_UNSIGNED (etype
))
4019 tree utype
, minv
, maxv
;
4021 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4022 for the type in question, as we rely on this here. */
4023 switch (TREE_CODE (etype
))
4028 utype
= lang_hooks
.types
.unsigned_type (etype
);
4029 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4030 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4031 integer_one_node
, 1);
4032 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4033 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4037 high
= fold_convert (etype
, high
);
4038 low
= fold_convert (etype
, low
);
4039 exp
= fold_convert (etype
, exp
);
4040 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4048 if (value
!= 0 && ! TREE_OVERFLOW (value
))
4049 return build_range_check (type
,
4050 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4051 1, fold_convert (etype
, integer_zero_node
),
4057 /* Given two ranges, see if we can merge them into one. Return 1 if we
4058 can, 0 if we can't. Set the output range into the specified parameters. */
4061 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4062 tree high0
, int in1_p
, tree low1
, tree high1
)
4070 int lowequal
= ((low0
== 0 && low1
== 0)
4071 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4072 low0
, 0, low1
, 0)));
4073 int highequal
= ((high0
== 0 && high1
== 0)
4074 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4075 high0
, 1, high1
, 1)));
4077 /* Make range 0 be the range that starts first, or ends last if they
4078 start at the same value. Swap them if it isn't. */
4079 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4082 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4083 high1
, 1, high0
, 1))))
4085 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4086 tem
= low0
, low0
= low1
, low1
= tem
;
4087 tem
= high0
, high0
= high1
, high1
= tem
;
4090 /* Now flag two cases, whether the ranges are disjoint or whether the
4091 second range is totally subsumed in the first. Note that the tests
4092 below are simplified by the ones above. */
4093 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4094 high0
, 1, low1
, 0));
4095 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4096 high1
, 1, high0
, 1));
4098 /* We now have four cases, depending on whether we are including or
4099 excluding the two ranges. */
4102 /* If they don't overlap, the result is false. If the second range
4103 is a subset it is the result. Otherwise, the range is from the start
4104 of the second to the end of the first. */
4106 in_p
= 0, low
= high
= 0;
4108 in_p
= 1, low
= low1
, high
= high1
;
4110 in_p
= 1, low
= low1
, high
= high0
;
4113 else if (in0_p
&& ! in1_p
)
4115 /* If they don't overlap, the result is the first range. If they are
4116 equal, the result is false. If the second range is a subset of the
4117 first, and the ranges begin at the same place, we go from just after
4118 the end of the first range to the end of the second. If the second
4119 range is not a subset of the first, or if it is a subset and both
4120 ranges end at the same place, the range starts at the start of the
4121 first range and ends just before the second range.
4122 Otherwise, we can't describe this as a single range. */
4124 in_p
= 1, low
= low0
, high
= high0
;
4125 else if (lowequal
&& highequal
)
4126 in_p
= 0, low
= high
= 0;
4127 else if (subset
&& lowequal
)
4129 in_p
= 1, high
= high0
;
4130 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
4131 integer_one_node
, 0);
4133 else if (! subset
|| highequal
)
4135 in_p
= 1, low
= low0
;
4136 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4137 integer_one_node
, 0);
4143 else if (! in0_p
&& in1_p
)
4145 /* If they don't overlap, the result is the second range. If the second
4146 is a subset of the first, the result is false. Otherwise,
4147 the range starts just after the first range and ends at the
4148 end of the second. */
4150 in_p
= 1, low
= low1
, high
= high1
;
4151 else if (subset
|| highequal
)
4152 in_p
= 0, low
= high
= 0;
4155 in_p
= 1, high
= high1
;
4156 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4157 integer_one_node
, 0);
4163 /* The case where we are excluding both ranges. Here the complex case
4164 is if they don't overlap. In that case, the only time we have a
4165 range is if they are adjacent. If the second is a subset of the
4166 first, the result is the first. Otherwise, the range to exclude
4167 starts at the beginning of the first range and ends at the end of the
4171 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4172 range_binop (PLUS_EXPR
, NULL_TREE
,
4174 integer_one_node
, 1),
4176 in_p
= 0, low
= low0
, high
= high1
;
4179 /* Canonicalize - [min, x] into - [-, x]. */
4180 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4181 switch (TREE_CODE (TREE_TYPE (low0
)))
4184 if (TYPE_PRECISION (TREE_TYPE (low0
))
4185 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4190 if (tree_int_cst_equal (low0
,
4191 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4195 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4196 && integer_zerop (low0
))
4203 /* Canonicalize - [x, max] into - [x, -]. */
4204 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4205 switch (TREE_CODE (TREE_TYPE (high1
)))
4208 if (TYPE_PRECISION (TREE_TYPE (high1
))
4209 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4214 if (tree_int_cst_equal (high1
,
4215 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4219 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4220 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4222 integer_one_node
, 1)))
4229 /* The ranges might be also adjacent between the maximum and
4230 minimum values of the given type. For
4231 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4232 return + [x + 1, y - 1]. */
4233 if (low0
== 0 && high1
== 0)
4235 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4236 integer_one_node
, 1);
4237 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4238 integer_one_node
, 0);
4239 if (low
== 0 || high
== 0)
4249 in_p
= 0, low
= low0
, high
= high0
;
4251 in_p
= 0, low
= low0
, high
= high1
;
4254 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4259 /* Subroutine of fold, looking inside expressions of the form
4260 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4261 of the COND_EXPR. This function is being used also to optimize
4262 A op B ? C : A, by reversing the comparison first.
4264 Return a folded expression whose code is not a COND_EXPR
4265 anymore, or NULL_TREE if no folding opportunity is found. */
4268 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4270 enum tree_code comp_code
= TREE_CODE (arg0
);
4271 tree arg00
= TREE_OPERAND (arg0
, 0);
4272 tree arg01
= TREE_OPERAND (arg0
, 1);
4273 tree arg1_type
= TREE_TYPE (arg1
);
4279 /* If we have A op 0 ? A : -A, consider applying the following
4282 A == 0? A : -A same as -A
4283 A != 0? A : -A same as A
4284 A >= 0? A : -A same as abs (A)
4285 A > 0? A : -A same as abs (A)
4286 A <= 0? A : -A same as -abs (A)
4287 A < 0? A : -A same as -abs (A)
4289 None of these transformations work for modes with signed
4290 zeros. If A is +/-0, the first two transformations will
4291 change the sign of the result (from +0 to -0, or vice
4292 versa). The last four will fix the sign of the result,
4293 even though the original expressions could be positive or
4294 negative, depending on the sign of A.
4296 Note that all these transformations are correct if A is
4297 NaN, since the two alternatives (A and -A) are also NaNs. */
4298 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4299 ? real_zerop (arg01
)
4300 : integer_zerop (arg01
))
4301 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4302 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4303 /* In the case that A is of the form X-Y, '-A' (arg2) may
4304 have already been folded to Y-X, check for that. */
4305 || (TREE_CODE (arg1
) == MINUS_EXPR
4306 && TREE_CODE (arg2
) == MINUS_EXPR
4307 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4308 TREE_OPERAND (arg2
, 1), 0)
4309 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4310 TREE_OPERAND (arg2
, 0), 0))))
4315 tem
= fold_convert (arg1_type
, arg1
);
4316 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4319 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4322 if (flag_trapping_math
)
4327 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4328 arg1
= fold_convert (lang_hooks
.types
.signed_type
4329 (TREE_TYPE (arg1
)), arg1
);
4330 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4331 return pedantic_non_lvalue (fold_convert (type
, tem
));
4334 if (flag_trapping_math
)
4338 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4339 arg1
= fold_convert (lang_hooks
.types
.signed_type
4340 (TREE_TYPE (arg1
)), arg1
);
4341 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4342 return negate_expr (fold_convert (type
, tem
));
4344 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4348 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4349 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4350 both transformations are correct when A is NaN: A != 0
4351 is then true, and A == 0 is false. */
4353 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4355 if (comp_code
== NE_EXPR
)
4356 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4357 else if (comp_code
== EQ_EXPR
)
4358 return fold_convert (type
, integer_zero_node
);
4361 /* Try some transformations of A op B ? A : B.
4363 A == B? A : B same as B
4364 A != B? A : B same as A
4365 A >= B? A : B same as max (A, B)
4366 A > B? A : B same as max (B, A)
4367 A <= B? A : B same as min (A, B)
4368 A < B? A : B same as min (B, A)
4370 As above, these transformations don't work in the presence
4371 of signed zeros. For example, if A and B are zeros of
4372 opposite sign, the first two transformations will change
4373 the sign of the result. In the last four, the original
4374 expressions give different results for (A=+0, B=-0) and
4375 (A=-0, B=+0), but the transformed expressions do not.
4377 The first two transformations are correct if either A or B
4378 is a NaN. In the first transformation, the condition will
4379 be false, and B will indeed be chosen. In the case of the
4380 second transformation, the condition A != B will be true,
4381 and A will be chosen.
4383 The conversions to max() and min() are not correct if B is
4384 a number and A is not. The conditions in the original
4385 expressions will be false, so all four give B. The min()
4386 and max() versions would give a NaN instead. */
4387 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4388 /* Avoid these transformations if the COND_EXPR may be used
4389 as an lvalue in the C++ front-end. PR c++/19199. */
4391 || strcmp (lang_hooks
.name
, "GNU C++") != 0
4392 || ! maybe_lvalue_p (arg1
)
4393 || ! maybe_lvalue_p (arg2
)))
4395 tree comp_op0
= arg00
;
4396 tree comp_op1
= arg01
;
4397 tree comp_type
= TREE_TYPE (comp_op0
);
4399 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4400 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4410 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4412 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4417 /* In C++ a ?: expression can be an lvalue, so put the
4418 operand which will be used if they are equal first
4419 so that we can convert this back to the
4420 corresponding COND_EXPR. */
4421 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4423 comp_op0
= fold_convert (comp_type
, comp_op0
);
4424 comp_op1
= fold_convert (comp_type
, comp_op1
);
4425 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4426 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4427 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4428 return pedantic_non_lvalue (fold_convert (type
, tem
));
4435 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4437 comp_op0
= fold_convert (comp_type
, comp_op0
);
4438 comp_op1
= fold_convert (comp_type
, comp_op1
);
4439 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4440 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4441 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4442 return pedantic_non_lvalue (fold_convert (type
, tem
));
4446 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4447 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4450 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4451 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4454 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4459 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4460 we might still be able to simplify this. For example,
4461 if C1 is one less or one more than C2, this might have started
4462 out as a MIN or MAX and been transformed by this function.
4463 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4465 if (INTEGRAL_TYPE_P (type
)
4466 && TREE_CODE (arg01
) == INTEGER_CST
4467 && TREE_CODE (arg2
) == INTEGER_CST
)
4471 /* We can replace A with C1 in this case. */
4472 arg1
= fold_convert (type
, arg01
);
4473 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4476 /* If C1 is C2 + 1, this is min(A, C2). */
4477 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4479 && operand_equal_p (arg01
,
4480 const_binop (PLUS_EXPR
, arg2
,
4481 integer_one_node
, 0),
4483 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4488 /* If C1 is C2 - 1, this is min(A, C2). */
4489 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4491 && operand_equal_p (arg01
,
4492 const_binop (MINUS_EXPR
, arg2
,
4493 integer_one_node
, 0),
4495 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4500 /* If C1 is C2 - 1, this is max(A, C2). */
4501 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4503 && operand_equal_p (arg01
,
4504 const_binop (MINUS_EXPR
, arg2
,
4505 integer_one_node
, 0),
4507 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4512 /* If C1 is C2 + 1, this is max(A, C2). */
4513 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4515 && operand_equal_p (arg01
,
4516 const_binop (PLUS_EXPR
, arg2
,
4517 integer_one_node
, 0),
4519 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4533 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4534 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4537 /* EXP is some logical combination of boolean tests. See if we can
4538 merge it into some range test. Return the new tree if so. */
4541 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4543 int or_op
= (code
== TRUTH_ORIF_EXPR
4544 || code
== TRUTH_OR_EXPR
);
4545 int in0_p
, in1_p
, in_p
;
4546 tree low0
, low1
, low
, high0
, high1
, high
;
4547 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4548 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4551 /* If this is an OR operation, invert both sides; we will invert
4552 again at the end. */
4554 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4556 /* If both expressions are the same, if we can merge the ranges, and we
4557 can build the range test, return it or it inverted. If one of the
4558 ranges is always true or always false, consider it to be the same
4559 expression as the other. */
4560 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4561 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4563 && 0 != (tem
= (build_range_check (type
,
4565 : rhs
!= 0 ? rhs
: integer_zero_node
,
4567 return or_op
? invert_truthvalue (tem
) : tem
;
4569 /* On machines where the branch cost is expensive, if this is a
4570 short-circuited branch and the underlying object on both sides
4571 is the same, make a non-short-circuit operation. */
4572 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4573 && lhs
!= 0 && rhs
!= 0
4574 && (code
== TRUTH_ANDIF_EXPR
4575 || code
== TRUTH_ORIF_EXPR
)
4576 && operand_equal_p (lhs
, rhs
, 0))
4578 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4579 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4580 which cases we can't do this. */
4581 if (simple_operand_p (lhs
))
4582 return build2 (code
== TRUTH_ANDIF_EXPR
4583 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4586 else if (lang_hooks
.decls
.global_bindings_p () == 0
4587 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4589 tree common
= save_expr (lhs
);
4591 if (0 != (lhs
= build_range_check (type
, common
,
4592 or_op
? ! in0_p
: in0_p
,
4594 && (0 != (rhs
= build_range_check (type
, common
,
4595 or_op
? ! in1_p
: in1_p
,
4597 return build2 (code
== TRUTH_ANDIF_EXPR
4598 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4606 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4607 bit value. Arrange things so the extra bits will be set to zero if and
4608 only if C is signed-extended to its full width. If MASK is nonzero,
4609 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4612 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4614 tree type
= TREE_TYPE (c
);
4615 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4618 if (p
== modesize
|| unsignedp
)
4621 /* We work by getting just the sign bit into the low-order bit, then
4622 into the high-order bit, then sign-extend. We then XOR that value
4624 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4625 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4627 /* We must use a signed type in order to get an arithmetic right shift.
4628 However, we must also avoid introducing accidental overflows, so that
4629 a subsequent call to integer_zerop will work. Hence we must
4630 do the type conversion here. At this point, the constant is either
4631 zero or one, and the conversion to a signed type can never overflow.
4632 We could get an overflow if this conversion is done anywhere else. */
4633 if (TYPE_UNSIGNED (type
))
4634 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4636 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4637 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4639 temp
= const_binop (BIT_AND_EXPR
, temp
,
4640 fold_convert (TREE_TYPE (c
), mask
), 0);
4641 /* If necessary, convert the type back to match the type of C. */
4642 if (TYPE_UNSIGNED (type
))
4643 temp
= fold_convert (type
, temp
);
4645 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4648 /* Find ways of folding logical expressions of LHS and RHS:
4649 Try to merge two comparisons to the same innermost item.
4650 Look for range tests like "ch >= '0' && ch <= '9'".
4651 Look for combinations of simple terms on machines with expensive branches
4652 and evaluate the RHS unconditionally.
4654 For example, if we have p->a == 2 && p->b == 4 and we can make an
4655 object large enough to span both A and B, we can do this with a comparison
4656 against the object ANDed with the a mask.
4658 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4659 operations to do this with one comparison.
4661 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4662 function and the one above.
4664 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4665 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4667 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4670 We return the simplified tree or 0 if no optimization is possible. */
4673 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4675 /* If this is the "or" of two comparisons, we can do something if
4676 the comparisons are NE_EXPR. If this is the "and", we can do something
4677 if the comparisons are EQ_EXPR. I.e.,
4678 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4680 WANTED_CODE is this operation code. For single bit fields, we can
4681 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4682 comparison for one-bit fields. */
4684 enum tree_code wanted_code
;
4685 enum tree_code lcode
, rcode
;
4686 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4687 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4688 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4689 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4690 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4691 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4692 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4693 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4694 enum machine_mode lnmode
, rnmode
;
4695 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4696 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4697 tree l_const
, r_const
;
4698 tree lntype
, rntype
, result
;
4699 int first_bit
, end_bit
;
4702 /* Start by getting the comparison codes. Fail if anything is volatile.
4703 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4704 it were surrounded with a NE_EXPR. */
4706 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4709 lcode
= TREE_CODE (lhs
);
4710 rcode
= TREE_CODE (rhs
);
4712 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4714 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4715 fold_convert (TREE_TYPE (lhs
), integer_zero_node
));
4719 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4721 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4722 fold_convert (TREE_TYPE (rhs
), integer_zero_node
));
4726 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4727 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4730 ll_arg
= TREE_OPERAND (lhs
, 0);
4731 lr_arg
= TREE_OPERAND (lhs
, 1);
4732 rl_arg
= TREE_OPERAND (rhs
, 0);
4733 rr_arg
= TREE_OPERAND (rhs
, 1);
4735 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4736 if (simple_operand_p (ll_arg
)
4737 && simple_operand_p (lr_arg
))
4740 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4741 && operand_equal_p (lr_arg
, rr_arg
, 0))
4743 result
= combine_comparisons (code
, lcode
, rcode
,
4744 truth_type
, ll_arg
, lr_arg
);
4748 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4749 && operand_equal_p (lr_arg
, rl_arg
, 0))
4751 result
= combine_comparisons (code
, lcode
,
4752 swap_tree_comparison (rcode
),
4753 truth_type
, ll_arg
, lr_arg
);
4759 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4760 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4762 /* If the RHS can be evaluated unconditionally and its operands are
4763 simple, it wins to evaluate the RHS unconditionally on machines
4764 with expensive branches. In this case, this isn't a comparison
4765 that can be merged. Avoid doing this if the RHS is a floating-point
4766 comparison since those can trap. */
4768 if (BRANCH_COST
>= 2
4769 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4770 && simple_operand_p (rl_arg
)
4771 && simple_operand_p (rr_arg
))
4773 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4774 if (code
== TRUTH_OR_EXPR
4775 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4776 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4777 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4778 return build2 (NE_EXPR
, truth_type
,
4779 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4781 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4783 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4784 if (code
== TRUTH_AND_EXPR
4785 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4786 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4787 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4788 return build2 (EQ_EXPR
, truth_type
,
4789 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4791 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4793 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4794 return build2 (code
, truth_type
, lhs
, rhs
);
4797 /* See if the comparisons can be merged. Then get all the parameters for
4800 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4801 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4805 ll_inner
= decode_field_reference (ll_arg
,
4806 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4807 &ll_unsignedp
, &volatilep
, &ll_mask
,
4809 lr_inner
= decode_field_reference (lr_arg
,
4810 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4811 &lr_unsignedp
, &volatilep
, &lr_mask
,
4813 rl_inner
= decode_field_reference (rl_arg
,
4814 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4815 &rl_unsignedp
, &volatilep
, &rl_mask
,
4817 rr_inner
= decode_field_reference (rr_arg
,
4818 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4819 &rr_unsignedp
, &volatilep
, &rr_mask
,
4822 /* It must be true that the inner operation on the lhs of each
4823 comparison must be the same if we are to be able to do anything.
4824 Then see if we have constants. If not, the same must be true for
4826 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4827 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4830 if (TREE_CODE (lr_arg
) == INTEGER_CST
4831 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4832 l_const
= lr_arg
, r_const
= rr_arg
;
4833 else if (lr_inner
== 0 || rr_inner
== 0
4834 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4837 l_const
= r_const
= 0;
4839 /* If either comparison code is not correct for our logical operation,
4840 fail. However, we can convert a one-bit comparison against zero into
4841 the opposite comparison against that bit being set in the field. */
4843 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4844 if (lcode
!= wanted_code
)
4846 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4848 /* Make the left operand unsigned, since we are only interested
4849 in the value of one bit. Otherwise we are doing the wrong
4858 /* This is analogous to the code for l_const above. */
4859 if (rcode
!= wanted_code
)
4861 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4870 /* After this point all optimizations will generate bit-field
4871 references, which we might not want. */
4872 if (! lang_hooks
.can_use_bit_fields_p ())
4875 /* See if we can find a mode that contains both fields being compared on
4876 the left. If we can't, fail. Otherwise, update all constants and masks
4877 to be relative to a field of that size. */
4878 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4879 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4880 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4881 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4883 if (lnmode
== VOIDmode
)
4886 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4887 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4888 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4889 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4891 if (BYTES_BIG_ENDIAN
)
4893 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4894 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4897 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4898 size_int (xll_bitpos
), 0);
4899 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4900 size_int (xrl_bitpos
), 0);
4904 l_const
= fold_convert (lntype
, l_const
);
4905 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4906 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4907 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4908 fold_build1 (BIT_NOT_EXPR
,
4912 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4914 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4919 r_const
= fold_convert (lntype
, r_const
);
4920 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4921 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4922 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4923 fold_build1 (BIT_NOT_EXPR
,
4927 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4929 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4933 /* If the right sides are not constant, do the same for it. Also,
4934 disallow this optimization if a size or signedness mismatch occurs
4935 between the left and right sides. */
4938 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4939 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4940 /* Make sure the two fields on the right
4941 correspond to the left without being swapped. */
4942 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4945 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4946 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4947 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4948 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4950 if (rnmode
== VOIDmode
)
4953 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4954 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4955 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4956 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4958 if (BYTES_BIG_ENDIAN
)
4960 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4961 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4964 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4965 size_int (xlr_bitpos
), 0);
4966 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4967 size_int (xrr_bitpos
), 0);
4969 /* Make a mask that corresponds to both fields being compared.
4970 Do this for both items being compared. If the operands are the
4971 same size and the bits being compared are in the same position
4972 then we can do this by masking both and comparing the masked
4974 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4975 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4976 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4978 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4979 ll_unsignedp
|| rl_unsignedp
);
4980 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4981 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4983 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4984 lr_unsignedp
|| rr_unsignedp
);
4985 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4986 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4988 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4991 /* There is still another way we can do something: If both pairs of
4992 fields being compared are adjacent, we may be able to make a wider
4993 field containing them both.
4995 Note that we still must mask the lhs/rhs expressions. Furthermore,
4996 the mask must be shifted to account for the shift done by
4997 make_bit_field_ref. */
4998 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4999 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5000 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5001 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5005 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5006 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5007 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5008 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5010 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5011 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5012 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5013 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5015 /* Convert to the smaller type before masking out unwanted bits. */
5017 if (lntype
!= rntype
)
5019 if (lnbitsize
> rnbitsize
)
5021 lhs
= fold_convert (rntype
, lhs
);
5022 ll_mask
= fold_convert (rntype
, ll_mask
);
5025 else if (lnbitsize
< rnbitsize
)
5027 rhs
= fold_convert (lntype
, rhs
);
5028 lr_mask
= fold_convert (lntype
, lr_mask
);
5033 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5034 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5036 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5037 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5039 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5045 /* Handle the case of comparisons with constants. If there is something in
5046 common between the masks, those bits of the constants must be the same.
5047 If not, the condition is always false. Test for this to avoid generating
5048 incorrect code below. */
5049 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5050 if (! integer_zerop (result
)
5051 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5052 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5054 if (wanted_code
== NE_EXPR
)
5056 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5057 return constant_boolean_node (true, truth_type
);
5061 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5062 return constant_boolean_node (false, truth_type
);
5066 /* Construct the expression we will return. First get the component
5067 reference we will make. Unless the mask is all ones the width of
5068 that field, perform the mask operation. Then compare with the
5070 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5071 ll_unsignedp
|| rl_unsignedp
);
5073 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5074 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5075 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5077 return build2 (wanted_code
, truth_type
, result
,
5078 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5081 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5085 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5088 enum tree_code op_code
;
5089 tree comp_const
= op1
;
5091 int consts_equal
, consts_lt
;
5094 STRIP_SIGN_NOPS (arg0
);
5096 op_code
= TREE_CODE (arg0
);
5097 minmax_const
= TREE_OPERAND (arg0
, 1);
5098 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5099 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5100 inner
= TREE_OPERAND (arg0
, 0);
5102 /* If something does not permit us to optimize, return the original tree. */
5103 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5104 || TREE_CODE (comp_const
) != INTEGER_CST
5105 || TREE_CONSTANT_OVERFLOW (comp_const
)
5106 || TREE_CODE (minmax_const
) != INTEGER_CST
5107 || TREE_CONSTANT_OVERFLOW (minmax_const
))
5110 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5111 and GT_EXPR, doing the rest with recursive calls using logical
5115 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5117 /* FIXME: We should be able to invert code without building a
5118 scratch tree node, but doing so would require us to
5119 duplicate a part of invert_truthvalue here. */
5120 tree tem
= invert_truthvalue (build2 (code
, type
, op0
, op1
));
5121 tem
= optimize_minmax_comparison (TREE_CODE (tem
),
5123 TREE_OPERAND (tem
, 0),
5124 TREE_OPERAND (tem
, 1));
5125 return invert_truthvalue (tem
);
5130 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5131 optimize_minmax_comparison
5132 (EQ_EXPR
, type
, arg0
, comp_const
),
5133 optimize_minmax_comparison
5134 (GT_EXPR
, type
, arg0
, comp_const
));
5137 if (op_code
== MAX_EXPR
&& consts_equal
)
5138 /* MAX (X, 0) == 0 -> X <= 0 */
5139 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5141 else if (op_code
== MAX_EXPR
&& consts_lt
)
5142 /* MAX (X, 0) == 5 -> X == 5 */
5143 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5145 else if (op_code
== MAX_EXPR
)
5146 /* MAX (X, 0) == -1 -> false */
5147 return omit_one_operand (type
, integer_zero_node
, inner
);
5149 else if (consts_equal
)
5150 /* MIN (X, 0) == 0 -> X >= 0 */
5151 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5154 /* MIN (X, 0) == 5 -> false */
5155 return omit_one_operand (type
, integer_zero_node
, inner
);
5158 /* MIN (X, 0) == -1 -> X == -1 */
5159 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5162 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5163 /* MAX (X, 0) > 0 -> X > 0
5164 MAX (X, 0) > 5 -> X > 5 */
5165 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5167 else if (op_code
== MAX_EXPR
)
5168 /* MAX (X, 0) > -1 -> true */
5169 return omit_one_operand (type
, integer_one_node
, inner
);
5171 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5172 /* MIN (X, 0) > 0 -> false
5173 MIN (X, 0) > 5 -> false */
5174 return omit_one_operand (type
, integer_zero_node
, inner
);
5177 /* MIN (X, 0) > -1 -> X > -1 */
5178 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5185 /* T is an integer expression that is being multiplied, divided, or taken a
5186 modulus (CODE says which and what kind of divide or modulus) by a
5187 constant C. See if we can eliminate that operation by folding it with
5188 other operations already in T. WIDE_TYPE, if non-null, is a type that
5189 should be used for the computation if wider than our type.
5191 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5192 (X * 2) + (Y * 4). We must, however, be assured that either the original
5193 expression would not overflow or that overflow is undefined for the type
5194 in the language in question.
5196 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5197 the machine has a multiply-accumulate insn or that this is part of an
5198 addressing calculation.
5200 If we return a non-null expression, it is an equivalent form of the
5201 original computation, but need not be in the original type. */
5204 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5206 /* To avoid exponential search depth, refuse to allow recursion past
5207 three levels. Beyond that (1) it's highly unlikely that we'll find
5208 something interesting and (2) we've probably processed it before
5209 when we built the inner expression. */
5218 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5225 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5227 tree type
= TREE_TYPE (t
);
5228 enum tree_code tcode
= TREE_CODE (t
);
5229 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5230 > GET_MODE_SIZE (TYPE_MODE (type
)))
5231 ? wide_type
: type
);
5233 int same_p
= tcode
== code
;
5234 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5236 /* Don't deal with constants of zero here; they confuse the code below. */
5237 if (integer_zerop (c
))
5240 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5241 op0
= TREE_OPERAND (t
, 0);
5243 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5244 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5246 /* Note that we need not handle conditional operations here since fold
5247 already handles those cases. So just do arithmetic here. */
5251 /* For a constant, we can always simplify if we are a multiply
5252 or (for divide and modulus) if it is a multiple of our constant. */
5253 if (code
== MULT_EXPR
5254 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5255 return const_binop (code
, fold_convert (ctype
, t
),
5256 fold_convert (ctype
, c
), 0);
5259 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5260 /* If op0 is an expression ... */
5261 if ((COMPARISON_CLASS_P (op0
)
5262 || UNARY_CLASS_P (op0
)
5263 || BINARY_CLASS_P (op0
)
5264 || EXPRESSION_CLASS_P (op0
))
5265 /* ... and is unsigned, and its type is smaller than ctype,
5266 then we cannot pass through as widening. */
5267 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5268 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5269 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5270 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5271 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5272 /* ... or this is a truncation (t is narrower than op0),
5273 then we cannot pass through this narrowing. */
5274 || (GET_MODE_SIZE (TYPE_MODE (type
))
5275 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5276 /* ... or signedness changes for division or modulus,
5277 then we cannot pass through this conversion. */
5278 || (code
!= MULT_EXPR
5279 && (TYPE_UNSIGNED (ctype
)
5280 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5283 /* Pass the constant down and see if we can make a simplification. If
5284 we can, replace this expression with the inner simplification for
5285 possible later conversion to our or some other type. */
5286 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5287 && TREE_CODE (t2
) == INTEGER_CST
5288 && ! TREE_CONSTANT_OVERFLOW (t2
)
5289 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5291 ? ctype
: NULL_TREE
))))
5296 /* If widening the type changes it from signed to unsigned, then we
5297 must avoid building ABS_EXPR itself as unsigned. */
5298 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5300 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5301 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5303 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5304 return fold_convert (ctype
, t1
);
5310 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5311 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5314 case MIN_EXPR
: case MAX_EXPR
:
5315 /* If widening the type changes the signedness, then we can't perform
5316 this optimization as that changes the result. */
5317 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5320 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5321 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5322 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5324 if (tree_int_cst_sgn (c
) < 0)
5325 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5327 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5328 fold_convert (ctype
, t2
));
5332 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5333 /* If the second operand is constant, this is a multiplication
5334 or floor division, by a power of two, so we can treat it that
5335 way unless the multiplier or divisor overflows. Signed
5336 left-shift overflow is implementation-defined rather than
5337 undefined in C90, so do not convert signed left shift into
5339 if (TREE_CODE (op1
) == INTEGER_CST
5340 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5341 /* const_binop may not detect overflow correctly,
5342 so check for it explicitly here. */
5343 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5344 && TREE_INT_CST_HIGH (op1
) == 0
5345 && 0 != (t1
= fold_convert (ctype
,
5346 const_binop (LSHIFT_EXPR
,
5349 && ! TREE_OVERFLOW (t1
))
5350 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5351 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5352 ctype
, fold_convert (ctype
, op0
), t1
),
5353 c
, code
, wide_type
);
5356 case PLUS_EXPR
: case MINUS_EXPR
:
5357 /* See if we can eliminate the operation on both sides. If we can, we
5358 can return a new PLUS or MINUS. If we can't, the only remaining
5359 cases where we can do anything are if the second operand is a
5361 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5362 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5363 if (t1
!= 0 && t2
!= 0
5364 && (code
== MULT_EXPR
5365 /* If not multiplication, we can only do this if both operands
5366 are divisible by c. */
5367 || (multiple_of_p (ctype
, op0
, c
)
5368 && multiple_of_p (ctype
, op1
, c
))))
5369 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5370 fold_convert (ctype
, t2
));
5372 /* If this was a subtraction, negate OP1 and set it to be an addition.
5373 This simplifies the logic below. */
5374 if (tcode
== MINUS_EXPR
)
5375 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5377 if (TREE_CODE (op1
) != INTEGER_CST
)
5380 /* If either OP1 or C are negative, this optimization is not safe for
5381 some of the division and remainder types while for others we need
5382 to change the code. */
5383 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5385 if (code
== CEIL_DIV_EXPR
)
5386 code
= FLOOR_DIV_EXPR
;
5387 else if (code
== FLOOR_DIV_EXPR
)
5388 code
= CEIL_DIV_EXPR
;
5389 else if (code
!= MULT_EXPR
5390 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5394 /* If it's a multiply or a division/modulus operation of a multiple
5395 of our constant, do the operation and verify it doesn't overflow. */
5396 if (code
== MULT_EXPR
5397 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5399 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5400 fold_convert (ctype
, c
), 0);
5401 /* We allow the constant to overflow with wrapping semantics. */
5403 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5409 /* If we have an unsigned type is not a sizetype, we cannot widen
5410 the operation since it will change the result if the original
5411 computation overflowed. */
5412 if (TYPE_UNSIGNED (ctype
)
5413 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5417 /* If we were able to eliminate our operation from the first side,
5418 apply our operation to the second side and reform the PLUS. */
5419 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5420 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5422 /* The last case is if we are a multiply. In that case, we can
5423 apply the distributive law to commute the multiply and addition
5424 if the multiplication of the constants doesn't overflow. */
5425 if (code
== MULT_EXPR
)
5426 return fold_build2 (tcode
, ctype
,
5427 fold_build2 (code
, ctype
,
5428 fold_convert (ctype
, op0
),
5429 fold_convert (ctype
, c
)),
5435 /* We have a special case here if we are doing something like
5436 (C * 8) % 4 since we know that's zero. */
5437 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5438 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5439 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5440 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5441 return omit_one_operand (type
, integer_zero_node
, op0
);
5443 /* ... fall through ... */
5445 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5446 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5447 /* If we can extract our operation from the LHS, do so and return a
5448 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5449 do something only if the second operand is a constant. */
5451 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5452 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5453 fold_convert (ctype
, op1
));
5454 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5455 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5456 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5457 fold_convert (ctype
, t1
));
5458 else if (TREE_CODE (op1
) != INTEGER_CST
)
5461 /* If these are the same operation types, we can associate them
5462 assuming no overflow. */
5464 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5465 fold_convert (ctype
, c
), 0))
5466 && ! TREE_OVERFLOW (t1
))
5467 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5469 /* If these operations "cancel" each other, we have the main
5470 optimizations of this pass, which occur when either constant is a
5471 multiple of the other, in which case we replace this with either an
5472 operation or CODE or TCODE.
5474 If we have an unsigned type that is not a sizetype, we cannot do
5475 this since it will change the result if the original computation
5477 if ((! TYPE_UNSIGNED (ctype
)
5478 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5480 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5481 || (tcode
== MULT_EXPR
5482 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5483 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5485 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5486 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5487 fold_convert (ctype
,
5488 const_binop (TRUNC_DIV_EXPR
,
5490 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5491 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5492 fold_convert (ctype
,
5493 const_binop (TRUNC_DIV_EXPR
,
5505 /* Return a node which has the indicated constant VALUE (either 0 or
5506 1), and is of the indicated TYPE. */
5509 constant_boolean_node (int value
, tree type
)
5511 if (type
== integer_type_node
)
5512 return value
? integer_one_node
: integer_zero_node
;
5513 else if (type
== boolean_type_node
)
5514 return value
? boolean_true_node
: boolean_false_node
;
5516 return build_int_cst (type
, value
);
5520 /* Return true if expr looks like an ARRAY_REF and set base and
5521 offset to the appropriate trees. If there is no offset,
5522 offset is set to NULL_TREE. Base will be canonicalized to
5523 something you can get the element type from using
5524 TREE_TYPE (TREE_TYPE (base)). */
5527 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5529 /* One canonical form is a PLUS_EXPR with the first
5530 argument being an ADDR_EXPR with a possible NOP_EXPR
5532 if (TREE_CODE (expr
) == PLUS_EXPR
)
5534 tree op0
= TREE_OPERAND (expr
, 0);
5535 tree inner_base
, dummy1
;
5536 /* Strip NOP_EXPRs here because the C frontends and/or
5537 folders present us (int *)&x.a + 4B possibly. */
5539 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5542 if (dummy1
== NULL_TREE
)
5543 *offset
= TREE_OPERAND (expr
, 1);
5545 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5546 dummy1
, TREE_OPERAND (expr
, 1));
5550 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5551 which we transform into an ADDR_EXPR with appropriate
5552 offset. For other arguments to the ADDR_EXPR we assume
5553 zero offset and as such do not care about the ADDR_EXPR
5554 type and strip possible nops from it. */
5555 else if (TREE_CODE (expr
) == ADDR_EXPR
)
5557 tree op0
= TREE_OPERAND (expr
, 0);
5558 if (TREE_CODE (op0
) == ARRAY_REF
)
5560 *base
= TREE_OPERAND (op0
, 0);
5561 *offset
= TREE_OPERAND (op0
, 1);
5565 /* Handle array-to-pointer decay as &a. */
5566 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
5567 *base
= TREE_OPERAND (expr
, 0);
5570 *offset
= NULL_TREE
;
5574 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5575 else if (SSA_VAR_P (expr
)
5576 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
5579 *offset
= NULL_TREE
;
5587 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5588 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5589 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5590 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5591 COND is the first argument to CODE; otherwise (as in the example
5592 given here), it is the second argument. TYPE is the type of the
5593 original expression. Return NULL_TREE if no simplification is
5597 fold_binary_op_with_conditional_arg (enum tree_code code
,
5598 tree type
, tree op0
, tree op1
,
5599 tree cond
, tree arg
, int cond_first_p
)
5601 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5602 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5603 tree test
, true_value
, false_value
;
5604 tree lhs
= NULL_TREE
;
5605 tree rhs
= NULL_TREE
;
5607 /* This transformation is only worthwhile if we don't have to wrap
5608 arg in a SAVE_EXPR, and the operation can be simplified on at least
5609 one of the branches once its pushed inside the COND_EXPR. */
5610 if (!TREE_CONSTANT (arg
))
5613 if (TREE_CODE (cond
) == COND_EXPR
)
5615 test
= TREE_OPERAND (cond
, 0);
5616 true_value
= TREE_OPERAND (cond
, 1);
5617 false_value
= TREE_OPERAND (cond
, 2);
5618 /* If this operand throws an expression, then it does not make
5619 sense to try to perform a logical or arithmetic operation
5621 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5623 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5628 tree testtype
= TREE_TYPE (cond
);
5630 true_value
= constant_boolean_node (true, testtype
);
5631 false_value
= constant_boolean_node (false, testtype
);
5634 arg
= fold_convert (arg_type
, arg
);
5637 true_value
= fold_convert (cond_type
, true_value
);
5639 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5641 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5645 false_value
= fold_convert (cond_type
, false_value
);
5647 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5649 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5652 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5653 return fold_convert (type
, test
);
5657 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5659 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5660 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5661 ADDEND is the same as X.
5663 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5664 and finite. The problematic cases are when X is zero, and its mode
5665 has signed zeros. In the case of rounding towards -infinity,
5666 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5667 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5670 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5672 if (!real_zerop (addend
))
5675 /* Don't allow the fold with -fsignaling-nans. */
5676 if (HONOR_SNANS (TYPE_MODE (type
)))
5679 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5680 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5683 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5684 if (TREE_CODE (addend
) == REAL_CST
5685 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5688 /* The mode has signed zeros, and we have to honor their sign.
5689 In this situation, there is only one case we can return true for.
5690 X - 0 is the same as X unless rounding towards -infinity is
5692 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5695 /* Subroutine of fold() that checks comparisons of built-in math
5696 functions against real constants.
5698 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5699 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5700 is the type of the result and ARG0 and ARG1 are the operands of the
5701 comparison. ARG1 must be a TREE_REAL_CST.
5703 The function returns the constant folded tree if a simplification
5704 can be made, and NULL_TREE otherwise. */
5707 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5708 tree type
, tree arg0
, tree arg1
)
5712 if (BUILTIN_SQRT_P (fcode
))
5714 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5715 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5717 c
= TREE_REAL_CST (arg1
);
5718 if (REAL_VALUE_NEGATIVE (c
))
5720 /* sqrt(x) < y is always false, if y is negative. */
5721 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5722 return omit_one_operand (type
, integer_zero_node
, arg
);
5724 /* sqrt(x) > y is always true, if y is negative and we
5725 don't care about NaNs, i.e. negative values of x. */
5726 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5727 return omit_one_operand (type
, integer_one_node
, arg
);
5729 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5730 return fold_build2 (GE_EXPR
, type
, arg
,
5731 build_real (TREE_TYPE (arg
), dconst0
));
5733 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5737 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5738 real_convert (&c2
, mode
, &c2
);
5740 if (REAL_VALUE_ISINF (c2
))
5742 /* sqrt(x) > y is x == +Inf, when y is very large. */
5743 if (HONOR_INFINITIES (mode
))
5744 return fold_build2 (EQ_EXPR
, type
, arg
,
5745 build_real (TREE_TYPE (arg
), c2
));
5747 /* sqrt(x) > y is always false, when y is very large
5748 and we don't care about infinities. */
5749 return omit_one_operand (type
, integer_zero_node
, arg
);
5752 /* sqrt(x) > c is the same as x > c*c. */
5753 return fold_build2 (code
, type
, arg
,
5754 build_real (TREE_TYPE (arg
), c2
));
5756 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5760 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5761 real_convert (&c2
, mode
, &c2
);
5763 if (REAL_VALUE_ISINF (c2
))
5765 /* sqrt(x) < y is always true, when y is a very large
5766 value and we don't care about NaNs or Infinities. */
5767 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5768 return omit_one_operand (type
, integer_one_node
, arg
);
5770 /* sqrt(x) < y is x != +Inf when y is very large and we
5771 don't care about NaNs. */
5772 if (! HONOR_NANS (mode
))
5773 return fold_build2 (NE_EXPR
, type
, arg
,
5774 build_real (TREE_TYPE (arg
), c2
));
5776 /* sqrt(x) < y is x >= 0 when y is very large and we
5777 don't care about Infinities. */
5778 if (! HONOR_INFINITIES (mode
))
5779 return fold_build2 (GE_EXPR
, type
, arg
,
5780 build_real (TREE_TYPE (arg
), dconst0
));
5782 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5783 if (lang_hooks
.decls
.global_bindings_p () != 0
5784 || CONTAINS_PLACEHOLDER_P (arg
))
5787 arg
= save_expr (arg
);
5788 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5789 fold_build2 (GE_EXPR
, type
, arg
,
5790 build_real (TREE_TYPE (arg
),
5792 fold_build2 (NE_EXPR
, type
, arg
,
5793 build_real (TREE_TYPE (arg
),
5797 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5798 if (! HONOR_NANS (mode
))
5799 return fold_build2 (code
, type
, arg
,
5800 build_real (TREE_TYPE (arg
), c2
));
5802 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5803 if (lang_hooks
.decls
.global_bindings_p () == 0
5804 && ! CONTAINS_PLACEHOLDER_P (arg
))
5806 arg
= save_expr (arg
);
5807 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5808 fold_build2 (GE_EXPR
, type
, arg
,
5809 build_real (TREE_TYPE (arg
),
5811 fold_build2 (code
, type
, arg
,
5812 build_real (TREE_TYPE (arg
),
5821 /* Subroutine of fold() that optimizes comparisons against Infinities,
5822 either +Inf or -Inf.
5824 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5825 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5826 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5828 The function returns the constant folded tree if a simplification
5829 can be made, and NULL_TREE otherwise. */
5832 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5834 enum machine_mode mode
;
5835 REAL_VALUE_TYPE max
;
5839 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5841 /* For negative infinity swap the sense of the comparison. */
5842 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5844 code
= swap_tree_comparison (code
);
5849 /* x > +Inf is always false, if with ignore sNANs. */
5850 if (HONOR_SNANS (mode
))
5852 return omit_one_operand (type
, integer_zero_node
, arg0
);
5855 /* x <= +Inf is always true, if we don't case about NaNs. */
5856 if (! HONOR_NANS (mode
))
5857 return omit_one_operand (type
, integer_one_node
, arg0
);
5859 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5860 if (lang_hooks
.decls
.global_bindings_p () == 0
5861 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5863 arg0
= save_expr (arg0
);
5864 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
5870 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5871 real_maxval (&max
, neg
, mode
);
5872 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5873 arg0
, build_real (TREE_TYPE (arg0
), max
));
5876 /* x < +Inf is always equal to x <= DBL_MAX. */
5877 real_maxval (&max
, neg
, mode
);
5878 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5879 arg0
, build_real (TREE_TYPE (arg0
), max
));
5882 /* x != +Inf is always equal to !(x > DBL_MAX). */
5883 real_maxval (&max
, neg
, mode
);
5884 if (! HONOR_NANS (mode
))
5885 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5886 arg0
, build_real (TREE_TYPE (arg0
), max
));
5888 /* The transformation below creates non-gimple code and thus is
5889 not appropriate if we are in gimple form. */
5893 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5894 arg0
, build_real (TREE_TYPE (arg0
), max
));
5895 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
5904 /* Subroutine of fold() that optimizes comparisons of a division by
5905 a nonzero integer constant against an integer constant, i.e.
5908 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5909 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5910 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5912 The function returns the constant folded tree if a simplification
5913 can be made, and NULL_TREE otherwise. */
5916 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5918 tree prod
, tmp
, hi
, lo
;
5919 tree arg00
= TREE_OPERAND (arg0
, 0);
5920 tree arg01
= TREE_OPERAND (arg0
, 1);
5921 unsigned HOST_WIDE_INT lpart
;
5922 HOST_WIDE_INT hpart
;
5925 /* We have to do this the hard way to detect unsigned overflow.
5926 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5927 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
5928 TREE_INT_CST_HIGH (arg01
),
5929 TREE_INT_CST_LOW (arg1
),
5930 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
5931 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5932 prod
= force_fit_type (prod
, -1, overflow
, false);
5934 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
5936 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5939 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5940 overflow
= add_double (TREE_INT_CST_LOW (prod
),
5941 TREE_INT_CST_HIGH (prod
),
5942 TREE_INT_CST_LOW (tmp
),
5943 TREE_INT_CST_HIGH (tmp
),
5945 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5946 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
5947 TREE_CONSTANT_OVERFLOW (prod
));
5949 else if (tree_int_cst_sgn (arg01
) >= 0)
5951 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5952 switch (tree_int_cst_sgn (arg1
))
5955 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5960 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5965 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5975 /* A negative divisor reverses the relational operators. */
5976 code
= swap_tree_comparison (code
);
5978 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
5979 switch (tree_int_cst_sgn (arg1
))
5982 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5987 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5992 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6004 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6005 return omit_one_operand (type
, integer_zero_node
, arg00
);
6006 if (TREE_OVERFLOW (hi
))
6007 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6008 if (TREE_OVERFLOW (lo
))
6009 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6010 return build_range_check (type
, arg00
, 1, lo
, hi
);
6013 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6014 return omit_one_operand (type
, integer_one_node
, arg00
);
6015 if (TREE_OVERFLOW (hi
))
6016 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6017 if (TREE_OVERFLOW (lo
))
6018 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6019 return build_range_check (type
, arg00
, 0, lo
, hi
);
6022 if (TREE_OVERFLOW (lo
))
6023 return omit_one_operand (type
, integer_zero_node
, arg00
);
6024 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6027 if (TREE_OVERFLOW (hi
))
6028 return omit_one_operand (type
, integer_one_node
, arg00
);
6029 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6032 if (TREE_OVERFLOW (hi
))
6033 return omit_one_operand (type
, integer_zero_node
, arg00
);
6034 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6037 if (TREE_OVERFLOW (lo
))
6038 return omit_one_operand (type
, integer_one_node
, arg00
);
6039 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6049 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6050 equality/inequality test, then return a simplified form of the test
6051 using a sign testing. Otherwise return NULL. TYPE is the desired
6055 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6058 /* If this is testing a single bit, we can optimize the test. */
6059 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6060 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6061 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6063 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6064 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6065 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6067 if (arg00
!= NULL_TREE
6068 /* This is only a win if casting to a signed type is cheap,
6069 i.e. when arg00's type is not a partial mode. */
6070 && TYPE_PRECISION (TREE_TYPE (arg00
))
6071 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6073 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6074 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6075 result_type
, fold_convert (stype
, arg00
),
6076 fold_convert (stype
, integer_zero_node
));
6083 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6084 equality/inequality test, then return a simplified form of
6085 the test using shifts and logical operations. Otherwise return
6086 NULL. TYPE is the desired result type. */
6089 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6092 /* If this is testing a single bit, we can optimize the test. */
6093 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6094 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6095 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6097 tree inner
= TREE_OPERAND (arg0
, 0);
6098 tree type
= TREE_TYPE (arg0
);
6099 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6100 enum machine_mode operand_mode
= TYPE_MODE (type
);
6102 tree signed_type
, unsigned_type
, intermediate_type
;
6105 /* First, see if we can fold the single bit test into a sign-bit
6107 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6112 /* Otherwise we have (A & C) != 0 where C is a single bit,
6113 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6114 Similarly for (A & C) == 0. */
6116 /* If INNER is a right shift of a constant and it plus BITNUM does
6117 not overflow, adjust BITNUM and INNER. */
6118 if (TREE_CODE (inner
) == RSHIFT_EXPR
6119 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6120 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6121 && bitnum
< TYPE_PRECISION (type
)
6122 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6123 bitnum
- TYPE_PRECISION (type
)))
6125 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6126 inner
= TREE_OPERAND (inner
, 0);
6129 /* If we are going to be able to omit the AND below, we must do our
6130 operations as unsigned. If we must use the AND, we have a choice.
6131 Normally unsigned is faster, but for some machines signed is. */
6132 #ifdef LOAD_EXTEND_OP
6133 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6134 && !flag_syntax_only
) ? 0 : 1;
6139 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6140 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6141 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6142 inner
= fold_convert (intermediate_type
, inner
);
6145 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6146 inner
, size_int (bitnum
));
6148 if (code
== EQ_EXPR
)
6149 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
,
6150 inner
, integer_one_node
);
6152 /* Put the AND last so it can combine with more things. */
6153 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
6154 inner
, integer_one_node
);
6156 /* Make sure to return the proper type. */
6157 inner
= fold_convert (result_type
, inner
);
6164 /* Check whether we are allowed to reorder operands arg0 and arg1,
6165 such that the evaluation of arg1 occurs before arg0. */
6168 reorder_operands_p (tree arg0
, tree arg1
)
6170 if (! flag_evaluation_order
)
6172 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6174 return ! TREE_SIDE_EFFECTS (arg0
)
6175 && ! TREE_SIDE_EFFECTS (arg1
);
6178 /* Test whether it is preferable two swap two operands, ARG0 and
6179 ARG1, for example because ARG0 is an integer constant and ARG1
6180 isn't. If REORDER is true, only recommend swapping if we can
6181 evaluate the operands in reverse order. */
6184 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6186 STRIP_SIGN_NOPS (arg0
);
6187 STRIP_SIGN_NOPS (arg1
);
6189 if (TREE_CODE (arg1
) == INTEGER_CST
)
6191 if (TREE_CODE (arg0
) == INTEGER_CST
)
6194 if (TREE_CODE (arg1
) == REAL_CST
)
6196 if (TREE_CODE (arg0
) == REAL_CST
)
6199 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6201 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6204 if (TREE_CONSTANT (arg1
))
6206 if (TREE_CONSTANT (arg0
))
6212 if (reorder
&& flag_evaluation_order
6213 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6221 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6222 for commutative and comparison operators. Ensuring a canonical
6223 form allows the optimizers to find additional redundancies without
6224 having to explicitly check for both orderings. */
6225 if (TREE_CODE (arg0
) == SSA_NAME
6226 && TREE_CODE (arg1
) == SSA_NAME
6227 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6233 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6234 ARG0 is extended to a wider type. */
6237 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6239 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6241 tree shorter_type
, outer_type
;
6245 if (arg0_unw
== arg0
)
6247 shorter_type
= TREE_TYPE (arg0_unw
);
6249 #ifdef HAVE_canonicalize_funcptr_for_compare
6250 /* Disable this optimization if we're casting a function pointer
6251 type on targets that require function pointer canonicalization. */
6252 if (HAVE_canonicalize_funcptr_for_compare
6253 && TREE_CODE (shorter_type
) == POINTER_TYPE
6254 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6258 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6261 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6263 /* If possible, express the comparison in the shorter mode. */
6264 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6265 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6266 && (TREE_TYPE (arg1_unw
) == shorter_type
6267 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6268 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6269 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6270 && int_fits_type_p (arg1_unw
, shorter_type
))))
6271 return fold_build2 (code
, type
, arg0_unw
,
6272 fold_convert (shorter_type
, arg1_unw
));
6274 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6275 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6276 || !int_fits_type_p (arg1_unw
, shorter_type
))
6279 /* If we are comparing with the integer that does not fit into the range
6280 of the shorter type, the result is known. */
6281 outer_type
= TREE_TYPE (arg1_unw
);
6282 min
= lower_bound_in_type (outer_type
, shorter_type
);
6283 max
= upper_bound_in_type (outer_type
, shorter_type
);
6285 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6287 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6294 return omit_one_operand (type
, integer_zero_node
, arg0
);
6299 return omit_one_operand (type
, integer_one_node
, arg0
);
6305 return omit_one_operand (type
, integer_one_node
, arg0
);
6307 return omit_one_operand (type
, integer_zero_node
, arg0
);
6312 return omit_one_operand (type
, integer_zero_node
, arg0
);
6314 return omit_one_operand (type
, integer_one_node
, arg0
);
6323 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6324 ARG0 just the signedness is changed. */
6327 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6328 tree arg0
, tree arg1
)
6330 tree arg0_inner
, tmp
;
6331 tree inner_type
, outer_type
;
6333 if (TREE_CODE (arg0
) != NOP_EXPR
6334 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6337 outer_type
= TREE_TYPE (arg0
);
6338 arg0_inner
= TREE_OPERAND (arg0
, 0);
6339 inner_type
= TREE_TYPE (arg0_inner
);
6341 #ifdef HAVE_canonicalize_funcptr_for_compare
6342 /* Disable this optimization if we're casting a function pointer
6343 type on targets that require function pointer canonicalization. */
6344 if (HAVE_canonicalize_funcptr_for_compare
6345 && TREE_CODE (inner_type
) == POINTER_TYPE
6346 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6350 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6353 if (TREE_CODE (arg1
) != INTEGER_CST
6354 && !((TREE_CODE (arg1
) == NOP_EXPR
6355 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6356 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6359 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6364 if (TREE_CODE (arg1
) == INTEGER_CST
)
6366 tmp
= build_int_cst_wide (inner_type
,
6367 TREE_INT_CST_LOW (arg1
),
6368 TREE_INT_CST_HIGH (arg1
));
6369 arg1
= force_fit_type (tmp
, 0,
6370 TREE_OVERFLOW (arg1
),
6371 TREE_CONSTANT_OVERFLOW (arg1
));
6374 arg1
= fold_convert (inner_type
, arg1
);
6376 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6379 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6380 step of the array. Reconstructs s and delta in the case of s * delta
6381 being an integer constant (and thus already folded).
6382 ADDR is the address. MULT is the multiplicative expression.
6383 If the function succeeds, the new address expression is returned. Otherwise
6384 NULL_TREE is returned. */
6387 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6389 tree s
, delta
, step
;
6390 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6394 /* Canonicalize op1 into a possibly non-constant delta
6395 and an INTEGER_CST s. */
6396 if (TREE_CODE (op1
) == MULT_EXPR
)
6398 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6403 if (TREE_CODE (arg0
) == INTEGER_CST
)
6408 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6416 else if (TREE_CODE (op1
) == INTEGER_CST
)
6423 /* Simulate we are delta * 1. */
6425 s
= integer_one_node
;
6428 for (;; ref
= TREE_OPERAND (ref
, 0))
6430 if (TREE_CODE (ref
) == ARRAY_REF
)
6432 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6436 step
= array_ref_element_size (ref
);
6437 if (TREE_CODE (step
) != INTEGER_CST
)
6442 if (! tree_int_cst_equal (step
, s
))
6447 /* Try if delta is a multiple of step. */
6448 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6457 if (!handled_component_p (ref
))
6461 /* We found the suitable array reference. So copy everything up to it,
6462 and replace the index. */
6464 pref
= TREE_OPERAND (addr
, 0);
6465 ret
= copy_node (pref
);
6470 pref
= TREE_OPERAND (pref
, 0);
6471 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6472 pos
= TREE_OPERAND (pos
, 0);
6475 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6476 fold_convert (itype
,
6477 TREE_OPERAND (pos
, 1)),
6478 fold_convert (itype
, delta
));
6480 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6484 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6485 means A >= Y && A != MAX, but in this case we know that
6486 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6489 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6491 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6493 if (TREE_CODE (bound
) == LT_EXPR
)
6494 a
= TREE_OPERAND (bound
, 0);
6495 else if (TREE_CODE (bound
) == GT_EXPR
)
6496 a
= TREE_OPERAND (bound
, 1);
6500 typea
= TREE_TYPE (a
);
6501 if (!INTEGRAL_TYPE_P (typea
)
6502 && !POINTER_TYPE_P (typea
))
6505 if (TREE_CODE (ineq
) == LT_EXPR
)
6507 a1
= TREE_OPERAND (ineq
, 1);
6508 y
= TREE_OPERAND (ineq
, 0);
6510 else if (TREE_CODE (ineq
) == GT_EXPR
)
6512 a1
= TREE_OPERAND (ineq
, 0);
6513 y
= TREE_OPERAND (ineq
, 1);
6518 if (TREE_TYPE (a1
) != typea
)
6521 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6522 if (!integer_onep (diff
))
6525 return fold_build2 (GE_EXPR
, type
, a
, y
);
6528 /* Fold a unary expression of code CODE and type TYPE with operand
6529 OP0. Return the folded expression if folding is successful.
6530 Otherwise, return NULL_TREE. */
6533 fold_unary (enum tree_code code
, tree type
, tree op0
)
6537 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
6539 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
6540 && TREE_CODE_LENGTH (code
) == 1);
6545 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
6546 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
6548 /* Don't use STRIP_NOPS, because signedness of argument type
6550 STRIP_SIGN_NOPS (arg0
);
6554 /* Strip any conversions that don't change the mode. This
6555 is safe for every expression, except for a comparison
6556 expression because its signedness is derived from its
6559 Note that this is done as an internal manipulation within
6560 the constant folder, in order to find the simplest
6561 representation of the arguments so that their form can be
6562 studied. In any cases, the appropriate type conversions
6563 should be put back in the tree that will get out of the
6569 if (TREE_CODE_CLASS (code
) == tcc_unary
)
6571 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6572 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6573 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
6574 else if (TREE_CODE (arg0
) == COND_EXPR
)
6576 tree arg01
= TREE_OPERAND (arg0
, 1);
6577 tree arg02
= TREE_OPERAND (arg0
, 2);
6578 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6579 arg01
= fold_build1 (code
, type
, arg01
);
6580 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6581 arg02
= fold_build1 (code
, type
, arg02
);
6582 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6585 /* If this was a conversion, and all we did was to move into
6586 inside the COND_EXPR, bring it back out. But leave it if
6587 it is a conversion from integer to integer and the
6588 result precision is no wider than a word since such a
6589 conversion is cheap and may be optimized away by combine,
6590 while it couldn't if it were outside the COND_EXPR. Then return
6591 so we don't get into an infinite recursion loop taking the
6592 conversion out and then back in. */
6594 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6595 || code
== NON_LVALUE_EXPR
)
6596 && TREE_CODE (tem
) == COND_EXPR
6597 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6598 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6599 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6600 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6601 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6602 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6603 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6605 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6606 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
6607 || flag_syntax_only
))
6608 tem
= build1 (code
, type
,
6610 TREE_TYPE (TREE_OPERAND
6611 (TREE_OPERAND (tem
, 1), 0)),
6612 TREE_OPERAND (tem
, 0),
6613 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6614 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6617 else if (COMPARISON_CLASS_P (arg0
))
6619 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6621 arg0
= copy_node (arg0
);
6622 TREE_TYPE (arg0
) = type
;
6625 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6626 return fold_build3 (COND_EXPR
, type
, arg0
,
6627 fold_build1 (code
, type
,
6629 fold_build1 (code
, type
,
6630 integer_zero_node
));
6639 case FIX_TRUNC_EXPR
:
6641 case FIX_FLOOR_EXPR
:
6642 case FIX_ROUND_EXPR
:
6643 if (TREE_TYPE (op0
) == type
)
6646 /* Handle cases of two conversions in a row. */
6647 if (TREE_CODE (op0
) == NOP_EXPR
6648 || TREE_CODE (op0
) == CONVERT_EXPR
)
6650 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
6651 tree inter_type
= TREE_TYPE (op0
);
6652 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6653 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6654 int inside_float
= FLOAT_TYPE_P (inside_type
);
6655 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
6656 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6657 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6658 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6659 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6660 int inter_float
= FLOAT_TYPE_P (inter_type
);
6661 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
6662 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6663 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6664 int final_int
= INTEGRAL_TYPE_P (type
);
6665 int final_ptr
= POINTER_TYPE_P (type
);
6666 int final_float
= FLOAT_TYPE_P (type
);
6667 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
6668 unsigned int final_prec
= TYPE_PRECISION (type
);
6669 int final_unsignedp
= TYPE_UNSIGNED (type
);
6671 /* In addition to the cases of two conversions in a row
6672 handled below, if we are converting something to its own
6673 type via an object of identical or wider precision, neither
6674 conversion is needed. */
6675 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6676 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6677 && inter_prec
>= final_prec
)
6678 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6680 /* Likewise, if the intermediate and final types are either both
6681 float or both integer, we don't need the middle conversion if
6682 it is wider than the final type and doesn't change the signedness
6683 (for integers). Avoid this if the final type is a pointer
6684 since then we sometimes need the inner conversion. Likewise if
6685 the outer has a precision not equal to the size of its mode. */
6686 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6687 || (inter_float
&& inside_float
)
6688 || (inter_vec
&& inside_vec
))
6689 && inter_prec
>= inside_prec
6690 && (inter_float
|| inter_vec
6691 || inter_unsignedp
== inside_unsignedp
)
6692 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6693 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6695 && (! final_vec
|| inter_prec
== inside_prec
))
6696 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6698 /* If we have a sign-extension of a zero-extended value, we can
6699 replace that by a single zero-extension. */
6700 if (inside_int
&& inter_int
&& final_int
6701 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6702 && inside_unsignedp
&& !inter_unsignedp
)
6703 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6705 /* Two conversions in a row are not needed unless:
6706 - some conversion is floating-point (overstrict for now), or
6707 - some conversion is a vector (overstrict for now), or
6708 - the intermediate type is narrower than both initial and
6710 - the intermediate type and innermost type differ in signedness,
6711 and the outermost type is wider than the intermediate, or
6712 - the initial type is a pointer type and the precisions of the
6713 intermediate and final types differ, or
6714 - the final type is a pointer type and the precisions of the
6715 initial and intermediate types differ. */
6716 if (! inside_float
&& ! inter_float
&& ! final_float
6717 && ! inside_vec
&& ! inter_vec
&& ! final_vec
6718 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6719 && ! (inside_int
&& inter_int
6720 && inter_unsignedp
!= inside_unsignedp
6721 && inter_prec
< final_prec
)
6722 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6723 == (final_unsignedp
&& final_prec
> inter_prec
))
6724 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6725 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6726 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6727 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6729 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6732 /* Handle (T *)&A.B.C for A being of type T and B and C
6733 living at offset zero. This occurs frequently in
6734 C++ upcasting and then accessing the base. */
6735 if (TREE_CODE (op0
) == ADDR_EXPR
6736 && POINTER_TYPE_P (type
)
6737 && handled_component_p (TREE_OPERAND (op0
, 0)))
6739 HOST_WIDE_INT bitsize
, bitpos
;
6741 enum machine_mode mode
;
6742 int unsignedp
, volatilep
;
6743 tree base
= TREE_OPERAND (op0
, 0);
6744 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
6745 &mode
, &unsignedp
, &volatilep
, false);
6746 /* If the reference was to a (constant) zero offset, we can use
6747 the address of the base if it has the same base type
6748 as the result type. */
6749 if (! offset
&& bitpos
== 0
6750 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
6751 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
6752 return fold_convert (type
, build_fold_addr_expr (base
));
6755 if (TREE_CODE (op0
) == MODIFY_EXPR
6756 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
6757 /* Detect assigning a bitfield. */
6758 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
6759 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
6761 /* Don't leave an assignment inside a conversion
6762 unless assigning a bitfield. */
6763 tem
= fold_build1 (code
, type
, TREE_OPERAND (op0
, 1));
6764 /* First do the assignment, then return converted constant. */
6765 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
6766 TREE_NO_WARNING (tem
) = 1;
6767 TREE_USED (tem
) = 1;
6771 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6772 constants (if x has signed type, the sign bit cannot be set
6773 in c). This folds extension into the BIT_AND_EXPR. */
6774 if (INTEGRAL_TYPE_P (type
)
6775 && TREE_CODE (type
) != BOOLEAN_TYPE
6776 && TREE_CODE (op0
) == BIT_AND_EXPR
6777 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
6780 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
6783 if (TYPE_UNSIGNED (TREE_TYPE (and))
6784 || (TYPE_PRECISION (type
)
6785 <= TYPE_PRECISION (TREE_TYPE (and))))
6787 else if (TYPE_PRECISION (TREE_TYPE (and1
))
6788 <= HOST_BITS_PER_WIDE_INT
6789 && host_integerp (and1
, 1))
6791 unsigned HOST_WIDE_INT cst
;
6793 cst
= tree_low_cst (and1
, 1);
6794 cst
&= (HOST_WIDE_INT
) -1
6795 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
6796 change
= (cst
== 0);
6797 #ifdef LOAD_EXTEND_OP
6799 && !flag_syntax_only
6800 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
6803 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
6804 and0
= fold_convert (uns
, and0
);
6805 and1
= fold_convert (uns
, and1
);
6811 tem
= build_int_cst_wide (type
, TREE_INT_CST_LOW (and1
),
6812 TREE_INT_CST_HIGH (and1
));
6813 tem
= force_fit_type (tem
, 0, TREE_OVERFLOW (and1
),
6814 TREE_CONSTANT_OVERFLOW (and1
));
6815 return fold_build2 (BIT_AND_EXPR
, type
,
6816 fold_convert (type
, and0
), tem
);
6820 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6821 T2 being pointers to types of the same size. */
6822 if (POINTER_TYPE_P (type
)
6823 && BINARY_CLASS_P (arg0
)
6824 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
6825 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6827 tree arg00
= TREE_OPERAND (arg0
, 0);
6829 tree t1
= TREE_TYPE (arg00
);
6830 tree tt0
= TREE_TYPE (t0
);
6831 tree tt1
= TREE_TYPE (t1
);
6832 tree s0
= TYPE_SIZE (tt0
);
6833 tree s1
= TYPE_SIZE (tt1
);
6835 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
6836 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
6837 TREE_OPERAND (arg0
, 1));
6840 tem
= fold_convert_const (code
, type
, arg0
);
6841 return tem
? tem
: NULL_TREE
;
6843 case VIEW_CONVERT_EXPR
:
6844 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
6845 return build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
6849 if (negate_expr_p (arg0
))
6850 return fold_convert (type
, negate_expr (arg0
));
6851 /* Convert - (~A) to A + 1. */
6852 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6853 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6854 build_int_cst (type
, 1));
6858 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
6859 return fold_abs_const (arg0
, type
);
6860 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
6861 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
6862 /* Convert fabs((double)float) into (double)fabsf(float). */
6863 else if (TREE_CODE (arg0
) == NOP_EXPR
6864 && TREE_CODE (type
) == REAL_TYPE
)
6866 tree targ0
= strip_float_extensions (arg0
);
6868 return fold_convert (type
, fold_build1 (ABS_EXPR
,
6872 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6873 else if (tree_expr_nonnegative_p (arg0
) || TREE_CODE (arg0
) == ABS_EXPR
)
6876 /* Strip sign ops from argument. */
6877 if (TREE_CODE (type
) == REAL_TYPE
)
6879 tem
= fold_strip_sign_ops (arg0
);
6881 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
6886 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6887 return fold_convert (type
, arg0
);
6888 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6889 return build2 (COMPLEX_EXPR
, type
,
6890 TREE_OPERAND (arg0
, 0),
6891 negate_expr (TREE_OPERAND (arg0
, 1)));
6892 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6893 return build_complex (type
, TREE_REALPART (arg0
),
6894 negate_expr (TREE_IMAGPART (arg0
)));
6895 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6896 return fold_build2 (TREE_CODE (arg0
), type
,
6897 fold_build1 (CONJ_EXPR
, type
,
6898 TREE_OPERAND (arg0
, 0)),
6899 fold_build1 (CONJ_EXPR
, type
,
6900 TREE_OPERAND (arg0
, 1)));
6901 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
6902 return TREE_OPERAND (arg0
, 0);
6906 if (TREE_CODE (arg0
) == INTEGER_CST
)
6907 return fold_not_const (arg0
, type
);
6908 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6909 return TREE_OPERAND (arg0
, 0);
6910 /* Convert ~ (-A) to A - 1. */
6911 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
6912 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6913 build_int_cst (type
, 1));
6914 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6915 else if (INTEGRAL_TYPE_P (type
)
6916 && ((TREE_CODE (arg0
) == MINUS_EXPR
6917 && integer_onep (TREE_OPERAND (arg0
, 1)))
6918 || (TREE_CODE (arg0
) == PLUS_EXPR
6919 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
6920 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
6921 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6922 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
6923 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
6925 TREE_OPERAND (arg0
, 0)))))
6926 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
6927 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
6928 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
6929 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
6931 TREE_OPERAND (arg0
, 1)))))
6932 return fold_build2 (BIT_XOR_EXPR
, type
,
6933 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
6937 case TRUTH_NOT_EXPR
:
6938 /* The argument to invert_truthvalue must have Boolean type. */
6939 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
6940 arg0
= fold_convert (boolean_type_node
, arg0
);
6942 /* Note that the operand of this must be an int
6943 and its values must be 0 or 1.
6944 ("true" is a fixed value perhaps depending on the language,
6945 but we don't handle values other than 1 correctly yet.) */
6946 tem
= invert_truthvalue (arg0
);
6947 /* Avoid infinite recursion. */
6948 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
6950 return fold_convert (type
, tem
);
6953 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6955 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6956 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
6957 TREE_OPERAND (arg0
, 1));
6958 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6959 return TREE_REALPART (arg0
);
6960 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6961 return fold_build2 (TREE_CODE (arg0
), type
,
6962 fold_build1 (REALPART_EXPR
, type
,
6963 TREE_OPERAND (arg0
, 0)),
6964 fold_build1 (REALPART_EXPR
, type
,
6965 TREE_OPERAND (arg0
, 1)));
6969 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6970 return fold_convert (type
, integer_zero_node
);
6971 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6972 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
6973 TREE_OPERAND (arg0
, 0));
6974 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6975 return TREE_IMAGPART (arg0
);
6976 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6977 return fold_build2 (TREE_CODE (arg0
), type
,
6978 fold_build1 (IMAGPART_EXPR
, type
,
6979 TREE_OPERAND (arg0
, 0)),
6980 fold_build1 (IMAGPART_EXPR
, type
,
6981 TREE_OPERAND (arg0
, 1)));
6986 } /* switch (code) */
6989 /* Fold a binary expression of code CODE and type TYPE with operands
6990 OP0 and OP1. Return the folded expression if folding is
6991 successful. Otherwise, return NULL_TREE. */
6994 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
6996 tree t1
= NULL_TREE
;
6998 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
6999 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7001 /* WINS will be nonzero when the switch is done
7002 if all operands are constant. */
7005 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7006 && TREE_CODE_LENGTH (code
) == 2);
7015 /* Strip any conversions that don't change the mode. This is
7016 safe for every expression, except for a comparison expression
7017 because its signedness is derived from its operands. So, in
7018 the latter case, only strip conversions that don't change the
7021 Note that this is done as an internal manipulation within the
7022 constant folder, in order to find the simplest representation
7023 of the arguments so that their form can be studied. In any
7024 cases, the appropriate type conversions should be put back in
7025 the tree that will get out of the constant folder. */
7026 if (kind
== tcc_comparison
)
7027 STRIP_SIGN_NOPS (arg0
);
7031 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7032 subop
= TREE_REALPART (arg0
);
7036 if (TREE_CODE (subop
) != INTEGER_CST
7037 && TREE_CODE (subop
) != REAL_CST
)
7038 /* Note that TREE_CONSTANT isn't enough:
7039 static var addresses are constant but we can't
7040 do arithmetic on them. */
7048 /* Strip any conversions that don't change the mode. This is
7049 safe for every expression, except for a comparison expression
7050 because its signedness is derived from its operands. So, in
7051 the latter case, only strip conversions that don't change the
7054 Note that this is done as an internal manipulation within the
7055 constant folder, in order to find the simplest representation
7056 of the arguments so that their form can be studied. In any
7057 cases, the appropriate type conversions should be put back in
7058 the tree that will get out of the constant folder. */
7059 if (kind
== tcc_comparison
)
7060 STRIP_SIGN_NOPS (arg1
);
7064 if (TREE_CODE (arg1
) == COMPLEX_CST
)
7065 subop
= TREE_REALPART (arg1
);
7069 if (TREE_CODE (subop
) != INTEGER_CST
7070 && TREE_CODE (subop
) != REAL_CST
)
7071 /* Note that TREE_CONSTANT isn't enough:
7072 static var addresses are constant but we can't
7073 do arithmetic on them. */
7077 /* If this is a commutative operation, and ARG0 is a constant, move it
7078 to ARG1 to reduce the number of tests below. */
7079 if (commutative_tree_code (code
)
7080 && tree_swap_operands_p (arg0
, arg1
, true))
7081 return fold_build2 (code
, type
, op1
, op0
);
7083 /* Now WINS is set as described above,
7084 ARG0 is the first operand of EXPR,
7085 and ARG1 is the second operand (if it has more than one operand).
7087 First check for cases where an arithmetic operation is applied to a
7088 compound, conditional, or comparison operation. Push the arithmetic
7089 operation inside the compound or conditional to see if any folding
7090 can then be done. Convert comparison to conditional for this purpose.
7091 The also optimizes non-constant cases that used to be done in
7094 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7095 one of the operands is a comparison and the other is a comparison, a
7096 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7097 code below would make the expression more complex. Change it to a
7098 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7099 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7101 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
7102 || code
== EQ_EXPR
|| code
== NE_EXPR
)
7103 && ((truth_value_p (TREE_CODE (arg0
))
7104 && (truth_value_p (TREE_CODE (arg1
))
7105 || (TREE_CODE (arg1
) == BIT_AND_EXPR
7106 && integer_onep (TREE_OPERAND (arg1
, 1)))))
7107 || (truth_value_p (TREE_CODE (arg1
))
7108 && (truth_value_p (TREE_CODE (arg0
))
7109 || (TREE_CODE (arg0
) == BIT_AND_EXPR
7110 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
7112 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
7113 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
7116 fold_convert (boolean_type_node
, arg0
),
7117 fold_convert (boolean_type_node
, arg1
));
7119 if (code
== EQ_EXPR
)
7120 tem
= invert_truthvalue (tem
);
7122 return fold_convert (type
, tem
);
7125 if (TREE_CODE_CLASS (code
) == tcc_binary
7126 || TREE_CODE_CLASS (code
) == tcc_comparison
)
7128 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7129 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7130 fold_build2 (code
, type
,
7131 TREE_OPERAND (arg0
, 1), op1
));
7132 if (TREE_CODE (arg1
) == COMPOUND_EXPR
7133 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
7134 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7135 fold_build2 (code
, type
,
7136 op0
, TREE_OPERAND (arg1
, 1)));
7138 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
7140 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7142 /*cond_first_p=*/1);
7143 if (tem
!= NULL_TREE
)
7147 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
7149 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7151 /*cond_first_p=*/0);
7152 if (tem
!= NULL_TREE
)
7160 /* A + (-B) -> A - B */
7161 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7162 return fold_build2 (MINUS_EXPR
, type
,
7163 fold_convert (type
, arg0
),
7164 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
7165 /* (-A) + B -> B - A */
7166 if (TREE_CODE (arg0
) == NEGATE_EXPR
7167 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
7168 return fold_build2 (MINUS_EXPR
, type
,
7169 fold_convert (type
, arg1
),
7170 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
7171 /* Convert ~A + 1 to -A. */
7172 if (INTEGRAL_TYPE_P (type
)
7173 && TREE_CODE (arg0
) == BIT_NOT_EXPR
7174 && integer_onep (arg1
))
7175 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7177 if (! FLOAT_TYPE_P (type
))
7179 if (integer_zerop (arg1
))
7180 return non_lvalue (fold_convert (type
, arg0
));
7182 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7183 with a constant, and the two constants have no bits in common,
7184 we should treat this as a BIT_IOR_EXPR since this may produce more
7186 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7187 && TREE_CODE (arg1
) == BIT_AND_EXPR
7188 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7189 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7190 && integer_zerop (const_binop (BIT_AND_EXPR
,
7191 TREE_OPERAND (arg0
, 1),
7192 TREE_OPERAND (arg1
, 1), 0)))
7194 code
= BIT_IOR_EXPR
;
7198 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7199 (plus (plus (mult) (mult)) (foo)) so that we can
7200 take advantage of the factoring cases below. */
7201 if (((TREE_CODE (arg0
) == PLUS_EXPR
7202 || TREE_CODE (arg0
) == MINUS_EXPR
)
7203 && TREE_CODE (arg1
) == MULT_EXPR
)
7204 || ((TREE_CODE (arg1
) == PLUS_EXPR
7205 || TREE_CODE (arg1
) == MINUS_EXPR
)
7206 && TREE_CODE (arg0
) == MULT_EXPR
))
7208 tree parg0
, parg1
, parg
, marg
;
7209 enum tree_code pcode
;
7211 if (TREE_CODE (arg1
) == MULT_EXPR
)
7212 parg
= arg0
, marg
= arg1
;
7214 parg
= arg1
, marg
= arg0
;
7215 pcode
= TREE_CODE (parg
);
7216 parg0
= TREE_OPERAND (parg
, 0);
7217 parg1
= TREE_OPERAND (parg
, 1);
7221 if (TREE_CODE (parg0
) == MULT_EXPR
7222 && TREE_CODE (parg1
) != MULT_EXPR
)
7223 return fold_build2 (pcode
, type
,
7224 fold_build2 (PLUS_EXPR
, type
,
7225 fold_convert (type
, parg0
),
7226 fold_convert (type
, marg
)),
7227 fold_convert (type
, parg1
));
7228 if (TREE_CODE (parg0
) != MULT_EXPR
7229 && TREE_CODE (parg1
) == MULT_EXPR
)
7230 return fold_build2 (PLUS_EXPR
, type
,
7231 fold_convert (type
, parg0
),
7232 fold_build2 (pcode
, type
,
7233 fold_convert (type
, marg
),
7238 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
7240 tree arg00
, arg01
, arg10
, arg11
;
7241 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7243 /* (A * C) + (B * C) -> (A+B) * C.
7244 We are most concerned about the case where C is a constant,
7245 but other combinations show up during loop reduction. Since
7246 it is not difficult, try all four possibilities. */
7248 arg00
= TREE_OPERAND (arg0
, 0);
7249 arg01
= TREE_OPERAND (arg0
, 1);
7250 arg10
= TREE_OPERAND (arg1
, 0);
7251 arg11
= TREE_OPERAND (arg1
, 1);
7254 if (operand_equal_p (arg01
, arg11
, 0))
7255 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7256 else if (operand_equal_p (arg00
, arg10
, 0))
7257 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7258 else if (operand_equal_p (arg00
, arg11
, 0))
7259 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7260 else if (operand_equal_p (arg01
, arg10
, 0))
7261 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7263 /* No identical multiplicands; see if we can find a common
7264 power-of-two factor in non-power-of-two multiplies. This
7265 can help in multi-dimensional array access. */
7266 else if (TREE_CODE (arg01
) == INTEGER_CST
7267 && TREE_CODE (arg11
) == INTEGER_CST
7268 && TREE_INT_CST_HIGH (arg01
) == 0
7269 && TREE_INT_CST_HIGH (arg11
) == 0)
7271 HOST_WIDE_INT int01
, int11
, tmp
;
7272 int01
= TREE_INT_CST_LOW (arg01
);
7273 int11
= TREE_INT_CST_LOW (arg11
);
7275 /* Move min of absolute values to int11. */
7276 if ((int01
>= 0 ? int01
: -int01
)
7277 < (int11
>= 0 ? int11
: -int11
))
7279 tmp
= int01
, int01
= int11
, int11
= tmp
;
7280 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7281 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
7284 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
7286 alt0
= fold_build2 (MULT_EXPR
, type
, arg00
,
7287 build_int_cst (NULL_TREE
,
7295 return fold_build2 (MULT_EXPR
, type
,
7296 fold_build2 (PLUS_EXPR
, type
,
7297 fold_convert (type
, alt0
),
7298 fold_convert (type
, alt1
)),
7299 fold_convert (type
, same
));
7302 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7303 of the array. Loop optimizer sometimes produce this type of
7305 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7307 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
7309 return fold_convert (type
, tem
);
7311 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
7313 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
7315 return fold_convert (type
, tem
);
7320 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7321 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
7322 return non_lvalue (fold_convert (type
, arg0
));
7324 /* Likewise if the operands are reversed. */
7325 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7326 return non_lvalue (fold_convert (type
, arg1
));
7328 /* Convert X + -C into X - C. */
7329 if (TREE_CODE (arg1
) == REAL_CST
7330 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
7332 tem
= fold_negate_const (arg1
, type
);
7333 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
7334 return fold_build2 (MINUS_EXPR
, type
,
7335 fold_convert (type
, arg0
),
7336 fold_convert (type
, tem
));
7339 if (flag_unsafe_math_optimizations
7340 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7341 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7342 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
7345 /* Convert x+x into x*2.0. */
7346 if (operand_equal_p (arg0
, arg1
, 0)
7347 && SCALAR_FLOAT_TYPE_P (type
))
7348 return fold_build2 (MULT_EXPR
, type
, arg0
,
7349 build_real (type
, dconst2
));
7351 /* Convert x*c+x into x*(c+1). */
7352 if (flag_unsafe_math_optimizations
7353 && TREE_CODE (arg0
) == MULT_EXPR
7354 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7355 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7356 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7360 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7361 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7362 return fold_build2 (MULT_EXPR
, type
, arg1
,
7363 build_real (type
, c
));
7366 /* Convert x+x*c into x*(c+1). */
7367 if (flag_unsafe_math_optimizations
7368 && TREE_CODE (arg1
) == MULT_EXPR
7369 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7370 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7371 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
7375 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7376 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7377 return fold_build2 (MULT_EXPR
, type
, arg0
,
7378 build_real (type
, c
));
7381 /* Convert x*c1+x*c2 into x*(c1+c2). */
7382 if (flag_unsafe_math_optimizations
7383 && TREE_CODE (arg0
) == MULT_EXPR
7384 && TREE_CODE (arg1
) == MULT_EXPR
7385 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7386 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7387 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7388 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7389 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7390 TREE_OPERAND (arg1
, 0), 0))
7392 REAL_VALUE_TYPE c1
, c2
;
7394 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7395 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7396 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
7397 return fold_build2 (MULT_EXPR
, type
,
7398 TREE_OPERAND (arg0
, 0),
7399 build_real (type
, c1
));
7401 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7402 if (flag_unsafe_math_optimizations
7403 && TREE_CODE (arg1
) == PLUS_EXPR
7404 && TREE_CODE (arg0
) != MULT_EXPR
)
7406 tree tree10
= TREE_OPERAND (arg1
, 0);
7407 tree tree11
= TREE_OPERAND (arg1
, 1);
7408 if (TREE_CODE (tree11
) == MULT_EXPR
7409 && TREE_CODE (tree10
) == MULT_EXPR
)
7412 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
7413 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
7416 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7417 if (flag_unsafe_math_optimizations
7418 && TREE_CODE (arg0
) == PLUS_EXPR
7419 && TREE_CODE (arg1
) != MULT_EXPR
)
7421 tree tree00
= TREE_OPERAND (arg0
, 0);
7422 tree tree01
= TREE_OPERAND (arg0
, 1);
7423 if (TREE_CODE (tree01
) == MULT_EXPR
7424 && TREE_CODE (tree00
) == MULT_EXPR
)
7427 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
7428 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
7434 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7435 is a rotate of A by C1 bits. */
7436 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7437 is a rotate of A by B bits. */
7439 enum tree_code code0
, code1
;
7440 code0
= TREE_CODE (arg0
);
7441 code1
= TREE_CODE (arg1
);
7442 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
7443 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
7444 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7445 TREE_OPERAND (arg1
, 0), 0)
7446 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7448 tree tree01
, tree11
;
7449 enum tree_code code01
, code11
;
7451 tree01
= TREE_OPERAND (arg0
, 1);
7452 tree11
= TREE_OPERAND (arg1
, 1);
7453 STRIP_NOPS (tree01
);
7454 STRIP_NOPS (tree11
);
7455 code01
= TREE_CODE (tree01
);
7456 code11
= TREE_CODE (tree11
);
7457 if (code01
== INTEGER_CST
7458 && code11
== INTEGER_CST
7459 && TREE_INT_CST_HIGH (tree01
) == 0
7460 && TREE_INT_CST_HIGH (tree11
) == 0
7461 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
7462 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
7463 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7464 code0
== LSHIFT_EXPR
? tree01
: tree11
);
7465 else if (code11
== MINUS_EXPR
)
7467 tree tree110
, tree111
;
7468 tree110
= TREE_OPERAND (tree11
, 0);
7469 tree111
= TREE_OPERAND (tree11
, 1);
7470 STRIP_NOPS (tree110
);
7471 STRIP_NOPS (tree111
);
7472 if (TREE_CODE (tree110
) == INTEGER_CST
7473 && 0 == compare_tree_int (tree110
,
7475 (TREE_TYPE (TREE_OPERAND
7477 && operand_equal_p (tree01
, tree111
, 0))
7478 return build2 ((code0
== LSHIFT_EXPR
7481 type
, TREE_OPERAND (arg0
, 0), tree01
);
7483 else if (code01
== MINUS_EXPR
)
7485 tree tree010
, tree011
;
7486 tree010
= TREE_OPERAND (tree01
, 0);
7487 tree011
= TREE_OPERAND (tree01
, 1);
7488 STRIP_NOPS (tree010
);
7489 STRIP_NOPS (tree011
);
7490 if (TREE_CODE (tree010
) == INTEGER_CST
7491 && 0 == compare_tree_int (tree010
,
7493 (TREE_TYPE (TREE_OPERAND
7495 && operand_equal_p (tree11
, tree011
, 0))
7496 return build2 ((code0
!= LSHIFT_EXPR
7499 type
, TREE_OPERAND (arg0
, 0), tree11
);
7505 /* In most languages, can't associate operations on floats through
7506 parentheses. Rather than remember where the parentheses were, we
7507 don't associate floats at all, unless the user has specified
7508 -funsafe-math-optimizations. */
7511 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7513 tree var0
, con0
, lit0
, minus_lit0
;
7514 tree var1
, con1
, lit1
, minus_lit1
;
7516 /* Split both trees into variables, constants, and literals. Then
7517 associate each group together, the constants with literals,
7518 then the result with variables. This increases the chances of
7519 literals being recombined later and of generating relocatable
7520 expressions for the sum of a constant and literal. */
7521 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
7522 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
7523 code
== MINUS_EXPR
);
7525 /* Only do something if we found more than two objects. Otherwise,
7526 nothing has changed and we risk infinite recursion. */
7527 if (2 < ((var0
!= 0) + (var1
!= 0)
7528 + (con0
!= 0) + (con1
!= 0)
7529 + (lit0
!= 0) + (lit1
!= 0)
7530 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
7532 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7533 if (code
== MINUS_EXPR
)
7536 var0
= associate_trees (var0
, var1
, code
, type
);
7537 con0
= associate_trees (con0
, con1
, code
, type
);
7538 lit0
= associate_trees (lit0
, lit1
, code
, type
);
7539 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
7541 /* Preserve the MINUS_EXPR if the negative part of the literal is
7542 greater than the positive part. Otherwise, the multiplicative
7543 folding code (i.e extract_muldiv) may be fooled in case
7544 unsigned constants are subtracted, like in the following
7545 example: ((X*2 + 4) - 8U)/2. */
7546 if (minus_lit0
&& lit0
)
7548 if (TREE_CODE (lit0
) == INTEGER_CST
7549 && TREE_CODE (minus_lit0
) == INTEGER_CST
7550 && tree_int_cst_lt (lit0
, minus_lit0
))
7552 minus_lit0
= associate_trees (minus_lit0
, lit0
,
7558 lit0
= associate_trees (lit0
, minus_lit0
,
7566 return fold_convert (type
,
7567 associate_trees (var0
, minus_lit0
,
7571 con0
= associate_trees (con0
, minus_lit0
,
7573 return fold_convert (type
,
7574 associate_trees (var0
, con0
,
7579 con0
= associate_trees (con0
, lit0
, code
, type
);
7580 return fold_convert (type
, associate_trees (var0
, con0
,
7587 t1
= const_binop (code
, arg0
, arg1
, 0);
7588 if (t1
!= NULL_TREE
)
7590 /* The return value should always have
7591 the same type as the original expression. */
7592 if (TREE_TYPE (t1
) != type
)
7593 t1
= fold_convert (type
, t1
);
7600 /* A - (-B) -> A + B */
7601 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7602 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
7603 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7604 if (TREE_CODE (arg0
) == NEGATE_EXPR
7605 && (FLOAT_TYPE_P (type
)
7606 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
7607 && negate_expr_p (arg1
)
7608 && reorder_operands_p (arg0
, arg1
))
7609 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
7610 TREE_OPERAND (arg0
, 0));
7611 /* Convert -A - 1 to ~A. */
7612 if (INTEGRAL_TYPE_P (type
)
7613 && TREE_CODE (arg0
) == NEGATE_EXPR
7614 && integer_onep (arg1
))
7615 return fold_build1 (BIT_NOT_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7617 /* Convert -1 - A to ~A. */
7618 if (INTEGRAL_TYPE_P (type
)
7619 && integer_all_onesp (arg0
))
7620 return fold_build1 (BIT_NOT_EXPR
, type
, arg1
);
7622 if (! FLOAT_TYPE_P (type
))
7624 if (! wins
&& integer_zerop (arg0
))
7625 return negate_expr (fold_convert (type
, arg1
));
7626 if (integer_zerop (arg1
))
7627 return non_lvalue (fold_convert (type
, arg0
));
7629 /* Fold A - (A & B) into ~B & A. */
7630 if (!TREE_SIDE_EFFECTS (arg0
)
7631 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
7633 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
7634 return fold_build2 (BIT_AND_EXPR
, type
,
7635 fold_build1 (BIT_NOT_EXPR
, type
,
7636 TREE_OPERAND (arg1
, 0)),
7638 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7639 return fold_build2 (BIT_AND_EXPR
, type
,
7640 fold_build1 (BIT_NOT_EXPR
, type
,
7641 TREE_OPERAND (arg1
, 1)),
7645 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7646 any power of 2 minus 1. */
7647 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7648 && TREE_CODE (arg1
) == BIT_AND_EXPR
7649 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7650 TREE_OPERAND (arg1
, 0), 0))
7652 tree mask0
= TREE_OPERAND (arg0
, 1);
7653 tree mask1
= TREE_OPERAND (arg1
, 1);
7654 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
7656 if (operand_equal_p (tem
, mask1
, 0))
7658 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
7659 TREE_OPERAND (arg0
, 0), mask1
);
7660 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
7665 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7666 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
7667 return non_lvalue (fold_convert (type
, arg0
));
7669 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7670 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7671 (-ARG1 + ARG0) reduces to -ARG1. */
7672 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7673 return negate_expr (fold_convert (type
, arg1
));
7675 /* Fold &x - &x. This can happen from &x.foo - &x.
7676 This is unsafe for certain floats even in non-IEEE formats.
7677 In IEEE, it is unsafe because it does wrong for NaNs.
7678 Also note that operand_equal_p is always false if an operand
7681 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
7682 && operand_equal_p (arg0
, arg1
, 0))
7683 return fold_convert (type
, integer_zero_node
);
7685 /* A - B -> A + (-B) if B is easily negatable. */
7686 if (!wins
&& negate_expr_p (arg1
)
7687 && ((FLOAT_TYPE_P (type
)
7688 /* Avoid this transformation if B is a positive REAL_CST. */
7689 && (TREE_CODE (arg1
) != REAL_CST
7690 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
7691 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
7692 return fold_build2 (PLUS_EXPR
, type
,
7693 fold_convert (type
, arg0
),
7694 fold_convert (type
, negate_expr (arg1
)));
7696 /* Try folding difference of addresses. */
7700 if ((TREE_CODE (arg0
) == ADDR_EXPR
7701 || TREE_CODE (arg1
) == ADDR_EXPR
)
7702 && ptr_difference_const (arg0
, arg1
, &diff
))
7703 return build_int_cst_type (type
, diff
);
7706 /* Fold &a[i] - &a[j] to i-j. */
7707 if (TREE_CODE (arg0
) == ADDR_EXPR
7708 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
7709 && TREE_CODE (arg1
) == ADDR_EXPR
7710 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
7712 tree aref0
= TREE_OPERAND (arg0
, 0);
7713 tree aref1
= TREE_OPERAND (arg1
, 0);
7714 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
7715 TREE_OPERAND (aref1
, 0), 0))
7717 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
7718 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
7719 tree esz
= array_ref_element_size (aref0
);
7720 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
7721 return fold_build2 (MULT_EXPR
, type
, diff
,
7722 fold_convert (type
, esz
));
7727 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7728 of the array. Loop optimizer sometimes produce this type of
7730 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7732 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
7734 return fold_convert (type
, tem
);
7737 if (flag_unsafe_math_optimizations
7738 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7739 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7740 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
7743 if (TREE_CODE (arg0
) == MULT_EXPR
7744 && TREE_CODE (arg1
) == MULT_EXPR
7745 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7747 /* (A * C) - (B * C) -> (A-B) * C. */
7748 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
7749 TREE_OPERAND (arg1
, 1), 0))
7750 return fold_build2 (MULT_EXPR
, type
,
7751 fold_build2 (MINUS_EXPR
, type
,
7752 TREE_OPERAND (arg0
, 0),
7753 TREE_OPERAND (arg1
, 0)),
7754 TREE_OPERAND (arg0
, 1));
7755 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7756 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
7757 TREE_OPERAND (arg1
, 0), 0))
7758 return fold_build2 (MULT_EXPR
, type
,
7759 TREE_OPERAND (arg0
, 0),
7760 fold_build2 (MINUS_EXPR
, type
,
7761 TREE_OPERAND (arg0
, 1),
7762 TREE_OPERAND (arg1
, 1)));
7768 /* (-A) * (-B) -> A * B */
7769 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7770 return fold_build2 (MULT_EXPR
, type
,
7771 TREE_OPERAND (arg0
, 0),
7772 negate_expr (arg1
));
7773 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7774 return fold_build2 (MULT_EXPR
, type
,
7776 TREE_OPERAND (arg1
, 0));
7778 if (! FLOAT_TYPE_P (type
))
7780 if (integer_zerop (arg1
))
7781 return omit_one_operand (type
, arg1
, arg0
);
7782 if (integer_onep (arg1
))
7783 return non_lvalue (fold_convert (type
, arg0
));
7784 /* Transform x * -1 into -x. */
7785 if (integer_all_onesp (arg1
))
7786 return fold_convert (type
, negate_expr (arg0
));
7788 /* (a * (1 << b)) is (a << b) */
7789 if (TREE_CODE (arg1
) == LSHIFT_EXPR
7790 && integer_onep (TREE_OPERAND (arg1
, 0)))
7791 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
7792 TREE_OPERAND (arg1
, 1));
7793 if (TREE_CODE (arg0
) == LSHIFT_EXPR
7794 && integer_onep (TREE_OPERAND (arg0
, 0)))
7795 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
7796 TREE_OPERAND (arg0
, 1));
7798 if (TREE_CODE (arg1
) == INTEGER_CST
7799 && 0 != (tem
= extract_muldiv (op0
,
7800 fold_convert (type
, arg1
),
7802 return fold_convert (type
, tem
);
7807 /* Maybe fold x * 0 to 0. The expressions aren't the same
7808 when x is NaN, since x * 0 is also NaN. Nor are they the
7809 same in modes with signed zeros, since multiplying a
7810 negative value by 0 gives -0, not +0. */
7811 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
7812 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
7813 && real_zerop (arg1
))
7814 return omit_one_operand (type
, arg1
, arg0
);
7815 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7816 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7817 && real_onep (arg1
))
7818 return non_lvalue (fold_convert (type
, arg0
));
7820 /* Transform x * -1.0 into -x. */
7821 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7822 && real_minus_onep (arg1
))
7823 return fold_convert (type
, negate_expr (arg0
));
7825 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7826 if (flag_unsafe_math_optimizations
7827 && TREE_CODE (arg0
) == RDIV_EXPR
7828 && TREE_CODE (arg1
) == REAL_CST
7829 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
7831 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
7834 return fold_build2 (RDIV_EXPR
, type
, tem
,
7835 TREE_OPERAND (arg0
, 1));
7838 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7839 if (operand_equal_p (arg0
, arg1
, 0))
7841 tree tem
= fold_strip_sign_ops (arg0
);
7842 if (tem
!= NULL_TREE
)
7844 tem
= fold_convert (type
, tem
);
7845 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
7849 if (flag_unsafe_math_optimizations
)
7851 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7852 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7854 /* Optimizations of root(...)*root(...). */
7855 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
7857 tree rootfn
, arg
, arglist
;
7858 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7859 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7861 /* Optimize sqrt(x)*sqrt(x) as x. */
7862 if (BUILTIN_SQRT_P (fcode0
)
7863 && operand_equal_p (arg00
, arg10
, 0)
7864 && ! HONOR_SNANS (TYPE_MODE (type
)))
7867 /* Optimize root(x)*root(y) as root(x*y). */
7868 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7869 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7870 arglist
= build_tree_list (NULL_TREE
, arg
);
7871 return build_function_call_expr (rootfn
, arglist
);
7874 /* Optimize expN(x)*expN(y) as expN(x+y). */
7875 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
7877 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7878 tree arg
= fold_build2 (PLUS_EXPR
, type
,
7879 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7880 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7881 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7882 return build_function_call_expr (expfn
, arglist
);
7885 /* Optimizations of pow(...)*pow(...). */
7886 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
7887 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
7888 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
7890 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7891 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7893 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7894 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7897 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7898 if (operand_equal_p (arg01
, arg11
, 0))
7900 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7901 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7902 tree arglist
= tree_cons (NULL_TREE
, arg
,
7903 build_tree_list (NULL_TREE
,
7905 return build_function_call_expr (powfn
, arglist
);
7908 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7909 if (operand_equal_p (arg00
, arg10
, 0))
7911 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7912 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
7913 tree arglist
= tree_cons (NULL_TREE
, arg00
,
7914 build_tree_list (NULL_TREE
,
7916 return build_function_call_expr (powfn
, arglist
);
7920 /* Optimize tan(x)*cos(x) as sin(x). */
7921 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
7922 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
7923 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
7924 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
7925 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
7926 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
7927 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7928 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7930 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7932 if (sinfn
!= NULL_TREE
)
7933 return build_function_call_expr (sinfn
,
7934 TREE_OPERAND (arg0
, 1));
7937 /* Optimize x*pow(x,c) as pow(x,c+1). */
7938 if (fcode1
== BUILT_IN_POW
7939 || fcode1
== BUILT_IN_POWF
7940 || fcode1
== BUILT_IN_POWL
)
7942 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7943 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7945 if (TREE_CODE (arg11
) == REAL_CST
7946 && ! TREE_CONSTANT_OVERFLOW (arg11
)
7947 && operand_equal_p (arg0
, arg10
, 0))
7949 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7953 c
= TREE_REAL_CST (arg11
);
7954 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7955 arg
= build_real (type
, c
);
7956 arglist
= build_tree_list (NULL_TREE
, arg
);
7957 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7958 return build_function_call_expr (powfn
, arglist
);
7962 /* Optimize pow(x,c)*x as pow(x,c+1). */
7963 if (fcode0
== BUILT_IN_POW
7964 || fcode0
== BUILT_IN_POWF
7965 || fcode0
== BUILT_IN_POWL
)
7967 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7968 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7970 if (TREE_CODE (arg01
) == REAL_CST
7971 && ! TREE_CONSTANT_OVERFLOW (arg01
)
7972 && operand_equal_p (arg1
, arg00
, 0))
7974 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7978 c
= TREE_REAL_CST (arg01
);
7979 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7980 arg
= build_real (type
, c
);
7981 arglist
= build_tree_list (NULL_TREE
, arg
);
7982 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
7983 return build_function_call_expr (powfn
, arglist
);
7987 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7989 && operand_equal_p (arg0
, arg1
, 0))
7991 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7995 tree arg
= build_real (type
, dconst2
);
7996 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7997 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7998 return build_function_call_expr (powfn
, arglist
);
8007 if (integer_all_onesp (arg1
))
8008 return omit_one_operand (type
, arg1
, arg0
);
8009 if (integer_zerop (arg1
))
8010 return non_lvalue (fold_convert (type
, arg0
));
8011 if (operand_equal_p (arg0
, arg1
, 0))
8012 return non_lvalue (fold_convert (type
, arg0
));
8015 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8016 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8018 t1
= build_int_cst (type
, -1);
8019 t1
= force_fit_type (t1
, 0, false, false);
8020 return omit_one_operand (type
, t1
, arg1
);
8024 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8025 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8027 t1
= build_int_cst (type
, -1);
8028 t1
= force_fit_type (t1
, 0, false, false);
8029 return omit_one_operand (type
, t1
, arg0
);
8032 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8033 if (t1
!= NULL_TREE
)
8036 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8038 This results in more efficient code for machines without a NAND
8039 instruction. Combine will canonicalize to the first form
8040 which will allow use of NAND instructions provided by the
8041 backend if they exist. */
8042 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8043 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8045 return fold_build1 (BIT_NOT_EXPR
, type
,
8046 build2 (BIT_AND_EXPR
, type
,
8047 TREE_OPERAND (arg0
, 0),
8048 TREE_OPERAND (arg1
, 0)));
8051 /* See if this can be simplified into a rotate first. If that
8052 is unsuccessful continue in the association code. */
8056 if (integer_zerop (arg1
))
8057 return non_lvalue (fold_convert (type
, arg0
));
8058 if (integer_all_onesp (arg1
))
8059 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
8060 if (operand_equal_p (arg0
, arg1
, 0))
8061 return omit_one_operand (type
, integer_zero_node
, arg0
);
8064 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8065 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8067 t1
= build_int_cst (type
, -1);
8068 t1
= force_fit_type (t1
, 0, false, false);
8069 return omit_one_operand (type
, t1
, arg1
);
8073 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8074 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8076 t1
= build_int_cst (type
, -1);
8077 t1
= force_fit_type (t1
, 0, false, false);
8078 return omit_one_operand (type
, t1
, arg0
);
8081 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8082 with a constant, and the two constants have no bits in common,
8083 we should treat this as a BIT_IOR_EXPR since this may produce more
8085 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8086 && TREE_CODE (arg1
) == BIT_AND_EXPR
8087 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8088 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8089 && integer_zerop (const_binop (BIT_AND_EXPR
,
8090 TREE_OPERAND (arg0
, 1),
8091 TREE_OPERAND (arg1
, 1), 0)))
8093 code
= BIT_IOR_EXPR
;
8097 /* (X | Y) ^ X -> Y & ~ X*/
8098 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
8099 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8101 tree t2
= TREE_OPERAND (arg0
, 1);
8102 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
8104 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8105 fold_convert (type
, t1
));
8109 /* (Y | X) ^ X -> Y & ~ X*/
8110 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
8111 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
8113 tree t2
= TREE_OPERAND (arg0
, 0);
8114 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
8116 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8117 fold_convert (type
, t1
));
8121 /* X ^ (X | Y) -> Y & ~ X*/
8122 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
8123 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
8125 tree t2
= TREE_OPERAND (arg1
, 1);
8126 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
8128 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8129 fold_convert (type
, t1
));
8133 /* X ^ (Y | X) -> Y & ~ X*/
8134 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
8135 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
8137 tree t2
= TREE_OPERAND (arg1
, 0);
8138 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
8140 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8141 fold_convert (type
, t1
));
8145 /* Convert ~X ^ ~Y to X ^ Y. */
8146 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8147 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8148 return fold_build2 (code
, type
,
8149 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8150 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8152 /* See if this can be simplified into a rotate first. If that
8153 is unsuccessful continue in the association code. */
8157 if (integer_all_onesp (arg1
))
8158 return non_lvalue (fold_convert (type
, arg0
));
8159 if (integer_zerop (arg1
))
8160 return omit_one_operand (type
, arg1
, arg0
);
8161 if (operand_equal_p (arg0
, arg1
, 0))
8162 return non_lvalue (fold_convert (type
, arg0
));
8164 /* ~X & X is always zero. */
8165 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8166 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8167 return omit_one_operand (type
, integer_zero_node
, arg1
);
8169 /* X & ~X is always zero. */
8170 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8171 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8172 return omit_one_operand (type
, integer_zero_node
, arg0
);
8174 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8175 if (t1
!= NULL_TREE
)
8177 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8178 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
8179 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8182 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
8184 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
8185 && (~TREE_INT_CST_LOW (arg1
)
8186 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
8187 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8190 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8192 This results in more efficient code for machines without a NOR
8193 instruction. Combine will canonicalize to the first form
8194 which will allow use of NOR instructions provided by the
8195 backend if they exist. */
8196 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8197 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8199 return fold_build1 (BIT_NOT_EXPR
, type
,
8200 build2 (BIT_IOR_EXPR
, type
,
8201 TREE_OPERAND (arg0
, 0),
8202 TREE_OPERAND (arg1
, 0)));
8208 /* Don't touch a floating-point divide by zero unless the mode
8209 of the constant can represent infinity. */
8210 if (TREE_CODE (arg1
) == REAL_CST
8211 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
8212 && real_zerop (arg1
))
8215 /* (-A) / (-B) -> A / B */
8216 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8217 return fold_build2 (RDIV_EXPR
, type
,
8218 TREE_OPERAND (arg0
, 0),
8219 negate_expr (arg1
));
8220 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8221 return fold_build2 (RDIV_EXPR
, type
,
8223 TREE_OPERAND (arg1
, 0));
8225 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8226 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8227 && real_onep (arg1
))
8228 return non_lvalue (fold_convert (type
, arg0
));
8230 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8231 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8232 && real_minus_onep (arg1
))
8233 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
8235 /* If ARG1 is a constant, we can convert this to a multiply by the
8236 reciprocal. This does not have the same rounding properties,
8237 so only do this if -funsafe-math-optimizations. We can actually
8238 always safely do it if ARG1 is a power of two, but it's hard to
8239 tell if it is or not in a portable manner. */
8240 if (TREE_CODE (arg1
) == REAL_CST
)
8242 if (flag_unsafe_math_optimizations
8243 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
8245 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
8246 /* Find the reciprocal if optimizing and the result is exact. */
8250 r
= TREE_REAL_CST (arg1
);
8251 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
8253 tem
= build_real (type
, r
);
8254 return fold_build2 (MULT_EXPR
, type
,
8255 fold_convert (type
, arg0
), tem
);
8259 /* Convert A/B/C to A/(B*C). */
8260 if (flag_unsafe_math_optimizations
8261 && TREE_CODE (arg0
) == RDIV_EXPR
)
8262 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8263 fold_build2 (MULT_EXPR
, type
,
8264 TREE_OPERAND (arg0
, 1), arg1
));
8266 /* Convert A/(B/C) to (A/B)*C. */
8267 if (flag_unsafe_math_optimizations
8268 && TREE_CODE (arg1
) == RDIV_EXPR
)
8269 return fold_build2 (MULT_EXPR
, type
,
8270 fold_build2 (RDIV_EXPR
, type
, arg0
,
8271 TREE_OPERAND (arg1
, 0)),
8272 TREE_OPERAND (arg1
, 1));
8274 /* Convert C1/(X*C2) into (C1/C2)/X. */
8275 if (flag_unsafe_math_optimizations
8276 && TREE_CODE (arg1
) == MULT_EXPR
8277 && TREE_CODE (arg0
) == REAL_CST
8278 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
8280 tree tem
= const_binop (RDIV_EXPR
, arg0
,
8281 TREE_OPERAND (arg1
, 1), 0);
8283 return fold_build2 (RDIV_EXPR
, type
, tem
,
8284 TREE_OPERAND (arg1
, 0));
8287 if (flag_unsafe_math_optimizations
)
8289 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
8290 /* Optimize x/expN(y) into x*expN(-y). */
8291 if (BUILTIN_EXPONENT_P (fcode
))
8293 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8294 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
8295 tree arglist
= build_tree_list (NULL_TREE
,
8296 fold_convert (type
, arg
));
8297 arg1
= build_function_call_expr (expfn
, arglist
);
8298 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8301 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8302 if (fcode
== BUILT_IN_POW
8303 || fcode
== BUILT_IN_POWF
8304 || fcode
== BUILT_IN_POWL
)
8306 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8307 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8308 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
8309 tree neg11
= fold_convert (type
, negate_expr (arg11
));
8310 tree arglist
= tree_cons(NULL_TREE
, arg10
,
8311 build_tree_list (NULL_TREE
, neg11
));
8312 arg1
= build_function_call_expr (powfn
, arglist
);
8313 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8317 if (flag_unsafe_math_optimizations
)
8319 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8320 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8322 /* Optimize sin(x)/cos(x) as tan(x). */
8323 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
8324 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
8325 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
8326 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8327 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8329 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8331 if (tanfn
!= NULL_TREE
)
8332 return build_function_call_expr (tanfn
,
8333 TREE_OPERAND (arg0
, 1));
8336 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8337 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
8338 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
8339 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
8340 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8341 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8343 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8345 if (tanfn
!= NULL_TREE
)
8347 tree tmp
= TREE_OPERAND (arg0
, 1);
8348 tmp
= build_function_call_expr (tanfn
, tmp
);
8349 return fold_build2 (RDIV_EXPR
, type
,
8350 build_real (type
, dconst1
), tmp
);
8354 /* Optimize pow(x,c)/x as pow(x,c-1). */
8355 if (fcode0
== BUILT_IN_POW
8356 || fcode0
== BUILT_IN_POWF
8357 || fcode0
== BUILT_IN_POWL
)
8359 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8360 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
8361 if (TREE_CODE (arg01
) == REAL_CST
8362 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8363 && operand_equal_p (arg1
, arg00
, 0))
8365 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8369 c
= TREE_REAL_CST (arg01
);
8370 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
8371 arg
= build_real (type
, c
);
8372 arglist
= build_tree_list (NULL_TREE
, arg
);
8373 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8374 return build_function_call_expr (powfn
, arglist
);
8380 case TRUNC_DIV_EXPR
:
8381 case ROUND_DIV_EXPR
:
8382 case FLOOR_DIV_EXPR
:
8384 case EXACT_DIV_EXPR
:
8385 if (integer_onep (arg1
))
8386 return non_lvalue (fold_convert (type
, arg0
));
8387 if (integer_zerop (arg1
))
8390 if (!TYPE_UNSIGNED (type
)
8391 && TREE_CODE (arg1
) == INTEGER_CST
8392 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8393 && TREE_INT_CST_HIGH (arg1
) == -1)
8394 return fold_convert (type
, negate_expr (arg0
));
8396 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8397 operation, EXACT_DIV_EXPR.
8399 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8400 At one time others generated faster code, it's not clear if they do
8401 after the last round to changes to the DIV code in expmed.c. */
8402 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
8403 && multiple_of_p (type
, arg0
, arg1
))
8404 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
8406 if (TREE_CODE (arg1
) == INTEGER_CST
8407 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8408 return fold_convert (type
, tem
);
8413 case FLOOR_MOD_EXPR
:
8414 case ROUND_MOD_EXPR
:
8415 case TRUNC_MOD_EXPR
:
8416 /* X % 1 is always zero, but be sure to preserve any side
8418 if (integer_onep (arg1
))
8419 return omit_one_operand (type
, integer_zero_node
, arg0
);
8421 /* X % 0, return X % 0 unchanged so that we can get the
8422 proper warnings and errors. */
8423 if (integer_zerop (arg1
))
8426 /* 0 % X is always zero, but be sure to preserve any side
8427 effects in X. Place this after checking for X == 0. */
8428 if (integer_zerop (arg0
))
8429 return omit_one_operand (type
, integer_zero_node
, arg1
);
8431 /* X % -1 is zero. */
8432 if (!TYPE_UNSIGNED (type
)
8433 && TREE_CODE (arg1
) == INTEGER_CST
8434 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8435 && TREE_INT_CST_HIGH (arg1
) == -1)
8436 return omit_one_operand (type
, integer_zero_node
, arg0
);
8438 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8439 i.e. "X % C" into "X & C2", if X and C are positive. */
8440 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
8441 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
))
8442 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) >= 0)
8444 unsigned HOST_WIDE_INT high
, low
;
8448 l
= tree_log2 (arg1
);
8449 if (l
>= HOST_BITS_PER_WIDE_INT
)
8451 high
= ((unsigned HOST_WIDE_INT
) 1
8452 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
8458 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
8461 mask
= build_int_cst_wide (type
, low
, high
);
8462 return fold_build2 (BIT_AND_EXPR
, type
,
8463 fold_convert (type
, arg0
), mask
);
8466 /* X % -C is the same as X % C. */
8467 if (code
== TRUNC_MOD_EXPR
8468 && !TYPE_UNSIGNED (type
)
8469 && TREE_CODE (arg1
) == INTEGER_CST
8470 && !TREE_CONSTANT_OVERFLOW (arg1
)
8471 && TREE_INT_CST_HIGH (arg1
) < 0
8473 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8474 && !sign_bit_p (arg1
, arg1
))
8475 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8476 fold_convert (type
, negate_expr (arg1
)));
8478 /* X % -Y is the same as X % Y. */
8479 if (code
== TRUNC_MOD_EXPR
8480 && !TYPE_UNSIGNED (type
)
8481 && TREE_CODE (arg1
) == NEGATE_EXPR
8483 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8484 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8486 if (TREE_CODE (arg1
) == INTEGER_CST
8487 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8488 return fold_convert (type
, tem
);
8494 if (integer_all_onesp (arg0
))
8495 return omit_one_operand (type
, arg0
, arg1
);
8499 /* Optimize -1 >> x for arithmetic right shifts. */
8500 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
8501 return omit_one_operand (type
, arg0
, arg1
);
8502 /* ... fall through ... */
8506 if (integer_zerop (arg1
))
8507 return non_lvalue (fold_convert (type
, arg0
));
8508 if (integer_zerop (arg0
))
8509 return omit_one_operand (type
, arg0
, arg1
);
8511 /* Since negative shift count is not well-defined,
8512 don't try to compute it in the compiler. */
8513 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
8516 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8517 if (TREE_CODE (arg0
) == code
&& host_integerp (arg1
, false)
8518 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
8519 && host_integerp (TREE_OPERAND (arg0
, 1), false)
8520 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
8522 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
8523 + TREE_INT_CST_LOW (arg1
));
8525 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8526 being well defined. */
8527 if (low
>= TYPE_PRECISION (type
))
8529 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
8530 low
= low
% TYPE_PRECISION (type
);
8531 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
8532 return build_int_cst (type
, 0);
8534 low
= TYPE_PRECISION (type
) - 1;
8537 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
8538 build_int_cst (type
, low
));
8541 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8542 into x & ((unsigned)-1 >> c) for unsigned types. */
8543 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
8544 || (TYPE_UNSIGNED (type
)
8545 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
8546 && host_integerp (arg1
, false)
8547 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
8548 && host_integerp (TREE_OPERAND (arg0
, 1), false)
8549 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
8551 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
8552 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
8558 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
8560 lshift
= build_int_cst (type
, -1);
8561 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
8563 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
8567 /* Rewrite an LROTATE_EXPR by a constant into an
8568 RROTATE_EXPR by a new constant. */
8569 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
8571 tree tem
= build_int_cst (NULL_TREE
,
8572 GET_MODE_BITSIZE (TYPE_MODE (type
)));
8573 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
8574 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
8575 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
8578 /* If we have a rotate of a bit operation with the rotate count and
8579 the second operand of the bit operation both constant,
8580 permute the two operations. */
8581 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8582 && (TREE_CODE (arg0
) == BIT_AND_EXPR
8583 || TREE_CODE (arg0
) == BIT_IOR_EXPR
8584 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
8585 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8586 return fold_build2 (TREE_CODE (arg0
), type
,
8587 fold_build2 (code
, type
,
8588 TREE_OPERAND (arg0
, 0), arg1
),
8589 fold_build2 (code
, type
,
8590 TREE_OPERAND (arg0
, 1), arg1
));
8592 /* Two consecutive rotates adding up to the width of the mode can
8594 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8595 && TREE_CODE (arg0
) == RROTATE_EXPR
8596 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8597 && TREE_INT_CST_HIGH (arg1
) == 0
8598 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
8599 && ((TREE_INT_CST_LOW (arg1
)
8600 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
8601 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
8602 return TREE_OPERAND (arg0
, 0);
8607 if (operand_equal_p (arg0
, arg1
, 0))
8608 return omit_one_operand (type
, arg0
, arg1
);
8609 if (INTEGRAL_TYPE_P (type
)
8610 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
8611 return omit_one_operand (type
, arg1
, arg0
);
8615 if (operand_equal_p (arg0
, arg1
, 0))
8616 return omit_one_operand (type
, arg0
, arg1
);
8617 if (INTEGRAL_TYPE_P (type
)
8618 && TYPE_MAX_VALUE (type
)
8619 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
8620 return omit_one_operand (type
, arg1
, arg0
);
8623 case TRUTH_ANDIF_EXPR
:
8624 /* Note that the operands of this must be ints
8625 and their values must be 0 or 1.
8626 ("true" is a fixed value perhaps depending on the language.) */
8627 /* If first arg is constant zero, return it. */
8628 if (integer_zerop (arg0
))
8629 return fold_convert (type
, arg0
);
8630 case TRUTH_AND_EXPR
:
8631 /* If either arg is constant true, drop it. */
8632 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8633 return non_lvalue (fold_convert (type
, arg1
));
8634 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
8635 /* Preserve sequence points. */
8636 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8637 return non_lvalue (fold_convert (type
, arg0
));
8638 /* If second arg is constant zero, result is zero, but first arg
8639 must be evaluated. */
8640 if (integer_zerop (arg1
))
8641 return omit_one_operand (type
, arg1
, arg0
);
8642 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8643 case will be handled here. */
8644 if (integer_zerop (arg0
))
8645 return omit_one_operand (type
, arg0
, arg1
);
8647 /* !X && X is always false. */
8648 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8649 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8650 return omit_one_operand (type
, integer_zero_node
, arg1
);
8651 /* X && !X is always false. */
8652 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8653 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8654 return omit_one_operand (type
, integer_zero_node
, arg0
);
8656 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8657 means A >= Y && A != MAX, but in this case we know that
8660 if (!TREE_SIDE_EFFECTS (arg0
)
8661 && !TREE_SIDE_EFFECTS (arg1
))
8663 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
8664 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
8665 return fold_build2 (code
, type
, tem
, arg1
);
8667 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
8668 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
8669 return fold_build2 (code
, type
, arg0
, tem
);
8673 /* We only do these simplifications if we are optimizing. */
8677 /* Check for things like (A || B) && (A || C). We can convert this
8678 to A || (B && C). Note that either operator can be any of the four
8679 truth and/or operations and the transformation will still be
8680 valid. Also note that we only care about order for the
8681 ANDIF and ORIF operators. If B contains side effects, this
8682 might change the truth-value of A. */
8683 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8684 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8685 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8686 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8687 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8688 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8690 tree a00
= TREE_OPERAND (arg0
, 0);
8691 tree a01
= TREE_OPERAND (arg0
, 1);
8692 tree a10
= TREE_OPERAND (arg1
, 0);
8693 tree a11
= TREE_OPERAND (arg1
, 1);
8694 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8695 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8696 && (code
== TRUTH_AND_EXPR
8697 || code
== TRUTH_OR_EXPR
));
8699 if (operand_equal_p (a00
, a10
, 0))
8700 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8701 fold_build2 (code
, type
, a01
, a11
));
8702 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8703 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8704 fold_build2 (code
, type
, a01
, a10
));
8705 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8706 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
8707 fold_build2 (code
, type
, a00
, a11
));
8709 /* This case if tricky because we must either have commutative
8710 operators or else A10 must not have side-effects. */
8712 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8713 && operand_equal_p (a01
, a11
, 0))
8714 return fold_build2 (TREE_CODE (arg0
), type
,
8715 fold_build2 (code
, type
, a00
, a10
),
8719 /* See if we can build a range comparison. */
8720 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
8723 /* Check for the possibility of merging component references. If our
8724 lhs is another similar operation, try to merge its rhs with our
8725 rhs. Then try to merge our lhs and rhs. */
8726 if (TREE_CODE (arg0
) == code
8727 && 0 != (tem
= fold_truthop (code
, type
,
8728 TREE_OPERAND (arg0
, 1), arg1
)))
8729 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8731 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
8736 case TRUTH_ORIF_EXPR
:
8737 /* Note that the operands of this must be ints
8738 and their values must be 0 or true.
8739 ("true" is a fixed value perhaps depending on the language.) */
8740 /* If first arg is constant true, return it. */
8741 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8742 return fold_convert (type
, arg0
);
8744 /* If either arg is constant zero, drop it. */
8745 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
8746 return non_lvalue (fold_convert (type
, arg1
));
8747 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
8748 /* Preserve sequence points. */
8749 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8750 return non_lvalue (fold_convert (type
, arg0
));
8751 /* If second arg is constant true, result is true, but we must
8752 evaluate first arg. */
8753 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
8754 return omit_one_operand (type
, arg1
, arg0
);
8755 /* Likewise for first arg, but note this only occurs here for
8757 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8758 return omit_one_operand (type
, arg0
, arg1
);
8760 /* !X || X is always true. */
8761 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8762 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8763 return omit_one_operand (type
, integer_one_node
, arg1
);
8764 /* X || !X is always true. */
8765 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8766 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8767 return omit_one_operand (type
, integer_one_node
, arg0
);
8771 case TRUTH_XOR_EXPR
:
8772 /* If the second arg is constant zero, drop it. */
8773 if (integer_zerop (arg1
))
8774 return non_lvalue (fold_convert (type
, arg0
));
8775 /* If the second arg is constant true, this is a logical inversion. */
8776 if (integer_onep (arg1
))
8778 /* Only call invert_truthvalue if operand is a truth value. */
8779 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8780 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
8782 tem
= invert_truthvalue (arg0
);
8783 return non_lvalue (fold_convert (type
, tem
));
8785 /* Identical arguments cancel to zero. */
8786 if (operand_equal_p (arg0
, arg1
, 0))
8787 return omit_one_operand (type
, integer_zero_node
, arg0
);
8789 /* !X ^ X is always true. */
8790 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8791 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8792 return omit_one_operand (type
, integer_one_node
, arg1
);
8794 /* X ^ !X is always true. */
8795 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8796 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8797 return omit_one_operand (type
, integer_one_node
, arg0
);
8807 /* If one arg is a real or integer constant, put it last. */
8808 if (tree_swap_operands_p (arg0
, arg1
, true))
8809 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8811 /* bool_var != 0 becomes bool_var. */
8812 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
8814 return non_lvalue (fold_convert (type
, arg0
));
8816 /* bool_var == 1 becomes bool_var. */
8817 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
8819 return non_lvalue (fold_convert (type
, arg0
));
8821 /* If this is an equality comparison of the address of a non-weak
8822 object against zero, then we know the result. */
8823 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8824 && TREE_CODE (arg0
) == ADDR_EXPR
8825 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
8826 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8827 && integer_zerop (arg1
))
8828 return constant_boolean_node (code
!= EQ_EXPR
, type
);
8830 /* If this is an equality comparison of the address of two non-weak,
8831 unaliased symbols neither of which are extern (since we do not
8832 have access to attributes for externs), then we know the result. */
8833 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8834 && TREE_CODE (arg0
) == ADDR_EXPR
8835 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
8836 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8837 && ! lookup_attribute ("alias",
8838 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
8839 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
8840 && TREE_CODE (arg1
) == ADDR_EXPR
8841 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
8842 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
8843 && ! lookup_attribute ("alias",
8844 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
8845 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
8847 /* We know that we're looking at the address of two
8848 non-weak, unaliased, static _DECL nodes.
8850 It is both wasteful and incorrect to call operand_equal_p
8851 to compare the two ADDR_EXPR nodes. It is wasteful in that
8852 all we need to do is test pointer equality for the arguments
8853 to the two ADDR_EXPR nodes. It is incorrect to use
8854 operand_equal_p as that function is NOT equivalent to a
8855 C equality test. It can in fact return false for two
8856 objects which would test as equal using the C equality
8858 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
8859 return constant_boolean_node (equal
8860 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
8864 /* If this is a comparison of two exprs that look like an
8865 ARRAY_REF of the same object, then we can fold this to a
8866 comparison of the two offsets. */
8867 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
8869 tree base0
, offset0
, base1
, offset1
;
8871 if (extract_array_ref (arg0
, &base0
, &offset0
)
8872 && extract_array_ref (arg1
, &base1
, &offset1
)
8873 && operand_equal_p (base0
, base1
, 0))
8875 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0
)))
8876 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0
)))))
8877 offset0
= NULL_TREE
;
8878 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1
)))
8879 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1
)))))
8880 offset1
= NULL_TREE
;
8881 if (offset0
== NULL_TREE
8882 && offset1
== NULL_TREE
)
8884 offset0
= integer_zero_node
;
8885 offset1
= integer_zero_node
;
8887 else if (offset0
== NULL_TREE
)
8888 offset0
= build_int_cst (TREE_TYPE (offset1
), 0);
8889 else if (offset1
== NULL_TREE
)
8890 offset1
= build_int_cst (TREE_TYPE (offset0
), 0);
8892 if (TREE_TYPE (offset0
) == TREE_TYPE (offset1
))
8893 return fold_build2 (code
, type
, offset0
, offset1
);
8897 /* Transform comparisons of the form X +- C CMP X. */
8898 if ((code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8899 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8900 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
8901 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8902 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
8903 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8904 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
8905 && !(flag_wrapv
|| flag_trapv
))))
8907 tree arg01
= TREE_OPERAND (arg0
, 1);
8908 enum tree_code code0
= TREE_CODE (arg0
);
8911 if (TREE_CODE (arg01
) == REAL_CST
)
8912 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
8914 is_positive
= tree_int_cst_sgn (arg01
);
8916 /* (X - c) > X becomes false. */
8918 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
8919 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
8920 return constant_boolean_node (0, type
);
8922 /* Likewise (X + c) < X becomes false. */
8924 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
8925 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
8926 return constant_boolean_node (0, type
);
8928 /* Convert (X - c) <= X to true. */
8929 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
8931 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
8932 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
8933 return constant_boolean_node (1, type
);
8935 /* Convert (X + c) >= X to true. */
8936 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
8938 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
8939 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
8940 return constant_boolean_node (1, type
);
8942 if (TREE_CODE (arg01
) == INTEGER_CST
)
8944 /* Convert X + c > X and X - c < X to true for integers. */
8946 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
8947 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
8948 return constant_boolean_node (1, type
);
8951 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
8952 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
8953 return constant_boolean_node (1, type
);
8955 /* Convert X + c <= X and X - c >= X to false for integers. */
8957 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
8958 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
8959 return constant_boolean_node (0, type
);
8962 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
8963 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
8964 return constant_boolean_node (0, type
);
8968 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8969 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8970 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8971 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8972 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
8973 && !(flag_wrapv
|| flag_trapv
))
8974 && (TREE_CODE (arg1
) == INTEGER_CST
8975 && !TREE_OVERFLOW (arg1
)))
8977 tree const1
= TREE_OPERAND (arg0
, 1);
8979 tree variable
= TREE_OPERAND (arg0
, 0);
8982 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8984 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8985 TREE_TYPE (arg1
), const2
, const1
);
8986 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8987 && (TREE_CODE (lhs
) != INTEGER_CST
8988 || !TREE_OVERFLOW (lhs
)))
8989 return fold_build2 (code
, type
, variable
, lhs
);
8992 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8994 tree targ0
= strip_float_extensions (arg0
);
8995 tree targ1
= strip_float_extensions (arg1
);
8996 tree newtype
= TREE_TYPE (targ0
);
8998 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8999 newtype
= TREE_TYPE (targ1
);
9001 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9002 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9003 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9004 fold_convert (newtype
, targ1
));
9006 /* (-a) CMP (-b) -> b CMP a */
9007 if (TREE_CODE (arg0
) == NEGATE_EXPR
9008 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9009 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
9010 TREE_OPERAND (arg0
, 0));
9012 if (TREE_CODE (arg1
) == REAL_CST
)
9014 REAL_VALUE_TYPE cst
;
9015 cst
= TREE_REAL_CST (arg1
);
9017 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9018 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9020 fold_build2 (swap_tree_comparison (code
), type
,
9021 TREE_OPERAND (arg0
, 0),
9022 build_real (TREE_TYPE (arg1
),
9023 REAL_VALUE_NEGATE (cst
)));
9025 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9026 /* a CMP (-0) -> a CMP 0 */
9027 if (REAL_VALUE_MINUS_ZERO (cst
))
9028 return fold_build2 (code
, type
, arg0
,
9029 build_real (TREE_TYPE (arg1
), dconst0
));
9031 /* x != NaN is always true, other ops are always false. */
9032 if (REAL_VALUE_ISNAN (cst
)
9033 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9035 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9036 return omit_one_operand (type
, tem
, arg0
);
9039 /* Fold comparisons against infinity. */
9040 if (REAL_VALUE_ISINF (cst
))
9042 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
9043 if (tem
!= NULL_TREE
)
9048 /* If this is a comparison of a real constant with a PLUS_EXPR
9049 or a MINUS_EXPR of a real constant, we can convert it into a
9050 comparison with a revised real constant as long as no overflow
9051 occurs when unsafe_math_optimizations are enabled. */
9052 if (flag_unsafe_math_optimizations
9053 && TREE_CODE (arg1
) == REAL_CST
9054 && (TREE_CODE (arg0
) == PLUS_EXPR
9055 || TREE_CODE (arg0
) == MINUS_EXPR
)
9056 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9057 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9058 ? MINUS_EXPR
: PLUS_EXPR
,
9059 arg1
, TREE_OPERAND (arg0
, 1), 0))
9060 && ! TREE_CONSTANT_OVERFLOW (tem
))
9061 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9063 /* Likewise, we can simplify a comparison of a real constant with
9064 a MINUS_EXPR whose first operand is also a real constant, i.e.
9065 (c1 - x) < c2 becomes x > c1-c2. */
9066 if (flag_unsafe_math_optimizations
9067 && TREE_CODE (arg1
) == REAL_CST
9068 && TREE_CODE (arg0
) == MINUS_EXPR
9069 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9070 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9072 && ! TREE_CONSTANT_OVERFLOW (tem
))
9073 return fold_build2 (swap_tree_comparison (code
), type
,
9074 TREE_OPERAND (arg0
, 1), tem
);
9076 /* Fold comparisons against built-in math functions. */
9077 if (TREE_CODE (arg1
) == REAL_CST
9078 && flag_unsafe_math_optimizations
9079 && ! flag_errno_math
)
9081 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9083 if (fcode
!= END_BUILTINS
)
9085 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
9086 if (tem
!= NULL_TREE
)
9092 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9093 if (TREE_CONSTANT (arg1
)
9094 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
9095 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
9096 /* This optimization is invalid for ordered comparisons
9097 if CONST+INCR overflows or if foo+incr might overflow.
9098 This optimization is invalid for floating point due to rounding.
9099 For pointer types we assume overflow doesn't happen. */
9100 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
9101 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9102 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
9104 tree varop
, newconst
;
9106 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
9108 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
9109 arg1
, TREE_OPERAND (arg0
, 1));
9110 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
9111 TREE_OPERAND (arg0
, 0),
9112 TREE_OPERAND (arg0
, 1));
9116 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
9117 arg1
, TREE_OPERAND (arg0
, 1));
9118 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
9119 TREE_OPERAND (arg0
, 0),
9120 TREE_OPERAND (arg0
, 1));
9124 /* If VAROP is a reference to a bitfield, we must mask
9125 the constant by the width of the field. */
9126 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
9127 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
9128 && host_integerp (DECL_SIZE (TREE_OPERAND
9129 (TREE_OPERAND (varop
, 0), 1)), 1))
9131 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
9132 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
9133 tree folded_compare
, shift
;
9135 /* First check whether the comparison would come out
9136 always the same. If we don't do that we would
9137 change the meaning with the masking. */
9138 folded_compare
= fold_build2 (code
, type
,
9139 TREE_OPERAND (varop
, 0), arg1
);
9140 if (integer_zerop (folded_compare
)
9141 || integer_onep (folded_compare
))
9142 return omit_one_operand (type
, folded_compare
, varop
);
9144 shift
= build_int_cst (NULL_TREE
,
9145 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
9146 shift
= fold_convert (TREE_TYPE (varop
), shift
);
9147 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
9149 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
9153 return fold_build2 (code
, type
, varop
, newconst
);
9156 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9157 This transformation affects the cases which are handled in later
9158 optimizations involving comparisons with non-negative constants. */
9159 if (TREE_CODE (arg1
) == INTEGER_CST
9160 && TREE_CODE (arg0
) != INTEGER_CST
9161 && tree_int_cst_sgn (arg1
) > 0)
9166 arg1
= const_binop (MINUS_EXPR
, arg1
,
9167 build_int_cst (TREE_TYPE (arg1
), 1), 0);
9168 return fold_build2 (GT_EXPR
, type
, arg0
,
9169 fold_convert (TREE_TYPE (arg0
), arg1
));
9172 arg1
= const_binop (MINUS_EXPR
, arg1
,
9173 build_int_cst (TREE_TYPE (arg1
), 1), 0);
9174 return fold_build2 (LE_EXPR
, type
, arg0
,
9175 fold_convert (TREE_TYPE (arg0
), arg1
));
9182 /* Comparisons with the highest or lowest possible integer of
9183 the specified size will have known values. */
9185 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
9187 if (TREE_CODE (arg1
) == INTEGER_CST
9188 && ! TREE_CONSTANT_OVERFLOW (arg1
)
9189 && width
<= 2 * HOST_BITS_PER_WIDE_INT
9190 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9191 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
9193 HOST_WIDE_INT signed_max_hi
;
9194 unsigned HOST_WIDE_INT signed_max_lo
;
9195 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
9197 if (width
<= HOST_BITS_PER_WIDE_INT
)
9199 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9204 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9206 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9212 max_lo
= signed_max_lo
;
9213 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9219 width
-= HOST_BITS_PER_WIDE_INT
;
9221 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9226 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9228 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9233 max_hi
= signed_max_hi
;
9234 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9238 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
9239 && TREE_INT_CST_LOW (arg1
) == max_lo
)
9243 return omit_one_operand (type
, integer_zero_node
, arg0
);
9246 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9249 return omit_one_operand (type
, integer_one_node
, arg0
);
9252 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9254 /* The GE_EXPR and LT_EXPR cases above are not normally
9255 reached because of previous transformations. */
9260 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9262 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
9266 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9267 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9269 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9270 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9274 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9276 && TREE_INT_CST_LOW (arg1
) == min_lo
)
9280 return omit_one_operand (type
, integer_zero_node
, arg0
);
9283 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9286 return omit_one_operand (type
, integer_one_node
, arg0
);
9289 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
9294 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9296 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
9300 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9301 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9303 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9304 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9309 else if (!in_gimple_form
9310 && TREE_INT_CST_HIGH (arg1
) == signed_max_hi
9311 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
9312 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
9313 /* signed_type does not work on pointer types. */
9314 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9316 /* The following case also applies to X < signed_max+1
9317 and X >= signed_max+1 because previous transformations. */
9318 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9321 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
9322 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
9323 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9324 type
, fold_convert (st0
, arg0
),
9325 build_int_cst (st1
, 0));
9331 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9332 a MINUS_EXPR of a constant, we can convert it into a comparison with
9333 a revised constant as long as no overflow occurs. */
9334 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9335 && TREE_CODE (arg1
) == INTEGER_CST
9336 && (TREE_CODE (arg0
) == PLUS_EXPR
9337 || TREE_CODE (arg0
) == MINUS_EXPR
)
9338 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9339 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9340 ? MINUS_EXPR
: PLUS_EXPR
,
9341 arg1
, TREE_OPERAND (arg0
, 1), 0))
9342 && ! TREE_CONSTANT_OVERFLOW (tem
))
9343 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9345 /* Similarly for a NEGATE_EXPR. */
9346 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9347 && TREE_CODE (arg0
) == NEGATE_EXPR
9348 && TREE_CODE (arg1
) == INTEGER_CST
9349 && 0 != (tem
= negate_expr (arg1
))
9350 && TREE_CODE (tem
) == INTEGER_CST
9351 && ! TREE_CONSTANT_OVERFLOW (tem
))
9352 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9354 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9355 for !=. Don't do this for ordered comparisons due to overflow. */
9356 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9357 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
9358 return fold_build2 (code
, type
,
9359 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
9361 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9362 && (TREE_CODE (arg0
) == NOP_EXPR
9363 || TREE_CODE (arg0
) == CONVERT_EXPR
))
9365 /* If we are widening one operand of an integer comparison,
9366 see if the other operand is similarly being widened. Perhaps we
9367 can do the comparison in the narrower type. */
9368 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9372 /* Or if we are changing signedness. */
9373 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9378 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9379 constant, we can simplify it. */
9380 else if (TREE_CODE (arg1
) == INTEGER_CST
9381 && (TREE_CODE (arg0
) == MIN_EXPR
9382 || TREE_CODE (arg0
) == MAX_EXPR
)
9383 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9385 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9392 /* If we are comparing an ABS_EXPR with a constant, we can
9393 convert all the cases into explicit comparisons, but they may
9394 well not be faster than doing the ABS and one comparison.
9395 But ABS (X) <= C is a range comparison, which becomes a subtraction
9396 and a comparison, and is probably faster. */
9397 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
9398 && TREE_CODE (arg0
) == ABS_EXPR
9399 && ! TREE_SIDE_EFFECTS (arg0
)
9400 && (0 != (tem
= negate_expr (arg1
)))
9401 && TREE_CODE (tem
) == INTEGER_CST
9402 && ! TREE_CONSTANT_OVERFLOW (tem
))
9403 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
9404 build2 (GE_EXPR
, type
,
9405 TREE_OPERAND (arg0
, 0), tem
),
9406 build2 (LE_EXPR
, type
,
9407 TREE_OPERAND (arg0
, 0), arg1
));
9409 /* Convert ABS_EXPR<x> >= 0 to true. */
9410 else if (code
== GE_EXPR
9411 && tree_expr_nonnegative_p (arg0
)
9412 && (integer_zerop (arg1
)
9413 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9414 && real_zerop (arg1
))))
9415 return omit_one_operand (type
, integer_one_node
, arg0
);
9417 /* Convert ABS_EXPR<x> < 0 to false. */
9418 else if (code
== LT_EXPR
9419 && tree_expr_nonnegative_p (arg0
)
9420 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9421 return omit_one_operand (type
, integer_zero_node
, arg0
);
9423 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9424 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9425 && TREE_CODE (arg0
) == ABS_EXPR
9426 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9427 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
9429 /* If this is an EQ or NE comparison with zero and ARG0 is
9430 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9431 two operations, but the latter can be done in one less insn
9432 on machines that have only two-operand insns or on which a
9433 constant cannot be the first operand. */
9434 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9435 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
9437 tree arg00
= TREE_OPERAND (arg0
, 0);
9438 tree arg01
= TREE_OPERAND (arg0
, 1);
9439 if (TREE_CODE (arg00
) == LSHIFT_EXPR
9440 && integer_onep (TREE_OPERAND (arg00
, 0)))
9442 fold_build2 (code
, type
,
9443 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9444 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
9445 arg01
, TREE_OPERAND (arg00
, 1)),
9446 fold_convert (TREE_TYPE (arg0
),
9449 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
9450 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
9452 fold_build2 (code
, type
,
9453 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9454 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
9455 arg00
, TREE_OPERAND (arg01
, 1)),
9456 fold_convert (TREE_TYPE (arg0
),
9461 /* If this is an NE or EQ comparison of zero against the result of a
9462 signed MOD operation whose second operand is a power of 2, make
9463 the MOD operation unsigned since it is simpler and equivalent. */
9464 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9465 && integer_zerop (arg1
)
9466 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
9467 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
9468 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
9469 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
9470 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
9471 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9473 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
9474 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
9475 fold_convert (newtype
,
9476 TREE_OPERAND (arg0
, 0)),
9477 fold_convert (newtype
,
9478 TREE_OPERAND (arg0
, 1)));
9480 return fold_build2 (code
, type
, newmod
,
9481 fold_convert (newtype
, arg1
));
9484 /* If this is an NE comparison of zero with an AND of one, remove the
9485 comparison since the AND will give the correct value. */
9486 if (code
== NE_EXPR
&& integer_zerop (arg1
)
9487 && TREE_CODE (arg0
) == BIT_AND_EXPR
9488 && integer_onep (TREE_OPERAND (arg0
, 1)))
9489 return fold_convert (type
, arg0
);
9491 /* If we have (A & C) == C where C is a power of 2, convert this into
9492 (A & C) != 0. Similarly for NE_EXPR. */
9493 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9494 && TREE_CODE (arg0
) == BIT_AND_EXPR
9495 && integer_pow2p (TREE_OPERAND (arg0
, 1))
9496 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9497 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
9498 arg0
, fold_convert (TREE_TYPE (arg0
),
9499 integer_zero_node
));
9501 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9502 bit, then fold the expression into A < 0 or A >= 0. */
9503 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
9507 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9508 Similarly for NE_EXPR. */
9509 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9510 && TREE_CODE (arg0
) == BIT_AND_EXPR
9511 && TREE_CODE (arg1
) == INTEGER_CST
9512 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9514 tree notc
= fold_build1 (BIT_NOT_EXPR
,
9515 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
9516 TREE_OPERAND (arg0
, 1));
9517 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9519 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9520 if (integer_nonzerop (dandnotc
))
9521 return omit_one_operand (type
, rslt
, arg0
);
9524 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9525 Similarly for NE_EXPR. */
9526 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9527 && TREE_CODE (arg0
) == BIT_IOR_EXPR
9528 && TREE_CODE (arg1
) == INTEGER_CST
9529 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9531 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
9532 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9533 TREE_OPERAND (arg0
, 1), notd
);
9534 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9535 if (integer_nonzerop (candnotd
))
9536 return omit_one_operand (type
, rslt
, arg0
);
9539 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9540 and similarly for >= into !=. */
9541 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9542 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9543 && TREE_CODE (arg1
) == LSHIFT_EXPR
9544 && integer_onep (TREE_OPERAND (arg1
, 0)))
9545 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9546 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9547 TREE_OPERAND (arg1
, 1)),
9548 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9550 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9551 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9552 && (TREE_CODE (arg1
) == NOP_EXPR
9553 || TREE_CODE (arg1
) == CONVERT_EXPR
)
9554 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
9555 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
9557 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9558 fold_convert (TREE_TYPE (arg0
),
9559 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9560 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
9562 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9564 /* Simplify comparison of something with itself. (For IEEE
9565 floating-point, we can only do some of these simplifications.) */
9566 if (operand_equal_p (arg0
, arg1
, 0))
9571 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9572 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9573 return constant_boolean_node (1, type
);
9578 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9579 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9580 return constant_boolean_node (1, type
);
9581 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9584 /* For NE, we can only do this simplification if integer
9585 or we don't honor IEEE floating point NaNs. */
9586 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9587 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9589 /* ... fall through ... */
9592 return constant_boolean_node (0, type
);
9598 /* If we are comparing an expression that just has comparisons
9599 of two integer values, arithmetic expressions of those comparisons,
9600 and constants, we can simplify it. There are only three cases
9601 to check: the two values can either be equal, the first can be
9602 greater, or the second can be greater. Fold the expression for
9603 those three values. Since each value must be 0 or 1, we have
9604 eight possibilities, each of which corresponds to the constant 0
9605 or 1 or one of the six possible comparisons.
9607 This handles common cases like (a > b) == 0 but also handles
9608 expressions like ((x > y) - (y > x)) > 0, which supposedly
9609 occur in macroized code. */
9611 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9613 tree cval1
= 0, cval2
= 0;
9616 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9617 /* Don't handle degenerate cases here; they should already
9618 have been handled anyway. */
9619 && cval1
!= 0 && cval2
!= 0
9620 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9621 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9622 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9623 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9624 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9625 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9626 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9628 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9629 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9631 /* We can't just pass T to eval_subst in case cval1 or cval2
9632 was the same as ARG1. */
9635 = fold_build2 (code
, type
,
9636 eval_subst (arg0
, cval1
, maxval
,
9640 = fold_build2 (code
, type
,
9641 eval_subst (arg0
, cval1
, maxval
,
9645 = fold_build2 (code
, type
,
9646 eval_subst (arg0
, cval1
, minval
,
9650 /* All three of these results should be 0 or 1. Confirm they
9651 are. Then use those values to select the proper code
9654 if ((integer_zerop (high_result
)
9655 || integer_onep (high_result
))
9656 && (integer_zerop (equal_result
)
9657 || integer_onep (equal_result
))
9658 && (integer_zerop (low_result
)
9659 || integer_onep (low_result
)))
9661 /* Make a 3-bit mask with the high-order bit being the
9662 value for `>', the next for '=', and the low for '<'. */
9663 switch ((integer_onep (high_result
) * 4)
9664 + (integer_onep (equal_result
) * 2)
9665 + integer_onep (low_result
))
9669 return omit_one_operand (type
, integer_zero_node
, arg0
);
9690 return omit_one_operand (type
, integer_one_node
, arg0
);
9694 return save_expr (build2 (code
, type
, cval1
, cval2
));
9696 return fold_build2 (code
, type
, cval1
, cval2
);
9701 /* If this is a comparison of a field, we may be able to simplify it. */
9702 if (((TREE_CODE (arg0
) == COMPONENT_REF
9703 && lang_hooks
.can_use_bit_fields_p ())
9704 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
9705 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9706 /* Handle the constant case even without -O
9707 to make sure the warnings are given. */
9708 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
9710 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
9715 /* Fold a comparison of the address of COMPONENT_REFs with the same
9716 type and component to a comparison of the address of the base
9717 object. In short, &x->a OP &y->a to x OP y and
9718 &x->a OP &y.a to x OP &y */
9719 if (TREE_CODE (arg0
) == ADDR_EXPR
9720 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
9721 && TREE_CODE (arg1
) == ADDR_EXPR
9722 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
9724 tree cref0
= TREE_OPERAND (arg0
, 0);
9725 tree cref1
= TREE_OPERAND (arg1
, 0);
9726 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
9728 tree op0
= TREE_OPERAND (cref0
, 0);
9729 tree op1
= TREE_OPERAND (cref1
, 0);
9730 return fold_build2 (code
, type
,
9731 build_fold_addr_expr (op0
),
9732 build_fold_addr_expr (op1
));
9736 /* Optimize comparisons of strlen vs zero to a compare of the
9737 first character of the string vs zero. To wit,
9738 strlen(ptr) == 0 => *ptr == 0
9739 strlen(ptr) != 0 => *ptr != 0
9740 Other cases should reduce to one of these two (or a constant)
9741 due to the return value of strlen being unsigned. */
9742 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9743 && integer_zerop (arg1
)
9744 && TREE_CODE (arg0
) == CALL_EXPR
)
9746 tree fndecl
= get_callee_fndecl (arg0
);
9750 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
9751 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
9752 && (arglist
= TREE_OPERAND (arg0
, 1))
9753 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
9754 && ! TREE_CHAIN (arglist
))
9756 tree iref
= build_fold_indirect_ref (TREE_VALUE (arglist
));
9757 return fold_build2 (code
, type
, iref
,
9758 build_int_cst (TREE_TYPE (iref
), 0));
9762 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9763 into a single range test. */
9764 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9765 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9766 && TREE_CODE (arg1
) == INTEGER_CST
9767 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9768 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9769 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9770 && !TREE_OVERFLOW (arg1
))
9772 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
9773 if (t1
!= NULL_TREE
)
9777 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9778 && integer_zerop (arg1
)
9779 && tree_expr_nonzero_p (arg0
))
9781 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
9782 return omit_one_operand (type
, res
, arg0
);
9785 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9786 return t1
== NULL_TREE
? NULL_TREE
: t1
;
9788 case UNORDERED_EXPR
:
9796 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9798 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9799 if (t1
!= NULL_TREE
)
9803 /* If the first operand is NaN, the result is constant. */
9804 if (TREE_CODE (arg0
) == REAL_CST
9805 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
9806 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9808 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9811 return omit_one_operand (type
, t1
, arg1
);
9814 /* If the second operand is NaN, the result is constant. */
9815 if (TREE_CODE (arg1
) == REAL_CST
9816 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
9817 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9819 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9822 return omit_one_operand (type
, t1
, arg0
);
9825 /* Simplify unordered comparison of something with itself. */
9826 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
9827 && operand_equal_p (arg0
, arg1
, 0))
9828 return constant_boolean_node (1, type
);
9830 if (code
== LTGT_EXPR
9831 && !flag_trapping_math
9832 && operand_equal_p (arg0
, arg1
, 0))
9833 return constant_boolean_node (0, type
);
9835 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9837 tree targ0
= strip_float_extensions (arg0
);
9838 tree targ1
= strip_float_extensions (arg1
);
9839 tree newtype
= TREE_TYPE (targ0
);
9841 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9842 newtype
= TREE_TYPE (targ1
);
9844 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9845 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9846 fold_convert (newtype
, targ1
));
9852 /* When pedantic, a compound expression can be neither an lvalue
9853 nor an integer constant expression. */
9854 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
9856 /* Don't let (0, 0) be null pointer constant. */
9857 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
9858 : fold_convert (type
, arg1
);
9859 return pedantic_non_lvalue (tem
);
9863 return build_complex (type
, arg0
, arg1
);
9867 /* An ASSERT_EXPR should never be passed to fold_binary. */
9872 } /* switch (code) */
9875 /* Callback for walk_tree, looking for LABEL_EXPR.
9876 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9877 Do not check the sub-tree of GOTO_EXPR. */
9880 contains_label_1 (tree
*tp
,
9882 void *data ATTRIBUTE_UNUSED
)
9884 switch (TREE_CODE (*tp
))
9896 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9897 accessible from outside the sub-tree. Returns NULL_TREE if no
9898 addressable label is found. */
9901 contains_label_p (tree st
)
9903 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
9906 /* Fold a ternary expression of code CODE and type TYPE with operands
9907 OP0, OP1, and OP2. Return the folded expression if folding is
9908 successful. Otherwise, return NULL_TREE. */
9911 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
9914 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
9915 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9917 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9918 && TREE_CODE_LENGTH (code
) == 3);
9920 /* Strip any conversions that don't change the mode. This is safe
9921 for every expression, except for a comparison expression because
9922 its signedness is derived from its operands. So, in the latter
9923 case, only strip conversions that don't change the signedness.
9925 Note that this is done as an internal manipulation within the
9926 constant folder, in order to find the simplest representation of
9927 the arguments so that their form can be studied. In any cases,
9928 the appropriate type conversions should be put back in the tree
9929 that will get out of the constant folder. */
9945 if (TREE_CODE (arg0
) == CONSTRUCTOR
9946 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
9948 unsigned HOST_WIDE_INT idx
;
9950 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
9957 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9958 so all simple results must be passed through pedantic_non_lvalue. */
9959 if (TREE_CODE (arg0
) == INTEGER_CST
)
9961 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
9962 tem
= integer_zerop (arg0
) ? op2
: op1
;
9963 /* Only optimize constant conditions when the selected branch
9964 has the same type as the COND_EXPR. This avoids optimizing
9965 away "c ? x : throw", where the throw has a void type.
9966 Avoid throwing away that operand which contains label. */
9967 if ((!TREE_SIDE_EFFECTS (unused_op
)
9968 || !contains_label_p (unused_op
))
9969 && (! VOID_TYPE_P (TREE_TYPE (tem
))
9970 || VOID_TYPE_P (type
)))
9971 return pedantic_non_lvalue (tem
);
9974 if (operand_equal_p (arg1
, op2
, 0))
9975 return pedantic_omit_one_operand (type
, arg1
, arg0
);
9977 /* If we have A op B ? A : C, we may be able to convert this to a
9978 simpler expression, depending on the operation and the values
9979 of B and C. Signed zeros prevent all of these transformations,
9980 for reasons given above each one.
9982 Also try swapping the arguments and inverting the conditional. */
9983 if (COMPARISON_CLASS_P (arg0
)
9984 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
9985 arg1
, TREE_OPERAND (arg0
, 1))
9986 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
9988 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
9993 if (COMPARISON_CLASS_P (arg0
)
9994 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
9996 TREE_OPERAND (arg0
, 1))
9997 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
9999 tem
= invert_truthvalue (arg0
);
10000 if (COMPARISON_CLASS_P (tem
))
10002 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
10008 /* If the second operand is simpler than the third, swap them
10009 since that produces better jump optimization results. */
10010 if (truth_value_p (TREE_CODE (arg0
))
10011 && tree_swap_operands_p (op1
, op2
, false))
10013 /* See if this can be inverted. If it can't, possibly because
10014 it was a floating-point inequality comparison, don't do
10016 tem
= invert_truthvalue (arg0
);
10018 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10019 return fold_build3 (code
, type
, tem
, op2
, op1
);
10022 /* Convert A ? 1 : 0 to simply A. */
10023 if (integer_onep (op1
)
10024 && integer_zerop (op2
)
10025 /* If we try to convert OP0 to our type, the
10026 call to fold will try to move the conversion inside
10027 a COND, which will recurse. In that case, the COND_EXPR
10028 is probably the best choice, so leave it alone. */
10029 && type
== TREE_TYPE (arg0
))
10030 return pedantic_non_lvalue (arg0
);
10032 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10033 over COND_EXPR in cases such as floating point comparisons. */
10034 if (integer_zerop (op1
)
10035 && integer_onep (op2
)
10036 && truth_value_p (TREE_CODE (arg0
)))
10037 return pedantic_non_lvalue (fold_convert (type
,
10038 invert_truthvalue (arg0
)));
10040 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10041 if (TREE_CODE (arg0
) == LT_EXPR
10042 && integer_zerop (TREE_OPERAND (arg0
, 1))
10043 && integer_zerop (op2
)
10044 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
10045 return fold_convert (type
, fold_build2 (BIT_AND_EXPR
,
10046 TREE_TYPE (tem
), tem
, arg1
));
10048 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10049 already handled above. */
10050 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10051 && integer_onep (TREE_OPERAND (arg0
, 1))
10052 && integer_zerop (op2
)
10053 && integer_pow2p (arg1
))
10055 tree tem
= TREE_OPERAND (arg0
, 0);
10057 if (TREE_CODE (tem
) == RSHIFT_EXPR
10058 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
10059 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
10060 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
10061 return fold_build2 (BIT_AND_EXPR
, type
,
10062 TREE_OPERAND (tem
, 0), arg1
);
10065 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10066 is probably obsolete because the first operand should be a
10067 truth value (that's why we have the two cases above), but let's
10068 leave it in until we can confirm this for all front-ends. */
10069 if (integer_zerop (op2
)
10070 && TREE_CODE (arg0
) == NE_EXPR
10071 && integer_zerop (TREE_OPERAND (arg0
, 1))
10072 && integer_pow2p (arg1
)
10073 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10074 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10075 arg1
, OEP_ONLY_CONST
))
10076 return pedantic_non_lvalue (fold_convert (type
,
10077 TREE_OPERAND (arg0
, 0)));
10079 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10080 if (integer_zerop (op2
)
10081 && truth_value_p (TREE_CODE (arg0
))
10082 && truth_value_p (TREE_CODE (arg1
)))
10083 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, arg0
, arg1
);
10085 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10086 if (integer_onep (op2
)
10087 && truth_value_p (TREE_CODE (arg0
))
10088 && truth_value_p (TREE_CODE (arg1
)))
10090 /* Only perform transformation if ARG0 is easily inverted. */
10091 tem
= invert_truthvalue (arg0
);
10092 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10093 return fold_build2 (TRUTH_ORIF_EXPR
, type
, tem
, arg1
);
10096 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10097 if (integer_zerop (arg1
)
10098 && truth_value_p (TREE_CODE (arg0
))
10099 && truth_value_p (TREE_CODE (op2
)))
10101 /* Only perform transformation if ARG0 is easily inverted. */
10102 tem
= invert_truthvalue (arg0
);
10103 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10104 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, tem
, op2
);
10107 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10108 if (integer_onep (arg1
)
10109 && truth_value_p (TREE_CODE (arg0
))
10110 && truth_value_p (TREE_CODE (op2
)))
10111 return fold_build2 (TRUTH_ORIF_EXPR
, type
, arg0
, op2
);
10116 /* Check for a built-in function. */
10117 if (TREE_CODE (op0
) == ADDR_EXPR
10118 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
10119 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
10120 return fold_builtin (TREE_OPERAND (op0
, 0), op1
, false);
10123 case BIT_FIELD_REF
:
10124 if (TREE_CODE (arg0
) == VECTOR_CST
10125 && type
== TREE_TYPE (TREE_TYPE (arg0
))
10126 && host_integerp (arg1
, 1)
10127 && host_integerp (op2
, 1))
10129 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
10130 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
10133 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
10134 && (idx
% width
) == 0
10135 && (idx
= idx
/ width
)
10136 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
10138 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
10139 while (idx
-- > 0 && elements
)
10140 elements
= TREE_CHAIN (elements
);
10142 return TREE_VALUE (elements
);
10144 return fold_convert (type
, integer_zero_node
);
10151 } /* switch (code) */
10154 /* Perform constant folding and related simplification of EXPR.
10155 The related simplifications include x*1 => x, x*0 => 0, etc.,
10156 and application of the associative law.
10157 NOP_EXPR conversions may be removed freely (as long as we
10158 are careful not to change the type of the overall expression).
10159 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10160 but we can constant-fold them if they have constant operands. */
10162 #ifdef ENABLE_FOLD_CHECKING
10163 # define fold(x) fold_1 (x)
10164 static tree
fold_1 (tree
);
10170 const tree t
= expr
;
10171 enum tree_code code
= TREE_CODE (t
);
10172 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10175 /* Return right away if a constant. */
10176 if (kind
== tcc_constant
)
10179 if (IS_EXPR_CODE_CLASS (kind
))
10181 tree type
= TREE_TYPE (t
);
10182 tree op0
, op1
, op2
;
10184 switch (TREE_CODE_LENGTH (code
))
10187 op0
= TREE_OPERAND (t
, 0);
10188 tem
= fold_unary (code
, type
, op0
);
10189 return tem
? tem
: expr
;
10191 op0
= TREE_OPERAND (t
, 0);
10192 op1
= TREE_OPERAND (t
, 1);
10193 tem
= fold_binary (code
, type
, op0
, op1
);
10194 return tem
? tem
: expr
;
10196 op0
= TREE_OPERAND (t
, 0);
10197 op1
= TREE_OPERAND (t
, 1);
10198 op2
= TREE_OPERAND (t
, 2);
10199 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10200 return tem
? tem
: expr
;
10209 return fold (DECL_INITIAL (t
));
10213 } /* switch (code) */
10216 #ifdef ENABLE_FOLD_CHECKING
10219 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
10220 static void fold_check_failed (tree
, tree
);
10221 void print_fold_checksum (tree
);
10223 /* When --enable-checking=fold, compute a digest of expr before
10224 and after actual fold call to see if fold did not accidentally
10225 change original expr. */
10231 struct md5_ctx ctx
;
10232 unsigned char checksum_before
[16], checksum_after
[16];
10235 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10236 md5_init_ctx (&ctx
);
10237 fold_checksum_tree (expr
, &ctx
, ht
);
10238 md5_finish_ctx (&ctx
, checksum_before
);
10241 ret
= fold_1 (expr
);
10243 md5_init_ctx (&ctx
);
10244 fold_checksum_tree (expr
, &ctx
, ht
);
10245 md5_finish_ctx (&ctx
, checksum_after
);
10248 if (memcmp (checksum_before
, checksum_after
, 16))
10249 fold_check_failed (expr
, ret
);
10255 print_fold_checksum (tree expr
)
10257 struct md5_ctx ctx
;
10258 unsigned char checksum
[16], cnt
;
10261 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10262 md5_init_ctx (&ctx
);
10263 fold_checksum_tree (expr
, &ctx
, ht
);
10264 md5_finish_ctx (&ctx
, checksum
);
10266 for (cnt
= 0; cnt
< 16; ++cnt
)
10267 fprintf (stderr
, "%02x", checksum
[cnt
]);
10268 putc ('\n', stderr
);
10272 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
10274 internal_error ("fold check: original tree changed by fold");
10278 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
10281 enum tree_code code
;
10282 char buf
[sizeof (struct tree_function_decl
)];
10287 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
10288 <= sizeof (struct tree_function_decl
))
10289 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
10292 slot
= htab_find_slot (ht
, expr
, INSERT
);
10296 code
= TREE_CODE (expr
);
10297 if (TREE_CODE_CLASS (code
) == tcc_declaration
10298 && DECL_ASSEMBLER_NAME_SET_P (expr
))
10300 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10301 memcpy (buf
, expr
, tree_size (expr
));
10303 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
10305 else if (TREE_CODE_CLASS (code
) == tcc_type
10306 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
10307 || TYPE_CACHED_VALUES_P (expr
)
10308 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
10310 /* Allow these fields to be modified. */
10311 memcpy (buf
, expr
, tree_size (expr
));
10313 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
10314 TYPE_POINTER_TO (expr
) = NULL
;
10315 TYPE_REFERENCE_TO (expr
) = NULL
;
10316 if (TYPE_CACHED_VALUES_P (expr
))
10318 TYPE_CACHED_VALUES_P (expr
) = 0;
10319 TYPE_CACHED_VALUES (expr
) = NULL
;
10322 md5_process_bytes (expr
, tree_size (expr
), ctx
);
10323 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
10324 if (TREE_CODE_CLASS (code
) != tcc_type
10325 && TREE_CODE_CLASS (code
) != tcc_declaration
10326 && code
!= TREE_LIST
)
10327 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
10328 switch (TREE_CODE_CLASS (code
))
10334 md5_process_bytes (TREE_STRING_POINTER (expr
),
10335 TREE_STRING_LENGTH (expr
), ctx
);
10338 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
10339 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
10342 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
10348 case tcc_exceptional
:
10352 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
10353 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
10354 expr
= TREE_CHAIN (expr
);
10355 goto recursive_label
;
10358 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
10359 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
10365 case tcc_expression
:
10366 case tcc_reference
:
10367 case tcc_comparison
:
10370 case tcc_statement
:
10371 len
= TREE_CODE_LENGTH (code
);
10372 for (i
= 0; i
< len
; ++i
)
10373 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
10375 case tcc_declaration
:
10376 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
10377 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
10378 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
10379 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
10380 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
10381 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
10382 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
10383 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
10384 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
10386 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
10388 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
10389 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
10390 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
10394 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
10395 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
10396 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
10397 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
10398 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
10399 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
10400 if (INTEGRAL_TYPE_P (expr
)
10401 || SCALAR_FLOAT_TYPE_P (expr
))
10403 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
10404 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
10406 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
10407 if (TREE_CODE (expr
) == RECORD_TYPE
10408 || TREE_CODE (expr
) == UNION_TYPE
10409 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
10410 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
10411 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
10420 /* Fold a unary tree expression with code CODE of type TYPE with an
10421 operand OP0. Return a folded expression if successful. Otherwise,
10422 return a tree expression with code CODE of type TYPE with an
10426 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
10429 #ifdef ENABLE_FOLD_CHECKING
10430 unsigned char checksum_before
[16], checksum_after
[16];
10431 struct md5_ctx ctx
;
10434 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10435 md5_init_ctx (&ctx
);
10436 fold_checksum_tree (op0
, &ctx
, ht
);
10437 md5_finish_ctx (&ctx
, checksum_before
);
10441 tem
= fold_unary (code
, type
, op0
);
10443 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
10445 #ifdef ENABLE_FOLD_CHECKING
10446 md5_init_ctx (&ctx
);
10447 fold_checksum_tree (op0
, &ctx
, ht
);
10448 md5_finish_ctx (&ctx
, checksum_after
);
10451 if (memcmp (checksum_before
, checksum_after
, 16))
10452 fold_check_failed (op0
, tem
);
10457 /* Fold a binary tree expression with code CODE of type TYPE with
10458 operands OP0 and OP1. Return a folded expression if successful.
10459 Otherwise, return a tree expression with code CODE of type TYPE
10460 with operands OP0 and OP1. */
10463 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
10467 #ifdef ENABLE_FOLD_CHECKING
10468 unsigned char checksum_before_op0
[16],
10469 checksum_before_op1
[16],
10470 checksum_after_op0
[16],
10471 checksum_after_op1
[16];
10472 struct md5_ctx ctx
;
10475 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10476 md5_init_ctx (&ctx
);
10477 fold_checksum_tree (op0
, &ctx
, ht
);
10478 md5_finish_ctx (&ctx
, checksum_before_op0
);
10481 md5_init_ctx (&ctx
);
10482 fold_checksum_tree (op1
, &ctx
, ht
);
10483 md5_finish_ctx (&ctx
, checksum_before_op1
);
10487 tem
= fold_binary (code
, type
, op0
, op1
);
10489 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
10491 #ifdef ENABLE_FOLD_CHECKING
10492 md5_init_ctx (&ctx
);
10493 fold_checksum_tree (op0
, &ctx
, ht
);
10494 md5_finish_ctx (&ctx
, checksum_after_op0
);
10497 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
10498 fold_check_failed (op0
, tem
);
10500 md5_init_ctx (&ctx
);
10501 fold_checksum_tree (op1
, &ctx
, ht
);
10502 md5_finish_ctx (&ctx
, checksum_after_op1
);
10505 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
10506 fold_check_failed (op1
, tem
);
10511 /* Fold a ternary tree expression with code CODE of type TYPE with
10512 operands OP0, OP1, and OP2. Return a folded expression if
10513 successful. Otherwise, return a tree expression with code CODE of
10514 type TYPE with operands OP0, OP1, and OP2. */
10517 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
10521 #ifdef ENABLE_FOLD_CHECKING
10522 unsigned char checksum_before_op0
[16],
10523 checksum_before_op1
[16],
10524 checksum_before_op2
[16],
10525 checksum_after_op0
[16],
10526 checksum_after_op1
[16],
10527 checksum_after_op2
[16];
10528 struct md5_ctx ctx
;
10531 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10532 md5_init_ctx (&ctx
);
10533 fold_checksum_tree (op0
, &ctx
, ht
);
10534 md5_finish_ctx (&ctx
, checksum_before_op0
);
10537 md5_init_ctx (&ctx
);
10538 fold_checksum_tree (op1
, &ctx
, ht
);
10539 md5_finish_ctx (&ctx
, checksum_before_op1
);
10542 md5_init_ctx (&ctx
);
10543 fold_checksum_tree (op2
, &ctx
, ht
);
10544 md5_finish_ctx (&ctx
, checksum_before_op2
);
10548 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10550 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
10552 #ifdef ENABLE_FOLD_CHECKING
10553 md5_init_ctx (&ctx
);
10554 fold_checksum_tree (op0
, &ctx
, ht
);
10555 md5_finish_ctx (&ctx
, checksum_after_op0
);
10558 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
10559 fold_check_failed (op0
, tem
);
10561 md5_init_ctx (&ctx
);
10562 fold_checksum_tree (op1
, &ctx
, ht
);
10563 md5_finish_ctx (&ctx
, checksum_after_op1
);
10566 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
10567 fold_check_failed (op1
, tem
);
10569 md5_init_ctx (&ctx
);
10570 fold_checksum_tree (op2
, &ctx
, ht
);
10571 md5_finish_ctx (&ctx
, checksum_after_op2
);
10574 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
10575 fold_check_failed (op2
, tem
);
10580 /* Perform constant folding and related simplification of initializer
10581 expression EXPR. These behave identically to "fold_buildN" but ignore
10582 potential run-time traps and exceptions that fold must preserve. */
10584 #define START_FOLD_INIT \
10585 int saved_signaling_nans = flag_signaling_nans;\
10586 int saved_trapping_math = flag_trapping_math;\
10587 int saved_rounding_math = flag_rounding_math;\
10588 int saved_trapv = flag_trapv;\
10589 flag_signaling_nans = 0;\
10590 flag_trapping_math = 0;\
10591 flag_rounding_math = 0;\
10594 #define END_FOLD_INIT \
10595 flag_signaling_nans = saved_signaling_nans;\
10596 flag_trapping_math = saved_trapping_math;\
10597 flag_rounding_math = saved_rounding_math;\
10598 flag_trapv = saved_trapv
10601 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
10606 result
= fold_build1 (code
, type
, op
);
10613 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
10618 result
= fold_build2 (code
, type
, op0
, op1
);
10625 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
10631 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
10637 #undef START_FOLD_INIT
10638 #undef END_FOLD_INIT
10640 /* Determine if first argument is a multiple of second argument. Return 0 if
10641 it is not, or we cannot easily determined it to be.
10643 An example of the sort of thing we care about (at this point; this routine
10644 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10645 fold cases do now) is discovering that
10647 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10653 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10655 This code also handles discovering that
10657 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10659 is a multiple of 8 so we don't have to worry about dealing with a
10660 possible remainder.
10662 Note that we *look* inside a SAVE_EXPR only to determine how it was
10663 calculated; it is not safe for fold to do much of anything else with the
10664 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10665 at run time. For example, the latter example above *cannot* be implemented
10666 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10667 evaluation time of the original SAVE_EXPR is not necessarily the same at
10668 the time the new expression is evaluated. The only optimization of this
10669 sort that would be valid is changing
10671 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10675 SAVE_EXPR (I) * SAVE_EXPR (J)
10677 (where the same SAVE_EXPR (J) is used in the original and the
10678 transformed version). */
10681 multiple_of_p (tree type
, tree top
, tree bottom
)
10683 if (operand_equal_p (top
, bottom
, 0))
10686 if (TREE_CODE (type
) != INTEGER_TYPE
)
10689 switch (TREE_CODE (top
))
10692 /* Bitwise and provides a power of two multiple. If the mask is
10693 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10694 if (!integer_pow2p (bottom
))
10699 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10700 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10704 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10705 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10708 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
10712 op1
= TREE_OPERAND (top
, 1);
10713 /* const_binop may not detect overflow correctly,
10714 so check for it explicitly here. */
10715 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
10716 > TREE_INT_CST_LOW (op1
)
10717 && TREE_INT_CST_HIGH (op1
) == 0
10718 && 0 != (t1
= fold_convert (type
,
10719 const_binop (LSHIFT_EXPR
,
10722 && ! TREE_OVERFLOW (t1
))
10723 return multiple_of_p (type
, t1
, bottom
);
10728 /* Can't handle conversions from non-integral or wider integral type. */
10729 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
10730 || (TYPE_PRECISION (type
)
10731 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
10734 /* .. fall through ... */
10737 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
10740 if (TREE_CODE (bottom
) != INTEGER_CST
10741 || (TYPE_UNSIGNED (type
)
10742 && (tree_int_cst_sgn (top
) < 0
10743 || tree_int_cst_sgn (bottom
) < 0)))
10745 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
10753 /* Return true if `t' is known to be non-negative. */
10756 tree_expr_nonnegative_p (tree t
)
10758 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
10761 switch (TREE_CODE (t
))
10764 /* We can't return 1 if flag_wrapv is set because
10765 ABS_EXPR<INT_MIN> = INT_MIN. */
10766 if (!(flag_wrapv
&& INTEGRAL_TYPE_P (TREE_TYPE (t
))))
10771 return tree_int_cst_sgn (t
) >= 0;
10774 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
10777 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10778 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10779 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10781 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10782 both unsigned and at least 2 bits shorter than the result. */
10783 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10784 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10785 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10787 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10788 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10789 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10790 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10792 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
10793 TYPE_PRECISION (inner2
)) + 1;
10794 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
10800 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10802 /* x * x for floating point x is always non-negative. */
10803 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
10805 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10806 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10809 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10810 both unsigned and their total bits is shorter than the result. */
10811 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10812 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10813 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10815 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10816 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10817 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10818 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10819 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
10820 < TYPE_PRECISION (TREE_TYPE (t
));
10826 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10827 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10833 case TRUNC_DIV_EXPR
:
10834 case CEIL_DIV_EXPR
:
10835 case FLOOR_DIV_EXPR
:
10836 case ROUND_DIV_EXPR
:
10837 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10838 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10840 case TRUNC_MOD_EXPR
:
10841 case CEIL_MOD_EXPR
:
10842 case FLOOR_MOD_EXPR
:
10843 case ROUND_MOD_EXPR
:
10845 case NON_LVALUE_EXPR
:
10847 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10849 case COMPOUND_EXPR
:
10851 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10854 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
10857 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
10858 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
10862 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10863 tree outer_type
= TREE_TYPE (t
);
10865 if (TREE_CODE (outer_type
) == REAL_TYPE
)
10867 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10868 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10869 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10871 if (TYPE_UNSIGNED (inner_type
))
10873 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10876 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
10878 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10879 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
10880 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10881 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
10882 && TYPE_UNSIGNED (inner_type
);
10889 tree temp
= TARGET_EXPR_SLOT (t
);
10890 t
= TARGET_EXPR_INITIAL (t
);
10892 /* If the initializer is non-void, then it's a normal expression
10893 that will be assigned to the slot. */
10894 if (!VOID_TYPE_P (t
))
10895 return tree_expr_nonnegative_p (t
);
10897 /* Otherwise, the initializer sets the slot in some way. One common
10898 way is an assignment statement at the end of the initializer. */
10901 if (TREE_CODE (t
) == BIND_EXPR
)
10902 t
= expr_last (BIND_EXPR_BODY (t
));
10903 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
10904 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
10905 t
= expr_last (TREE_OPERAND (t
, 0));
10906 else if (TREE_CODE (t
) == STATEMENT_LIST
)
10911 if (TREE_CODE (t
) == MODIFY_EXPR
10912 && TREE_OPERAND (t
, 0) == temp
)
10913 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10920 tree fndecl
= get_callee_fndecl (t
);
10921 tree arglist
= TREE_OPERAND (t
, 1);
10922 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
10923 switch (DECL_FUNCTION_CODE (fndecl
))
10925 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10926 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10927 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10928 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10930 CASE_BUILTIN_F (BUILT_IN_ACOS
)
10931 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
10932 CASE_BUILTIN_F (BUILT_IN_CABS
)
10933 CASE_BUILTIN_F (BUILT_IN_COSH
)
10934 CASE_BUILTIN_F (BUILT_IN_ERFC
)
10935 CASE_BUILTIN_F (BUILT_IN_EXP
)
10936 CASE_BUILTIN_F (BUILT_IN_EXP10
)
10937 CASE_BUILTIN_F (BUILT_IN_EXP2
)
10938 CASE_BUILTIN_F (BUILT_IN_FABS
)
10939 CASE_BUILTIN_F (BUILT_IN_FDIM
)
10940 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
10941 CASE_BUILTIN_F (BUILT_IN_POW10
)
10942 CASE_BUILTIN_I (BUILT_IN_FFS
)
10943 CASE_BUILTIN_I (BUILT_IN_PARITY
)
10944 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
10948 CASE_BUILTIN_F (BUILT_IN_SQRT
)
10949 /* sqrt(-0.0) is -0.0. */
10950 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
10952 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
10954 CASE_BUILTIN_F (BUILT_IN_ASINH
)
10955 CASE_BUILTIN_F (BUILT_IN_ATAN
)
10956 CASE_BUILTIN_F (BUILT_IN_ATANH
)
10957 CASE_BUILTIN_F (BUILT_IN_CBRT
)
10958 CASE_BUILTIN_F (BUILT_IN_CEIL
)
10959 CASE_BUILTIN_F (BUILT_IN_ERF
)
10960 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
10961 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
10962 CASE_BUILTIN_F (BUILT_IN_FMOD
)
10963 CASE_BUILTIN_F (BUILT_IN_FREXP
)
10964 CASE_BUILTIN_F (BUILT_IN_LCEIL
)
10965 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
10966 CASE_BUILTIN_F (BUILT_IN_LFLOOR
)
10967 CASE_BUILTIN_F (BUILT_IN_LLCEIL
)
10968 CASE_BUILTIN_F (BUILT_IN_LLFLOOR
)
10969 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
10970 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
10971 CASE_BUILTIN_F (BUILT_IN_LRINT
)
10972 CASE_BUILTIN_F (BUILT_IN_LROUND
)
10973 CASE_BUILTIN_F (BUILT_IN_MODF
)
10974 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
10975 CASE_BUILTIN_F (BUILT_IN_POW
)
10976 CASE_BUILTIN_F (BUILT_IN_RINT
)
10977 CASE_BUILTIN_F (BUILT_IN_ROUND
)
10978 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
10979 CASE_BUILTIN_F (BUILT_IN_SINH
)
10980 CASE_BUILTIN_F (BUILT_IN_TANH
)
10981 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
10982 /* True if the 1st argument is nonnegative. */
10983 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
10985 CASE_BUILTIN_F (BUILT_IN_FMAX
)
10986 /* True if the 1st OR 2nd arguments are nonnegative. */
10987 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
10988 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
10990 CASE_BUILTIN_F (BUILT_IN_FMIN
)
10991 /* True if the 1st AND 2nd arguments are nonnegative. */
10992 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
10993 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
10995 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
10996 /* True if the 2nd argument is nonnegative. */
10997 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11001 #undef CASE_BUILTIN_F
11002 #undef CASE_BUILTIN_I
11006 /* ... fall through ... */
11009 if (truth_value_p (TREE_CODE (t
)))
11010 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11014 /* We don't know sign of `t', so be conservative and return false. */
11018 /* Return true when T is an address and is known to be nonzero.
11019 For floating point we further ensure that T is not denormal.
11020 Similar logic is present in nonzero_address in rtlanal.h. */
11023 tree_expr_nonzero_p (tree t
)
11025 tree type
= TREE_TYPE (t
);
11027 /* Doing something useful for floating point would need more work. */
11028 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
11031 switch (TREE_CODE (t
))
11034 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11037 /* We used to test for !integer_zerop here. This does not work correctly
11038 if TREE_CONSTANT_OVERFLOW (t). */
11039 return (TREE_INT_CST_LOW (t
) != 0
11040 || TREE_INT_CST_HIGH (t
) != 0);
11043 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11045 /* With the presence of negative values it is hard
11046 to say something. */
11047 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11048 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11050 /* One of operands must be positive and the other non-negative. */
11051 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11052 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11057 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11059 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11060 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11066 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11067 tree outer_type
= TREE_TYPE (t
);
11069 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
11070 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
11076 tree base
= get_base_address (TREE_OPERAND (t
, 0));
11081 /* Weak declarations may link to NULL. */
11082 if (VAR_OR_FUNCTION_DECL_P (base
))
11083 return !DECL_WEAK (base
);
11085 /* Constants are never weak. */
11086 if (CONSTANT_CLASS_P (base
))
11093 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11094 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
11097 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11098 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11101 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
11103 /* When both operands are nonzero, then MAX must be too. */
11104 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
11107 /* MAX where operand 0 is positive is positive. */
11108 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11110 /* MAX where operand 1 is positive is positive. */
11111 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11112 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11116 case COMPOUND_EXPR
:
11119 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
11122 case NON_LVALUE_EXPR
:
11123 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11126 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11127 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11130 return alloca_call_p (t
);
11138 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11139 attempt to fold the expression to a constant without modifying TYPE,
11142 If the expression could be simplified to a constant, then return
11143 the constant. If the expression would not be simplified to a
11144 constant, then return NULL_TREE. */
11147 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
11149 tree tem
= fold_binary (code
, type
, op0
, op1
);
11150 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11153 /* Given the components of a unary expression CODE, TYPE and OP0,
11154 attempt to fold the expression to a constant without modifying
11157 If the expression could be simplified to a constant, then return
11158 the constant. If the expression would not be simplified to a
11159 constant, then return NULL_TREE. */
11162 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
11164 tree tem
= fold_unary (code
, type
, op0
);
11165 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11168 /* If EXP represents referencing an element in a constant string
11169 (either via pointer arithmetic or array indexing), return the
11170 tree representing the value accessed, otherwise return NULL. */
11173 fold_read_from_constant_string (tree exp
)
11175 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
11177 tree exp1
= TREE_OPERAND (exp
, 0);
11181 if (TREE_CODE (exp
) == INDIRECT_REF
)
11182 string
= string_constant (exp1
, &index
);
11185 tree low_bound
= array_ref_low_bound (exp
);
11186 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
11188 /* Optimize the special-case of a zero lower bound.
11190 We convert the low_bound to sizetype to avoid some problems
11191 with constant folding. (E.g. suppose the lower bound is 1,
11192 and its mode is QI. Without the conversion,l (ARRAY
11193 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11194 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11195 if (! integer_zerop (low_bound
))
11196 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
11202 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
11203 && TREE_CODE (string
) == STRING_CST
11204 && TREE_CODE (index
) == INTEGER_CST
11205 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
11206 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
11208 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
11209 return fold_convert (TREE_TYPE (exp
),
11210 build_int_cst (NULL_TREE
,
11211 (TREE_STRING_POINTER (string
)
11212 [TREE_INT_CST_LOW (index
)])));
11217 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11218 an integer constant or real constant.
11220 TYPE is the type of the result. */
11223 fold_negate_const (tree arg0
, tree type
)
11225 tree t
= NULL_TREE
;
11227 switch (TREE_CODE (arg0
))
11231 unsigned HOST_WIDE_INT low
;
11232 HOST_WIDE_INT high
;
11233 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11234 TREE_INT_CST_HIGH (arg0
),
11236 t
= build_int_cst_wide (type
, low
, high
);
11237 t
= force_fit_type (t
, 1,
11238 (overflow
| TREE_OVERFLOW (arg0
))
11239 && !TYPE_UNSIGNED (type
),
11240 TREE_CONSTANT_OVERFLOW (arg0
));
11245 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11249 gcc_unreachable ();
11255 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11256 an integer constant or real constant.
11258 TYPE is the type of the result. */
11261 fold_abs_const (tree arg0
, tree type
)
11263 tree t
= NULL_TREE
;
11265 switch (TREE_CODE (arg0
))
11268 /* If the value is unsigned, then the absolute value is
11269 the same as the ordinary value. */
11270 if (TYPE_UNSIGNED (type
))
11272 /* Similarly, if the value is non-negative. */
11273 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
11275 /* If the value is negative, then the absolute value is
11279 unsigned HOST_WIDE_INT low
;
11280 HOST_WIDE_INT high
;
11281 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11282 TREE_INT_CST_HIGH (arg0
),
11284 t
= build_int_cst_wide (type
, low
, high
);
11285 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
11286 TREE_CONSTANT_OVERFLOW (arg0
));
11291 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
11292 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11298 gcc_unreachable ();
11304 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11305 constant. TYPE is the type of the result. */
11308 fold_not_const (tree arg0
, tree type
)
11310 tree t
= NULL_TREE
;
11312 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
11314 t
= build_int_cst_wide (type
,
11315 ~ TREE_INT_CST_LOW (arg0
),
11316 ~ TREE_INT_CST_HIGH (arg0
));
11317 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
11318 TREE_CONSTANT_OVERFLOW (arg0
));
11323 /* Given CODE, a relational operator, the target type, TYPE and two
11324 constant operands OP0 and OP1, return the result of the
11325 relational operation. If the result is not a compile time
11326 constant, then return NULL_TREE. */
11329 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
11331 int result
, invert
;
11333 /* From here on, the only cases we handle are when the result is
11334 known to be a constant. */
11336 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
11338 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
11339 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
11341 /* Handle the cases where either operand is a NaN. */
11342 if (real_isnan (c0
) || real_isnan (c1
))
11352 case UNORDERED_EXPR
:
11366 if (flag_trapping_math
)
11372 gcc_unreachable ();
11375 return constant_boolean_node (result
, type
);
11378 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
11381 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11383 To compute GT, swap the arguments and do LT.
11384 To compute GE, do LT and invert the result.
11385 To compute LE, swap the arguments, do LT and invert the result.
11386 To compute NE, do EQ and invert the result.
11388 Therefore, the code below must handle only EQ and LT. */
11390 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11395 code
= swap_tree_comparison (code
);
11398 /* Note that it is safe to invert for real values here because we
11399 have already handled the one case that it matters. */
11402 if (code
== NE_EXPR
|| code
== GE_EXPR
)
11405 code
= invert_tree_comparison (code
, false);
11408 /* Compute a result for LT or EQ if args permit;
11409 Otherwise return T. */
11410 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11412 if (code
== EQ_EXPR
)
11413 result
= tree_int_cst_equal (op0
, op1
);
11414 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
11415 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
11417 result
= INT_CST_LT (op0
, op1
);
11424 return constant_boolean_node (result
, type
);
11427 /* Build an expression for the a clean point containing EXPR with type TYPE.
11428 Don't build a cleanup point expression for EXPR which don't have side
11432 fold_build_cleanup_point_expr (tree type
, tree expr
)
11434 /* If the expression does not have side effects then we don't have to wrap
11435 it with a cleanup point expression. */
11436 if (!TREE_SIDE_EFFECTS (expr
))
11439 /* If the expression is a return, check to see if the expression inside the
11440 return has no side effects or the right hand side of the modify expression
11441 inside the return. If either don't have side effects set we don't need to
11442 wrap the expression in a cleanup point expression. Note we don't check the
11443 left hand side of the modify because it should always be a return decl. */
11444 if (TREE_CODE (expr
) == RETURN_EXPR
)
11446 tree op
= TREE_OPERAND (expr
, 0);
11447 if (!op
|| !TREE_SIDE_EFFECTS (op
))
11449 op
= TREE_OPERAND (op
, 1);
11450 if (!TREE_SIDE_EFFECTS (op
))
11454 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
11457 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11458 avoid confusing the gimplify process. */
11461 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
11463 /* The size of the object is not relevant when talking about its address. */
11464 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
11465 t
= TREE_OPERAND (t
, 0);
11467 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11468 if (TREE_CODE (t
) == INDIRECT_REF
11469 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
11471 t
= TREE_OPERAND (t
, 0);
11472 if (TREE_TYPE (t
) != ptrtype
)
11473 t
= build1 (NOP_EXPR
, ptrtype
, t
);
11479 while (handled_component_p (base
))
11480 base
= TREE_OPERAND (base
, 0);
11482 TREE_ADDRESSABLE (base
) = 1;
11484 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
11491 build_fold_addr_expr (tree t
)
11493 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
11496 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11497 of an indirection through OP0, or NULL_TREE if no simplification is
11501 fold_indirect_ref_1 (tree type
, tree op0
)
11507 subtype
= TREE_TYPE (sub
);
11508 if (!POINTER_TYPE_P (subtype
))
11511 if (TREE_CODE (sub
) == ADDR_EXPR
)
11513 tree op
= TREE_OPERAND (sub
, 0);
11514 tree optype
= TREE_TYPE (op
);
11516 if (type
== optype
)
11518 /* *(foo *)&fooarray => fooarray[0] */
11519 else if (TREE_CODE (optype
) == ARRAY_TYPE
11520 && type
== TREE_TYPE (optype
))
11522 tree type_domain
= TYPE_DOMAIN (optype
);
11523 tree min_val
= size_zero_node
;
11524 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11525 min_val
= TYPE_MIN_VALUE (type_domain
);
11526 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
11530 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11531 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
11532 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
11535 tree min_val
= size_zero_node
;
11536 sub
= build_fold_indirect_ref (sub
);
11537 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
11538 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11539 min_val
= TYPE_MIN_VALUE (type_domain
);
11540 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
11546 /* Builds an expression for an indirection through T, simplifying some
11550 build_fold_indirect_ref (tree t
)
11552 tree type
= TREE_TYPE (TREE_TYPE (t
));
11553 tree sub
= fold_indirect_ref_1 (type
, t
);
11558 return build1 (INDIRECT_REF
, type
, t
);
11561 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11564 fold_indirect_ref (tree t
)
11566 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
11574 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11575 whose result is ignored. The type of the returned tree need not be
11576 the same as the original expression. */
11579 fold_ignored_result (tree t
)
11581 if (!TREE_SIDE_EFFECTS (t
))
11582 return integer_zero_node
;
11585 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
11588 t
= TREE_OPERAND (t
, 0);
11592 case tcc_comparison
:
11593 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11594 t
= TREE_OPERAND (t
, 0);
11595 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
11596 t
= TREE_OPERAND (t
, 1);
11601 case tcc_expression
:
11602 switch (TREE_CODE (t
))
11604 case COMPOUND_EXPR
:
11605 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11607 t
= TREE_OPERAND (t
, 0);
11611 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
11612 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
11614 t
= TREE_OPERAND (t
, 0);
11627 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11628 This can only be applied to objects of a sizetype. */
11631 round_up (tree value
, int divisor
)
11633 tree div
= NULL_TREE
;
11635 gcc_assert (divisor
> 0);
11639 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11640 have to do anything. Only do this when we are not given a const,
11641 because in that case, this check is more expensive than just
11643 if (TREE_CODE (value
) != INTEGER_CST
)
11645 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11647 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11651 /* If divisor is a power of two, simplify this to bit manipulation. */
11652 if (divisor
== (divisor
& -divisor
))
11656 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
11657 value
= size_binop (PLUS_EXPR
, value
, t
);
11658 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11659 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11664 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11665 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
11666 value
= size_binop (MULT_EXPR
, value
, div
);
11672 /* Likewise, but round down. */
11675 round_down (tree value
, int divisor
)
11677 tree div
= NULL_TREE
;
11679 gcc_assert (divisor
> 0);
11683 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11684 have to do anything. Only do this when we are not given a const,
11685 because in that case, this check is more expensive than just
11687 if (TREE_CODE (value
) != INTEGER_CST
)
11689 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11691 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11695 /* If divisor is a power of two, simplify this to bit manipulation. */
11696 if (divisor
== (divisor
& -divisor
))
11700 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11701 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11706 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11707 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
11708 value
= size_binop (MULT_EXPR
, value
, div
);
11714 /* Returns the pointer to the base of the object addressed by EXP and
11715 extracts the information about the offset of the access, storing it
11716 to PBITPOS and POFFSET. */
11719 split_address_to_core_and_offset (tree exp
,
11720 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
11723 enum machine_mode mode
;
11724 int unsignedp
, volatilep
;
11725 HOST_WIDE_INT bitsize
;
11727 if (TREE_CODE (exp
) == ADDR_EXPR
)
11729 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
11730 poffset
, &mode
, &unsignedp
, &volatilep
,
11732 core
= build_fold_addr_expr (core
);
11738 *poffset
= NULL_TREE
;
11744 /* Returns true if addresses of E1 and E2 differ by a constant, false
11745 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11748 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
11751 HOST_WIDE_INT bitpos1
, bitpos2
;
11752 tree toffset1
, toffset2
, tdiff
, type
;
11754 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
11755 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
11757 if (bitpos1
% BITS_PER_UNIT
!= 0
11758 || bitpos2
% BITS_PER_UNIT
!= 0
11759 || !operand_equal_p (core1
, core2
, 0))
11762 if (toffset1
&& toffset2
)
11764 type
= TREE_TYPE (toffset1
);
11765 if (type
!= TREE_TYPE (toffset2
))
11766 toffset2
= fold_convert (type
, toffset2
);
11768 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
11769 if (!cst_and_fits_in_hwi (tdiff
))
11772 *diff
= int_cst_value (tdiff
);
11774 else if (toffset1
|| toffset2
)
11776 /* If only one of the offsets is non-constant, the difference cannot
11783 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
11787 /* Simplify the floating point expression EXP when the sign of the
11788 result is not significant. Return NULL_TREE if no simplification
11792 fold_strip_sign_ops (tree exp
)
11796 switch (TREE_CODE (exp
))
11800 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11801 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
11805 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
11807 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11808 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
11809 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
11810 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
11811 arg0
? arg0
: TREE_OPERAND (exp
, 0),
11812 arg1
? arg1
: TREE_OPERAND (exp
, 1));