1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code
{
84 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
85 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
86 static bool negate_mathfn_p (enum built_in_function
);
87 static bool negate_expr_p (tree
);
88 static tree
negate_expr (tree
);
89 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
90 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
91 static tree
const_binop (enum tree_code
, tree
, tree
, int);
92 static enum tree_code
invert_tree_comparison (enum tree_code
, bool);
93 static enum comparison_code
comparison_to_compcode (enum tree_code
);
94 static enum tree_code
compcode_to_comparison (enum comparison_code
);
95 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
96 enum tree_code
, tree
, tree
, tree
);
97 static int truth_value_p (enum tree_code
);
98 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
99 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
100 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
101 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
102 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
103 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
104 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
105 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
106 enum machine_mode
*, int *, int *,
108 static int all_ones_mask_p (tree
, int);
109 static tree
sign_bit_p (tree
, tree
);
110 static int simple_operand_p (tree
);
111 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
112 static tree
make_range (tree
, int *, tree
*, tree
*);
113 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
114 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
116 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
117 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
118 static tree
unextend (tree
, int, int, tree
);
119 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
120 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
121 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
122 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
123 static int multiple_of_p (tree
, tree
, tree
);
124 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
127 static bool fold_real_zero_addition_p (tree
, tree
, int);
128 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
130 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
131 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
132 static bool reorder_operands_p (tree
, tree
);
133 static tree
fold_negate_const (tree
, tree
);
134 static tree
fold_not_const (tree
, tree
);
135 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
136 static bool tree_expr_nonzero_p (tree
);
138 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
139 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
140 and SUM1. Then this yields nonzero if overflow occurred during the
143 Overflow occurs if A and B have the same sign, but A and SUM differ in
144 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
146 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
148 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
149 We do that by representing the two-word integer in 4 words, with only
150 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
151 number. The value of the word is LOWPART + HIGHPART * BASE. */
154 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
155 #define HIGHPART(x) \
156 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
157 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
159 /* Unpack a two-word integer into 4 words.
160 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
161 WORDS points to the array of HOST_WIDE_INTs. */
164 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
166 words
[0] = LOWPART (low
);
167 words
[1] = HIGHPART (low
);
168 words
[2] = LOWPART (hi
);
169 words
[3] = HIGHPART (hi
);
172 /* Pack an array of 4 words into a two-word integer.
173 WORDS points to the array of words.
174 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
180 *low
= words
[0] + words
[1] * BASE
;
181 *hi
= words
[2] + words
[3] * BASE
;
184 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
185 in overflow of the value, when >0 we are only interested in signed
186 overflow, for <0 we are interested in any overflow. OVERFLOWED
187 indicates whether overflow has already occurred. CONST_OVERFLOWED
188 indicates whether constant overflow has already occurred. We force
189 T's value to be within range of T's type (by setting to 0 or 1 all
190 the bits outside the type's range). We set TREE_OVERFLOWED if,
191 OVERFLOWED is nonzero,
192 or OVERFLOWABLE is >0 and signed overflow occurs
193 or OVERFLOWABLE is <0 and any overflow occurs
194 We set TREE_CONSTANT_OVERFLOWED if,
195 CONST_OVERFLOWED is nonzero
196 or we set TREE_OVERFLOWED.
197 We return either the original T, or a copy. */
200 force_fit_type (tree t
, int overflowable
,
201 bool overflowed
, bool overflowed_const
)
203 unsigned HOST_WIDE_INT low
;
206 int sign_extended_type
;
208 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
210 low
= TREE_INT_CST_LOW (t
);
211 high
= TREE_INT_CST_HIGH (t
);
213 if (POINTER_TYPE_P (TREE_TYPE (t
))
214 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
217 prec
= TYPE_PRECISION (TREE_TYPE (t
));
218 /* Size types *are* sign extended. */
219 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
220 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
223 /* First clear all bits that are beyond the type's precision. */
225 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
227 else if (prec
> HOST_BITS_PER_WIDE_INT
)
228 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
232 if (prec
< HOST_BITS_PER_WIDE_INT
)
233 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
236 if (!sign_extended_type
)
237 /* No sign extension */;
238 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
239 /* Correct width already. */;
240 else if (prec
> HOST_BITS_PER_WIDE_INT
)
242 /* Sign extend top half? */
243 if (high
& ((unsigned HOST_WIDE_INT
)1
244 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
245 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
247 else if (prec
== HOST_BITS_PER_WIDE_INT
)
249 if ((HOST_WIDE_INT
)low
< 0)
254 /* Sign extend bottom half? */
255 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
258 low
|= (HOST_WIDE_INT
)(-1) << prec
;
262 /* If the value changed, return a new node. */
263 if (overflowed
|| overflowed_const
264 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
266 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
270 || (overflowable
> 0 && sign_extended_type
))
273 TREE_OVERFLOW (t
) = 1;
274 TREE_CONSTANT_OVERFLOW (t
) = 1;
276 else if (overflowed_const
)
279 TREE_CONSTANT_OVERFLOW (t
) = 1;
286 /* Add two doubleword integers with doubleword result.
287 Each argument is given as two `HOST_WIDE_INT' pieces.
288 One argument is L1 and H1; the other, L2 and H2.
289 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
292 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
293 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
294 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
296 unsigned HOST_WIDE_INT l
;
300 h
= h1
+ h2
+ (l
< l1
);
304 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
307 /* Negate a doubleword integer with doubleword result.
308 Return nonzero if the operation overflows, assuming it's signed.
309 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
310 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
313 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
314 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
320 return (*hv
& h1
) < 0;
330 /* Multiply two doubleword integers with doubleword result.
331 Return nonzero if the operation overflows, assuming it's signed.
332 Each argument is given as two `HOST_WIDE_INT' pieces.
333 One argument is L1 and H1; the other, L2 and H2.
334 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
337 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
338 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
339 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
341 HOST_WIDE_INT arg1
[4];
342 HOST_WIDE_INT arg2
[4];
343 HOST_WIDE_INT prod
[4 * 2];
344 unsigned HOST_WIDE_INT carry
;
346 unsigned HOST_WIDE_INT toplow
, neglow
;
347 HOST_WIDE_INT tophigh
, neghigh
;
349 encode (arg1
, l1
, h1
);
350 encode (arg2
, l2
, h2
);
352 memset (prod
, 0, sizeof prod
);
354 for (i
= 0; i
< 4; i
++)
357 for (j
= 0; j
< 4; j
++)
360 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
361 carry
+= arg1
[i
] * arg2
[j
];
362 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
364 prod
[k
] = LOWPART (carry
);
365 carry
= HIGHPART (carry
);
370 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
372 /* Check for overflow by calculating the top half of the answer in full;
373 it should agree with the low half's sign bit. */
374 decode (prod
+ 4, &toplow
, &tophigh
);
377 neg_double (l2
, h2
, &neglow
, &neghigh
);
378 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
382 neg_double (l1
, h1
, &neglow
, &neghigh
);
383 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
385 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
388 /* Shift the doubleword integer in L1, H1 left by COUNT places
389 keeping only PREC bits of result.
390 Shift right if COUNT is negative.
391 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
392 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
395 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
396 HOST_WIDE_INT count
, unsigned int prec
,
397 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
399 unsigned HOST_WIDE_INT signmask
;
403 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
407 if (SHIFT_COUNT_TRUNCATED
)
410 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
412 /* Shifting by the host word size is undefined according to the
413 ANSI standard, so we must handle this as a special case. */
417 else if (count
>= HOST_BITS_PER_WIDE_INT
)
419 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
424 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
425 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
429 /* Sign extend all bits that are beyond the precision. */
431 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
432 ? ((unsigned HOST_WIDE_INT
) *hv
433 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
434 : (*lv
>> (prec
- 1))) & 1);
436 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
438 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
440 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
441 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
446 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
447 *lv
|= signmask
<< prec
;
451 /* Shift the doubleword integer in L1, H1 right by COUNT places
452 keeping only PREC bits of result. COUNT must be positive.
453 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
454 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
457 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
458 HOST_WIDE_INT count
, unsigned int prec
,
459 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
462 unsigned HOST_WIDE_INT signmask
;
465 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
468 if (SHIFT_COUNT_TRUNCATED
)
471 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
473 /* Shifting by the host word size is undefined according to the
474 ANSI standard, so we must handle this as a special case. */
478 else if (count
>= HOST_BITS_PER_WIDE_INT
)
481 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
485 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
487 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
490 /* Zero / sign extend all bits that are beyond the precision. */
492 if (count
>= (HOST_WIDE_INT
)prec
)
497 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
499 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
501 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
502 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
507 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
508 *lv
|= signmask
<< (prec
- count
);
512 /* Rotate the doubleword integer in L1, H1 left by COUNT places
513 keeping only PREC bits of result.
514 Rotate right if COUNT is negative.
515 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
518 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
519 HOST_WIDE_INT count
, unsigned int prec
,
520 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
522 unsigned HOST_WIDE_INT s1l
, s2l
;
523 HOST_WIDE_INT s1h
, s2h
;
529 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
530 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
535 /* Rotate the doubleword integer in L1, H1 left by COUNT places
536 keeping only PREC bits of result. COUNT must be positive.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
540 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
541 HOST_WIDE_INT count
, unsigned int prec
,
542 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
544 unsigned HOST_WIDE_INT s1l
, s2l
;
545 HOST_WIDE_INT s1h
, s2h
;
551 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
552 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
557 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
558 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
559 CODE is a tree code for a kind of division, one of
560 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
562 It controls how the quotient is rounded to an integer.
563 Return nonzero if the operation overflows.
564 UNS nonzero says do unsigned division. */
567 div_and_round_double (enum tree_code code
, int uns
,
568 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
569 HOST_WIDE_INT hnum_orig
,
570 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
571 HOST_WIDE_INT hden_orig
,
572 unsigned HOST_WIDE_INT
*lquo
,
573 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
577 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
578 HOST_WIDE_INT den
[4], quo
[4];
580 unsigned HOST_WIDE_INT work
;
581 unsigned HOST_WIDE_INT carry
= 0;
582 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
583 HOST_WIDE_INT hnum
= hnum_orig
;
584 unsigned HOST_WIDE_INT lden
= lden_orig
;
585 HOST_WIDE_INT hden
= hden_orig
;
588 if (hden
== 0 && lden
== 0)
589 overflow
= 1, lden
= 1;
591 /* Calculate quotient sign and convert operands to unsigned. */
597 /* (minimum integer) / (-1) is the only overflow case. */
598 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
599 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
605 neg_double (lden
, hden
, &lden
, &hden
);
609 if (hnum
== 0 && hden
== 0)
610 { /* single precision */
612 /* This unsigned division rounds toward zero. */
618 { /* trivial case: dividend < divisor */
619 /* hden != 0 already checked. */
626 memset (quo
, 0, sizeof quo
);
628 memset (num
, 0, sizeof num
); /* to zero 9th element */
629 memset (den
, 0, sizeof den
);
631 encode (num
, lnum
, hnum
);
632 encode (den
, lden
, hden
);
634 /* Special code for when the divisor < BASE. */
635 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
637 /* hnum != 0 already checked. */
638 for (i
= 4 - 1; i
>= 0; i
--)
640 work
= num
[i
] + carry
* BASE
;
641 quo
[i
] = work
/ lden
;
647 /* Full double precision division,
648 with thanks to Don Knuth's "Seminumerical Algorithms". */
649 int num_hi_sig
, den_hi_sig
;
650 unsigned HOST_WIDE_INT quo_est
, scale
;
652 /* Find the highest nonzero divisor digit. */
653 for (i
= 4 - 1;; i
--)
660 /* Insure that the first digit of the divisor is at least BASE/2.
661 This is required by the quotient digit estimation algorithm. */
663 scale
= BASE
/ (den
[den_hi_sig
] + 1);
665 { /* scale divisor and dividend */
667 for (i
= 0; i
<= 4 - 1; i
++)
669 work
= (num
[i
] * scale
) + carry
;
670 num
[i
] = LOWPART (work
);
671 carry
= HIGHPART (work
);
676 for (i
= 0; i
<= 4 - 1; i
++)
678 work
= (den
[i
] * scale
) + carry
;
679 den
[i
] = LOWPART (work
);
680 carry
= HIGHPART (work
);
681 if (den
[i
] != 0) den_hi_sig
= i
;
688 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
690 /* Guess the next quotient digit, quo_est, by dividing the first
691 two remaining dividend digits by the high order quotient digit.
692 quo_est is never low and is at most 2 high. */
693 unsigned HOST_WIDE_INT tmp
;
695 num_hi_sig
= i
+ den_hi_sig
+ 1;
696 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
697 if (num
[num_hi_sig
] != den
[den_hi_sig
])
698 quo_est
= work
/ den
[den_hi_sig
];
702 /* Refine quo_est so it's usually correct, and at most one high. */
703 tmp
= work
- quo_est
* den
[den_hi_sig
];
705 && (den
[den_hi_sig
- 1] * quo_est
706 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
709 /* Try QUO_EST as the quotient digit, by multiplying the
710 divisor by QUO_EST and subtracting from the remaining dividend.
711 Keep in mind that QUO_EST is the I - 1st digit. */
714 for (j
= 0; j
<= den_hi_sig
; j
++)
716 work
= quo_est
* den
[j
] + carry
;
717 carry
= HIGHPART (work
);
718 work
= num
[i
+ j
] - LOWPART (work
);
719 num
[i
+ j
] = LOWPART (work
);
720 carry
+= HIGHPART (work
) != 0;
723 /* If quo_est was high by one, then num[i] went negative and
724 we need to correct things. */
725 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
728 carry
= 0; /* add divisor back in */
729 for (j
= 0; j
<= den_hi_sig
; j
++)
731 work
= num
[i
+ j
] + den
[j
] + carry
;
732 carry
= HIGHPART (work
);
733 num
[i
+ j
] = LOWPART (work
);
736 num
[num_hi_sig
] += carry
;
739 /* Store the quotient digit. */
744 decode (quo
, lquo
, hquo
);
747 /* If result is negative, make it so. */
749 neg_double (*lquo
, *hquo
, lquo
, hquo
);
751 /* Compute trial remainder: rem = num - (quo * den) */
752 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
753 neg_double (*lrem
, *hrem
, lrem
, hrem
);
754 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
759 case TRUNC_MOD_EXPR
: /* round toward zero */
760 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
764 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
765 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
768 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
776 case CEIL_MOD_EXPR
: /* round toward positive infinity */
777 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
779 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
787 case ROUND_MOD_EXPR
: /* round to closest integer */
789 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
790 HOST_WIDE_INT habs_rem
= *hrem
;
791 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
792 HOST_WIDE_INT habs_den
= hden
, htwice
;
794 /* Get absolute values. */
796 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
798 neg_double (lden
, hden
, &labs_den
, &habs_den
);
800 /* If (2 * abs (lrem) >= abs (lden)) */
801 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
802 labs_rem
, habs_rem
, <wice
, &htwice
);
804 if (((unsigned HOST_WIDE_INT
) habs_den
805 < (unsigned HOST_WIDE_INT
) htwice
)
806 || (((unsigned HOST_WIDE_INT
) habs_den
807 == (unsigned HOST_WIDE_INT
) htwice
)
808 && (labs_den
< ltwice
)))
812 add_double (*lquo
, *hquo
,
813 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
816 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
828 /* Compute true remainder: rem = num - (quo * den) */
829 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
830 neg_double (*lrem
, *hrem
, lrem
, hrem
);
831 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
835 /* If ARG2 divides ARG1 with zero remainder, carries out the division
836 of type CODE and returns the quotient.
837 Otherwise returns NULL_TREE. */
840 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
842 unsigned HOST_WIDE_INT int1l
, int2l
;
843 HOST_WIDE_INT int1h
, int2h
;
844 unsigned HOST_WIDE_INT quol
, reml
;
845 HOST_WIDE_INT quoh
, remh
;
846 tree type
= TREE_TYPE (arg1
);
847 int uns
= TYPE_UNSIGNED (type
);
849 int1l
= TREE_INT_CST_LOW (arg1
);
850 int1h
= TREE_INT_CST_HIGH (arg1
);
851 int2l
= TREE_INT_CST_LOW (arg2
);
852 int2h
= TREE_INT_CST_HIGH (arg2
);
854 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
855 &quol
, &quoh
, &reml
, &remh
);
856 if (remh
!= 0 || reml
!= 0)
859 return build_int_cst_wide (type
, quol
, quoh
);
862 /* Return true if built-in mathematical function specified by CODE
863 preserves the sign of it argument, i.e. -f(x) == f(-x). */
866 negate_mathfn_p (enum built_in_function code
)
890 /* Check whether we may negate an integer constant T without causing
894 may_negate_without_overflow_p (tree t
)
896 unsigned HOST_WIDE_INT val
;
900 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
902 type
= TREE_TYPE (t
);
903 if (TYPE_UNSIGNED (type
))
906 prec
= TYPE_PRECISION (type
);
907 if (prec
> HOST_BITS_PER_WIDE_INT
)
909 if (TREE_INT_CST_LOW (t
) != 0)
911 prec
-= HOST_BITS_PER_WIDE_INT
;
912 val
= TREE_INT_CST_HIGH (t
);
915 val
= TREE_INT_CST_LOW (t
);
916 if (prec
< HOST_BITS_PER_WIDE_INT
)
917 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
918 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
921 /* Determine whether an expression T can be cheaply negated using
922 the function negate_expr. */
925 negate_expr_p (tree t
)
932 type
= TREE_TYPE (t
);
935 switch (TREE_CODE (t
))
938 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
941 /* Check that -CST will not overflow type. */
942 return may_negate_without_overflow_p (t
);
949 return negate_expr_p (TREE_REALPART (t
))
950 && negate_expr_p (TREE_IMAGPART (t
));
953 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
955 /* -(A + B) -> (-B) - A. */
956 if (negate_expr_p (TREE_OPERAND (t
, 1))
957 && reorder_operands_p (TREE_OPERAND (t
, 0),
958 TREE_OPERAND (t
, 1)))
960 /* -(A + B) -> (-A) - B. */
961 return negate_expr_p (TREE_OPERAND (t
, 0));
964 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
965 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
966 && reorder_operands_p (TREE_OPERAND (t
, 0),
967 TREE_OPERAND (t
, 1));
970 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
976 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
977 return negate_expr_p (TREE_OPERAND (t
, 1))
978 || negate_expr_p (TREE_OPERAND (t
, 0));
982 /* Negate -((double)float) as (double)(-float). */
983 if (TREE_CODE (type
) == REAL_TYPE
)
985 tree tem
= strip_float_extensions (t
);
987 return negate_expr_p (tem
);
992 /* Negate -f(x) as f(-x). */
993 if (negate_mathfn_p (builtin_mathfn_code (t
)))
994 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
998 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
999 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1001 tree op1
= TREE_OPERAND (t
, 1);
1002 if (TREE_INT_CST_HIGH (op1
) == 0
1003 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1004 == TREE_INT_CST_LOW (op1
))
1015 /* Given T, an expression, return the negation of T. Allow for T to be
1016 null, in which case return null. */
1019 negate_expr (tree t
)
1027 type
= TREE_TYPE (t
);
1028 STRIP_SIGN_NOPS (t
);
1030 switch (TREE_CODE (t
))
1033 tem
= fold_negate_const (t
, type
);
1034 if (! TREE_OVERFLOW (tem
)
1035 || TYPE_UNSIGNED (type
)
1041 tem
= fold_negate_const (t
, type
);
1042 /* Two's complement FP formats, such as c4x, may overflow. */
1043 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1044 return fold_convert (type
, tem
);
1049 tree rpart
= negate_expr (TREE_REALPART (t
));
1050 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1052 if ((TREE_CODE (rpart
) == REAL_CST
1053 && TREE_CODE (ipart
) == REAL_CST
)
1054 || (TREE_CODE (rpart
) == INTEGER_CST
1055 && TREE_CODE (ipart
) == INTEGER_CST
))
1056 return build_complex (type
, rpart
, ipart
);
1061 return fold_convert (type
, TREE_OPERAND (t
, 0));
1064 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1066 /* -(A + B) -> (-B) - A. */
1067 if (negate_expr_p (TREE_OPERAND (t
, 1))
1068 && reorder_operands_p (TREE_OPERAND (t
, 0),
1069 TREE_OPERAND (t
, 1)))
1071 tem
= negate_expr (TREE_OPERAND (t
, 1));
1072 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1073 tem
, TREE_OPERAND (t
, 0));
1074 return fold_convert (type
, tem
);
1077 /* -(A + B) -> (-A) - B. */
1078 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1080 tem
= negate_expr (TREE_OPERAND (t
, 0));
1081 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1082 tem
, TREE_OPERAND (t
, 1));
1083 return fold_convert (type
, tem
);
1089 /* - (A - B) -> B - A */
1090 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1091 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1092 return fold_convert (type
,
1093 fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1094 TREE_OPERAND (t
, 1),
1095 TREE_OPERAND (t
, 0)));
1099 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1105 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1107 tem
= TREE_OPERAND (t
, 1);
1108 if (negate_expr_p (tem
))
1109 return fold_convert (type
,
1110 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1111 TREE_OPERAND (t
, 0),
1112 negate_expr (tem
)));
1113 tem
= TREE_OPERAND (t
, 0);
1114 if (negate_expr_p (tem
))
1115 return fold_convert (type
,
1116 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1118 TREE_OPERAND (t
, 1)));
1123 /* Convert -((double)float) into (double)(-float). */
1124 if (TREE_CODE (type
) == REAL_TYPE
)
1126 tem
= strip_float_extensions (t
);
1127 if (tem
!= t
&& negate_expr_p (tem
))
1128 return fold_convert (type
, negate_expr (tem
));
1133 /* Negate -f(x) as f(-x). */
1134 if (negate_mathfn_p (builtin_mathfn_code (t
))
1135 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1137 tree fndecl
, arg
, arglist
;
1139 fndecl
= get_callee_fndecl (t
);
1140 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1141 arglist
= build_tree_list (NULL_TREE
, arg
);
1142 return build_function_call_expr (fndecl
, arglist
);
1147 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1148 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1150 tree op1
= TREE_OPERAND (t
, 1);
1151 if (TREE_INT_CST_HIGH (op1
) == 0
1152 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1153 == TREE_INT_CST_LOW (op1
))
1155 tree ntype
= TYPE_UNSIGNED (type
)
1156 ? lang_hooks
.types
.signed_type (type
)
1157 : lang_hooks
.types
.unsigned_type (type
);
1158 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1159 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1160 return fold_convert (type
, temp
);
1169 tem
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1170 return fold_convert (type
, tem
);
1173 /* Split a tree IN into a constant, literal and variable parts that could be
1174 combined with CODE to make IN. "constant" means an expression with
1175 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1176 commutative arithmetic operation. Store the constant part into *CONP,
1177 the literal in *LITP and return the variable part. If a part isn't
1178 present, set it to null. If the tree does not decompose in this way,
1179 return the entire tree as the variable part and the other parts as null.
1181 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1182 case, we negate an operand that was subtracted. Except if it is a
1183 literal for which we use *MINUS_LITP instead.
1185 If NEGATE_P is true, we are negating all of IN, again except a literal
1186 for which we use *MINUS_LITP instead.
1188 If IN is itself a literal or constant, return it as appropriate.
1190 Note that we do not guarantee that any of the three values will be the
1191 same type as IN, but they will have the same signedness and mode. */
1194 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1195 tree
*minus_litp
, int negate_p
)
1203 /* Strip any conversions that don't change the machine mode or signedness. */
1204 STRIP_SIGN_NOPS (in
);
1206 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1208 else if (TREE_CODE (in
) == code
1209 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1210 /* We can associate addition and subtraction together (even
1211 though the C standard doesn't say so) for integers because
1212 the value is not affected. For reals, the value might be
1213 affected, so we can't. */
1214 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1215 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1217 tree op0
= TREE_OPERAND (in
, 0);
1218 tree op1
= TREE_OPERAND (in
, 1);
1219 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1220 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1222 /* First see if either of the operands is a literal, then a constant. */
1223 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1224 *litp
= op0
, op0
= 0;
1225 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1226 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1228 if (op0
!= 0 && TREE_CONSTANT (op0
))
1229 *conp
= op0
, op0
= 0;
1230 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1231 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1233 /* If we haven't dealt with either operand, this is not a case we can
1234 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1235 if (op0
!= 0 && op1
!= 0)
1240 var
= op1
, neg_var_p
= neg1_p
;
1242 /* Now do any needed negations. */
1244 *minus_litp
= *litp
, *litp
= 0;
1246 *conp
= negate_expr (*conp
);
1248 var
= negate_expr (var
);
1250 else if (TREE_CONSTANT (in
))
1258 *minus_litp
= *litp
, *litp
= 0;
1259 else if (*minus_litp
)
1260 *litp
= *minus_litp
, *minus_litp
= 0;
1261 *conp
= negate_expr (*conp
);
1262 var
= negate_expr (var
);
1268 /* Re-associate trees split by the above function. T1 and T2 are either
1269 expressions to associate or null. Return the new expression, if any. If
1270 we build an operation, do it in TYPE and with CODE. */
1273 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1280 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1281 try to fold this since we will have infinite recursion. But do
1282 deal with any NEGATE_EXPRs. */
1283 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1284 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1286 if (code
== PLUS_EXPR
)
1288 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1289 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1290 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1291 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1292 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1293 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1294 else if (integer_zerop (t2
))
1295 return fold_convert (type
, t1
);
1297 else if (code
== MINUS_EXPR
)
1299 if (integer_zerop (t2
))
1300 return fold_convert (type
, t1
);
1303 return build2 (code
, type
, fold_convert (type
, t1
),
1304 fold_convert (type
, t2
));
1307 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1308 fold_convert (type
, t2
));
1311 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1312 to produce a new constant.
1314 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1317 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1319 unsigned HOST_WIDE_INT int1l
, int2l
;
1320 HOST_WIDE_INT int1h
, int2h
;
1321 unsigned HOST_WIDE_INT low
;
1323 unsigned HOST_WIDE_INT garbagel
;
1324 HOST_WIDE_INT garbageh
;
1326 tree type
= TREE_TYPE (arg1
);
1327 int uns
= TYPE_UNSIGNED (type
);
1329 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1332 int1l
= TREE_INT_CST_LOW (arg1
);
1333 int1h
= TREE_INT_CST_HIGH (arg1
);
1334 int2l
= TREE_INT_CST_LOW (arg2
);
1335 int2h
= TREE_INT_CST_HIGH (arg2
);
1340 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1344 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1348 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1354 /* It's unclear from the C standard whether shifts can overflow.
1355 The following code ignores overflow; perhaps a C standard
1356 interpretation ruling is needed. */
1357 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1364 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1369 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1373 neg_double (int2l
, int2h
, &low
, &hi
);
1374 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1375 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1379 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1382 case TRUNC_DIV_EXPR
:
1383 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1384 case EXACT_DIV_EXPR
:
1385 /* This is a shortcut for a common special case. */
1386 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1387 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1388 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1389 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1391 if (code
== CEIL_DIV_EXPR
)
1394 low
= int1l
/ int2l
, hi
= 0;
1398 /* ... fall through ... */
1400 case ROUND_DIV_EXPR
:
1401 if (int2h
== 0 && int2l
== 1)
1403 low
= int1l
, hi
= int1h
;
1406 if (int1l
== int2l
&& int1h
== int2h
1407 && ! (int1l
== 0 && int1h
== 0))
1412 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1413 &low
, &hi
, &garbagel
, &garbageh
);
1416 case TRUNC_MOD_EXPR
:
1417 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1418 /* This is a shortcut for a common special case. */
1419 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1420 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1421 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1422 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1424 if (code
== CEIL_MOD_EXPR
)
1426 low
= int1l
% int2l
, hi
= 0;
1430 /* ... fall through ... */
1432 case ROUND_MOD_EXPR
:
1433 overflow
= div_and_round_double (code
, uns
,
1434 int1l
, int1h
, int2l
, int2h
,
1435 &garbagel
, &garbageh
, &low
, &hi
);
1441 low
= (((unsigned HOST_WIDE_INT
) int1h
1442 < (unsigned HOST_WIDE_INT
) int2h
)
1443 || (((unsigned HOST_WIDE_INT
) int1h
1444 == (unsigned HOST_WIDE_INT
) int2h
)
1447 low
= (int1h
< int2h
1448 || (int1h
== int2h
&& int1l
< int2l
));
1450 if (low
== (code
== MIN_EXPR
))
1451 low
= int1l
, hi
= int1h
;
1453 low
= int2l
, hi
= int2h
;
1460 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1464 /* Propagate overflow flags ourselves. */
1465 if (((!uns
|| is_sizetype
) && overflow
)
1466 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1469 TREE_OVERFLOW (t
) = 1;
1470 TREE_CONSTANT_OVERFLOW (t
) = 1;
1472 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1475 TREE_CONSTANT_OVERFLOW (t
) = 1;
1479 t
= force_fit_type (t
, 1,
1480 ((!uns
|| is_sizetype
) && overflow
)
1481 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1482 TREE_CONSTANT_OVERFLOW (arg1
)
1483 | TREE_CONSTANT_OVERFLOW (arg2
));
1488 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1489 constant. We assume ARG1 and ARG2 have the same data type, or at least
1490 are the same kind of constant and the same machine mode.
1492 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1495 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1500 if (TREE_CODE (arg1
) == INTEGER_CST
)
1501 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1503 if (TREE_CODE (arg1
) == REAL_CST
)
1505 enum machine_mode mode
;
1508 REAL_VALUE_TYPE value
;
1509 REAL_VALUE_TYPE result
;
1513 d1
= TREE_REAL_CST (arg1
);
1514 d2
= TREE_REAL_CST (arg2
);
1516 type
= TREE_TYPE (arg1
);
1517 mode
= TYPE_MODE (type
);
1519 /* Don't perform operation if we honor signaling NaNs and
1520 either operand is a NaN. */
1521 if (HONOR_SNANS (mode
)
1522 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1525 /* Don't perform operation if it would raise a division
1526 by zero exception. */
1527 if (code
== RDIV_EXPR
1528 && REAL_VALUES_EQUAL (d2
, dconst0
)
1529 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1532 /* If either operand is a NaN, just return it. Otherwise, set up
1533 for floating-point trap; we return an overflow. */
1534 if (REAL_VALUE_ISNAN (d1
))
1536 else if (REAL_VALUE_ISNAN (d2
))
1539 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1540 real_convert (&result
, mode
, &value
);
1542 /* Don't constant fold this floating point operation if the
1543 result may dependent upon the run-time rounding mode and
1544 flag_rounding_math is set, or if GCC's software emulation
1545 is unable to accurately represent the result. */
1547 if ((flag_rounding_math
1548 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1549 && !flag_unsafe_math_optimizations
))
1550 && (inexact
|| !real_identical (&result
, &value
)))
1553 t
= build_real (type
, result
);
1555 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1556 TREE_CONSTANT_OVERFLOW (t
)
1558 | TREE_CONSTANT_OVERFLOW (arg1
)
1559 | TREE_CONSTANT_OVERFLOW (arg2
);
1562 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1564 tree type
= TREE_TYPE (arg1
);
1565 tree r1
= TREE_REALPART (arg1
);
1566 tree i1
= TREE_IMAGPART (arg1
);
1567 tree r2
= TREE_REALPART (arg2
);
1568 tree i2
= TREE_IMAGPART (arg2
);
1574 t
= build_complex (type
,
1575 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1576 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1580 t
= build_complex (type
,
1581 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1582 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1586 t
= build_complex (type
,
1587 const_binop (MINUS_EXPR
,
1588 const_binop (MULT_EXPR
,
1590 const_binop (MULT_EXPR
,
1593 const_binop (PLUS_EXPR
,
1594 const_binop (MULT_EXPR
,
1596 const_binop (MULT_EXPR
,
1603 tree t1
, t2
, real
, imag
;
1605 = const_binop (PLUS_EXPR
,
1606 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1607 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1610 t1
= const_binop (PLUS_EXPR
,
1611 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1612 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1614 t2
= const_binop (MINUS_EXPR
,
1615 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1616 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1619 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1621 real
= const_binop (TRUNC_DIV_EXPR
, t1
, magsquared
, notrunc
);
1622 imag
= const_binop (TRUNC_DIV_EXPR
, t2
, magsquared
, notrunc
);
1626 real
= const_binop (RDIV_EXPR
, t1
, magsquared
, notrunc
);
1627 imag
= const_binop (RDIV_EXPR
, t2
, magsquared
, notrunc
);
1632 t
= build_complex (type
, real
, imag
);
1644 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1645 indicates which particular sizetype to create. */
1648 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1650 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1653 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1654 is a tree code. The type of the result is taken from the operands.
1655 Both must be the same type integer type and it must be a size type.
1656 If the operands are constant, so is the result. */
1659 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1661 tree type
= TREE_TYPE (arg0
);
1663 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1664 && type
== TREE_TYPE (arg1
));
1666 /* Handle the special case of two integer constants faster. */
1667 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1669 /* And some specific cases even faster than that. */
1670 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1672 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1673 && integer_zerop (arg1
))
1675 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1678 /* Handle general case of two integer constants. */
1679 return int_const_binop (code
, arg0
, arg1
, 0);
1682 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1683 return error_mark_node
;
1685 return fold_build2 (code
, type
, arg0
, arg1
);
1688 /* Given two values, either both of sizetype or both of bitsizetype,
1689 compute the difference between the two values. Return the value
1690 in signed type corresponding to the type of the operands. */
1693 size_diffop (tree arg0
, tree arg1
)
1695 tree type
= TREE_TYPE (arg0
);
1698 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1699 && type
== TREE_TYPE (arg1
));
1701 /* If the type is already signed, just do the simple thing. */
1702 if (!TYPE_UNSIGNED (type
))
1703 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1705 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1707 /* If either operand is not a constant, do the conversions to the signed
1708 type and subtract. The hardware will do the right thing with any
1709 overflow in the subtraction. */
1710 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1711 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1712 fold_convert (ctype
, arg1
));
1714 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1715 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1716 overflow) and negate (which can't either). Special-case a result
1717 of zero while we're here. */
1718 if (tree_int_cst_equal (arg0
, arg1
))
1719 return fold_convert (ctype
, integer_zero_node
);
1720 else if (tree_int_cst_lt (arg1
, arg0
))
1721 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1723 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1724 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1728 /* A subroutine of fold_convert_const handling conversions of an
1729 INTEGER_CST to another integer type. */
1732 fold_convert_const_int_from_int (tree type
, tree arg1
)
1736 /* Given an integer constant, make new constant with new type,
1737 appropriately sign-extended or truncated. */
1738 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1739 TREE_INT_CST_HIGH (arg1
));
1741 t
= force_fit_type (t
,
1742 /* Don't set the overflow when
1743 converting a pointer */
1744 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1745 (TREE_INT_CST_HIGH (arg1
) < 0
1746 && (TYPE_UNSIGNED (type
)
1747 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1748 | TREE_OVERFLOW (arg1
),
1749 TREE_CONSTANT_OVERFLOW (arg1
));
1754 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1755 to an integer type. */
1758 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1763 /* The following code implements the floating point to integer
1764 conversion rules required by the Java Language Specification,
1765 that IEEE NaNs are mapped to zero and values that overflow
1766 the target precision saturate, i.e. values greater than
1767 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1768 are mapped to INT_MIN. These semantics are allowed by the
1769 C and C++ standards that simply state that the behavior of
1770 FP-to-integer conversion is unspecified upon overflow. */
1772 HOST_WIDE_INT high
, low
;
1774 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1778 case FIX_TRUNC_EXPR
:
1779 real_trunc (&r
, VOIDmode
, &x
);
1783 real_ceil (&r
, VOIDmode
, &x
);
1786 case FIX_FLOOR_EXPR
:
1787 real_floor (&r
, VOIDmode
, &x
);
1790 case FIX_ROUND_EXPR
:
1791 real_round (&r
, VOIDmode
, &x
);
1798 /* If R is NaN, return zero and show we have an overflow. */
1799 if (REAL_VALUE_ISNAN (r
))
1806 /* See if R is less than the lower bound or greater than the
1811 tree lt
= TYPE_MIN_VALUE (type
);
1812 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1813 if (REAL_VALUES_LESS (r
, l
))
1816 high
= TREE_INT_CST_HIGH (lt
);
1817 low
= TREE_INT_CST_LOW (lt
);
1823 tree ut
= TYPE_MAX_VALUE (type
);
1826 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1827 if (REAL_VALUES_LESS (u
, r
))
1830 high
= TREE_INT_CST_HIGH (ut
);
1831 low
= TREE_INT_CST_LOW (ut
);
1837 REAL_VALUE_TO_INT (&low
, &high
, r
);
1839 t
= build_int_cst_wide (type
, low
, high
);
1841 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1842 TREE_CONSTANT_OVERFLOW (arg1
));
1846 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1847 to another floating point type. */
1850 fold_convert_const_real_from_real (tree type
, tree arg1
)
1852 REAL_VALUE_TYPE value
;
1855 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1856 t
= build_real (type
, value
);
1858 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1859 TREE_CONSTANT_OVERFLOW (t
)
1860 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1864 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1865 type TYPE. If no simplification can be done return NULL_TREE. */
1868 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1870 if (TREE_TYPE (arg1
) == type
)
1873 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1875 if (TREE_CODE (arg1
) == INTEGER_CST
)
1876 return fold_convert_const_int_from_int (type
, arg1
);
1877 else if (TREE_CODE (arg1
) == REAL_CST
)
1878 return fold_convert_const_int_from_real (code
, type
, arg1
);
1880 else if (TREE_CODE (type
) == REAL_TYPE
)
1882 if (TREE_CODE (arg1
) == INTEGER_CST
)
1883 return build_real_from_int_cst (type
, arg1
);
1884 if (TREE_CODE (arg1
) == REAL_CST
)
1885 return fold_convert_const_real_from_real (type
, arg1
);
1890 /* Construct a vector of zero elements of vector type TYPE. */
1893 build_zero_vector (tree type
)
1898 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1899 units
= TYPE_VECTOR_SUBPARTS (type
);
1902 for (i
= 0; i
< units
; i
++)
1903 list
= tree_cons (NULL_TREE
, elem
, list
);
1904 return build_vector (type
, list
);
1907 /* Convert expression ARG to type TYPE. Used by the middle-end for
1908 simple conversions in preference to calling the front-end's convert. */
1911 fold_convert (tree type
, tree arg
)
1913 tree orig
= TREE_TYPE (arg
);
1919 if (TREE_CODE (arg
) == ERROR_MARK
1920 || TREE_CODE (type
) == ERROR_MARK
1921 || TREE_CODE (orig
) == ERROR_MARK
)
1922 return error_mark_node
;
1924 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1925 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1926 TYPE_MAIN_VARIANT (orig
)))
1927 return fold_build1 (NOP_EXPR
, type
, arg
);
1929 switch (TREE_CODE (type
))
1931 case INTEGER_TYPE
: case CHAR_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1932 case POINTER_TYPE
: case REFERENCE_TYPE
:
1934 if (TREE_CODE (arg
) == INTEGER_CST
)
1936 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1937 if (tem
!= NULL_TREE
)
1940 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1941 || TREE_CODE (orig
) == OFFSET_TYPE
)
1942 return fold_build1 (NOP_EXPR
, type
, arg
);
1943 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1945 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1946 return fold_convert (type
, tem
);
1948 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1949 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1950 return fold_build1 (NOP_EXPR
, type
, arg
);
1953 if (TREE_CODE (arg
) == INTEGER_CST
)
1955 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1956 if (tem
!= NULL_TREE
)
1959 else if (TREE_CODE (arg
) == REAL_CST
)
1961 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1962 if (tem
!= NULL_TREE
)
1966 switch (TREE_CODE (orig
))
1968 case INTEGER_TYPE
: case CHAR_TYPE
:
1969 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1970 case POINTER_TYPE
: case REFERENCE_TYPE
:
1971 return fold_build1 (FLOAT_EXPR
, type
, arg
);
1974 return fold_build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1978 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1979 return fold_convert (type
, tem
);
1986 switch (TREE_CODE (orig
))
1988 case INTEGER_TYPE
: case CHAR_TYPE
:
1989 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1990 case POINTER_TYPE
: case REFERENCE_TYPE
:
1992 return build2 (COMPLEX_EXPR
, type
,
1993 fold_convert (TREE_TYPE (type
), arg
),
1994 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1999 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2001 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2002 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2003 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2006 arg
= save_expr (arg
);
2007 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2008 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2009 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2010 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2011 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2019 if (integer_zerop (arg
))
2020 return build_zero_vector (type
);
2021 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2022 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2023 || TREE_CODE (orig
) == VECTOR_TYPE
);
2024 return fold_build1 (NOP_EXPR
, type
, arg
);
2027 return fold_build1 (CONVERT_EXPR
, type
, fold_ignored_result (arg
));
2034 /* Return false if expr can be assumed not to be an value, true
2038 maybe_lvalue_p (tree x
)
2040 /* We only need to wrap lvalue tree codes. */
2041 switch (TREE_CODE (x
))
2052 case ALIGN_INDIRECT_REF
:
2053 case MISALIGNED_INDIRECT_REF
:
2055 case ARRAY_RANGE_REF
:
2061 case PREINCREMENT_EXPR
:
2062 case PREDECREMENT_EXPR
:
2064 case TRY_CATCH_EXPR
:
2065 case WITH_CLEANUP_EXPR
:
2076 /* Assume the worst for front-end tree codes. */
2077 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2085 /* Return an expr equal to X but certainly not valid as an lvalue. */
2090 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2095 if (! maybe_lvalue_p (x
))
2097 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2100 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2101 Zero means allow extended lvalues. */
2103 int pedantic_lvalues
;
2105 /* When pedantic, return an expr equal to X but certainly not valid as a
2106 pedantic lvalue. Otherwise, return X. */
2109 pedantic_non_lvalue (tree x
)
2111 if (pedantic_lvalues
)
2112 return non_lvalue (x
);
2117 /* Given a tree comparison code, return the code that is the logical inverse
2118 of the given code. It is not safe to do this for floating-point
2119 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2120 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2122 static enum tree_code
2123 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2125 if (honor_nans
&& flag_trapping_math
)
2135 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2137 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2139 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2141 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2155 return UNORDERED_EXPR
;
2156 case UNORDERED_EXPR
:
2157 return ORDERED_EXPR
;
2163 /* Similar, but return the comparison that results if the operands are
2164 swapped. This is safe for floating-point. */
2167 swap_tree_comparison (enum tree_code code
)
2188 /* Convert a comparison tree code from an enum tree_code representation
2189 into a compcode bit-based encoding. This function is the inverse of
2190 compcode_to_comparison. */
2192 static enum comparison_code
2193 comparison_to_compcode (enum tree_code code
)
2210 return COMPCODE_ORD
;
2211 case UNORDERED_EXPR
:
2212 return COMPCODE_UNORD
;
2214 return COMPCODE_UNLT
;
2216 return COMPCODE_UNEQ
;
2218 return COMPCODE_UNLE
;
2220 return COMPCODE_UNGT
;
2222 return COMPCODE_LTGT
;
2224 return COMPCODE_UNGE
;
2230 /* Convert a compcode bit-based encoding of a comparison operator back
2231 to GCC's enum tree_code representation. This function is the
2232 inverse of comparison_to_compcode. */
2234 static enum tree_code
2235 compcode_to_comparison (enum comparison_code code
)
2252 return ORDERED_EXPR
;
2253 case COMPCODE_UNORD
:
2254 return UNORDERED_EXPR
;
2272 /* Return a tree for the comparison which is the combination of
2273 doing the AND or OR (depending on CODE) of the two operations LCODE
2274 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2275 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2276 if this makes the transformation invalid. */
2279 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2280 enum tree_code rcode
, tree truth_type
,
2281 tree ll_arg
, tree lr_arg
)
2283 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2284 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2285 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2286 enum comparison_code compcode
;
2290 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2291 compcode
= lcompcode
& rcompcode
;
2294 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2295 compcode
= lcompcode
| rcompcode
;
2304 /* Eliminate unordered comparisons, as well as LTGT and ORD
2305 which are not used unless the mode has NaNs. */
2306 compcode
&= ~COMPCODE_UNORD
;
2307 if (compcode
== COMPCODE_LTGT
)
2308 compcode
= COMPCODE_NE
;
2309 else if (compcode
== COMPCODE_ORD
)
2310 compcode
= COMPCODE_TRUE
;
2312 else if (flag_trapping_math
)
2314 /* Check that the original operation and the optimized ones will trap
2315 under the same condition. */
2316 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2317 && (lcompcode
!= COMPCODE_EQ
)
2318 && (lcompcode
!= COMPCODE_ORD
);
2319 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2320 && (rcompcode
!= COMPCODE_EQ
)
2321 && (rcompcode
!= COMPCODE_ORD
);
2322 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2323 && (compcode
!= COMPCODE_EQ
)
2324 && (compcode
!= COMPCODE_ORD
);
2326 /* In a short-circuited boolean expression the LHS might be
2327 such that the RHS, if evaluated, will never trap. For
2328 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2329 if neither x nor y is NaN. (This is a mixed blessing: for
2330 example, the expression above will never trap, hence
2331 optimizing it to x < y would be invalid). */
2332 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2333 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2336 /* If the comparison was short-circuited, and only the RHS
2337 trapped, we may now generate a spurious trap. */
2339 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2342 /* If we changed the conditions that cause a trap, we lose. */
2343 if ((ltrap
|| rtrap
) != trap
)
2347 if (compcode
== COMPCODE_TRUE
)
2348 return constant_boolean_node (true, truth_type
);
2349 else if (compcode
== COMPCODE_FALSE
)
2350 return constant_boolean_node (false, truth_type
);
2352 return fold_build2 (compcode_to_comparison (compcode
),
2353 truth_type
, ll_arg
, lr_arg
);
2356 /* Return nonzero if CODE is a tree code that represents a truth value. */
2359 truth_value_p (enum tree_code code
)
2361 return (TREE_CODE_CLASS (code
) == tcc_comparison
2362 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2363 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2364 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2367 /* Return nonzero if two operands (typically of the same tree node)
2368 are necessarily equal. If either argument has side-effects this
2369 function returns zero. FLAGS modifies behavior as follows:
2371 If OEP_ONLY_CONST is set, only return nonzero for constants.
2372 This function tests whether the operands are indistinguishable;
2373 it does not test whether they are equal using C's == operation.
2374 The distinction is important for IEEE floating point, because
2375 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2376 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2378 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2379 even though it may hold multiple values during a function.
2380 This is because a GCC tree node guarantees that nothing else is
2381 executed between the evaluation of its "operands" (which may often
2382 be evaluated in arbitrary order). Hence if the operands themselves
2383 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2384 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2385 unset means assuming isochronic (or instantaneous) tree equivalence.
2386 Unless comparing arbitrary expression trees, such as from different
2387 statements, this flag can usually be left unset.
2389 If OEP_PURE_SAME is set, then pure functions with identical arguments
2390 are considered the same. It is used when the caller has other ways
2391 to ensure that global memory is unchanged in between. */
2394 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2396 /* If either is ERROR_MARK, they aren't equal. */
2397 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2400 /* If both types don't have the same signedness, then we can't consider
2401 them equal. We must check this before the STRIP_NOPS calls
2402 because they may change the signedness of the arguments. */
2403 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2409 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2410 /* This is needed for conversions and for COMPONENT_REF.
2411 Might as well play it safe and always test this. */
2412 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2413 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2414 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2417 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2418 We don't care about side effects in that case because the SAVE_EXPR
2419 takes care of that for us. In all other cases, two expressions are
2420 equal if they have no side effects. If we have two identical
2421 expressions with side effects that should be treated the same due
2422 to the only side effects being identical SAVE_EXPR's, that will
2423 be detected in the recursive calls below. */
2424 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2425 && (TREE_CODE (arg0
) == SAVE_EXPR
2426 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2429 /* Next handle constant cases, those for which we can return 1 even
2430 if ONLY_CONST is set. */
2431 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2432 switch (TREE_CODE (arg0
))
2435 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2436 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2437 && tree_int_cst_equal (arg0
, arg1
));
2440 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2441 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2442 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2443 TREE_REAL_CST (arg1
)));
2449 if (TREE_CONSTANT_OVERFLOW (arg0
)
2450 || TREE_CONSTANT_OVERFLOW (arg1
))
2453 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2454 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2457 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2460 v1
= TREE_CHAIN (v1
);
2461 v2
= TREE_CHAIN (v2
);
2468 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2470 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2474 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2475 && ! memcmp (TREE_STRING_POINTER (arg0
),
2476 TREE_STRING_POINTER (arg1
),
2477 TREE_STRING_LENGTH (arg0
)));
2480 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2486 if (flags
& OEP_ONLY_CONST
)
2489 /* Define macros to test an operand from arg0 and arg1 for equality and a
2490 variant that allows null and views null as being different from any
2491 non-null value. In the latter case, if either is null, the both
2492 must be; otherwise, do the normal comparison. */
2493 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2494 TREE_OPERAND (arg1, N), flags)
2496 #define OP_SAME_WITH_NULL(N) \
2497 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2498 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2500 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2503 /* Two conversions are equal only if signedness and modes match. */
2504 switch (TREE_CODE (arg0
))
2509 case FIX_TRUNC_EXPR
:
2510 case FIX_FLOOR_EXPR
:
2511 case FIX_ROUND_EXPR
:
2512 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2513 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2523 case tcc_comparison
:
2525 if (OP_SAME (0) && OP_SAME (1))
2528 /* For commutative ops, allow the other order. */
2529 return (commutative_tree_code (TREE_CODE (arg0
))
2530 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2531 TREE_OPERAND (arg1
, 1), flags
)
2532 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2533 TREE_OPERAND (arg1
, 0), flags
));
2536 /* If either of the pointer (or reference) expressions we are
2537 dereferencing contain a side effect, these cannot be equal. */
2538 if (TREE_SIDE_EFFECTS (arg0
)
2539 || TREE_SIDE_EFFECTS (arg1
))
2542 switch (TREE_CODE (arg0
))
2545 case ALIGN_INDIRECT_REF
:
2546 case MISALIGNED_INDIRECT_REF
:
2552 case ARRAY_RANGE_REF
:
2553 /* Operands 2 and 3 may be null. */
2556 && OP_SAME_WITH_NULL (2)
2557 && OP_SAME_WITH_NULL (3));
2560 /* Handle operand 2 the same as for ARRAY_REF. */
2561 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2564 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2570 case tcc_expression
:
2571 switch (TREE_CODE (arg0
))
2574 case TRUTH_NOT_EXPR
:
2577 case TRUTH_ANDIF_EXPR
:
2578 case TRUTH_ORIF_EXPR
:
2579 return OP_SAME (0) && OP_SAME (1);
2581 case TRUTH_AND_EXPR
:
2583 case TRUTH_XOR_EXPR
:
2584 if (OP_SAME (0) && OP_SAME (1))
2587 /* Otherwise take into account this is a commutative operation. */
2588 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2589 TREE_OPERAND (arg1
, 1), flags
)
2590 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2591 TREE_OPERAND (arg1
, 0), flags
));
2594 /* If the CALL_EXPRs call different functions, then they
2595 clearly can not be equal. */
2600 unsigned int cef
= call_expr_flags (arg0
);
2601 if (flags
& OEP_PURE_SAME
)
2602 cef
&= ECF_CONST
| ECF_PURE
;
2609 /* Now see if all the arguments are the same. operand_equal_p
2610 does not handle TREE_LIST, so we walk the operands here
2611 feeding them to operand_equal_p. */
2612 arg0
= TREE_OPERAND (arg0
, 1);
2613 arg1
= TREE_OPERAND (arg1
, 1);
2614 while (arg0
&& arg1
)
2616 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2620 arg0
= TREE_CHAIN (arg0
);
2621 arg1
= TREE_CHAIN (arg1
);
2624 /* If we get here and both argument lists are exhausted
2625 then the CALL_EXPRs are equal. */
2626 return ! (arg0
|| arg1
);
2632 case tcc_declaration
:
2633 /* Consider __builtin_sqrt equal to sqrt. */
2634 return (TREE_CODE (arg0
) == FUNCTION_DECL
2635 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2636 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2637 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2644 #undef OP_SAME_WITH_NULL
2647 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2648 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2650 When in doubt, return 0. */
2653 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2655 int unsignedp1
, unsignedpo
;
2656 tree primarg0
, primarg1
, primother
;
2657 unsigned int correct_width
;
2659 if (operand_equal_p (arg0
, arg1
, 0))
2662 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2663 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2666 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2667 and see if the inner values are the same. This removes any
2668 signedness comparison, which doesn't matter here. */
2669 primarg0
= arg0
, primarg1
= arg1
;
2670 STRIP_NOPS (primarg0
);
2671 STRIP_NOPS (primarg1
);
2672 if (operand_equal_p (primarg0
, primarg1
, 0))
2675 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2676 actual comparison operand, ARG0.
2678 First throw away any conversions to wider types
2679 already present in the operands. */
2681 primarg1
= get_narrower (arg1
, &unsignedp1
);
2682 primother
= get_narrower (other
, &unsignedpo
);
2684 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2685 if (unsignedp1
== unsignedpo
2686 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2687 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2689 tree type
= TREE_TYPE (arg0
);
2691 /* Make sure shorter operand is extended the right way
2692 to match the longer operand. */
2693 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2694 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2696 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2703 /* See if ARG is an expression that is either a comparison or is performing
2704 arithmetic on comparisons. The comparisons must only be comparing
2705 two different values, which will be stored in *CVAL1 and *CVAL2; if
2706 they are nonzero it means that some operands have already been found.
2707 No variables may be used anywhere else in the expression except in the
2708 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2709 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2711 If this is true, return 1. Otherwise, return zero. */
2714 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2716 enum tree_code code
= TREE_CODE (arg
);
2717 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2719 /* We can handle some of the tcc_expression cases here. */
2720 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2722 else if (class == tcc_expression
2723 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2724 || code
== COMPOUND_EXPR
))
2727 else if (class == tcc_expression
&& code
== SAVE_EXPR
2728 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2730 /* If we've already found a CVAL1 or CVAL2, this expression is
2731 two complex to handle. */
2732 if (*cval1
|| *cval2
)
2742 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2745 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2746 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2747 cval1
, cval2
, save_p
));
2752 case tcc_expression
:
2753 if (code
== COND_EXPR
)
2754 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2755 cval1
, cval2
, save_p
)
2756 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2757 cval1
, cval2
, save_p
)
2758 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2759 cval1
, cval2
, save_p
));
2762 case tcc_comparison
:
2763 /* First see if we can handle the first operand, then the second. For
2764 the second operand, we know *CVAL1 can't be zero. It must be that
2765 one side of the comparison is each of the values; test for the
2766 case where this isn't true by failing if the two operands
2769 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2770 TREE_OPERAND (arg
, 1), 0))
2774 *cval1
= TREE_OPERAND (arg
, 0);
2775 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2777 else if (*cval2
== 0)
2778 *cval2
= TREE_OPERAND (arg
, 0);
2779 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2784 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2786 else if (*cval2
== 0)
2787 *cval2
= TREE_OPERAND (arg
, 1);
2788 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2800 /* ARG is a tree that is known to contain just arithmetic operations and
2801 comparisons. Evaluate the operations in the tree substituting NEW0 for
2802 any occurrence of OLD0 as an operand of a comparison and likewise for
2806 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2808 tree type
= TREE_TYPE (arg
);
2809 enum tree_code code
= TREE_CODE (arg
);
2810 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2812 /* We can handle some of the tcc_expression cases here. */
2813 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2815 else if (class == tcc_expression
2816 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2822 return fold_build1 (code
, type
,
2823 eval_subst (TREE_OPERAND (arg
, 0),
2824 old0
, new0
, old1
, new1
));
2827 return fold_build2 (code
, type
,
2828 eval_subst (TREE_OPERAND (arg
, 0),
2829 old0
, new0
, old1
, new1
),
2830 eval_subst (TREE_OPERAND (arg
, 1),
2831 old0
, new0
, old1
, new1
));
2833 case tcc_expression
:
2837 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2840 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2843 return fold_build3 (code
, type
,
2844 eval_subst (TREE_OPERAND (arg
, 0),
2845 old0
, new0
, old1
, new1
),
2846 eval_subst (TREE_OPERAND (arg
, 1),
2847 old0
, new0
, old1
, new1
),
2848 eval_subst (TREE_OPERAND (arg
, 2),
2849 old0
, new0
, old1
, new1
));
2853 /* Fall through - ??? */
2855 case tcc_comparison
:
2857 tree arg0
= TREE_OPERAND (arg
, 0);
2858 tree arg1
= TREE_OPERAND (arg
, 1);
2860 /* We need to check both for exact equality and tree equality. The
2861 former will be true if the operand has a side-effect. In that
2862 case, we know the operand occurred exactly once. */
2864 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2866 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2869 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2871 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2874 return fold_build2 (code
, type
, arg0
, arg1
);
2882 /* Return a tree for the case when the result of an expression is RESULT
2883 converted to TYPE and OMITTED was previously an operand of the expression
2884 but is now not needed (e.g., we folded OMITTED * 0).
2886 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2887 the conversion of RESULT to TYPE. */
2890 omit_one_operand (tree type
, tree result
, tree omitted
)
2892 tree t
= fold_convert (type
, result
);
2894 if (TREE_SIDE_EFFECTS (omitted
))
2895 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2897 return non_lvalue (t
);
2900 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2903 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2905 tree t
= fold_convert (type
, result
);
2907 if (TREE_SIDE_EFFECTS (omitted
))
2908 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2910 return pedantic_non_lvalue (t
);
2913 /* Return a tree for the case when the result of an expression is RESULT
2914 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2915 of the expression but are now not needed.
2917 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2918 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2919 evaluated before OMITTED2. Otherwise, if neither has side effects,
2920 just do the conversion of RESULT to TYPE. */
2923 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2925 tree t
= fold_convert (type
, result
);
2927 if (TREE_SIDE_EFFECTS (omitted2
))
2928 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2929 if (TREE_SIDE_EFFECTS (omitted1
))
2930 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2932 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2936 /* Return a simplified tree node for the truth-negation of ARG. This
2937 never alters ARG itself. We assume that ARG is an operation that
2938 returns a truth value (0 or 1).
2940 FIXME: one would think we would fold the result, but it causes
2941 problems with the dominator optimizer. */
2943 invert_truthvalue (tree arg
)
2945 tree type
= TREE_TYPE (arg
);
2946 enum tree_code code
= TREE_CODE (arg
);
2948 if (code
== ERROR_MARK
)
2951 /* If this is a comparison, we can simply invert it, except for
2952 floating-point non-equality comparisons, in which case we just
2953 enclose a TRUTH_NOT_EXPR around what we have. */
2955 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2957 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2958 if (FLOAT_TYPE_P (op_type
)
2959 && flag_trapping_math
2960 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2961 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2962 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2965 code
= invert_tree_comparison (code
,
2966 HONOR_NANS (TYPE_MODE (op_type
)));
2967 if (code
== ERROR_MARK
)
2968 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2970 return build2 (code
, type
,
2971 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2978 return constant_boolean_node (integer_zerop (arg
), type
);
2980 case TRUTH_AND_EXPR
:
2981 return build2 (TRUTH_OR_EXPR
, type
,
2982 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2983 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2986 return build2 (TRUTH_AND_EXPR
, type
,
2987 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2988 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2990 case TRUTH_XOR_EXPR
:
2991 /* Here we can invert either operand. We invert the first operand
2992 unless the second operand is a TRUTH_NOT_EXPR in which case our
2993 result is the XOR of the first operand with the inside of the
2994 negation of the second operand. */
2996 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2997 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2998 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3000 return build2 (TRUTH_XOR_EXPR
, type
,
3001 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3002 TREE_OPERAND (arg
, 1));
3004 case TRUTH_ANDIF_EXPR
:
3005 return build2 (TRUTH_ORIF_EXPR
, type
,
3006 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3007 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3009 case TRUTH_ORIF_EXPR
:
3010 return build2 (TRUTH_ANDIF_EXPR
, type
,
3011 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3012 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3014 case TRUTH_NOT_EXPR
:
3015 return TREE_OPERAND (arg
, 0);
3018 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3019 invert_truthvalue (TREE_OPERAND (arg
, 1)),
3020 invert_truthvalue (TREE_OPERAND (arg
, 2)));
3023 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3024 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3026 case NON_LVALUE_EXPR
:
3027 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3030 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3035 return build1 (TREE_CODE (arg
), type
,
3036 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3039 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3041 return build2 (EQ_EXPR
, type
, arg
,
3042 fold_convert (type
, integer_zero_node
));
3045 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3047 case CLEANUP_POINT_EXPR
:
3048 return build1 (CLEANUP_POINT_EXPR
, type
,
3049 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3054 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
);
3055 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3058 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3059 operands are another bit-wise operation with a common input. If so,
3060 distribute the bit operations to save an operation and possibly two if
3061 constants are involved. For example, convert
3062 (A | B) & (A | C) into A | (B & C)
3063 Further simplification will occur if B and C are constants.
3065 If this optimization cannot be done, 0 will be returned. */
3068 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3073 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3074 || TREE_CODE (arg0
) == code
3075 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3076 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3079 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3081 common
= TREE_OPERAND (arg0
, 0);
3082 left
= TREE_OPERAND (arg0
, 1);
3083 right
= TREE_OPERAND (arg1
, 1);
3085 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3087 common
= TREE_OPERAND (arg0
, 0);
3088 left
= TREE_OPERAND (arg0
, 1);
3089 right
= TREE_OPERAND (arg1
, 0);
3091 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3093 common
= TREE_OPERAND (arg0
, 1);
3094 left
= TREE_OPERAND (arg0
, 0);
3095 right
= TREE_OPERAND (arg1
, 1);
3097 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3099 common
= TREE_OPERAND (arg0
, 1);
3100 left
= TREE_OPERAND (arg0
, 0);
3101 right
= TREE_OPERAND (arg1
, 0);
3106 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3107 fold_build2 (code
, type
, left
, right
));
3110 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3111 with code CODE. This optimization is unsafe. */
3113 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3115 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3116 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3118 /* (A / C) +- (B / C) -> (A +- B) / C. */
3120 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3121 TREE_OPERAND (arg1
, 1), 0))
3122 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3123 fold_build2 (code
, type
,
3124 TREE_OPERAND (arg0
, 0),
3125 TREE_OPERAND (arg1
, 0)),
3126 TREE_OPERAND (arg0
, 1));
3128 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3129 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3130 TREE_OPERAND (arg1
, 0), 0)
3131 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3132 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3134 REAL_VALUE_TYPE r0
, r1
;
3135 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3136 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3138 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3140 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3141 real_arithmetic (&r0
, code
, &r0
, &r1
);
3142 return fold_build2 (MULT_EXPR
, type
,
3143 TREE_OPERAND (arg0
, 0),
3144 build_real (type
, r0
));
3150 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3151 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3154 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3161 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3162 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3163 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3164 && host_integerp (size
, 0)
3165 && tree_low_cst (size
, 0) == bitsize
)
3166 return fold_convert (type
, inner
);
3169 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3170 size_int (bitsize
), bitsize_int (bitpos
));
3172 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3177 /* Optimize a bit-field compare.
3179 There are two cases: First is a compare against a constant and the
3180 second is a comparison of two items where the fields are at the same
3181 bit position relative to the start of a chunk (byte, halfword, word)
3182 large enough to contain it. In these cases we can avoid the shift
3183 implicit in bitfield extractions.
3185 For constants, we emit a compare of the shifted constant with the
3186 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3187 compared. For two fields at the same position, we do the ANDs with the
3188 similar mask and compare the result of the ANDs.
3190 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3191 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3192 are the left and right operands of the comparison, respectively.
3194 If the optimization described above can be done, we return the resulting
3195 tree. Otherwise we return zero. */
3198 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3201 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3202 tree type
= TREE_TYPE (lhs
);
3203 tree signed_type
, unsigned_type
;
3204 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3205 enum machine_mode lmode
, rmode
, nmode
;
3206 int lunsignedp
, runsignedp
;
3207 int lvolatilep
= 0, rvolatilep
= 0;
3208 tree linner
, rinner
= NULL_TREE
;
3212 /* Get all the information about the extractions being done. If the bit size
3213 if the same as the size of the underlying object, we aren't doing an
3214 extraction at all and so can do nothing. We also don't want to
3215 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3216 then will no longer be able to replace it. */
3217 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3218 &lunsignedp
, &lvolatilep
, false);
3219 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3220 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3225 /* If this is not a constant, we can only do something if bit positions,
3226 sizes, and signedness are the same. */
3227 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3228 &runsignedp
, &rvolatilep
, false);
3230 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3231 || lunsignedp
!= runsignedp
|| offset
!= 0
3232 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3236 /* See if we can find a mode to refer to this field. We should be able to,
3237 but fail if we can't. */
3238 nmode
= get_best_mode (lbitsize
, lbitpos
,
3239 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3240 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3241 TYPE_ALIGN (TREE_TYPE (rinner
))),
3242 word_mode
, lvolatilep
|| rvolatilep
);
3243 if (nmode
== VOIDmode
)
3246 /* Set signed and unsigned types of the precision of this mode for the
3248 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3249 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3251 /* Compute the bit position and size for the new reference and our offset
3252 within it. If the new reference is the same size as the original, we
3253 won't optimize anything, so return zero. */
3254 nbitsize
= GET_MODE_BITSIZE (nmode
);
3255 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3257 if (nbitsize
== lbitsize
)
3260 if (BYTES_BIG_ENDIAN
)
3261 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3263 /* Make the mask to be used against the extracted field. */
3264 mask
= build_int_cst (unsigned_type
, -1);
3265 mask
= force_fit_type (mask
, 0, false, false);
3266 mask
= fold_convert (unsigned_type
, mask
);
3267 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3268 mask
= const_binop (RSHIFT_EXPR
, mask
,
3269 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3272 /* If not comparing with constant, just rework the comparison
3274 return build2 (code
, compare_type
,
3275 build2 (BIT_AND_EXPR
, unsigned_type
,
3276 make_bit_field_ref (linner
, unsigned_type
,
3277 nbitsize
, nbitpos
, 1),
3279 build2 (BIT_AND_EXPR
, unsigned_type
,
3280 make_bit_field_ref (rinner
, unsigned_type
,
3281 nbitsize
, nbitpos
, 1),
3284 /* Otherwise, we are handling the constant case. See if the constant is too
3285 big for the field. Warn and return a tree of for 0 (false) if so. We do
3286 this not only for its own sake, but to avoid having to test for this
3287 error case below. If we didn't, we might generate wrong code.
3289 For unsigned fields, the constant shifted right by the field length should
3290 be all zero. For signed fields, the high-order bits should agree with
3295 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3296 fold_convert (unsigned_type
, rhs
),
3297 size_int (lbitsize
), 0)))
3299 warning (0, "comparison is always %d due to width of bit-field",
3301 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3306 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3307 size_int (lbitsize
- 1), 0);
3308 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3310 warning (0, "comparison is always %d due to width of bit-field",
3312 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3316 /* Single-bit compares should always be against zero. */
3317 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3319 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3320 rhs
= fold_convert (type
, integer_zero_node
);
3323 /* Make a new bitfield reference, shift the constant over the
3324 appropriate number of bits and mask it with the computed mask
3325 (in case this was a signed field). If we changed it, make a new one. */
3326 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3329 TREE_SIDE_EFFECTS (lhs
) = 1;
3330 TREE_THIS_VOLATILE (lhs
) = 1;
3333 rhs
= fold (const_binop (BIT_AND_EXPR
,
3334 const_binop (LSHIFT_EXPR
,
3335 fold_convert (unsigned_type
, rhs
),
3336 size_int (lbitpos
), 0),
3339 return build2 (code
, compare_type
,
3340 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3344 /* Subroutine for fold_truthop: decode a field reference.
3346 If EXP is a comparison reference, we return the innermost reference.
3348 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3349 set to the starting bit number.
3351 If the innermost field can be completely contained in a mode-sized
3352 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3354 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3355 otherwise it is not changed.
3357 *PUNSIGNEDP is set to the signedness of the field.
3359 *PMASK is set to the mask used. This is either contained in a
3360 BIT_AND_EXPR or derived from the width of the field.
3362 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3364 Return 0 if this is not a component reference or is one that we can't
3365 do anything with. */
3368 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3369 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3370 int *punsignedp
, int *pvolatilep
,
3371 tree
*pmask
, tree
*pand_mask
)
3373 tree outer_type
= 0;
3375 tree mask
, inner
, offset
;
3377 unsigned int precision
;
3379 /* All the optimizations using this function assume integer fields.
3380 There are problems with FP fields since the type_for_size call
3381 below can fail for, e.g., XFmode. */
3382 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3385 /* We are interested in the bare arrangement of bits, so strip everything
3386 that doesn't affect the machine mode. However, record the type of the
3387 outermost expression if it may matter below. */
3388 if (TREE_CODE (exp
) == NOP_EXPR
3389 || TREE_CODE (exp
) == CONVERT_EXPR
3390 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3391 outer_type
= TREE_TYPE (exp
);
3394 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3396 and_mask
= TREE_OPERAND (exp
, 1);
3397 exp
= TREE_OPERAND (exp
, 0);
3398 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3399 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3403 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3404 punsignedp
, pvolatilep
, false);
3405 if ((inner
== exp
&& and_mask
== 0)
3406 || *pbitsize
< 0 || offset
!= 0
3407 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3410 /* If the number of bits in the reference is the same as the bitsize of
3411 the outer type, then the outer type gives the signedness. Otherwise
3412 (in case of a small bitfield) the signedness is unchanged. */
3413 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3414 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3416 /* Compute the mask to access the bitfield. */
3417 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3418 precision
= TYPE_PRECISION (unsigned_type
);
3420 mask
= build_int_cst (unsigned_type
, -1);
3421 mask
= force_fit_type (mask
, 0, false, false);
3423 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3424 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3426 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3428 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3429 fold_convert (unsigned_type
, and_mask
), mask
);
3432 *pand_mask
= and_mask
;
3436 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3440 all_ones_mask_p (tree mask
, int size
)
3442 tree type
= TREE_TYPE (mask
);
3443 unsigned int precision
= TYPE_PRECISION (type
);
3446 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3447 tmask
= force_fit_type (tmask
, 0, false, false);
3450 tree_int_cst_equal (mask
,
3451 const_binop (RSHIFT_EXPR
,
3452 const_binop (LSHIFT_EXPR
, tmask
,
3453 size_int (precision
- size
),
3455 size_int (precision
- size
), 0));
3458 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3459 represents the sign bit of EXP's type. If EXP represents a sign
3460 or zero extension, also test VAL against the unextended type.
3461 The return value is the (sub)expression whose sign bit is VAL,
3462 or NULL_TREE otherwise. */
3465 sign_bit_p (tree exp
, tree val
)
3467 unsigned HOST_WIDE_INT mask_lo
, lo
;
3468 HOST_WIDE_INT mask_hi
, hi
;
3472 /* Tree EXP must have an integral type. */
3473 t
= TREE_TYPE (exp
);
3474 if (! INTEGRAL_TYPE_P (t
))
3477 /* Tree VAL must be an integer constant. */
3478 if (TREE_CODE (val
) != INTEGER_CST
3479 || TREE_CONSTANT_OVERFLOW (val
))
3482 width
= TYPE_PRECISION (t
);
3483 if (width
> HOST_BITS_PER_WIDE_INT
)
3485 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3488 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3489 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3495 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3498 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3499 >> (HOST_BITS_PER_WIDE_INT
- width
));
3502 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3503 treat VAL as if it were unsigned. */
3504 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3505 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3508 /* Handle extension from a narrower type. */
3509 if (TREE_CODE (exp
) == NOP_EXPR
3510 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3511 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3516 /* Subroutine for fold_truthop: determine if an operand is simple enough
3517 to be evaluated unconditionally. */
3520 simple_operand_p (tree exp
)
3522 /* Strip any conversions that don't change the machine mode. */
3525 return (CONSTANT_CLASS_P (exp
)
3526 || TREE_CODE (exp
) == SSA_NAME
3528 && ! TREE_ADDRESSABLE (exp
)
3529 && ! TREE_THIS_VOLATILE (exp
)
3530 && ! DECL_NONLOCAL (exp
)
3531 /* Don't regard global variables as simple. They may be
3532 allocated in ways unknown to the compiler (shared memory,
3533 #pragma weak, etc). */
3534 && ! TREE_PUBLIC (exp
)
3535 && ! DECL_EXTERNAL (exp
)
3536 /* Loading a static variable is unduly expensive, but global
3537 registers aren't expensive. */
3538 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3541 /* The following functions are subroutines to fold_range_test and allow it to
3542 try to change a logical combination of comparisons into a range test.
3545 X == 2 || X == 3 || X == 4 || X == 5
3549 (unsigned) (X - 2) <= 3
3551 We describe each set of comparisons as being either inside or outside
3552 a range, using a variable named like IN_P, and then describe the
3553 range with a lower and upper bound. If one of the bounds is omitted,
3554 it represents either the highest or lowest value of the type.
3556 In the comments below, we represent a range by two numbers in brackets
3557 preceded by a "+" to designate being inside that range, or a "-" to
3558 designate being outside that range, so the condition can be inverted by
3559 flipping the prefix. An omitted bound is represented by a "-". For
3560 example, "- [-, 10]" means being outside the range starting at the lowest
3561 possible value and ending at 10, in other words, being greater than 10.
3562 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3565 We set up things so that the missing bounds are handled in a consistent
3566 manner so neither a missing bound nor "true" and "false" need to be
3567 handled using a special case. */
3569 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3570 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3571 and UPPER1_P are nonzero if the respective argument is an upper bound
3572 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3573 must be specified for a comparison. ARG1 will be converted to ARG0's
3574 type if both are specified. */
3577 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3578 tree arg1
, int upper1_p
)
3584 /* If neither arg represents infinity, do the normal operation.
3585 Else, if not a comparison, return infinity. Else handle the special
3586 comparison rules. Note that most of the cases below won't occur, but
3587 are handled for consistency. */
3589 if (arg0
!= 0 && arg1
!= 0)
3591 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3592 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3594 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3597 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3600 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3601 for neither. In real maths, we cannot assume open ended ranges are
3602 the same. But, this is computer arithmetic, where numbers are finite.
3603 We can therefore make the transformation of any unbounded range with
3604 the value Z, Z being greater than any representable number. This permits
3605 us to treat unbounded ranges as equal. */
3606 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3607 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3611 result
= sgn0
== sgn1
;
3614 result
= sgn0
!= sgn1
;
3617 result
= sgn0
< sgn1
;
3620 result
= sgn0
<= sgn1
;
3623 result
= sgn0
> sgn1
;
3626 result
= sgn0
>= sgn1
;
3632 return constant_boolean_node (result
, type
);
3635 /* Given EXP, a logical expression, set the range it is testing into
3636 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3637 actually being tested. *PLOW and *PHIGH will be made of the same type
3638 as the returned expression. If EXP is not a comparison, we will most
3639 likely not be returning a useful value and range. */
3642 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3644 enum tree_code code
;
3645 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3646 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3648 tree low
, high
, n_low
, n_high
;
3650 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3651 and see if we can refine the range. Some of the cases below may not
3652 happen, but it doesn't seem worth worrying about this. We "continue"
3653 the outer loop when we've changed something; otherwise we "break"
3654 the switch, which will "break" the while. */
3657 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3661 code
= TREE_CODE (exp
);
3662 exp_type
= TREE_TYPE (exp
);
3664 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3666 if (TREE_CODE_LENGTH (code
) > 0)
3667 arg0
= TREE_OPERAND (exp
, 0);
3668 if (TREE_CODE_CLASS (code
) == tcc_comparison
3669 || TREE_CODE_CLASS (code
) == tcc_unary
3670 || TREE_CODE_CLASS (code
) == tcc_binary
)
3671 arg0_type
= TREE_TYPE (arg0
);
3672 if (TREE_CODE_CLASS (code
) == tcc_binary
3673 || TREE_CODE_CLASS (code
) == tcc_comparison
3674 || (TREE_CODE_CLASS (code
) == tcc_expression
3675 && TREE_CODE_LENGTH (code
) > 1))
3676 arg1
= TREE_OPERAND (exp
, 1);
3681 case TRUTH_NOT_EXPR
:
3682 in_p
= ! in_p
, exp
= arg0
;
3685 case EQ_EXPR
: case NE_EXPR
:
3686 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3687 /* We can only do something if the range is testing for zero
3688 and if the second operand is an integer constant. Note that
3689 saying something is "in" the range we make is done by
3690 complementing IN_P since it will set in the initial case of
3691 being not equal to zero; "out" is leaving it alone. */
3692 if (low
== 0 || high
== 0
3693 || ! integer_zerop (low
) || ! integer_zerop (high
)
3694 || TREE_CODE (arg1
) != INTEGER_CST
)
3699 case NE_EXPR
: /* - [c, c] */
3702 case EQ_EXPR
: /* + [c, c] */
3703 in_p
= ! in_p
, low
= high
= arg1
;
3705 case GT_EXPR
: /* - [-, c] */
3706 low
= 0, high
= arg1
;
3708 case GE_EXPR
: /* + [c, -] */
3709 in_p
= ! in_p
, low
= arg1
, high
= 0;
3711 case LT_EXPR
: /* - [c, -] */
3712 low
= arg1
, high
= 0;
3714 case LE_EXPR
: /* + [-, c] */
3715 in_p
= ! in_p
, low
= 0, high
= arg1
;
3721 /* If this is an unsigned comparison, we also know that EXP is
3722 greater than or equal to zero. We base the range tests we make
3723 on that fact, so we record it here so we can parse existing
3724 range tests. We test arg0_type since often the return type
3725 of, e.g. EQ_EXPR, is boolean. */
3726 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3728 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3730 fold_convert (arg0_type
, integer_zero_node
),
3734 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3736 /* If the high bound is missing, but we have a nonzero low
3737 bound, reverse the range so it goes from zero to the low bound
3739 if (high
== 0 && low
&& ! integer_zerop (low
))
3742 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3743 integer_one_node
, 0);
3744 low
= fold_convert (arg0_type
, integer_zero_node
);
3752 /* (-x) IN [a,b] -> x in [-b, -a] */
3753 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3754 fold_convert (exp_type
, integer_zero_node
),
3756 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3757 fold_convert (exp_type
, integer_zero_node
),
3759 low
= n_low
, high
= n_high
;
3765 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3766 fold_convert (exp_type
, integer_one_node
));
3769 case PLUS_EXPR
: case MINUS_EXPR
:
3770 if (TREE_CODE (arg1
) != INTEGER_CST
)
3773 /* If EXP is signed, any overflow in the computation is undefined,
3774 so we don't worry about it so long as our computations on
3775 the bounds don't overflow. For unsigned, overflow is defined
3776 and this is exactly the right thing. */
3777 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3778 arg0_type
, low
, 0, arg1
, 0);
3779 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3780 arg0_type
, high
, 1, arg1
, 0);
3781 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3782 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3785 /* Check for an unsigned range which has wrapped around the maximum
3786 value thus making n_high < n_low, and normalize it. */
3787 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3789 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3790 integer_one_node
, 0);
3791 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3792 integer_one_node
, 0);
3794 /* If the range is of the form +/- [ x+1, x ], we won't
3795 be able to normalize it. But then, it represents the
3796 whole range or the empty set, so make it
3798 if (tree_int_cst_equal (n_low
, low
)
3799 && tree_int_cst_equal (n_high
, high
))
3805 low
= n_low
, high
= n_high
;
3810 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3811 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3814 if (! INTEGRAL_TYPE_P (arg0_type
)
3815 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3816 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3819 n_low
= low
, n_high
= high
;
3822 n_low
= fold_convert (arg0_type
, n_low
);
3825 n_high
= fold_convert (arg0_type
, n_high
);
3828 /* If we're converting arg0 from an unsigned type, to exp,
3829 a signed type, we will be doing the comparison as unsigned.
3830 The tests above have already verified that LOW and HIGH
3833 So we have to ensure that we will handle large unsigned
3834 values the same way that the current signed bounds treat
3837 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3840 tree equiv_type
= lang_hooks
.types
.type_for_mode
3841 (TYPE_MODE (arg0_type
), 1);
3843 /* A range without an upper bound is, naturally, unbounded.
3844 Since convert would have cropped a very large value, use
3845 the max value for the destination type. */
3847 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3848 : TYPE_MAX_VALUE (arg0_type
);
3850 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3851 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
3852 fold_convert (arg0_type
,
3854 fold_convert (arg0_type
,
3857 /* If the low bound is specified, "and" the range with the
3858 range for which the original unsigned value will be
3862 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3863 1, n_low
, n_high
, 1,
3864 fold_convert (arg0_type
,
3869 in_p
= (n_in_p
== in_p
);
3873 /* Otherwise, "or" the range with the range of the input
3874 that will be interpreted as negative. */
3875 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3876 0, n_low
, n_high
, 1,
3877 fold_convert (arg0_type
,
3882 in_p
= (in_p
!= n_in_p
);
3887 low
= n_low
, high
= n_high
;
3897 /* If EXP is a constant, we can evaluate whether this is true or false. */
3898 if (TREE_CODE (exp
) == INTEGER_CST
)
3900 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3902 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3908 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3912 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3913 type, TYPE, return an expression to test if EXP is in (or out of, depending
3914 on IN_P) the range. Return 0 if the test couldn't be created. */
3917 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3919 tree etype
= TREE_TYPE (exp
);
3924 value
= build_range_check (type
, exp
, 1, low
, high
);
3926 return invert_truthvalue (value
);
3931 if (low
== 0 && high
== 0)
3932 return fold_convert (type
, integer_one_node
);
3935 return fold_build2 (LE_EXPR
, type
, exp
, high
);
3938 return fold_build2 (GE_EXPR
, type
, exp
, low
);
3940 if (operand_equal_p (low
, high
, 0))
3941 return fold_build2 (EQ_EXPR
, type
, exp
, low
);
3943 if (integer_zerop (low
))
3945 if (! TYPE_UNSIGNED (etype
))
3947 etype
= lang_hooks
.types
.unsigned_type (etype
);
3948 high
= fold_convert (etype
, high
);
3949 exp
= fold_convert (etype
, exp
);
3951 return build_range_check (type
, exp
, 1, 0, high
);
3954 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3955 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3957 unsigned HOST_WIDE_INT lo
;
3961 prec
= TYPE_PRECISION (etype
);
3962 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3965 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3969 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3970 lo
= (unsigned HOST_WIDE_INT
) -1;
3973 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3975 if (TYPE_UNSIGNED (etype
))
3977 etype
= lang_hooks
.types
.signed_type (etype
);
3978 exp
= fold_convert (etype
, exp
);
3980 return fold_build2 (GT_EXPR
, type
, exp
,
3981 fold_convert (etype
, integer_zero_node
));
3985 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3986 if (value
!= 0 && TREE_OVERFLOW (value
) && ! TYPE_UNSIGNED (etype
))
3988 tree utype
, minv
, maxv
;
3990 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3991 for the type in question, as we rely on this here. */
3992 switch (TREE_CODE (etype
))
3997 utype
= lang_hooks
.types
.unsigned_type (etype
);
3998 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
3999 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4000 integer_one_node
, 1);
4001 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4002 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4006 high
= fold_convert (etype
, high
);
4007 low
= fold_convert (etype
, low
);
4008 exp
= fold_convert (etype
, exp
);
4009 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4017 if (value
!= 0 && ! TREE_OVERFLOW (value
))
4018 return build_range_check (type
,
4019 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4020 1, fold_convert (etype
, integer_zero_node
),
4026 /* Given two ranges, see if we can merge them into one. Return 1 if we
4027 can, 0 if we can't. Set the output range into the specified parameters. */
4030 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4031 tree high0
, int in1_p
, tree low1
, tree high1
)
4039 int lowequal
= ((low0
== 0 && low1
== 0)
4040 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4041 low0
, 0, low1
, 0)));
4042 int highequal
= ((high0
== 0 && high1
== 0)
4043 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4044 high0
, 1, high1
, 1)));
4046 /* Make range 0 be the range that starts first, or ends last if they
4047 start at the same value. Swap them if it isn't. */
4048 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4051 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4052 high1
, 1, high0
, 1))))
4054 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4055 tem
= low0
, low0
= low1
, low1
= tem
;
4056 tem
= high0
, high0
= high1
, high1
= tem
;
4059 /* Now flag two cases, whether the ranges are disjoint or whether the
4060 second range is totally subsumed in the first. Note that the tests
4061 below are simplified by the ones above. */
4062 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4063 high0
, 1, low1
, 0));
4064 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4065 high1
, 1, high0
, 1));
4067 /* We now have four cases, depending on whether we are including or
4068 excluding the two ranges. */
4071 /* If they don't overlap, the result is false. If the second range
4072 is a subset it is the result. Otherwise, the range is from the start
4073 of the second to the end of the first. */
4075 in_p
= 0, low
= high
= 0;
4077 in_p
= 1, low
= low1
, high
= high1
;
4079 in_p
= 1, low
= low1
, high
= high0
;
4082 else if (in0_p
&& ! in1_p
)
4084 /* If they don't overlap, the result is the first range. If they are
4085 equal, the result is false. If the second range is a subset of the
4086 first, and the ranges begin at the same place, we go from just after
4087 the end of the first range to the end of the second. If the second
4088 range is not a subset of the first, or if it is a subset and both
4089 ranges end at the same place, the range starts at the start of the
4090 first range and ends just before the second range.
4091 Otherwise, we can't describe this as a single range. */
4093 in_p
= 1, low
= low0
, high
= high0
;
4094 else if (lowequal
&& highequal
)
4095 in_p
= 0, low
= high
= 0;
4096 else if (subset
&& lowequal
)
4098 in_p
= 1, high
= high0
;
4099 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
4100 integer_one_node
, 0);
4102 else if (! subset
|| highequal
)
4104 in_p
= 1, low
= low0
;
4105 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4106 integer_one_node
, 0);
4112 else if (! in0_p
&& in1_p
)
4114 /* If they don't overlap, the result is the second range. If the second
4115 is a subset of the first, the result is false. Otherwise,
4116 the range starts just after the first range and ends at the
4117 end of the second. */
4119 in_p
= 1, low
= low1
, high
= high1
;
4120 else if (subset
|| highequal
)
4121 in_p
= 0, low
= high
= 0;
4124 in_p
= 1, high
= high1
;
4125 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4126 integer_one_node
, 0);
4132 /* The case where we are excluding both ranges. Here the complex case
4133 is if they don't overlap. In that case, the only time we have a
4134 range is if they are adjacent. If the second is a subset of the
4135 first, the result is the first. Otherwise, the range to exclude
4136 starts at the beginning of the first range and ends at the end of the
4140 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4141 range_binop (PLUS_EXPR
, NULL_TREE
,
4143 integer_one_node
, 1),
4145 in_p
= 0, low
= low0
, high
= high1
;
4148 /* Canonicalize - [min, x] into - [-, x]. */
4149 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4150 switch (TREE_CODE (TREE_TYPE (low0
)))
4153 if (TYPE_PRECISION (TREE_TYPE (low0
))
4154 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4159 if (tree_int_cst_equal (low0
,
4160 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4164 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4165 && integer_zerop (low0
))
4172 /* Canonicalize - [x, max] into - [x, -]. */
4173 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4174 switch (TREE_CODE (TREE_TYPE (high1
)))
4177 if (TYPE_PRECISION (TREE_TYPE (high1
))
4178 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4183 if (tree_int_cst_equal (high1
,
4184 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4188 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4189 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4191 integer_one_node
, 1)))
4198 /* The ranges might be also adjacent between the maximum and
4199 minimum values of the given type. For
4200 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4201 return + [x + 1, y - 1]. */
4202 if (low0
== 0 && high1
== 0)
4204 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4205 integer_one_node
, 1);
4206 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4207 integer_one_node
, 0);
4208 if (low
== 0 || high
== 0)
4218 in_p
= 0, low
= low0
, high
= high0
;
4220 in_p
= 0, low
= low0
, high
= high1
;
4223 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4228 /* Subroutine of fold, looking inside expressions of the form
4229 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4230 of the COND_EXPR. This function is being used also to optimize
4231 A op B ? C : A, by reversing the comparison first.
4233 Return a folded expression whose code is not a COND_EXPR
4234 anymore, or NULL_TREE if no folding opportunity is found. */
4237 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4239 enum tree_code comp_code
= TREE_CODE (arg0
);
4240 tree arg00
= TREE_OPERAND (arg0
, 0);
4241 tree arg01
= TREE_OPERAND (arg0
, 1);
4242 tree arg1_type
= TREE_TYPE (arg1
);
4248 /* If we have A op 0 ? A : -A, consider applying the following
4251 A == 0? A : -A same as -A
4252 A != 0? A : -A same as A
4253 A >= 0? A : -A same as abs (A)
4254 A > 0? A : -A same as abs (A)
4255 A <= 0? A : -A same as -abs (A)
4256 A < 0? A : -A same as -abs (A)
4258 None of these transformations work for modes with signed
4259 zeros. If A is +/-0, the first two transformations will
4260 change the sign of the result (from +0 to -0, or vice
4261 versa). The last four will fix the sign of the result,
4262 even though the original expressions could be positive or
4263 negative, depending on the sign of A.
4265 Note that all these transformations are correct if A is
4266 NaN, since the two alternatives (A and -A) are also NaNs. */
4267 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4268 ? real_zerop (arg01
)
4269 : integer_zerop (arg01
))
4270 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4271 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4272 /* In the case that A is of the form X-Y, '-A' (arg2) may
4273 have already been folded to Y-X, check for that. */
4274 || (TREE_CODE (arg1
) == MINUS_EXPR
4275 && TREE_CODE (arg2
) == MINUS_EXPR
4276 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4277 TREE_OPERAND (arg2
, 1), 0)
4278 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4279 TREE_OPERAND (arg2
, 0), 0))))
4284 tem
= fold_convert (arg1_type
, arg1
);
4285 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4288 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4291 if (flag_trapping_math
)
4296 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4297 arg1
= fold_convert (lang_hooks
.types
.signed_type
4298 (TREE_TYPE (arg1
)), arg1
);
4299 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4300 return pedantic_non_lvalue (fold_convert (type
, tem
));
4303 if (flag_trapping_math
)
4307 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4308 arg1
= fold_convert (lang_hooks
.types
.signed_type
4309 (TREE_TYPE (arg1
)), arg1
);
4310 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4311 return negate_expr (fold_convert (type
, tem
));
4313 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4317 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4318 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4319 both transformations are correct when A is NaN: A != 0
4320 is then true, and A == 0 is false. */
4322 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4324 if (comp_code
== NE_EXPR
)
4325 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4326 else if (comp_code
== EQ_EXPR
)
4327 return fold_convert (type
, integer_zero_node
);
4330 /* Try some transformations of A op B ? A : B.
4332 A == B? A : B same as B
4333 A != B? A : B same as A
4334 A >= B? A : B same as max (A, B)
4335 A > B? A : B same as max (B, A)
4336 A <= B? A : B same as min (A, B)
4337 A < B? A : B same as min (B, A)
4339 As above, these transformations don't work in the presence
4340 of signed zeros. For example, if A and B are zeros of
4341 opposite sign, the first two transformations will change
4342 the sign of the result. In the last four, the original
4343 expressions give different results for (A=+0, B=-0) and
4344 (A=-0, B=+0), but the transformed expressions do not.
4346 The first two transformations are correct if either A or B
4347 is a NaN. In the first transformation, the condition will
4348 be false, and B will indeed be chosen. In the case of the
4349 second transformation, the condition A != B will be true,
4350 and A will be chosen.
4352 The conversions to max() and min() are not correct if B is
4353 a number and A is not. The conditions in the original
4354 expressions will be false, so all four give B. The min()
4355 and max() versions would give a NaN instead. */
4356 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4357 /* Avoid these transformations if the COND_EXPR may be used
4358 as an lvalue in the C++ front-end. PR c++/19199. */
4360 || strcmp (lang_hooks
.name
, "GNU C++") != 0
4361 || ! maybe_lvalue_p (arg1
)
4362 || ! maybe_lvalue_p (arg2
)))
4364 tree comp_op0
= arg00
;
4365 tree comp_op1
= arg01
;
4366 tree comp_type
= TREE_TYPE (comp_op0
);
4368 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4369 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4379 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4381 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4386 /* In C++ a ?: expression can be an lvalue, so put the
4387 operand which will be used if they are equal first
4388 so that we can convert this back to the
4389 corresponding COND_EXPR. */
4390 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4392 comp_op0
= fold_convert (comp_type
, comp_op0
);
4393 comp_op1
= fold_convert (comp_type
, comp_op1
);
4394 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4395 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4396 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4397 return pedantic_non_lvalue (fold_convert (type
, tem
));
4404 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4406 comp_op0
= fold_convert (comp_type
, comp_op0
);
4407 comp_op1
= fold_convert (comp_type
, comp_op1
);
4408 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4409 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4410 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4411 return pedantic_non_lvalue (fold_convert (type
, tem
));
4415 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4416 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4419 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4420 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4423 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4428 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4429 we might still be able to simplify this. For example,
4430 if C1 is one less or one more than C2, this might have started
4431 out as a MIN or MAX and been transformed by this function.
4432 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4434 if (INTEGRAL_TYPE_P (type
)
4435 && TREE_CODE (arg01
) == INTEGER_CST
4436 && TREE_CODE (arg2
) == INTEGER_CST
)
4440 /* We can replace A with C1 in this case. */
4441 arg1
= fold_convert (type
, arg01
);
4442 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4445 /* If C1 is C2 + 1, this is min(A, C2). */
4446 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4448 && operand_equal_p (arg01
,
4449 const_binop (PLUS_EXPR
, arg2
,
4450 integer_one_node
, 0),
4452 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4457 /* If C1 is C2 - 1, this is min(A, C2). */
4458 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4460 && operand_equal_p (arg01
,
4461 const_binop (MINUS_EXPR
, arg2
,
4462 integer_one_node
, 0),
4464 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4469 /* If C1 is C2 - 1, this is max(A, C2). */
4470 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4472 && operand_equal_p (arg01
,
4473 const_binop (MINUS_EXPR
, arg2
,
4474 integer_one_node
, 0),
4476 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4481 /* If C1 is C2 + 1, this is max(A, C2). */
4482 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4484 && operand_equal_p (arg01
,
4485 const_binop (PLUS_EXPR
, arg2
,
4486 integer_one_node
, 0),
4488 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4502 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4503 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4506 /* EXP is some logical combination of boolean tests. See if we can
4507 merge it into some range test. Return the new tree if so. */
4510 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4512 int or_op
= (code
== TRUTH_ORIF_EXPR
4513 || code
== TRUTH_OR_EXPR
);
4514 int in0_p
, in1_p
, in_p
;
4515 tree low0
, low1
, low
, high0
, high1
, high
;
4516 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4517 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4520 /* If this is an OR operation, invert both sides; we will invert
4521 again at the end. */
4523 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4525 /* If both expressions are the same, if we can merge the ranges, and we
4526 can build the range test, return it or it inverted. If one of the
4527 ranges is always true or always false, consider it to be the same
4528 expression as the other. */
4529 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4530 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4532 && 0 != (tem
= (build_range_check (type
,
4534 : rhs
!= 0 ? rhs
: integer_zero_node
,
4536 return or_op
? invert_truthvalue (tem
) : tem
;
4538 /* On machines where the branch cost is expensive, if this is a
4539 short-circuited branch and the underlying object on both sides
4540 is the same, make a non-short-circuit operation. */
4541 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4542 && lhs
!= 0 && rhs
!= 0
4543 && (code
== TRUTH_ANDIF_EXPR
4544 || code
== TRUTH_ORIF_EXPR
)
4545 && operand_equal_p (lhs
, rhs
, 0))
4547 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4548 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4549 which cases we can't do this. */
4550 if (simple_operand_p (lhs
))
4551 return build2 (code
== TRUTH_ANDIF_EXPR
4552 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4555 else if (lang_hooks
.decls
.global_bindings_p () == 0
4556 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4558 tree common
= save_expr (lhs
);
4560 if (0 != (lhs
= build_range_check (type
, common
,
4561 or_op
? ! in0_p
: in0_p
,
4563 && (0 != (rhs
= build_range_check (type
, common
,
4564 or_op
? ! in1_p
: in1_p
,
4566 return build2 (code
== TRUTH_ANDIF_EXPR
4567 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4575 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4576 bit value. Arrange things so the extra bits will be set to zero if and
4577 only if C is signed-extended to its full width. If MASK is nonzero,
4578 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4581 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4583 tree type
= TREE_TYPE (c
);
4584 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4587 if (p
== modesize
|| unsignedp
)
4590 /* We work by getting just the sign bit into the low-order bit, then
4591 into the high-order bit, then sign-extend. We then XOR that value
4593 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4594 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4596 /* We must use a signed type in order to get an arithmetic right shift.
4597 However, we must also avoid introducing accidental overflows, so that
4598 a subsequent call to integer_zerop will work. Hence we must
4599 do the type conversion here. At this point, the constant is either
4600 zero or one, and the conversion to a signed type can never overflow.
4601 We could get an overflow if this conversion is done anywhere else. */
4602 if (TYPE_UNSIGNED (type
))
4603 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4605 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4606 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4608 temp
= const_binop (BIT_AND_EXPR
, temp
,
4609 fold_convert (TREE_TYPE (c
), mask
), 0);
4610 /* If necessary, convert the type back to match the type of C. */
4611 if (TYPE_UNSIGNED (type
))
4612 temp
= fold_convert (type
, temp
);
4614 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4617 /* Find ways of folding logical expressions of LHS and RHS:
4618 Try to merge two comparisons to the same innermost item.
4619 Look for range tests like "ch >= '0' && ch <= '9'".
4620 Look for combinations of simple terms on machines with expensive branches
4621 and evaluate the RHS unconditionally.
4623 For example, if we have p->a == 2 && p->b == 4 and we can make an
4624 object large enough to span both A and B, we can do this with a comparison
4625 against the object ANDed with the a mask.
4627 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4628 operations to do this with one comparison.
4630 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4631 function and the one above.
4633 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4634 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4636 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4639 We return the simplified tree or 0 if no optimization is possible. */
4642 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4644 /* If this is the "or" of two comparisons, we can do something if
4645 the comparisons are NE_EXPR. If this is the "and", we can do something
4646 if the comparisons are EQ_EXPR. I.e.,
4647 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4649 WANTED_CODE is this operation code. For single bit fields, we can
4650 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4651 comparison for one-bit fields. */
4653 enum tree_code wanted_code
;
4654 enum tree_code lcode
, rcode
;
4655 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4656 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4657 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4658 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4659 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4660 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4661 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4662 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4663 enum machine_mode lnmode
, rnmode
;
4664 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4665 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4666 tree l_const
, r_const
;
4667 tree lntype
, rntype
, result
;
4668 int first_bit
, end_bit
;
4671 /* Start by getting the comparison codes. Fail if anything is volatile.
4672 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4673 it were surrounded with a NE_EXPR. */
4675 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4678 lcode
= TREE_CODE (lhs
);
4679 rcode
= TREE_CODE (rhs
);
4681 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4683 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4684 fold_convert (TREE_TYPE (lhs
), integer_zero_node
));
4688 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4690 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4691 fold_convert (TREE_TYPE (rhs
), integer_zero_node
));
4695 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4696 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4699 ll_arg
= TREE_OPERAND (lhs
, 0);
4700 lr_arg
= TREE_OPERAND (lhs
, 1);
4701 rl_arg
= TREE_OPERAND (rhs
, 0);
4702 rr_arg
= TREE_OPERAND (rhs
, 1);
4704 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4705 if (simple_operand_p (ll_arg
)
4706 && simple_operand_p (lr_arg
))
4709 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4710 && operand_equal_p (lr_arg
, rr_arg
, 0))
4712 result
= combine_comparisons (code
, lcode
, rcode
,
4713 truth_type
, ll_arg
, lr_arg
);
4717 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4718 && operand_equal_p (lr_arg
, rl_arg
, 0))
4720 result
= combine_comparisons (code
, lcode
,
4721 swap_tree_comparison (rcode
),
4722 truth_type
, ll_arg
, lr_arg
);
4728 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4729 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4731 /* If the RHS can be evaluated unconditionally and its operands are
4732 simple, it wins to evaluate the RHS unconditionally on machines
4733 with expensive branches. In this case, this isn't a comparison
4734 that can be merged. Avoid doing this if the RHS is a floating-point
4735 comparison since those can trap. */
4737 if (BRANCH_COST
>= 2
4738 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4739 && simple_operand_p (rl_arg
)
4740 && simple_operand_p (rr_arg
))
4742 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4743 if (code
== TRUTH_OR_EXPR
4744 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4745 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4746 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4747 return build2 (NE_EXPR
, truth_type
,
4748 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4750 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4752 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4753 if (code
== TRUTH_AND_EXPR
4754 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4755 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4756 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4757 return build2 (EQ_EXPR
, truth_type
,
4758 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4760 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4762 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4763 return build2 (code
, truth_type
, lhs
, rhs
);
4766 /* See if the comparisons can be merged. Then get all the parameters for
4769 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4770 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4774 ll_inner
= decode_field_reference (ll_arg
,
4775 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4776 &ll_unsignedp
, &volatilep
, &ll_mask
,
4778 lr_inner
= decode_field_reference (lr_arg
,
4779 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4780 &lr_unsignedp
, &volatilep
, &lr_mask
,
4782 rl_inner
= decode_field_reference (rl_arg
,
4783 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4784 &rl_unsignedp
, &volatilep
, &rl_mask
,
4786 rr_inner
= decode_field_reference (rr_arg
,
4787 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4788 &rr_unsignedp
, &volatilep
, &rr_mask
,
4791 /* It must be true that the inner operation on the lhs of each
4792 comparison must be the same if we are to be able to do anything.
4793 Then see if we have constants. If not, the same must be true for
4795 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4796 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4799 if (TREE_CODE (lr_arg
) == INTEGER_CST
4800 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4801 l_const
= lr_arg
, r_const
= rr_arg
;
4802 else if (lr_inner
== 0 || rr_inner
== 0
4803 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4806 l_const
= r_const
= 0;
4808 /* If either comparison code is not correct for our logical operation,
4809 fail. However, we can convert a one-bit comparison against zero into
4810 the opposite comparison against that bit being set in the field. */
4812 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4813 if (lcode
!= wanted_code
)
4815 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4817 /* Make the left operand unsigned, since we are only interested
4818 in the value of one bit. Otherwise we are doing the wrong
4827 /* This is analogous to the code for l_const above. */
4828 if (rcode
!= wanted_code
)
4830 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4839 /* After this point all optimizations will generate bit-field
4840 references, which we might not want. */
4841 if (! lang_hooks
.can_use_bit_fields_p ())
4844 /* See if we can find a mode that contains both fields being compared on
4845 the left. If we can't, fail. Otherwise, update all constants and masks
4846 to be relative to a field of that size. */
4847 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4848 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4849 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4850 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4852 if (lnmode
== VOIDmode
)
4855 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4856 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4857 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4858 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4860 if (BYTES_BIG_ENDIAN
)
4862 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4863 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4866 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4867 size_int (xll_bitpos
), 0);
4868 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4869 size_int (xrl_bitpos
), 0);
4873 l_const
= fold_convert (lntype
, l_const
);
4874 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4875 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4876 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4877 fold_build1 (BIT_NOT_EXPR
,
4881 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4883 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4888 r_const
= fold_convert (lntype
, r_const
);
4889 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4890 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4891 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4892 fold_build1 (BIT_NOT_EXPR
,
4896 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4898 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4902 /* If the right sides are not constant, do the same for it. Also,
4903 disallow this optimization if a size or signedness mismatch occurs
4904 between the left and right sides. */
4907 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4908 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4909 /* Make sure the two fields on the right
4910 correspond to the left without being swapped. */
4911 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4914 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4915 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4916 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4917 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4919 if (rnmode
== VOIDmode
)
4922 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4923 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4924 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4925 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4927 if (BYTES_BIG_ENDIAN
)
4929 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4930 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4933 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4934 size_int (xlr_bitpos
), 0);
4935 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4936 size_int (xrr_bitpos
), 0);
4938 /* Make a mask that corresponds to both fields being compared.
4939 Do this for both items being compared. If the operands are the
4940 same size and the bits being compared are in the same position
4941 then we can do this by masking both and comparing the masked
4943 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4944 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4945 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4947 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4948 ll_unsignedp
|| rl_unsignedp
);
4949 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4950 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4952 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4953 lr_unsignedp
|| rr_unsignedp
);
4954 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4955 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4957 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4960 /* There is still another way we can do something: If both pairs of
4961 fields being compared are adjacent, we may be able to make a wider
4962 field containing them both.
4964 Note that we still must mask the lhs/rhs expressions. Furthermore,
4965 the mask must be shifted to account for the shift done by
4966 make_bit_field_ref. */
4967 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4968 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4969 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4970 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4974 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4975 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4976 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4977 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4979 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4980 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4981 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4982 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4984 /* Convert to the smaller type before masking out unwanted bits. */
4986 if (lntype
!= rntype
)
4988 if (lnbitsize
> rnbitsize
)
4990 lhs
= fold_convert (rntype
, lhs
);
4991 ll_mask
= fold_convert (rntype
, ll_mask
);
4994 else if (lnbitsize
< rnbitsize
)
4996 rhs
= fold_convert (lntype
, rhs
);
4997 lr_mask
= fold_convert (lntype
, lr_mask
);
5002 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5003 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5005 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5006 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5008 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5014 /* Handle the case of comparisons with constants. If there is something in
5015 common between the masks, those bits of the constants must be the same.
5016 If not, the condition is always false. Test for this to avoid generating
5017 incorrect code below. */
5018 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5019 if (! integer_zerop (result
)
5020 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5021 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5023 if (wanted_code
== NE_EXPR
)
5025 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5026 return constant_boolean_node (true, truth_type
);
5030 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5031 return constant_boolean_node (false, truth_type
);
5035 /* Construct the expression we will return. First get the component
5036 reference we will make. Unless the mask is all ones the width of
5037 that field, perform the mask operation. Then compare with the
5039 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5040 ll_unsignedp
|| rl_unsignedp
);
5042 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5043 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5044 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5046 return build2 (wanted_code
, truth_type
, result
,
5047 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5050 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5054 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5057 enum tree_code op_code
;
5058 tree comp_const
= op1
;
5060 int consts_equal
, consts_lt
;
5063 STRIP_SIGN_NOPS (arg0
);
5065 op_code
= TREE_CODE (arg0
);
5066 minmax_const
= TREE_OPERAND (arg0
, 1);
5067 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5068 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5069 inner
= TREE_OPERAND (arg0
, 0);
5071 /* If something does not permit us to optimize, return the original tree. */
5072 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5073 || TREE_CODE (comp_const
) != INTEGER_CST
5074 || TREE_CONSTANT_OVERFLOW (comp_const
)
5075 || TREE_CODE (minmax_const
) != INTEGER_CST
5076 || TREE_CONSTANT_OVERFLOW (minmax_const
))
5079 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5080 and GT_EXPR, doing the rest with recursive calls using logical
5084 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5086 /* FIXME: We should be able to invert code without building a
5087 scratch tree node, but doing so would require us to
5088 duplicate a part of invert_truthvalue here. */
5089 tree tem
= invert_truthvalue (build2 (code
, type
, op0
, op1
));
5090 tem
= optimize_minmax_comparison (TREE_CODE (tem
),
5092 TREE_OPERAND (tem
, 0),
5093 TREE_OPERAND (tem
, 1));
5094 return invert_truthvalue (tem
);
5099 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5100 optimize_minmax_comparison
5101 (EQ_EXPR
, type
, arg0
, comp_const
),
5102 optimize_minmax_comparison
5103 (GT_EXPR
, type
, arg0
, comp_const
));
5106 if (op_code
== MAX_EXPR
&& consts_equal
)
5107 /* MAX (X, 0) == 0 -> X <= 0 */
5108 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5110 else if (op_code
== MAX_EXPR
&& consts_lt
)
5111 /* MAX (X, 0) == 5 -> X == 5 */
5112 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5114 else if (op_code
== MAX_EXPR
)
5115 /* MAX (X, 0) == -1 -> false */
5116 return omit_one_operand (type
, integer_zero_node
, inner
);
5118 else if (consts_equal
)
5119 /* MIN (X, 0) == 0 -> X >= 0 */
5120 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5123 /* MIN (X, 0) == 5 -> false */
5124 return omit_one_operand (type
, integer_zero_node
, inner
);
5127 /* MIN (X, 0) == -1 -> X == -1 */
5128 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5131 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5132 /* MAX (X, 0) > 0 -> X > 0
5133 MAX (X, 0) > 5 -> X > 5 */
5134 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5136 else if (op_code
== MAX_EXPR
)
5137 /* MAX (X, 0) > -1 -> true */
5138 return omit_one_operand (type
, integer_one_node
, inner
);
5140 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5141 /* MIN (X, 0) > 0 -> false
5142 MIN (X, 0) > 5 -> false */
5143 return omit_one_operand (type
, integer_zero_node
, inner
);
5146 /* MIN (X, 0) > -1 -> X > -1 */
5147 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5154 /* T is an integer expression that is being multiplied, divided, or taken a
5155 modulus (CODE says which and what kind of divide or modulus) by a
5156 constant C. See if we can eliminate that operation by folding it with
5157 other operations already in T. WIDE_TYPE, if non-null, is a type that
5158 should be used for the computation if wider than our type.
5160 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5161 (X * 2) + (Y * 4). We must, however, be assured that either the original
5162 expression would not overflow or that overflow is undefined for the type
5163 in the language in question.
5165 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5166 the machine has a multiply-accumulate insn or that this is part of an
5167 addressing calculation.
5169 If we return a non-null expression, it is an equivalent form of the
5170 original computation, but need not be in the original type. */
5173 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5175 /* To avoid exponential search depth, refuse to allow recursion past
5176 three levels. Beyond that (1) it's highly unlikely that we'll find
5177 something interesting and (2) we've probably processed it before
5178 when we built the inner expression. */
5187 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5194 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5196 tree type
= TREE_TYPE (t
);
5197 enum tree_code tcode
= TREE_CODE (t
);
5198 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5199 > GET_MODE_SIZE (TYPE_MODE (type
)))
5200 ? wide_type
: type
);
5202 int same_p
= tcode
== code
;
5203 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5205 /* Don't deal with constants of zero here; they confuse the code below. */
5206 if (integer_zerop (c
))
5209 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5210 op0
= TREE_OPERAND (t
, 0);
5212 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5213 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5215 /* Note that we need not handle conditional operations here since fold
5216 already handles those cases. So just do arithmetic here. */
5220 /* For a constant, we can always simplify if we are a multiply
5221 or (for divide and modulus) if it is a multiple of our constant. */
5222 if (code
== MULT_EXPR
5223 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5224 return const_binop (code
, fold_convert (ctype
, t
),
5225 fold_convert (ctype
, c
), 0);
5228 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5229 /* If op0 is an expression ... */
5230 if ((COMPARISON_CLASS_P (op0
)
5231 || UNARY_CLASS_P (op0
)
5232 || BINARY_CLASS_P (op0
)
5233 || EXPRESSION_CLASS_P (op0
))
5234 /* ... and is unsigned, and its type is smaller than ctype,
5235 then we cannot pass through as widening. */
5236 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5237 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5238 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5239 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5240 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5241 /* ... or this is a truncation (t is narrower than op0),
5242 then we cannot pass through this narrowing. */
5243 || (GET_MODE_SIZE (TYPE_MODE (type
))
5244 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5245 /* ... or signedness changes for division or modulus,
5246 then we cannot pass through this conversion. */
5247 || (code
!= MULT_EXPR
5248 && (TYPE_UNSIGNED (ctype
)
5249 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5252 /* Pass the constant down and see if we can make a simplification. If
5253 we can, replace this expression with the inner simplification for
5254 possible later conversion to our or some other type. */
5255 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5256 && TREE_CODE (t2
) == INTEGER_CST
5257 && ! TREE_CONSTANT_OVERFLOW (t2
)
5258 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5260 ? ctype
: NULL_TREE
))))
5265 /* If widening the type changes it from signed to unsigned, then we
5266 must avoid building ABS_EXPR itself as unsigned. */
5267 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5269 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5270 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5272 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5273 return fold_convert (ctype
, t1
);
5279 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5280 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5283 case MIN_EXPR
: case MAX_EXPR
:
5284 /* If widening the type changes the signedness, then we can't perform
5285 this optimization as that changes the result. */
5286 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5289 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5290 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5291 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5293 if (tree_int_cst_sgn (c
) < 0)
5294 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5296 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5297 fold_convert (ctype
, t2
));
5301 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5302 /* If the second operand is constant, this is a multiplication
5303 or floor division, by a power of two, so we can treat it that
5304 way unless the multiplier or divisor overflows. Signed
5305 left-shift overflow is implementation-defined rather than
5306 undefined in C90, so do not convert signed left shift into
5308 if (TREE_CODE (op1
) == INTEGER_CST
5309 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5310 /* const_binop may not detect overflow correctly,
5311 so check for it explicitly here. */
5312 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5313 && TREE_INT_CST_HIGH (op1
) == 0
5314 && 0 != (t1
= fold_convert (ctype
,
5315 const_binop (LSHIFT_EXPR
,
5318 && ! TREE_OVERFLOW (t1
))
5319 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5320 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5321 ctype
, fold_convert (ctype
, op0
), t1
),
5322 c
, code
, wide_type
);
5325 case PLUS_EXPR
: case MINUS_EXPR
:
5326 /* See if we can eliminate the operation on both sides. If we can, we
5327 can return a new PLUS or MINUS. If we can't, the only remaining
5328 cases where we can do anything are if the second operand is a
5330 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5331 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5332 if (t1
!= 0 && t2
!= 0
5333 && (code
== MULT_EXPR
5334 /* If not multiplication, we can only do this if both operands
5335 are divisible by c. */
5336 || (multiple_of_p (ctype
, op0
, c
)
5337 && multiple_of_p (ctype
, op1
, c
))))
5338 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5339 fold_convert (ctype
, t2
));
5341 /* If this was a subtraction, negate OP1 and set it to be an addition.
5342 This simplifies the logic below. */
5343 if (tcode
== MINUS_EXPR
)
5344 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5346 if (TREE_CODE (op1
) != INTEGER_CST
)
5349 /* If either OP1 or C are negative, this optimization is not safe for
5350 some of the division and remainder types while for others we need
5351 to change the code. */
5352 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5354 if (code
== CEIL_DIV_EXPR
)
5355 code
= FLOOR_DIV_EXPR
;
5356 else if (code
== FLOOR_DIV_EXPR
)
5357 code
= CEIL_DIV_EXPR
;
5358 else if (code
!= MULT_EXPR
5359 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5363 /* If it's a multiply or a division/modulus operation of a multiple
5364 of our constant, do the operation and verify it doesn't overflow. */
5365 if (code
== MULT_EXPR
5366 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5368 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5369 fold_convert (ctype
, c
), 0);
5370 /* We allow the constant to overflow with wrapping semantics. */
5372 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5378 /* If we have an unsigned type is not a sizetype, we cannot widen
5379 the operation since it will change the result if the original
5380 computation overflowed. */
5381 if (TYPE_UNSIGNED (ctype
)
5382 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5386 /* If we were able to eliminate our operation from the first side,
5387 apply our operation to the second side and reform the PLUS. */
5388 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5389 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5391 /* The last case is if we are a multiply. In that case, we can
5392 apply the distributive law to commute the multiply and addition
5393 if the multiplication of the constants doesn't overflow. */
5394 if (code
== MULT_EXPR
)
5395 return fold_build2 (tcode
, ctype
,
5396 fold_build2 (code
, ctype
,
5397 fold_convert (ctype
, op0
),
5398 fold_convert (ctype
, c
)),
5404 /* We have a special case here if we are doing something like
5405 (C * 8) % 4 since we know that's zero. */
5406 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5407 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5408 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5409 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5410 return omit_one_operand (type
, integer_zero_node
, op0
);
5412 /* ... fall through ... */
5414 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5415 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5416 /* If we can extract our operation from the LHS, do so and return a
5417 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5418 do something only if the second operand is a constant. */
5420 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5421 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5422 fold_convert (ctype
, op1
));
5423 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5424 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5425 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5426 fold_convert (ctype
, t1
));
5427 else if (TREE_CODE (op1
) != INTEGER_CST
)
5430 /* If these are the same operation types, we can associate them
5431 assuming no overflow. */
5433 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5434 fold_convert (ctype
, c
), 0))
5435 && ! TREE_OVERFLOW (t1
))
5436 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5438 /* If these operations "cancel" each other, we have the main
5439 optimizations of this pass, which occur when either constant is a
5440 multiple of the other, in which case we replace this with either an
5441 operation or CODE or TCODE.
5443 If we have an unsigned type that is not a sizetype, we cannot do
5444 this since it will change the result if the original computation
5446 if ((! TYPE_UNSIGNED (ctype
)
5447 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5449 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5450 || (tcode
== MULT_EXPR
5451 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5452 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5454 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5455 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5456 fold_convert (ctype
,
5457 const_binop (TRUNC_DIV_EXPR
,
5459 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5460 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5461 fold_convert (ctype
,
5462 const_binop (TRUNC_DIV_EXPR
,
5474 /* Return a node which has the indicated constant VALUE (either 0 or
5475 1), and is of the indicated TYPE. */
5478 constant_boolean_node (int value
, tree type
)
5480 if (type
== integer_type_node
)
5481 return value
? integer_one_node
: integer_zero_node
;
5482 else if (type
== boolean_type_node
)
5483 return value
? boolean_true_node
: boolean_false_node
;
5485 return build_int_cst (type
, value
);
5489 /* Return true if expr looks like an ARRAY_REF and set base and
5490 offset to the appropriate trees. If there is no offset,
5491 offset is set to NULL_TREE. Base will be canonicalized to
5492 something you can get the element type from using
5493 TREE_TYPE (TREE_TYPE (base)). */
5496 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5498 /* One canonical form is a PLUS_EXPR with the first
5499 argument being an ADDR_EXPR with a possible NOP_EXPR
5501 if (TREE_CODE (expr
) == PLUS_EXPR
)
5503 tree op0
= TREE_OPERAND (expr
, 0);
5504 tree inner_base
, dummy1
;
5505 /* Strip NOP_EXPRs here because the C frontends and/or
5506 folders present us (int *)&x.a + 4B possibly. */
5508 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5511 if (dummy1
== NULL_TREE
)
5512 *offset
= TREE_OPERAND (expr
, 1);
5514 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5515 dummy1
, TREE_OPERAND (expr
, 1));
5519 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5520 which we transform into an ADDR_EXPR with appropriate
5521 offset. For other arguments to the ADDR_EXPR we assume
5522 zero offset and as such do not care about the ADDR_EXPR
5523 type and strip possible nops from it. */
5524 else if (TREE_CODE (expr
) == ADDR_EXPR
)
5526 tree op0
= TREE_OPERAND (expr
, 0);
5527 if (TREE_CODE (op0
) == ARRAY_REF
)
5529 *base
= TREE_OPERAND (op0
, 0);
5530 *offset
= TREE_OPERAND (op0
, 1);
5534 /* Handle array-to-pointer decay as &a. */
5535 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
5536 *base
= TREE_OPERAND (expr
, 0);
5539 *offset
= NULL_TREE
;
5543 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5544 else if (SSA_VAR_P (expr
)
5545 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
5548 *offset
= NULL_TREE
;
5556 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5557 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5558 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5559 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5560 COND is the first argument to CODE; otherwise (as in the example
5561 given here), it is the second argument. TYPE is the type of the
5562 original expression. Return NULL_TREE if no simplification is
5566 fold_binary_op_with_conditional_arg (enum tree_code code
,
5567 tree type
, tree op0
, tree op1
,
5568 tree cond
, tree arg
, int cond_first_p
)
5570 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5571 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5572 tree test
, true_value
, false_value
;
5573 tree lhs
= NULL_TREE
;
5574 tree rhs
= NULL_TREE
;
5576 /* This transformation is only worthwhile if we don't have to wrap
5577 arg in a SAVE_EXPR, and the operation can be simplified on at least
5578 one of the branches once its pushed inside the COND_EXPR. */
5579 if (!TREE_CONSTANT (arg
))
5582 if (TREE_CODE (cond
) == COND_EXPR
)
5584 test
= TREE_OPERAND (cond
, 0);
5585 true_value
= TREE_OPERAND (cond
, 1);
5586 false_value
= TREE_OPERAND (cond
, 2);
5587 /* If this operand throws an expression, then it does not make
5588 sense to try to perform a logical or arithmetic operation
5590 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5592 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5597 tree testtype
= TREE_TYPE (cond
);
5599 true_value
= constant_boolean_node (true, testtype
);
5600 false_value
= constant_boolean_node (false, testtype
);
5603 arg
= fold_convert (arg_type
, arg
);
5606 true_value
= fold_convert (cond_type
, true_value
);
5608 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5610 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5614 false_value
= fold_convert (cond_type
, false_value
);
5616 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5618 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5621 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5622 return fold_convert (type
, test
);
5626 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5628 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5629 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5630 ADDEND is the same as X.
5632 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5633 and finite. The problematic cases are when X is zero, and its mode
5634 has signed zeros. In the case of rounding towards -infinity,
5635 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5636 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5639 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5641 if (!real_zerop (addend
))
5644 /* Don't allow the fold with -fsignaling-nans. */
5645 if (HONOR_SNANS (TYPE_MODE (type
)))
5648 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5649 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5652 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5653 if (TREE_CODE (addend
) == REAL_CST
5654 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5657 /* The mode has signed zeros, and we have to honor their sign.
5658 In this situation, there is only one case we can return true for.
5659 X - 0 is the same as X unless rounding towards -infinity is
5661 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5664 /* Subroutine of fold() that checks comparisons of built-in math
5665 functions against real constants.
5667 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5668 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5669 is the type of the result and ARG0 and ARG1 are the operands of the
5670 comparison. ARG1 must be a TREE_REAL_CST.
5672 The function returns the constant folded tree if a simplification
5673 can be made, and NULL_TREE otherwise. */
5676 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5677 tree type
, tree arg0
, tree arg1
)
5681 if (BUILTIN_SQRT_P (fcode
))
5683 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5684 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5686 c
= TREE_REAL_CST (arg1
);
5687 if (REAL_VALUE_NEGATIVE (c
))
5689 /* sqrt(x) < y is always false, if y is negative. */
5690 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5691 return omit_one_operand (type
, integer_zero_node
, arg
);
5693 /* sqrt(x) > y is always true, if y is negative and we
5694 don't care about NaNs, i.e. negative values of x. */
5695 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5696 return omit_one_operand (type
, integer_one_node
, arg
);
5698 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5699 return fold_build2 (GE_EXPR
, type
, arg
,
5700 build_real (TREE_TYPE (arg
), dconst0
));
5702 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5706 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5707 real_convert (&c2
, mode
, &c2
);
5709 if (REAL_VALUE_ISINF (c2
))
5711 /* sqrt(x) > y is x == +Inf, when y is very large. */
5712 if (HONOR_INFINITIES (mode
))
5713 return fold_build2 (EQ_EXPR
, type
, arg
,
5714 build_real (TREE_TYPE (arg
), c2
));
5716 /* sqrt(x) > y is always false, when y is very large
5717 and we don't care about infinities. */
5718 return omit_one_operand (type
, integer_zero_node
, arg
);
5721 /* sqrt(x) > c is the same as x > c*c. */
5722 return fold_build2 (code
, type
, arg
,
5723 build_real (TREE_TYPE (arg
), c2
));
5725 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5729 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5730 real_convert (&c2
, mode
, &c2
);
5732 if (REAL_VALUE_ISINF (c2
))
5734 /* sqrt(x) < y is always true, when y is a very large
5735 value and we don't care about NaNs or Infinities. */
5736 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5737 return omit_one_operand (type
, integer_one_node
, arg
);
5739 /* sqrt(x) < y is x != +Inf when y is very large and we
5740 don't care about NaNs. */
5741 if (! HONOR_NANS (mode
))
5742 return fold_build2 (NE_EXPR
, type
, arg
,
5743 build_real (TREE_TYPE (arg
), c2
));
5745 /* sqrt(x) < y is x >= 0 when y is very large and we
5746 don't care about Infinities. */
5747 if (! HONOR_INFINITIES (mode
))
5748 return fold_build2 (GE_EXPR
, type
, arg
,
5749 build_real (TREE_TYPE (arg
), dconst0
));
5751 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5752 if (lang_hooks
.decls
.global_bindings_p () != 0
5753 || CONTAINS_PLACEHOLDER_P (arg
))
5756 arg
= save_expr (arg
);
5757 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5758 fold_build2 (GE_EXPR
, type
, arg
,
5759 build_real (TREE_TYPE (arg
),
5761 fold_build2 (NE_EXPR
, type
, arg
,
5762 build_real (TREE_TYPE (arg
),
5766 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5767 if (! HONOR_NANS (mode
))
5768 return fold_build2 (code
, type
, arg
,
5769 build_real (TREE_TYPE (arg
), c2
));
5771 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5772 if (lang_hooks
.decls
.global_bindings_p () == 0
5773 && ! CONTAINS_PLACEHOLDER_P (arg
))
5775 arg
= save_expr (arg
);
5776 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5777 fold_build2 (GE_EXPR
, type
, arg
,
5778 build_real (TREE_TYPE (arg
),
5780 fold_build2 (code
, type
, arg
,
5781 build_real (TREE_TYPE (arg
),
5790 /* Subroutine of fold() that optimizes comparisons against Infinities,
5791 either +Inf or -Inf.
5793 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5794 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5795 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5797 The function returns the constant folded tree if a simplification
5798 can be made, and NULL_TREE otherwise. */
5801 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5803 enum machine_mode mode
;
5804 REAL_VALUE_TYPE max
;
5808 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5810 /* For negative infinity swap the sense of the comparison. */
5811 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5813 code
= swap_tree_comparison (code
);
5818 /* x > +Inf is always false, if with ignore sNANs. */
5819 if (HONOR_SNANS (mode
))
5821 return omit_one_operand (type
, integer_zero_node
, arg0
);
5824 /* x <= +Inf is always true, if we don't case about NaNs. */
5825 if (! HONOR_NANS (mode
))
5826 return omit_one_operand (type
, integer_one_node
, arg0
);
5828 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5829 if (lang_hooks
.decls
.global_bindings_p () == 0
5830 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5832 arg0
= save_expr (arg0
);
5833 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
5839 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5840 real_maxval (&max
, neg
, mode
);
5841 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5842 arg0
, build_real (TREE_TYPE (arg0
), max
));
5845 /* x < +Inf is always equal to x <= DBL_MAX. */
5846 real_maxval (&max
, neg
, mode
);
5847 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5848 arg0
, build_real (TREE_TYPE (arg0
), max
));
5851 /* x != +Inf is always equal to !(x > DBL_MAX). */
5852 real_maxval (&max
, neg
, mode
);
5853 if (! HONOR_NANS (mode
))
5854 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5855 arg0
, build_real (TREE_TYPE (arg0
), max
));
5857 /* The transformation below creates non-gimple code and thus is
5858 not appropriate if we are in gimple form. */
5862 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5863 arg0
, build_real (TREE_TYPE (arg0
), max
));
5864 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
5873 /* Subroutine of fold() that optimizes comparisons of a division by
5874 a nonzero integer constant against an integer constant, i.e.
5877 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5878 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5879 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5881 The function returns the constant folded tree if a simplification
5882 can be made, and NULL_TREE otherwise. */
5885 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5887 tree prod
, tmp
, hi
, lo
;
5888 tree arg00
= TREE_OPERAND (arg0
, 0);
5889 tree arg01
= TREE_OPERAND (arg0
, 1);
5890 unsigned HOST_WIDE_INT lpart
;
5891 HOST_WIDE_INT hpart
;
5894 /* We have to do this the hard way to detect unsigned overflow.
5895 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5896 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
5897 TREE_INT_CST_HIGH (arg01
),
5898 TREE_INT_CST_LOW (arg1
),
5899 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
5900 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5901 prod
= force_fit_type (prod
, -1, overflow
, false);
5903 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
5905 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5908 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5909 overflow
= add_double (TREE_INT_CST_LOW (prod
),
5910 TREE_INT_CST_HIGH (prod
),
5911 TREE_INT_CST_LOW (tmp
),
5912 TREE_INT_CST_HIGH (tmp
),
5914 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5915 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
5916 TREE_CONSTANT_OVERFLOW (prod
));
5918 else if (tree_int_cst_sgn (arg01
) >= 0)
5920 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5921 switch (tree_int_cst_sgn (arg1
))
5924 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5929 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5934 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5944 /* A negative divisor reverses the relational operators. */
5945 code
= swap_tree_comparison (code
);
5947 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
5948 switch (tree_int_cst_sgn (arg1
))
5951 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5956 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5961 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5973 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5974 return omit_one_operand (type
, integer_zero_node
, arg00
);
5975 if (TREE_OVERFLOW (hi
))
5976 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
5977 if (TREE_OVERFLOW (lo
))
5978 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
5979 return build_range_check (type
, arg00
, 1, lo
, hi
);
5982 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5983 return omit_one_operand (type
, integer_one_node
, arg00
);
5984 if (TREE_OVERFLOW (hi
))
5985 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
5986 if (TREE_OVERFLOW (lo
))
5987 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
5988 return build_range_check (type
, arg00
, 0, lo
, hi
);
5991 if (TREE_OVERFLOW (lo
))
5992 return omit_one_operand (type
, integer_zero_node
, arg00
);
5993 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
5996 if (TREE_OVERFLOW (hi
))
5997 return omit_one_operand (type
, integer_one_node
, arg00
);
5998 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6001 if (TREE_OVERFLOW (hi
))
6002 return omit_one_operand (type
, integer_zero_node
, arg00
);
6003 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6006 if (TREE_OVERFLOW (lo
))
6007 return omit_one_operand (type
, integer_one_node
, arg00
);
6008 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6018 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6019 equality/inequality test, then return a simplified form of the test
6020 using a sign testing. Otherwise return NULL. TYPE is the desired
6024 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6027 /* If this is testing a single bit, we can optimize the test. */
6028 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6029 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6030 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6032 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6033 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6034 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6036 if (arg00
!= NULL_TREE
6037 /* This is only a win if casting to a signed type is cheap,
6038 i.e. when arg00's type is not a partial mode. */
6039 && TYPE_PRECISION (TREE_TYPE (arg00
))
6040 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6042 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6043 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6044 result_type
, fold_convert (stype
, arg00
),
6045 fold_convert (stype
, integer_zero_node
));
6052 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6053 equality/inequality test, then return a simplified form of
6054 the test using shifts and logical operations. Otherwise return
6055 NULL. TYPE is the desired result type. */
6058 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6061 /* If this is testing a single bit, we can optimize the test. */
6062 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6063 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6064 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6066 tree inner
= TREE_OPERAND (arg0
, 0);
6067 tree type
= TREE_TYPE (arg0
);
6068 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6069 enum machine_mode operand_mode
= TYPE_MODE (type
);
6071 tree signed_type
, unsigned_type
, intermediate_type
;
6074 /* First, see if we can fold the single bit test into a sign-bit
6076 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6081 /* Otherwise we have (A & C) != 0 where C is a single bit,
6082 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6083 Similarly for (A & C) == 0. */
6085 /* If INNER is a right shift of a constant and it plus BITNUM does
6086 not overflow, adjust BITNUM and INNER. */
6087 if (TREE_CODE (inner
) == RSHIFT_EXPR
6088 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6089 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6090 && bitnum
< TYPE_PRECISION (type
)
6091 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6092 bitnum
- TYPE_PRECISION (type
)))
6094 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6095 inner
= TREE_OPERAND (inner
, 0);
6098 /* If we are going to be able to omit the AND below, we must do our
6099 operations as unsigned. If we must use the AND, we have a choice.
6100 Normally unsigned is faster, but for some machines signed is. */
6101 #ifdef LOAD_EXTEND_OP
6102 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6103 && !flag_syntax_only
) ? 0 : 1;
6108 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6109 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6110 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6111 inner
= fold_convert (intermediate_type
, inner
);
6114 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6115 inner
, size_int (bitnum
));
6117 if (code
== EQ_EXPR
)
6118 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
,
6119 inner
, integer_one_node
);
6121 /* Put the AND last so it can combine with more things. */
6122 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
6123 inner
, integer_one_node
);
6125 /* Make sure to return the proper type. */
6126 inner
= fold_convert (result_type
, inner
);
6133 /* Check whether we are allowed to reorder operands arg0 and arg1,
6134 such that the evaluation of arg1 occurs before arg0. */
6137 reorder_operands_p (tree arg0
, tree arg1
)
6139 if (! flag_evaluation_order
)
6141 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6143 return ! TREE_SIDE_EFFECTS (arg0
)
6144 && ! TREE_SIDE_EFFECTS (arg1
);
6147 /* Test whether it is preferable two swap two operands, ARG0 and
6148 ARG1, for example because ARG0 is an integer constant and ARG1
6149 isn't. If REORDER is true, only recommend swapping if we can
6150 evaluate the operands in reverse order. */
6153 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6155 STRIP_SIGN_NOPS (arg0
);
6156 STRIP_SIGN_NOPS (arg1
);
6158 if (TREE_CODE (arg1
) == INTEGER_CST
)
6160 if (TREE_CODE (arg0
) == INTEGER_CST
)
6163 if (TREE_CODE (arg1
) == REAL_CST
)
6165 if (TREE_CODE (arg0
) == REAL_CST
)
6168 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6170 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6173 if (TREE_CONSTANT (arg1
))
6175 if (TREE_CONSTANT (arg0
))
6181 if (reorder
&& flag_evaluation_order
6182 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6190 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6191 for commutative and comparison operators. Ensuring a canonical
6192 form allows the optimizers to find additional redundancies without
6193 having to explicitly check for both orderings. */
6194 if (TREE_CODE (arg0
) == SSA_NAME
6195 && TREE_CODE (arg1
) == SSA_NAME
6196 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6202 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6203 ARG0 is extended to a wider type. */
6206 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6208 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6210 tree shorter_type
, outer_type
;
6214 if (arg0_unw
== arg0
)
6216 shorter_type
= TREE_TYPE (arg0_unw
);
6218 #ifdef HAVE_canonicalize_funcptr_for_compare
6219 /* Disable this optimization if we're casting a function pointer
6220 type on targets that require function pointer canonicalization. */
6221 if (HAVE_canonicalize_funcptr_for_compare
6222 && TREE_CODE (shorter_type
) == POINTER_TYPE
6223 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6227 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6230 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6234 /* If possible, express the comparison in the shorter mode. */
6235 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6236 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6237 && (TREE_TYPE (arg1_unw
) == shorter_type
6238 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6239 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6240 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6241 && int_fits_type_p (arg1_unw
, shorter_type
))))
6242 return fold_build2 (code
, type
, arg0_unw
,
6243 fold_convert (shorter_type
, arg1_unw
));
6245 if (TREE_CODE (arg1_unw
) != INTEGER_CST
)
6248 /* If we are comparing with the integer that does not fit into the range
6249 of the shorter type, the result is known. */
6250 outer_type
= TREE_TYPE (arg1_unw
);
6251 min
= lower_bound_in_type (outer_type
, shorter_type
);
6252 max
= upper_bound_in_type (outer_type
, shorter_type
);
6254 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6256 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6263 return omit_one_operand (type
, integer_zero_node
, arg0
);
6268 return omit_one_operand (type
, integer_one_node
, arg0
);
6274 return omit_one_operand (type
, integer_one_node
, arg0
);
6276 return omit_one_operand (type
, integer_zero_node
, arg0
);
6281 return omit_one_operand (type
, integer_zero_node
, arg0
);
6283 return omit_one_operand (type
, integer_one_node
, arg0
);
6292 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6293 ARG0 just the signedness is changed. */
6296 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6297 tree arg0
, tree arg1
)
6299 tree arg0_inner
, tmp
;
6300 tree inner_type
, outer_type
;
6302 if (TREE_CODE (arg0
) != NOP_EXPR
6303 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6306 outer_type
= TREE_TYPE (arg0
);
6307 arg0_inner
= TREE_OPERAND (arg0
, 0);
6308 inner_type
= TREE_TYPE (arg0_inner
);
6310 #ifdef HAVE_canonicalize_funcptr_for_compare
6311 /* Disable this optimization if we're casting a function pointer
6312 type on targets that require function pointer canonicalization. */
6313 if (HAVE_canonicalize_funcptr_for_compare
6314 && TREE_CODE (inner_type
) == POINTER_TYPE
6315 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6319 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6322 if (TREE_CODE (arg1
) != INTEGER_CST
6323 && !((TREE_CODE (arg1
) == NOP_EXPR
6324 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6325 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6328 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6333 if (TREE_CODE (arg1
) == INTEGER_CST
)
6335 tmp
= build_int_cst_wide (inner_type
,
6336 TREE_INT_CST_LOW (arg1
),
6337 TREE_INT_CST_HIGH (arg1
));
6338 arg1
= force_fit_type (tmp
, 0,
6339 TREE_OVERFLOW (arg1
),
6340 TREE_CONSTANT_OVERFLOW (arg1
));
6343 arg1
= fold_convert (inner_type
, arg1
);
6345 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6348 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6349 step of the array. Reconstructs s and delta in the case of s * delta
6350 being an integer constant (and thus already folded).
6351 ADDR is the address. MULT is the multiplicative expression.
6352 If the function succeeds, the new address expression is returned. Otherwise
6353 NULL_TREE is returned. */
6356 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6358 tree s
, delta
, step
;
6359 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6363 /* Canonicalize op1 into a possibly non-constant delta
6364 and an INTEGER_CST s. */
6365 if (TREE_CODE (op1
) == MULT_EXPR
)
6367 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6372 if (TREE_CODE (arg0
) == INTEGER_CST
)
6377 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6385 else if (TREE_CODE (op1
) == INTEGER_CST
)
6392 /* Simulate we are delta * 1. */
6394 s
= integer_one_node
;
6397 for (;; ref
= TREE_OPERAND (ref
, 0))
6399 if (TREE_CODE (ref
) == ARRAY_REF
)
6401 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6405 step
= array_ref_element_size (ref
);
6406 if (TREE_CODE (step
) != INTEGER_CST
)
6411 if (! tree_int_cst_equal (step
, s
))
6416 /* Try if delta is a multiple of step. */
6417 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6426 if (!handled_component_p (ref
))
6430 /* We found the suitable array reference. So copy everything up to it,
6431 and replace the index. */
6433 pref
= TREE_OPERAND (addr
, 0);
6434 ret
= copy_node (pref
);
6439 pref
= TREE_OPERAND (pref
, 0);
6440 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6441 pos
= TREE_OPERAND (pos
, 0);
6444 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6445 fold_convert (itype
,
6446 TREE_OPERAND (pos
, 1)),
6447 fold_convert (itype
, delta
));
6449 return build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6453 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6454 means A >= Y && A != MAX, but in this case we know that
6455 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6458 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6460 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6462 if (TREE_CODE (bound
) == LT_EXPR
)
6463 a
= TREE_OPERAND (bound
, 0);
6464 else if (TREE_CODE (bound
) == GT_EXPR
)
6465 a
= TREE_OPERAND (bound
, 1);
6469 typea
= TREE_TYPE (a
);
6470 if (!INTEGRAL_TYPE_P (typea
)
6471 && !POINTER_TYPE_P (typea
))
6474 if (TREE_CODE (ineq
) == LT_EXPR
)
6476 a1
= TREE_OPERAND (ineq
, 1);
6477 y
= TREE_OPERAND (ineq
, 0);
6479 else if (TREE_CODE (ineq
) == GT_EXPR
)
6481 a1
= TREE_OPERAND (ineq
, 0);
6482 y
= TREE_OPERAND (ineq
, 1);
6487 if (TREE_TYPE (a1
) != typea
)
6490 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6491 if (!integer_onep (diff
))
6494 return fold_build2 (GE_EXPR
, type
, a
, y
);
6497 /* Fold complex addition when both components are accessible by parts.
6498 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6499 or MINUS_EXPR for subtraction. */
6502 fold_complex_add (tree type
, tree ac
, tree bc
, enum tree_code code
)
6504 tree ar
, ai
, br
, bi
, rr
, ri
, inner_type
;
6506 if (TREE_CODE (ac
) == COMPLEX_EXPR
)
6507 ar
= TREE_OPERAND (ac
, 0), ai
= TREE_OPERAND (ac
, 1);
6508 else if (TREE_CODE (ac
) == COMPLEX_CST
)
6509 ar
= TREE_REALPART (ac
), ai
= TREE_IMAGPART (ac
);
6513 if (TREE_CODE (bc
) == COMPLEX_EXPR
)
6514 br
= TREE_OPERAND (bc
, 0), bi
= TREE_OPERAND (bc
, 1);
6515 else if (TREE_CODE (bc
) == COMPLEX_CST
)
6516 br
= TREE_REALPART (bc
), bi
= TREE_IMAGPART (bc
);
6520 inner_type
= TREE_TYPE (type
);
6522 rr
= fold_build2 (code
, inner_type
, ar
, br
);
6523 ri
= fold_build2 (code
, inner_type
, ai
, bi
);
6525 return fold_build2 (COMPLEX_EXPR
, type
, rr
, ri
);
6528 /* Perform some simplifications of complex multiplication when one or more
6529 of the components are constants or zeros. Return non-null if successful. */
6532 fold_complex_mult_parts (tree type
, tree ar
, tree ai
, tree br
, tree bi
)
6534 tree rr
, ri
, inner_type
, zero
;
6535 bool ar0
, ai0
, br0
, bi0
, bi1
;
6537 inner_type
= TREE_TYPE (type
);
6540 if (SCALAR_FLOAT_TYPE_P (inner_type
))
6542 ar0
= ai0
= br0
= bi0
= bi1
= false;
6544 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6546 if (TREE_CODE (ar
) == REAL_CST
6547 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar
), dconst0
))
6548 ar0
= true, zero
= ar
;
6550 if (TREE_CODE (ai
) == REAL_CST
6551 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai
), dconst0
))
6552 ai0
= true, zero
= ai
;
6554 if (TREE_CODE (br
) == REAL_CST
6555 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br
), dconst0
))
6556 br0
= true, zero
= br
;
6558 if (TREE_CODE (bi
) == REAL_CST
)
6560 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst0
))
6561 bi0
= true, zero
= bi
;
6562 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst1
))
6568 ar0
= integer_zerop (ar
);
6571 ai0
= integer_zerop (ai
);
6574 br0
= integer_zerop (br
);
6577 bi0
= integer_zerop (bi
);
6584 bi1
= integer_onep (bi
);
6587 /* We won't optimize anything below unless something is zero. */
6591 if (ai0
&& br0
&& bi1
)
6596 else if (ai0
&& bi0
)
6598 rr
= fold_build2 (MULT_EXPR
, inner_type
, ar
, br
);
6601 else if (ai0
&& br0
)
6604 ri
= fold_build2 (MULT_EXPR
, inner_type
, ar
, bi
);
6606 else if (ar0
&& bi0
)
6609 ri
= fold_build2 (MULT_EXPR
, inner_type
, ai
, br
);
6611 else if (ar0
&& br0
)
6613 rr
= fold_build2 (MULT_EXPR
, inner_type
, ai
, bi
);
6614 rr
= fold_build1 (NEGATE_EXPR
, inner_type
, rr
);
6619 rr
= fold_build2 (MULT_EXPR
, inner_type
, ar
, br
);
6620 ri
= fold_build2 (MULT_EXPR
, inner_type
, ai
, br
);
6624 rr
= fold_build2 (MULT_EXPR
, inner_type
, ar
, br
);
6625 ri
= fold_build2 (MULT_EXPR
, inner_type
, ar
, bi
);
6629 rr
= fold_build2 (MULT_EXPR
, inner_type
, ai
, bi
);
6630 rr
= fold_build1 (NEGATE_EXPR
, inner_type
, rr
);
6631 ri
= fold_build2 (MULT_EXPR
, inner_type
, ar
, bi
);
6635 rr
= fold_build2 (MULT_EXPR
, inner_type
, ai
, bi
);
6636 rr
= fold_build1 (NEGATE_EXPR
, inner_type
, rr
);
6637 ri
= fold_build2 (MULT_EXPR
, inner_type
, ai
, br
);
6642 return fold_build2 (COMPLEX_EXPR
, type
, rr
, ri
);
6646 fold_complex_mult (tree type
, tree ac
, tree bc
)
6648 tree ar
, ai
, br
, bi
;
6650 if (TREE_CODE (ac
) == COMPLEX_EXPR
)
6651 ar
= TREE_OPERAND (ac
, 0), ai
= TREE_OPERAND (ac
, 1);
6652 else if (TREE_CODE (ac
) == COMPLEX_CST
)
6653 ar
= TREE_REALPART (ac
), ai
= TREE_IMAGPART (ac
);
6657 if (TREE_CODE (bc
) == COMPLEX_EXPR
)
6658 br
= TREE_OPERAND (bc
, 0), bi
= TREE_OPERAND (bc
, 1);
6659 else if (TREE_CODE (bc
) == COMPLEX_CST
)
6660 br
= TREE_REALPART (bc
), bi
= TREE_IMAGPART (bc
);
6664 return fold_complex_mult_parts (type
, ar
, ai
, br
, bi
);
6667 /* Perform some simplifications of complex division when one or more of
6668 the components are constants or zeros. Return non-null if successful. */
6671 fold_complex_div_parts (tree type
, tree ar
, tree ai
, tree br
, tree bi
,
6672 enum tree_code code
)
6674 tree rr
, ri
, inner_type
, zero
;
6675 bool ar0
, ai0
, br0
, bi0
, bi1
;
6677 inner_type
= TREE_TYPE (type
);
6680 if (SCALAR_FLOAT_TYPE_P (inner_type
))
6682 ar0
= ai0
= br0
= bi0
= bi1
= false;
6684 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6686 if (TREE_CODE (ar
) == REAL_CST
6687 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar
), dconst0
))
6688 ar0
= true, zero
= ar
;
6690 if (TREE_CODE (ai
) == REAL_CST
6691 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai
), dconst0
))
6692 ai0
= true, zero
= ai
;
6694 if (TREE_CODE (br
) == REAL_CST
6695 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br
), dconst0
))
6696 br0
= true, zero
= br
;
6698 if (TREE_CODE (bi
) == REAL_CST
)
6700 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst0
))
6701 bi0
= true, zero
= bi
;
6702 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst1
))
6708 ar0
= integer_zerop (ar
);
6711 ai0
= integer_zerop (ai
);
6714 br0
= integer_zerop (br
);
6717 bi0
= integer_zerop (bi
);
6724 bi1
= integer_onep (bi
);
6727 /* We won't optimize anything below unless something is zero. */
6733 rr
= fold_build2 (code
, inner_type
, ar
, br
);
6736 else if (ai0
&& br0
)
6739 ri
= fold_build2 (code
, inner_type
, ar
, bi
);
6740 ri
= fold_build1 (NEGATE_EXPR
, inner_type
, ri
);
6742 else if (ar0
&& bi0
)
6745 ri
= fold_build2 (code
, inner_type
, ai
, br
);
6747 else if (ar0
&& br0
)
6749 rr
= fold_build2 (code
, inner_type
, ai
, bi
);
6754 rr
= fold_build2 (code
, inner_type
, ar
, br
);
6755 ri
= fold_build2 (code
, inner_type
, ai
, br
);
6759 rr
= fold_build2 (code
, inner_type
, ai
, bi
);
6760 ri
= fold_build2 (code
, inner_type
, ar
, bi
);
6761 ri
= fold_build1 (NEGATE_EXPR
, inner_type
, ri
);
6766 return fold_build2 (COMPLEX_EXPR
, type
, rr
, ri
);
6770 fold_complex_div (tree type
, tree ac
, tree bc
, enum tree_code code
)
6772 tree ar
, ai
, br
, bi
;
6774 if (TREE_CODE (ac
) == COMPLEX_EXPR
)
6775 ar
= TREE_OPERAND (ac
, 0), ai
= TREE_OPERAND (ac
, 1);
6776 else if (TREE_CODE (ac
) == COMPLEX_CST
)
6777 ar
= TREE_REALPART (ac
), ai
= TREE_IMAGPART (ac
);
6781 if (TREE_CODE (bc
) == COMPLEX_EXPR
)
6782 br
= TREE_OPERAND (bc
, 0), bi
= TREE_OPERAND (bc
, 1);
6783 else if (TREE_CODE (bc
) == COMPLEX_CST
)
6784 br
= TREE_REALPART (bc
), bi
= TREE_IMAGPART (bc
);
6788 return fold_complex_div_parts (type
, ar
, ai
, br
, bi
, code
);
6791 /* Fold a unary expression of code CODE and type TYPE with operand
6792 OP0. Return the folded expression if folding is successful.
6793 Otherwise, return NULL_TREE. */
6796 fold_unary (enum tree_code code
, tree type
, tree op0
)
6800 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
6802 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
6803 && TREE_CODE_LENGTH (code
) == 1);
6808 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
6810 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6811 STRIP_SIGN_NOPS (arg0
);
6815 /* Strip any conversions that don't change the mode. This
6816 is safe for every expression, except for a comparison
6817 expression because its signedness is derived from its
6820 Note that this is done as an internal manipulation within
6821 the constant folder, in order to find the simplest
6822 representation of the arguments so that their form can be
6823 studied. In any cases, the appropriate type conversions
6824 should be put back in the tree that will get out of the
6830 if (TREE_CODE_CLASS (code
) == tcc_unary
)
6832 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6833 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6834 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
6835 else if (TREE_CODE (arg0
) == COND_EXPR
)
6837 tree arg01
= TREE_OPERAND (arg0
, 1);
6838 tree arg02
= TREE_OPERAND (arg0
, 2);
6839 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6840 arg01
= fold_build1 (code
, type
, arg01
);
6841 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6842 arg02
= fold_build1 (code
, type
, arg02
);
6843 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6846 /* If this was a conversion, and all we did was to move into
6847 inside the COND_EXPR, bring it back out. But leave it if
6848 it is a conversion from integer to integer and the
6849 result precision is no wider than a word since such a
6850 conversion is cheap and may be optimized away by combine,
6851 while it couldn't if it were outside the COND_EXPR. Then return
6852 so we don't get into an infinite recursion loop taking the
6853 conversion out and then back in. */
6855 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6856 || code
== NON_LVALUE_EXPR
)
6857 && TREE_CODE (tem
) == COND_EXPR
6858 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6859 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6860 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6861 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6862 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6863 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6864 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6866 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6867 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
6868 || flag_syntax_only
))
6869 tem
= build1 (code
, type
,
6871 TREE_TYPE (TREE_OPERAND
6872 (TREE_OPERAND (tem
, 1), 0)),
6873 TREE_OPERAND (tem
, 0),
6874 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6875 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6878 else if (COMPARISON_CLASS_P (arg0
))
6880 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6882 arg0
= copy_node (arg0
);
6883 TREE_TYPE (arg0
) = type
;
6886 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6887 return fold_build3 (COND_EXPR
, type
, arg0
,
6888 fold_build1 (code
, type
,
6890 fold_build1 (code
, type
,
6891 integer_zero_node
));
6900 case FIX_TRUNC_EXPR
:
6902 case FIX_FLOOR_EXPR
:
6903 case FIX_ROUND_EXPR
:
6904 if (TREE_TYPE (op0
) == type
)
6907 /* Handle cases of two conversions in a row. */
6908 if (TREE_CODE (op0
) == NOP_EXPR
6909 || TREE_CODE (op0
) == CONVERT_EXPR
)
6911 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
6912 tree inter_type
= TREE_TYPE (op0
);
6913 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6914 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6915 int inside_float
= FLOAT_TYPE_P (inside_type
);
6916 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
6917 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6918 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6919 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6920 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6921 int inter_float
= FLOAT_TYPE_P (inter_type
);
6922 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
6923 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6924 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6925 int final_int
= INTEGRAL_TYPE_P (type
);
6926 int final_ptr
= POINTER_TYPE_P (type
);
6927 int final_float
= FLOAT_TYPE_P (type
);
6928 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
6929 unsigned int final_prec
= TYPE_PRECISION (type
);
6930 int final_unsignedp
= TYPE_UNSIGNED (type
);
6932 /* In addition to the cases of two conversions in a row
6933 handled below, if we are converting something to its own
6934 type via an object of identical or wider precision, neither
6935 conversion is needed. */
6936 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6937 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6938 && inter_prec
>= final_prec
)
6939 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6941 /* Likewise, if the intermediate and final types are either both
6942 float or both integer, we don't need the middle conversion if
6943 it is wider than the final type and doesn't change the signedness
6944 (for integers). Avoid this if the final type is a pointer
6945 since then we sometimes need the inner conversion. Likewise if
6946 the outer has a precision not equal to the size of its mode. */
6947 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6948 || (inter_float
&& inside_float
)
6949 || (inter_vec
&& inside_vec
))
6950 && inter_prec
>= inside_prec
6951 && (inter_float
|| inter_vec
6952 || inter_unsignedp
== inside_unsignedp
)
6953 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6954 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6956 && (! final_vec
|| inter_prec
== inside_prec
))
6957 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6959 /* If we have a sign-extension of a zero-extended value, we can
6960 replace that by a single zero-extension. */
6961 if (inside_int
&& inter_int
&& final_int
6962 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6963 && inside_unsignedp
&& !inter_unsignedp
)
6964 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6966 /* Two conversions in a row are not needed unless:
6967 - some conversion is floating-point (overstrict for now), or
6968 - some conversion is a vector (overstrict for now), or
6969 - the intermediate type is narrower than both initial and
6971 - the intermediate type and innermost type differ in signedness,
6972 and the outermost type is wider than the intermediate, or
6973 - the initial type is a pointer type and the precisions of the
6974 intermediate and final types differ, or
6975 - the final type is a pointer type and the precisions of the
6976 initial and intermediate types differ. */
6977 if (! inside_float
&& ! inter_float
&& ! final_float
6978 && ! inside_vec
&& ! inter_vec
&& ! final_vec
6979 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6980 && ! (inside_int
&& inter_int
6981 && inter_unsignedp
!= inside_unsignedp
6982 && inter_prec
< final_prec
)
6983 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6984 == (final_unsignedp
&& final_prec
> inter_prec
))
6985 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6986 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6987 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6988 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6990 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6993 if (TREE_CODE (op0
) == MODIFY_EXPR
6994 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
6995 /* Detect assigning a bitfield. */
6996 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
6997 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
6999 /* Don't leave an assignment inside a conversion
7000 unless assigning a bitfield. */
7001 tem
= fold_build1 (code
, type
, TREE_OPERAND (op0
, 1));
7002 /* First do the assignment, then return converted constant. */
7003 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7004 TREE_NO_WARNING (tem
) = 1;
7005 TREE_USED (tem
) = 1;
7009 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7010 constants (if x has signed type, the sign bit cannot be set
7011 in c). This folds extension into the BIT_AND_EXPR. */
7012 if (INTEGRAL_TYPE_P (type
)
7013 && TREE_CODE (type
) != BOOLEAN_TYPE
7014 && TREE_CODE (op0
) == BIT_AND_EXPR
7015 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7018 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
7021 if (TYPE_UNSIGNED (TREE_TYPE (and))
7022 || (TYPE_PRECISION (type
)
7023 <= TYPE_PRECISION (TREE_TYPE (and))))
7025 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7026 <= HOST_BITS_PER_WIDE_INT
7027 && host_integerp (and1
, 1))
7029 unsigned HOST_WIDE_INT cst
;
7031 cst
= tree_low_cst (and1
, 1);
7032 cst
&= (HOST_WIDE_INT
) -1
7033 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7034 change
= (cst
== 0);
7035 #ifdef LOAD_EXTEND_OP
7037 && !flag_syntax_only
7038 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7041 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
7042 and0
= fold_convert (uns
, and0
);
7043 and1
= fold_convert (uns
, and1
);
7049 tem
= build_int_cst_wide (type
, TREE_INT_CST_LOW (and1
),
7050 TREE_INT_CST_HIGH (and1
));
7051 tem
= force_fit_type (tem
, 0, TREE_OVERFLOW (and1
),
7052 TREE_CONSTANT_OVERFLOW (and1
));
7053 return fold_build2 (BIT_AND_EXPR
, type
,
7054 fold_convert (type
, and0
), tem
);
7058 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7059 T2 being pointers to types of the same size. */
7060 if (POINTER_TYPE_P (type
)
7061 && BINARY_CLASS_P (arg0
)
7062 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7063 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7065 tree arg00
= TREE_OPERAND (arg0
, 0);
7067 tree t1
= TREE_TYPE (arg00
);
7068 tree tt0
= TREE_TYPE (t0
);
7069 tree tt1
= TREE_TYPE (t1
);
7070 tree s0
= TYPE_SIZE (tt0
);
7071 tree s1
= TYPE_SIZE (tt1
);
7073 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
7074 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
7075 TREE_OPERAND (arg0
, 1));
7078 tem
= fold_convert_const (code
, type
, arg0
);
7079 return tem
? tem
: NULL_TREE
;
7081 case VIEW_CONVERT_EXPR
:
7082 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7083 return build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7087 if (negate_expr_p (arg0
))
7088 return fold_convert (type
, negate_expr (arg0
));
7089 /* Convert - (~A) to A + 1. */
7090 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == BIT_NOT_EXPR
)
7091 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7092 build_int_cst (type
, 1));
7096 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7097 return fold_abs_const (arg0
, type
);
7098 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7099 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7100 /* Convert fabs((double)float) into (double)fabsf(float). */
7101 else if (TREE_CODE (arg0
) == NOP_EXPR
7102 && TREE_CODE (type
) == REAL_TYPE
)
7104 tree targ0
= strip_float_extensions (arg0
);
7106 return fold_convert (type
, fold_build1 (ABS_EXPR
,
7110 else if (tree_expr_nonnegative_p (arg0
))
7113 /* Strip sign ops from argument. */
7114 if (TREE_CODE (type
) == REAL_TYPE
)
7116 tem
= fold_strip_sign_ops (arg0
);
7118 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
7123 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7124 return fold_convert (type
, arg0
);
7125 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7126 return build2 (COMPLEX_EXPR
, type
,
7127 TREE_OPERAND (arg0
, 0),
7128 negate_expr (TREE_OPERAND (arg0
, 1)));
7129 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7130 return build_complex (type
, TREE_REALPART (arg0
),
7131 negate_expr (TREE_IMAGPART (arg0
)));
7132 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7133 return fold_build2 (TREE_CODE (arg0
), type
,
7134 fold_build1 (CONJ_EXPR
, type
,
7135 TREE_OPERAND (arg0
, 0)),
7136 fold_build1 (CONJ_EXPR
, type
,
7137 TREE_OPERAND (arg0
, 1)));
7138 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
7139 return TREE_OPERAND (arg0
, 0);
7143 if (TREE_CODE (arg0
) == INTEGER_CST
)
7144 return fold_not_const (arg0
, type
);
7145 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
7146 return TREE_OPERAND (arg0
, 0);
7147 /* Convert ~ (-A) to A - 1. */
7148 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7149 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7150 build_int_cst (type
, 1));
7151 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7152 else if (INTEGRAL_TYPE_P (type
)
7153 && ((TREE_CODE (arg0
) == MINUS_EXPR
7154 && integer_onep (TREE_OPERAND (arg0
, 1)))
7155 || (TREE_CODE (arg0
) == PLUS_EXPR
7156 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7157 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7158 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7159 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7160 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7162 TREE_OPERAND (arg0
, 0)))))
7163 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
7164 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
7165 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7166 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7168 TREE_OPERAND (arg0
, 1)))))
7169 return fold_build2 (BIT_XOR_EXPR
, type
,
7170 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
7174 case TRUTH_NOT_EXPR
:
7175 /* The argument to invert_truthvalue must have Boolean type. */
7176 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7177 arg0
= fold_convert (boolean_type_node
, arg0
);
7179 /* Note that the operand of this must be an int
7180 and its values must be 0 or 1.
7181 ("true" is a fixed value perhaps depending on the language,
7182 but we don't handle values other than 1 correctly yet.) */
7183 tem
= invert_truthvalue (arg0
);
7184 /* Avoid infinite recursion. */
7185 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
7187 return fold_convert (type
, tem
);
7190 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7192 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7193 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7194 TREE_OPERAND (arg0
, 1));
7195 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7196 return TREE_REALPART (arg0
);
7197 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7198 return fold_build2 (TREE_CODE (arg0
), type
,
7199 fold_build1 (REALPART_EXPR
, type
,
7200 TREE_OPERAND (arg0
, 0)),
7201 fold_build1 (REALPART_EXPR
, type
,
7202 TREE_OPERAND (arg0
, 1)));
7206 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7207 return fold_convert (type
, integer_zero_node
);
7208 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7209 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7210 TREE_OPERAND (arg0
, 0));
7211 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7212 return TREE_IMAGPART (arg0
);
7213 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7214 return fold_build2 (TREE_CODE (arg0
), type
,
7215 fold_build1 (IMAGPART_EXPR
, type
,
7216 TREE_OPERAND (arg0
, 0)),
7217 fold_build1 (IMAGPART_EXPR
, type
,
7218 TREE_OPERAND (arg0
, 1)));
7223 } /* switch (code) */
7226 /* Fold a binary expression of code CODE and type TYPE with operands
7227 OP0 and OP1. Return the folded expression if folding is
7228 successful. Otherwise, return NULL_TREE. */
7231 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
7233 tree t1
= NULL_TREE
;
7235 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
7236 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7238 /* WINS will be nonzero when the switch is done
7239 if all operands are constant. */
7242 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7243 && TREE_CODE_LENGTH (code
) == 2);
7252 /* Strip any conversions that don't change the mode. This is
7253 safe for every expression, except for a comparison expression
7254 because its signedness is derived from its operands. So, in
7255 the latter case, only strip conversions that don't change the
7258 Note that this is done as an internal manipulation within the
7259 constant folder, in order to find the simplest representation
7260 of the arguments so that their form can be studied. In any
7261 cases, the appropriate type conversions should be put back in
7262 the tree that will get out of the constant folder. */
7263 if (kind
== tcc_comparison
)
7264 STRIP_SIGN_NOPS (arg0
);
7268 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7269 subop
= TREE_REALPART (arg0
);
7273 if (TREE_CODE (subop
) != INTEGER_CST
7274 && TREE_CODE (subop
) != REAL_CST
)
7275 /* Note that TREE_CONSTANT isn't enough:
7276 static var addresses are constant but we can't
7277 do arithmetic on them. */
7285 /* Strip any conversions that don't change the mode. This is
7286 safe for every expression, except for a comparison expression
7287 because its signedness is derived from its operands. So, in
7288 the latter case, only strip conversions that don't change the
7291 Note that this is done as an internal manipulation within the
7292 constant folder, in order to find the simplest representation
7293 of the arguments so that their form can be studied. In any
7294 cases, the appropriate type conversions should be put back in
7295 the tree that will get out of the constant folder. */
7296 if (kind
== tcc_comparison
)
7297 STRIP_SIGN_NOPS (arg1
);
7301 if (TREE_CODE (arg1
) == COMPLEX_CST
)
7302 subop
= TREE_REALPART (arg1
);
7306 if (TREE_CODE (subop
) != INTEGER_CST
7307 && TREE_CODE (subop
) != REAL_CST
)
7308 /* Note that TREE_CONSTANT isn't enough:
7309 static var addresses are constant but we can't
7310 do arithmetic on them. */
7314 /* If this is a commutative operation, and ARG0 is a constant, move it
7315 to ARG1 to reduce the number of tests below. */
7316 if (commutative_tree_code (code
)
7317 && tree_swap_operands_p (arg0
, arg1
, true))
7318 return fold_build2 (code
, type
, op1
, op0
);
7320 /* Now WINS is set as described above,
7321 ARG0 is the first operand of EXPR,
7322 and ARG1 is the second operand (if it has more than one operand).
7324 First check for cases where an arithmetic operation is applied to a
7325 compound, conditional, or comparison operation. Push the arithmetic
7326 operation inside the compound or conditional to see if any folding
7327 can then be done. Convert comparison to conditional for this purpose.
7328 The also optimizes non-constant cases that used to be done in
7331 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7332 one of the operands is a comparison and the other is a comparison, a
7333 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7334 code below would make the expression more complex. Change it to a
7335 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7336 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7338 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
7339 || code
== EQ_EXPR
|| code
== NE_EXPR
)
7340 && ((truth_value_p (TREE_CODE (arg0
))
7341 && (truth_value_p (TREE_CODE (arg1
))
7342 || (TREE_CODE (arg1
) == BIT_AND_EXPR
7343 && integer_onep (TREE_OPERAND (arg1
, 1)))))
7344 || (truth_value_p (TREE_CODE (arg1
))
7345 && (truth_value_p (TREE_CODE (arg0
))
7346 || (TREE_CODE (arg0
) == BIT_AND_EXPR
7347 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
7349 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
7350 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
7353 fold_convert (boolean_type_node
, arg0
),
7354 fold_convert (boolean_type_node
, arg1
));
7356 if (code
== EQ_EXPR
)
7357 tem
= invert_truthvalue (tem
);
7359 return fold_convert (type
, tem
);
7362 if (TREE_CODE_CLASS (code
) == tcc_comparison
7363 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
7364 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7365 fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1), arg1
));
7366 else if (TREE_CODE_CLASS (code
) == tcc_comparison
7367 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
7368 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7369 fold_build2 (code
, type
, arg0
, TREE_OPERAND (arg1
, 1)));
7370 else if (TREE_CODE_CLASS (code
) == tcc_binary
7371 || TREE_CODE_CLASS (code
) == tcc_comparison
)
7373 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7374 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7375 fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
7377 if (TREE_CODE (arg1
) == COMPOUND_EXPR
7378 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
7379 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7380 fold_build2 (code
, type
,
7381 arg0
, TREE_OPERAND (arg1
, 1)));
7383 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
7385 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7387 /*cond_first_p=*/1);
7388 if (tem
!= NULL_TREE
)
7392 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
7394 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7396 /*cond_first_p=*/0);
7397 if (tem
!= NULL_TREE
)
7405 /* A + (-B) -> A - B */
7406 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7407 return fold_build2 (MINUS_EXPR
, type
,
7408 fold_convert (type
, arg0
),
7409 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
7410 /* (-A) + B -> B - A */
7411 if (TREE_CODE (arg0
) == NEGATE_EXPR
7412 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
7413 return fold_build2 (MINUS_EXPR
, type
,
7414 fold_convert (type
, arg1
),
7415 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
7416 /* Convert ~A + 1 to -A. */
7417 if (INTEGRAL_TYPE_P (type
)
7418 && TREE_CODE (arg0
) == BIT_NOT_EXPR
7419 && integer_onep (arg1
))
7420 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7422 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7424 tem
= fold_complex_add (type
, arg0
, arg1
, PLUS_EXPR
);
7429 if (! FLOAT_TYPE_P (type
))
7431 if (integer_zerop (arg1
))
7432 return non_lvalue (fold_convert (type
, arg0
));
7434 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7435 with a constant, and the two constants have no bits in common,
7436 we should treat this as a BIT_IOR_EXPR since this may produce more
7438 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7439 && TREE_CODE (arg1
) == BIT_AND_EXPR
7440 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7441 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7442 && integer_zerop (const_binop (BIT_AND_EXPR
,
7443 TREE_OPERAND (arg0
, 1),
7444 TREE_OPERAND (arg1
, 1), 0)))
7446 code
= BIT_IOR_EXPR
;
7450 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7451 (plus (plus (mult) (mult)) (foo)) so that we can
7452 take advantage of the factoring cases below. */
7453 if (((TREE_CODE (arg0
) == PLUS_EXPR
7454 || TREE_CODE (arg0
) == MINUS_EXPR
)
7455 && TREE_CODE (arg1
) == MULT_EXPR
)
7456 || ((TREE_CODE (arg1
) == PLUS_EXPR
7457 || TREE_CODE (arg1
) == MINUS_EXPR
)
7458 && TREE_CODE (arg0
) == MULT_EXPR
))
7460 tree parg0
, parg1
, parg
, marg
;
7461 enum tree_code pcode
;
7463 if (TREE_CODE (arg1
) == MULT_EXPR
)
7464 parg
= arg0
, marg
= arg1
;
7466 parg
= arg1
, marg
= arg0
;
7467 pcode
= TREE_CODE (parg
);
7468 parg0
= TREE_OPERAND (parg
, 0);
7469 parg1
= TREE_OPERAND (parg
, 1);
7473 if (TREE_CODE (parg0
) == MULT_EXPR
7474 && TREE_CODE (parg1
) != MULT_EXPR
)
7475 return fold_build2 (pcode
, type
,
7476 fold_build2 (PLUS_EXPR
, type
,
7477 fold_convert (type
, parg0
),
7478 fold_convert (type
, marg
)),
7479 fold_convert (type
, parg1
));
7480 if (TREE_CODE (parg0
) != MULT_EXPR
7481 && TREE_CODE (parg1
) == MULT_EXPR
)
7482 return fold_build2 (PLUS_EXPR
, type
,
7483 fold_convert (type
, parg0
),
7484 fold_build2 (pcode
, type
,
7485 fold_convert (type
, marg
),
7490 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
7492 tree arg00
, arg01
, arg10
, arg11
;
7493 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7495 /* (A * C) + (B * C) -> (A+B) * C.
7496 We are most concerned about the case where C is a constant,
7497 but other combinations show up during loop reduction. Since
7498 it is not difficult, try all four possibilities. */
7500 arg00
= TREE_OPERAND (arg0
, 0);
7501 arg01
= TREE_OPERAND (arg0
, 1);
7502 arg10
= TREE_OPERAND (arg1
, 0);
7503 arg11
= TREE_OPERAND (arg1
, 1);
7506 if (operand_equal_p (arg01
, arg11
, 0))
7507 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7508 else if (operand_equal_p (arg00
, arg10
, 0))
7509 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7510 else if (operand_equal_p (arg00
, arg11
, 0))
7511 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7512 else if (operand_equal_p (arg01
, arg10
, 0))
7513 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7515 /* No identical multiplicands; see if we can find a common
7516 power-of-two factor in non-power-of-two multiplies. This
7517 can help in multi-dimensional array access. */
7518 else if (TREE_CODE (arg01
) == INTEGER_CST
7519 && TREE_CODE (arg11
) == INTEGER_CST
7520 && TREE_INT_CST_HIGH (arg01
) == 0
7521 && TREE_INT_CST_HIGH (arg11
) == 0)
7523 HOST_WIDE_INT int01
, int11
, tmp
;
7524 int01
= TREE_INT_CST_LOW (arg01
);
7525 int11
= TREE_INT_CST_LOW (arg11
);
7527 /* Move min of absolute values to int11. */
7528 if ((int01
>= 0 ? int01
: -int01
)
7529 < (int11
>= 0 ? int11
: -int11
))
7531 tmp
= int01
, int01
= int11
, int11
= tmp
;
7532 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7533 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
7536 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
7538 alt0
= fold_build2 (MULT_EXPR
, type
, arg00
,
7539 build_int_cst (NULL_TREE
,
7547 return fold_build2 (MULT_EXPR
, type
,
7548 fold_build2 (PLUS_EXPR
, type
,
7549 fold_convert (type
, alt0
),
7550 fold_convert (type
, alt1
)),
7551 fold_convert (type
, same
));
7554 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7555 of the array. Loop optimizer sometimes produce this type of
7557 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7559 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
7561 return fold_convert (type
, fold (tem
));
7563 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
7565 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
7567 return fold_convert (type
, fold (tem
));
7572 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7573 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
7574 return non_lvalue (fold_convert (type
, arg0
));
7576 /* Likewise if the operands are reversed. */
7577 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7578 return non_lvalue (fold_convert (type
, arg1
));
7580 /* Convert X + -C into X - C. */
7581 if (TREE_CODE (arg1
) == REAL_CST
7582 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
7584 tem
= fold_negate_const (arg1
, type
);
7585 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
7586 return fold_build2 (MINUS_EXPR
, type
,
7587 fold_convert (type
, arg0
),
7588 fold_convert (type
, tem
));
7591 if (flag_unsafe_math_optimizations
7592 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7593 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7594 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
7597 /* Convert x+x into x*2.0. */
7598 if (operand_equal_p (arg0
, arg1
, 0)
7599 && SCALAR_FLOAT_TYPE_P (type
))
7600 return fold_build2 (MULT_EXPR
, type
, arg0
,
7601 build_real (type
, dconst2
));
7603 /* Convert x*c+x into x*(c+1). */
7604 if (flag_unsafe_math_optimizations
7605 && TREE_CODE (arg0
) == MULT_EXPR
7606 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7607 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7608 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7612 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7613 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7614 return fold_build2 (MULT_EXPR
, type
, arg1
,
7615 build_real (type
, c
));
7618 /* Convert x+x*c into x*(c+1). */
7619 if (flag_unsafe_math_optimizations
7620 && TREE_CODE (arg1
) == MULT_EXPR
7621 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7622 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7623 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
7627 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7628 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7629 return fold_build2 (MULT_EXPR
, type
, arg0
,
7630 build_real (type
, c
));
7633 /* Convert x*c1+x*c2 into x*(c1+c2). */
7634 if (flag_unsafe_math_optimizations
7635 && TREE_CODE (arg0
) == MULT_EXPR
7636 && TREE_CODE (arg1
) == MULT_EXPR
7637 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7638 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7639 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7640 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7641 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7642 TREE_OPERAND (arg1
, 0), 0))
7644 REAL_VALUE_TYPE c1
, c2
;
7646 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7647 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7648 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
7649 return fold_build2 (MULT_EXPR
, type
,
7650 TREE_OPERAND (arg0
, 0),
7651 build_real (type
, c1
));
7653 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7654 if (flag_unsafe_math_optimizations
7655 && TREE_CODE (arg1
) == PLUS_EXPR
7656 && TREE_CODE (arg0
) != MULT_EXPR
)
7658 tree tree10
= TREE_OPERAND (arg1
, 0);
7659 tree tree11
= TREE_OPERAND (arg1
, 1);
7660 if (TREE_CODE (tree11
) == MULT_EXPR
7661 && TREE_CODE (tree10
) == MULT_EXPR
)
7664 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
7665 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
7668 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7669 if (flag_unsafe_math_optimizations
7670 && TREE_CODE (arg0
) == PLUS_EXPR
7671 && TREE_CODE (arg1
) != MULT_EXPR
)
7673 tree tree00
= TREE_OPERAND (arg0
, 0);
7674 tree tree01
= TREE_OPERAND (arg0
, 1);
7675 if (TREE_CODE (tree01
) == MULT_EXPR
7676 && TREE_CODE (tree00
) == MULT_EXPR
)
7679 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
7680 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
7686 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7687 is a rotate of A by C1 bits. */
7688 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7689 is a rotate of A by B bits. */
7691 enum tree_code code0
, code1
;
7692 code0
= TREE_CODE (arg0
);
7693 code1
= TREE_CODE (arg1
);
7694 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
7695 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
7696 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7697 TREE_OPERAND (arg1
, 0), 0)
7698 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7700 tree tree01
, tree11
;
7701 enum tree_code code01
, code11
;
7703 tree01
= TREE_OPERAND (arg0
, 1);
7704 tree11
= TREE_OPERAND (arg1
, 1);
7705 STRIP_NOPS (tree01
);
7706 STRIP_NOPS (tree11
);
7707 code01
= TREE_CODE (tree01
);
7708 code11
= TREE_CODE (tree11
);
7709 if (code01
== INTEGER_CST
7710 && code11
== INTEGER_CST
7711 && TREE_INT_CST_HIGH (tree01
) == 0
7712 && TREE_INT_CST_HIGH (tree11
) == 0
7713 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
7714 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
7715 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7716 code0
== LSHIFT_EXPR
? tree01
: tree11
);
7717 else if (code11
== MINUS_EXPR
)
7719 tree tree110
, tree111
;
7720 tree110
= TREE_OPERAND (tree11
, 0);
7721 tree111
= TREE_OPERAND (tree11
, 1);
7722 STRIP_NOPS (tree110
);
7723 STRIP_NOPS (tree111
);
7724 if (TREE_CODE (tree110
) == INTEGER_CST
7725 && 0 == compare_tree_int (tree110
,
7727 (TREE_TYPE (TREE_OPERAND
7729 && operand_equal_p (tree01
, tree111
, 0))
7730 return build2 ((code0
== LSHIFT_EXPR
7733 type
, TREE_OPERAND (arg0
, 0), tree01
);
7735 else if (code01
== MINUS_EXPR
)
7737 tree tree010
, tree011
;
7738 tree010
= TREE_OPERAND (tree01
, 0);
7739 tree011
= TREE_OPERAND (tree01
, 1);
7740 STRIP_NOPS (tree010
);
7741 STRIP_NOPS (tree011
);
7742 if (TREE_CODE (tree010
) == INTEGER_CST
7743 && 0 == compare_tree_int (tree010
,
7745 (TREE_TYPE (TREE_OPERAND
7747 && operand_equal_p (tree11
, tree011
, 0))
7748 return build2 ((code0
!= LSHIFT_EXPR
7751 type
, TREE_OPERAND (arg0
, 0), tree11
);
7757 /* In most languages, can't associate operations on floats through
7758 parentheses. Rather than remember where the parentheses were, we
7759 don't associate floats at all, unless the user has specified
7760 -funsafe-math-optimizations. */
7763 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7765 tree var0
, con0
, lit0
, minus_lit0
;
7766 tree var1
, con1
, lit1
, minus_lit1
;
7768 /* Split both trees into variables, constants, and literals. Then
7769 associate each group together, the constants with literals,
7770 then the result with variables. This increases the chances of
7771 literals being recombined later and of generating relocatable
7772 expressions for the sum of a constant and literal. */
7773 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
7774 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
7775 code
== MINUS_EXPR
);
7777 /* Only do something if we found more than two objects. Otherwise,
7778 nothing has changed and we risk infinite recursion. */
7779 if (2 < ((var0
!= 0) + (var1
!= 0)
7780 + (con0
!= 0) + (con1
!= 0)
7781 + (lit0
!= 0) + (lit1
!= 0)
7782 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
7784 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7785 if (code
== MINUS_EXPR
)
7788 var0
= associate_trees (var0
, var1
, code
, type
);
7789 con0
= associate_trees (con0
, con1
, code
, type
);
7790 lit0
= associate_trees (lit0
, lit1
, code
, type
);
7791 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
7793 /* Preserve the MINUS_EXPR if the negative part of the literal is
7794 greater than the positive part. Otherwise, the multiplicative
7795 folding code (i.e extract_muldiv) may be fooled in case
7796 unsigned constants are subtracted, like in the following
7797 example: ((X*2 + 4) - 8U)/2. */
7798 if (minus_lit0
&& lit0
)
7800 if (TREE_CODE (lit0
) == INTEGER_CST
7801 && TREE_CODE (minus_lit0
) == INTEGER_CST
7802 && tree_int_cst_lt (lit0
, minus_lit0
))
7804 minus_lit0
= associate_trees (minus_lit0
, lit0
,
7810 lit0
= associate_trees (lit0
, minus_lit0
,
7818 return fold_convert (type
,
7819 associate_trees (var0
, minus_lit0
,
7823 con0
= associate_trees (con0
, minus_lit0
,
7825 return fold_convert (type
,
7826 associate_trees (var0
, con0
,
7831 con0
= associate_trees (con0
, lit0
, code
, type
);
7832 return fold_convert (type
, associate_trees (var0
, con0
,
7839 t1
= const_binop (code
, arg0
, arg1
, 0);
7840 if (t1
!= NULL_TREE
)
7842 /* The return value should always have
7843 the same type as the original expression. */
7844 if (TREE_TYPE (t1
) != type
)
7845 t1
= fold_convert (type
, t1
);
7852 /* A - (-B) -> A + B */
7853 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7854 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
7855 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7856 if (TREE_CODE (arg0
) == NEGATE_EXPR
7857 && (FLOAT_TYPE_P (type
)
7858 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
7859 && negate_expr_p (arg1
)
7860 && reorder_operands_p (arg0
, arg1
))
7861 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
7862 TREE_OPERAND (arg0
, 0));
7863 /* Convert -A - 1 to ~A. */
7864 if (INTEGRAL_TYPE_P (type
)
7865 && TREE_CODE (arg0
) == NEGATE_EXPR
7866 && integer_onep (arg1
))
7867 return fold_build1 (BIT_NOT_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7869 /* Convert -1 - A to ~A. */
7870 if (INTEGRAL_TYPE_P (type
)
7871 && integer_all_onesp (arg0
))
7872 return fold_build1 (BIT_NOT_EXPR
, type
, arg1
);
7874 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7876 tem
= fold_complex_add (type
, arg0
, arg1
, MINUS_EXPR
);
7881 if (! FLOAT_TYPE_P (type
))
7883 if (! wins
&& integer_zerop (arg0
))
7884 return negate_expr (fold_convert (type
, arg1
));
7885 if (integer_zerop (arg1
))
7886 return non_lvalue (fold_convert (type
, arg0
));
7888 /* Fold A - (A & B) into ~B & A. */
7889 if (!TREE_SIDE_EFFECTS (arg0
)
7890 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
7892 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
7893 return fold_build2 (BIT_AND_EXPR
, type
,
7894 fold_build1 (BIT_NOT_EXPR
, type
,
7895 TREE_OPERAND (arg1
, 0)),
7897 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7898 return fold_build2 (BIT_AND_EXPR
, type
,
7899 fold_build1 (BIT_NOT_EXPR
, type
,
7900 TREE_OPERAND (arg1
, 1)),
7904 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7905 any power of 2 minus 1. */
7906 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7907 && TREE_CODE (arg1
) == BIT_AND_EXPR
7908 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7909 TREE_OPERAND (arg1
, 0), 0))
7911 tree mask0
= TREE_OPERAND (arg0
, 1);
7912 tree mask1
= TREE_OPERAND (arg1
, 1);
7913 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
7915 if (operand_equal_p (tem
, mask1
, 0))
7917 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
7918 TREE_OPERAND (arg0
, 0), mask1
);
7919 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
7924 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7925 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
7926 return non_lvalue (fold_convert (type
, arg0
));
7928 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7929 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7930 (-ARG1 + ARG0) reduces to -ARG1. */
7931 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7932 return negate_expr (fold_convert (type
, arg1
));
7934 /* Fold &x - &x. This can happen from &x.foo - &x.
7935 This is unsafe for certain floats even in non-IEEE formats.
7936 In IEEE, it is unsafe because it does wrong for NaNs.
7937 Also note that operand_equal_p is always false if an operand
7940 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
7941 && operand_equal_p (arg0
, arg1
, 0))
7942 return fold_convert (type
, integer_zero_node
);
7944 /* A - B -> A + (-B) if B is easily negatable. */
7945 if (!wins
&& negate_expr_p (arg1
)
7946 && ((FLOAT_TYPE_P (type
)
7947 /* Avoid this transformation if B is a positive REAL_CST. */
7948 && (TREE_CODE (arg1
) != REAL_CST
7949 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
7950 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
7951 return fold_build2 (PLUS_EXPR
, type
,
7952 fold_convert (type
, arg0
),
7953 fold_convert (type
, negate_expr (arg1
)));
7955 /* Try folding difference of addresses. */
7959 if ((TREE_CODE (arg0
) == ADDR_EXPR
7960 || TREE_CODE (arg1
) == ADDR_EXPR
)
7961 && ptr_difference_const (arg0
, arg1
, &diff
))
7962 return build_int_cst_type (type
, diff
);
7965 /* Fold &a[i] - &a[j] to i-j. */
7966 if (TREE_CODE (arg0
) == ADDR_EXPR
7967 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
7968 && TREE_CODE (arg1
) == ADDR_EXPR
7969 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
7971 tree aref0
= TREE_OPERAND (arg0
, 0);
7972 tree aref1
= TREE_OPERAND (arg1
, 0);
7973 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
7974 TREE_OPERAND (aref1
, 0), 0))
7976 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
7977 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
7978 tree esz
= array_ref_element_size (aref0
);
7979 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
7980 return fold_build2 (MULT_EXPR
, type
, diff
,
7981 fold_convert (type
, esz
));
7986 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7987 of the array. Loop optimizer sometimes produce this type of
7989 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7991 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
7993 return fold_convert (type
, fold (tem
));
7996 if (flag_unsafe_math_optimizations
7997 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7998 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7999 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
8002 if (TREE_CODE (arg0
) == MULT_EXPR
8003 && TREE_CODE (arg1
) == MULT_EXPR
8004 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
8006 /* (A * C) - (B * C) -> (A-B) * C. */
8007 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
8008 TREE_OPERAND (arg1
, 1), 0))
8009 return fold_build2 (MULT_EXPR
, type
,
8010 fold_build2 (MINUS_EXPR
, type
,
8011 TREE_OPERAND (arg0
, 0),
8012 TREE_OPERAND (arg1
, 0)),
8013 TREE_OPERAND (arg0
, 1));
8014 /* (A * C1) - (A * C2) -> A * (C1-C2). */
8015 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
8016 TREE_OPERAND (arg1
, 0), 0))
8017 return fold_build2 (MULT_EXPR
, type
,
8018 TREE_OPERAND (arg0
, 0),
8019 fold_build2 (MINUS_EXPR
, type
,
8020 TREE_OPERAND (arg0
, 1),
8021 TREE_OPERAND (arg1
, 1)));
8027 /* (-A) * (-B) -> A * B */
8028 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8029 return fold_build2 (MULT_EXPR
, type
,
8030 TREE_OPERAND (arg0
, 0),
8031 negate_expr (arg1
));
8032 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8033 return fold_build2 (MULT_EXPR
, type
,
8035 TREE_OPERAND (arg1
, 0));
8037 if (TREE_CODE (type
) == COMPLEX_TYPE
)
8039 tem
= fold_complex_mult (type
, arg0
, arg1
);
8044 if (! FLOAT_TYPE_P (type
))
8046 if (integer_zerop (arg1
))
8047 return omit_one_operand (type
, arg1
, arg0
);
8048 if (integer_onep (arg1
))
8049 return non_lvalue (fold_convert (type
, arg0
));
8050 /* Transform x * -1 into -x. */
8051 if (integer_all_onesp (arg1
))
8052 return fold_convert (type
, negate_expr (arg0
));
8054 /* (a * (1 << b)) is (a << b) */
8055 if (TREE_CODE (arg1
) == LSHIFT_EXPR
8056 && integer_onep (TREE_OPERAND (arg1
, 0)))
8057 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
8058 TREE_OPERAND (arg1
, 1));
8059 if (TREE_CODE (arg0
) == LSHIFT_EXPR
8060 && integer_onep (TREE_OPERAND (arg0
, 0)))
8061 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
8062 TREE_OPERAND (arg0
, 1));
8064 if (TREE_CODE (arg1
) == INTEGER_CST
8065 && 0 != (tem
= extract_muldiv (op0
,
8066 fold_convert (type
, arg1
),
8068 return fold_convert (type
, tem
);
8073 /* Maybe fold x * 0 to 0. The expressions aren't the same
8074 when x is NaN, since x * 0 is also NaN. Nor are they the
8075 same in modes with signed zeros, since multiplying a
8076 negative value by 0 gives -0, not +0. */
8077 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
8078 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
8079 && real_zerop (arg1
))
8080 return omit_one_operand (type
, arg1
, arg0
);
8081 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8082 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8083 && real_onep (arg1
))
8084 return non_lvalue (fold_convert (type
, arg0
));
8086 /* Transform x * -1.0 into -x. */
8087 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8088 && real_minus_onep (arg1
))
8089 return fold_convert (type
, negate_expr (arg0
));
8091 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8092 if (flag_unsafe_math_optimizations
8093 && TREE_CODE (arg0
) == RDIV_EXPR
8094 && TREE_CODE (arg1
) == REAL_CST
8095 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
8097 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
8100 return fold_build2 (RDIV_EXPR
, type
, tem
,
8101 TREE_OPERAND (arg0
, 1));
8104 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8105 if (operand_equal_p (arg0
, arg1
, 0))
8107 tree tem
= fold_strip_sign_ops (arg0
);
8108 if (tem
!= NULL_TREE
)
8110 tem
= fold_convert (type
, tem
);
8111 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
8115 if (flag_unsafe_math_optimizations
)
8117 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8118 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8120 /* Optimizations of root(...)*root(...). */
8121 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
8123 tree rootfn
, arg
, arglist
;
8124 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8125 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8127 /* Optimize sqrt(x)*sqrt(x) as x. */
8128 if (BUILTIN_SQRT_P (fcode0
)
8129 && operand_equal_p (arg00
, arg10
, 0)
8130 && ! HONOR_SNANS (TYPE_MODE (type
)))
8133 /* Optimize root(x)*root(y) as root(x*y). */
8134 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8135 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
8136 arglist
= build_tree_list (NULL_TREE
, arg
);
8137 return build_function_call_expr (rootfn
, arglist
);
8140 /* Optimize expN(x)*expN(y) as expN(x+y). */
8141 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
8143 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8144 tree arg
= fold_build2 (PLUS_EXPR
, type
,
8145 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8146 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
8147 tree arglist
= build_tree_list (NULL_TREE
, arg
);
8148 return build_function_call_expr (expfn
, arglist
);
8151 /* Optimizations of pow(...)*pow(...). */
8152 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
8153 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
8154 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
8156 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8157 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
8159 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8160 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
8163 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8164 if (operand_equal_p (arg01
, arg11
, 0))
8166 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8167 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
8168 tree arglist
= tree_cons (NULL_TREE
, arg
,
8169 build_tree_list (NULL_TREE
,
8171 return build_function_call_expr (powfn
, arglist
);
8174 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8175 if (operand_equal_p (arg00
, arg10
, 0))
8177 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8178 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
8179 tree arglist
= tree_cons (NULL_TREE
, arg00
,
8180 build_tree_list (NULL_TREE
,
8182 return build_function_call_expr (powfn
, arglist
);
8186 /* Optimize tan(x)*cos(x) as sin(x). */
8187 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
8188 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
8189 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
8190 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
8191 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
8192 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
8193 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8194 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8196 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8198 if (sinfn
!= NULL_TREE
)
8199 return build_function_call_expr (sinfn
,
8200 TREE_OPERAND (arg0
, 1));
8203 /* Optimize x*pow(x,c) as pow(x,c+1). */
8204 if (fcode1
== BUILT_IN_POW
8205 || fcode1
== BUILT_IN_POWF
8206 || fcode1
== BUILT_IN_POWL
)
8208 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8209 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
8211 if (TREE_CODE (arg11
) == REAL_CST
8212 && ! TREE_CONSTANT_OVERFLOW (arg11
)
8213 && operand_equal_p (arg0
, arg10
, 0))
8215 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8219 c
= TREE_REAL_CST (arg11
);
8220 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
8221 arg
= build_real (type
, c
);
8222 arglist
= build_tree_list (NULL_TREE
, arg
);
8223 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
8224 return build_function_call_expr (powfn
, arglist
);
8228 /* Optimize pow(x,c)*x as pow(x,c+1). */
8229 if (fcode0
== BUILT_IN_POW
8230 || fcode0
== BUILT_IN_POWF
8231 || fcode0
== BUILT_IN_POWL
)
8233 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8234 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
8236 if (TREE_CODE (arg01
) == REAL_CST
8237 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8238 && operand_equal_p (arg1
, arg00
, 0))
8240 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8244 c
= TREE_REAL_CST (arg01
);
8245 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
8246 arg
= build_real (type
, c
);
8247 arglist
= build_tree_list (NULL_TREE
, arg
);
8248 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8249 return build_function_call_expr (powfn
, arglist
);
8253 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8255 && operand_equal_p (arg0
, arg1
, 0))
8257 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
8261 tree arg
= build_real (type
, dconst2
);
8262 tree arglist
= build_tree_list (NULL_TREE
, arg
);
8263 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
8264 return build_function_call_expr (powfn
, arglist
);
8273 if (integer_all_onesp (arg1
))
8274 return omit_one_operand (type
, arg1
, arg0
);
8275 if (integer_zerop (arg1
))
8276 return non_lvalue (fold_convert (type
, arg0
));
8277 if (operand_equal_p (arg0
, arg1
, 0))
8278 return non_lvalue (fold_convert (type
, arg0
));
8281 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8282 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8284 t1
= build_int_cst (type
, -1);
8285 t1
= force_fit_type (t1
, 0, false, false);
8286 return omit_one_operand (type
, t1
, arg1
);
8290 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8291 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8293 t1
= build_int_cst (type
, -1);
8294 t1
= force_fit_type (t1
, 0, false, false);
8295 return omit_one_operand (type
, t1
, arg0
);
8298 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8299 if (t1
!= NULL_TREE
)
8302 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8304 This results in more efficient code for machines without a NAND
8305 instruction. Combine will canonicalize to the first form
8306 which will allow use of NAND instructions provided by the
8307 backend if they exist. */
8308 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8309 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8311 return fold_build1 (BIT_NOT_EXPR
, type
,
8312 build2 (BIT_AND_EXPR
, type
,
8313 TREE_OPERAND (arg0
, 0),
8314 TREE_OPERAND (arg1
, 0)));
8317 /* See if this can be simplified into a rotate first. If that
8318 is unsuccessful continue in the association code. */
8322 if (integer_zerop (arg1
))
8323 return non_lvalue (fold_convert (type
, arg0
));
8324 if (integer_all_onesp (arg1
))
8325 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
8326 if (operand_equal_p (arg0
, arg1
, 0))
8327 return omit_one_operand (type
, integer_zero_node
, arg0
);
8330 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8331 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8333 t1
= build_int_cst (type
, -1);
8334 t1
= force_fit_type (t1
, 0, false, false);
8335 return omit_one_operand (type
, t1
, arg1
);
8339 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8340 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8342 t1
= build_int_cst (type
, -1);
8343 t1
= force_fit_type (t1
, 0, false, false);
8344 return omit_one_operand (type
, t1
, arg0
);
8347 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8348 with a constant, and the two constants have no bits in common,
8349 we should treat this as a BIT_IOR_EXPR since this may produce more
8351 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8352 && TREE_CODE (arg1
) == BIT_AND_EXPR
8353 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8354 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8355 && integer_zerop (const_binop (BIT_AND_EXPR
,
8356 TREE_OPERAND (arg0
, 1),
8357 TREE_OPERAND (arg1
, 1), 0)))
8359 code
= BIT_IOR_EXPR
;
8363 /* Convert ~X ^ ~Y to X ^ Y. */
8364 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8365 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8366 return fold_build2 (code
, type
,
8367 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8368 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8370 /* See if this can be simplified into a rotate first. If that
8371 is unsuccessful continue in the association code. */
8375 if (integer_all_onesp (arg1
))
8376 return non_lvalue (fold_convert (type
, arg0
));
8377 if (integer_zerop (arg1
))
8378 return omit_one_operand (type
, arg1
, arg0
);
8379 if (operand_equal_p (arg0
, arg1
, 0))
8380 return non_lvalue (fold_convert (type
, arg0
));
8382 /* ~X & X is always zero. */
8383 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8384 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8385 return omit_one_operand (type
, integer_zero_node
, arg1
);
8387 /* X & ~X is always zero. */
8388 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8389 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8390 return omit_one_operand (type
, integer_zero_node
, arg0
);
8392 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8393 if (t1
!= NULL_TREE
)
8395 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8396 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
8397 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8400 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
8402 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
8403 && (~TREE_INT_CST_LOW (arg1
)
8404 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
8405 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8408 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8410 This results in more efficient code for machines without a NOR
8411 instruction. Combine will canonicalize to the first form
8412 which will allow use of NOR instructions provided by the
8413 backend if they exist. */
8414 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8415 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8417 return fold_build1 (BIT_NOT_EXPR
, type
,
8418 build2 (BIT_IOR_EXPR
, type
,
8419 TREE_OPERAND (arg0
, 0),
8420 TREE_OPERAND (arg1
, 0)));
8426 /* Don't touch a floating-point divide by zero unless the mode
8427 of the constant can represent infinity. */
8428 if (TREE_CODE (arg1
) == REAL_CST
8429 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
8430 && real_zerop (arg1
))
8433 /* (-A) / (-B) -> A / B */
8434 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8435 return fold_build2 (RDIV_EXPR
, type
,
8436 TREE_OPERAND (arg0
, 0),
8437 negate_expr (arg1
));
8438 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8439 return fold_build2 (RDIV_EXPR
, type
,
8441 TREE_OPERAND (arg1
, 0));
8443 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8444 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8445 && real_onep (arg1
))
8446 return non_lvalue (fold_convert (type
, arg0
));
8448 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8449 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8450 && real_minus_onep (arg1
))
8451 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
8453 /* If ARG1 is a constant, we can convert this to a multiply by the
8454 reciprocal. This does not have the same rounding properties,
8455 so only do this if -funsafe-math-optimizations. We can actually
8456 always safely do it if ARG1 is a power of two, but it's hard to
8457 tell if it is or not in a portable manner. */
8458 if (TREE_CODE (arg1
) == REAL_CST
)
8460 if (flag_unsafe_math_optimizations
8461 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
8463 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
8464 /* Find the reciprocal if optimizing and the result is exact. */
8468 r
= TREE_REAL_CST (arg1
);
8469 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
8471 tem
= build_real (type
, r
);
8472 return fold_build2 (MULT_EXPR
, type
,
8473 fold_convert (type
, arg0
), tem
);
8477 /* Convert A/B/C to A/(B*C). */
8478 if (flag_unsafe_math_optimizations
8479 && TREE_CODE (arg0
) == RDIV_EXPR
)
8480 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8481 fold_build2 (MULT_EXPR
, type
,
8482 TREE_OPERAND (arg0
, 1), arg1
));
8484 /* Convert A/(B/C) to (A/B)*C. */
8485 if (flag_unsafe_math_optimizations
8486 && TREE_CODE (arg1
) == RDIV_EXPR
)
8487 return fold_build2 (MULT_EXPR
, type
,
8488 fold_build2 (RDIV_EXPR
, type
, arg0
,
8489 TREE_OPERAND (arg1
, 0)),
8490 TREE_OPERAND (arg1
, 1));
8492 /* Convert C1/(X*C2) into (C1/C2)/X. */
8493 if (flag_unsafe_math_optimizations
8494 && TREE_CODE (arg1
) == MULT_EXPR
8495 && TREE_CODE (arg0
) == REAL_CST
8496 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
8498 tree tem
= const_binop (RDIV_EXPR
, arg0
,
8499 TREE_OPERAND (arg1
, 1), 0);
8501 return fold_build2 (RDIV_EXPR
, type
, tem
,
8502 TREE_OPERAND (arg1
, 0));
8505 if (TREE_CODE (type
) == COMPLEX_TYPE
)
8507 tem
= fold_complex_div (type
, arg0
, arg1
, code
);
8512 if (flag_unsafe_math_optimizations
)
8514 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
8515 /* Optimize x/expN(y) into x*expN(-y). */
8516 if (BUILTIN_EXPONENT_P (fcode
))
8518 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8519 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
8520 tree arglist
= build_tree_list (NULL_TREE
,
8521 fold_convert (type
, arg
));
8522 arg1
= build_function_call_expr (expfn
, arglist
);
8523 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8526 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8527 if (fcode
== BUILT_IN_POW
8528 || fcode
== BUILT_IN_POWF
8529 || fcode
== BUILT_IN_POWL
)
8531 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8532 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8533 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
8534 tree neg11
= fold_convert (type
, negate_expr (arg11
));
8535 tree arglist
= tree_cons(NULL_TREE
, arg10
,
8536 build_tree_list (NULL_TREE
, neg11
));
8537 arg1
= build_function_call_expr (powfn
, arglist
);
8538 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8542 if (flag_unsafe_math_optimizations
)
8544 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8545 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8547 /* Optimize sin(x)/cos(x) as tan(x). */
8548 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
8549 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
8550 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
8551 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8552 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8554 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8556 if (tanfn
!= NULL_TREE
)
8557 return build_function_call_expr (tanfn
,
8558 TREE_OPERAND (arg0
, 1));
8561 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8562 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
8563 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
8564 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
8565 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8566 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8568 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8570 if (tanfn
!= NULL_TREE
)
8572 tree tmp
= TREE_OPERAND (arg0
, 1);
8573 tmp
= build_function_call_expr (tanfn
, tmp
);
8574 return fold_build2 (RDIV_EXPR
, type
,
8575 build_real (type
, dconst1
), tmp
);
8579 /* Optimize pow(x,c)/x as pow(x,c-1). */
8580 if (fcode0
== BUILT_IN_POW
8581 || fcode0
== BUILT_IN_POWF
8582 || fcode0
== BUILT_IN_POWL
)
8584 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8585 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
8586 if (TREE_CODE (arg01
) == REAL_CST
8587 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8588 && operand_equal_p (arg1
, arg00
, 0))
8590 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8594 c
= TREE_REAL_CST (arg01
);
8595 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
8596 arg
= build_real (type
, c
);
8597 arglist
= build_tree_list (NULL_TREE
, arg
);
8598 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8599 return build_function_call_expr (powfn
, arglist
);
8605 case TRUNC_DIV_EXPR
:
8606 case ROUND_DIV_EXPR
:
8607 case FLOOR_DIV_EXPR
:
8609 case EXACT_DIV_EXPR
:
8610 if (integer_onep (arg1
))
8611 return non_lvalue (fold_convert (type
, arg0
));
8612 if (integer_zerop (arg1
))
8615 if (!TYPE_UNSIGNED (type
)
8616 && TREE_CODE (arg1
) == INTEGER_CST
8617 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8618 && TREE_INT_CST_HIGH (arg1
) == -1)
8619 return fold_convert (type
, negate_expr (arg0
));
8621 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8622 operation, EXACT_DIV_EXPR.
8624 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8625 At one time others generated faster code, it's not clear if they do
8626 after the last round to changes to the DIV code in expmed.c. */
8627 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
8628 && multiple_of_p (type
, arg0
, arg1
))
8629 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
8631 if (TREE_CODE (arg1
) == INTEGER_CST
8632 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8633 return fold_convert (type
, tem
);
8635 if (TREE_CODE (type
) == COMPLEX_TYPE
)
8637 tem
= fold_complex_div (type
, arg0
, arg1
, code
);
8644 case FLOOR_MOD_EXPR
:
8645 case ROUND_MOD_EXPR
:
8646 case TRUNC_MOD_EXPR
:
8647 /* X % 1 is always zero, but be sure to preserve any side
8649 if (integer_onep (arg1
))
8650 return omit_one_operand (type
, integer_zero_node
, arg0
);
8652 /* X % 0, return X % 0 unchanged so that we can get the
8653 proper warnings and errors. */
8654 if (integer_zerop (arg1
))
8657 /* 0 % X is always zero, but be sure to preserve any side
8658 effects in X. Place this after checking for X == 0. */
8659 if (integer_zerop (arg0
))
8660 return omit_one_operand (type
, integer_zero_node
, arg1
);
8662 /* X % -1 is zero. */
8663 if (!TYPE_UNSIGNED (type
)
8664 && TREE_CODE (arg1
) == INTEGER_CST
8665 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8666 && TREE_INT_CST_HIGH (arg1
) == -1)
8667 return omit_one_operand (type
, integer_zero_node
, arg0
);
8669 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8670 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8671 if (code
== TRUNC_MOD_EXPR
8672 && TYPE_UNSIGNED (type
)
8673 && integer_pow2p (arg1
))
8675 unsigned HOST_WIDE_INT high
, low
;
8679 l
= tree_log2 (arg1
);
8680 if (l
>= HOST_BITS_PER_WIDE_INT
)
8682 high
= ((unsigned HOST_WIDE_INT
) 1
8683 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
8689 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
8692 mask
= build_int_cst_wide (type
, low
, high
);
8693 return fold_build2 (BIT_AND_EXPR
, type
,
8694 fold_convert (type
, arg0
), mask
);
8697 /* X % -C is the same as X % C. */
8698 if (code
== TRUNC_MOD_EXPR
8699 && !TYPE_UNSIGNED (type
)
8700 && TREE_CODE (arg1
) == INTEGER_CST
8701 && !TREE_CONSTANT_OVERFLOW (arg1
)
8702 && TREE_INT_CST_HIGH (arg1
) < 0
8704 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8705 && !sign_bit_p (arg1
, arg1
))
8706 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8707 fold_convert (type
, negate_expr (arg1
)));
8709 /* X % -Y is the same as X % Y. */
8710 if (code
== TRUNC_MOD_EXPR
8711 && !TYPE_UNSIGNED (type
)
8712 && TREE_CODE (arg1
) == NEGATE_EXPR
8714 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8715 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8717 if (TREE_CODE (arg1
) == INTEGER_CST
8718 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8719 return fold_convert (type
, tem
);
8725 if (integer_all_onesp (arg0
))
8726 return omit_one_operand (type
, arg0
, arg1
);
8730 /* Optimize -1 >> x for arithmetic right shifts. */
8731 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
8732 return omit_one_operand (type
, arg0
, arg1
);
8733 /* ... fall through ... */
8737 if (integer_zerop (arg1
))
8738 return non_lvalue (fold_convert (type
, arg0
));
8739 if (integer_zerop (arg0
))
8740 return omit_one_operand (type
, arg0
, arg1
);
8742 /* Since negative shift count is not well-defined,
8743 don't try to compute it in the compiler. */
8744 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
8746 /* Rewrite an LROTATE_EXPR by a constant into an
8747 RROTATE_EXPR by a new constant. */
8748 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
8750 tree tem
= build_int_cst (NULL_TREE
,
8751 GET_MODE_BITSIZE (TYPE_MODE (type
)));
8752 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
8753 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
8754 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
8757 /* If we have a rotate of a bit operation with the rotate count and
8758 the second operand of the bit operation both constant,
8759 permute the two operations. */
8760 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8761 && (TREE_CODE (arg0
) == BIT_AND_EXPR
8762 || TREE_CODE (arg0
) == BIT_IOR_EXPR
8763 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
8764 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8765 return fold_build2 (TREE_CODE (arg0
), type
,
8766 fold_build2 (code
, type
,
8767 TREE_OPERAND (arg0
, 0), arg1
),
8768 fold_build2 (code
, type
,
8769 TREE_OPERAND (arg0
, 1), arg1
));
8771 /* Two consecutive rotates adding up to the width of the mode can
8773 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8774 && TREE_CODE (arg0
) == RROTATE_EXPR
8775 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8776 && TREE_INT_CST_HIGH (arg1
) == 0
8777 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
8778 && ((TREE_INT_CST_LOW (arg1
)
8779 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
8780 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
8781 return TREE_OPERAND (arg0
, 0);
8786 if (operand_equal_p (arg0
, arg1
, 0))
8787 return omit_one_operand (type
, arg0
, arg1
);
8788 if (INTEGRAL_TYPE_P (type
)
8789 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
8790 return omit_one_operand (type
, arg1
, arg0
);
8794 if (operand_equal_p (arg0
, arg1
, 0))
8795 return omit_one_operand (type
, arg0
, arg1
);
8796 if (INTEGRAL_TYPE_P (type
)
8797 && TYPE_MAX_VALUE (type
)
8798 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
8799 return omit_one_operand (type
, arg1
, arg0
);
8802 case TRUTH_ANDIF_EXPR
:
8803 /* Note that the operands of this must be ints
8804 and their values must be 0 or 1.
8805 ("true" is a fixed value perhaps depending on the language.) */
8806 /* If first arg is constant zero, return it. */
8807 if (integer_zerop (arg0
))
8808 return fold_convert (type
, arg0
);
8809 case TRUTH_AND_EXPR
:
8810 /* If either arg is constant true, drop it. */
8811 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8812 return non_lvalue (fold_convert (type
, arg1
));
8813 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
8814 /* Preserve sequence points. */
8815 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8816 return non_lvalue (fold_convert (type
, arg0
));
8817 /* If second arg is constant zero, result is zero, but first arg
8818 must be evaluated. */
8819 if (integer_zerop (arg1
))
8820 return omit_one_operand (type
, arg1
, arg0
);
8821 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8822 case will be handled here. */
8823 if (integer_zerop (arg0
))
8824 return omit_one_operand (type
, arg0
, arg1
);
8826 /* !X && X is always false. */
8827 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8828 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8829 return omit_one_operand (type
, integer_zero_node
, arg1
);
8830 /* X && !X is always false. */
8831 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8832 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8833 return omit_one_operand (type
, integer_zero_node
, arg0
);
8835 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8836 means A >= Y && A != MAX, but in this case we know that
8839 if (!TREE_SIDE_EFFECTS (arg0
)
8840 && !TREE_SIDE_EFFECTS (arg1
))
8842 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
8844 return fold_build2 (code
, type
, tem
, arg1
);
8846 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
8848 return fold_build2 (code
, type
, arg0
, tem
);
8852 /* We only do these simplifications if we are optimizing. */
8856 /* Check for things like (A || B) && (A || C). We can convert this
8857 to A || (B && C). Note that either operator can be any of the four
8858 truth and/or operations and the transformation will still be
8859 valid. Also note that we only care about order for the
8860 ANDIF and ORIF operators. If B contains side effects, this
8861 might change the truth-value of A. */
8862 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8863 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8864 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8865 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8866 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8867 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8869 tree a00
= TREE_OPERAND (arg0
, 0);
8870 tree a01
= TREE_OPERAND (arg0
, 1);
8871 tree a10
= TREE_OPERAND (arg1
, 0);
8872 tree a11
= TREE_OPERAND (arg1
, 1);
8873 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8874 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8875 && (code
== TRUTH_AND_EXPR
8876 || code
== TRUTH_OR_EXPR
));
8878 if (operand_equal_p (a00
, a10
, 0))
8879 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8880 fold_build2 (code
, type
, a01
, a11
));
8881 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8882 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8883 fold_build2 (code
, type
, a01
, a10
));
8884 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8885 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
8886 fold_build2 (code
, type
, a00
, a11
));
8888 /* This case if tricky because we must either have commutative
8889 operators or else A10 must not have side-effects. */
8891 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8892 && operand_equal_p (a01
, a11
, 0))
8893 return fold_build2 (TREE_CODE (arg0
), type
,
8894 fold_build2 (code
, type
, a00
, a10
),
8898 /* See if we can build a range comparison. */
8899 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
8902 /* Check for the possibility of merging component references. If our
8903 lhs is another similar operation, try to merge its rhs with our
8904 rhs. Then try to merge our lhs and rhs. */
8905 if (TREE_CODE (arg0
) == code
8906 && 0 != (tem
= fold_truthop (code
, type
,
8907 TREE_OPERAND (arg0
, 1), arg1
)))
8908 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8910 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
8915 case TRUTH_ORIF_EXPR
:
8916 /* Note that the operands of this must be ints
8917 and their values must be 0 or true.
8918 ("true" is a fixed value perhaps depending on the language.) */
8919 /* If first arg is constant true, return it. */
8920 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8921 return fold_convert (type
, arg0
);
8923 /* If either arg is constant zero, drop it. */
8924 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
8925 return non_lvalue (fold_convert (type
, arg1
));
8926 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
8927 /* Preserve sequence points. */
8928 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8929 return non_lvalue (fold_convert (type
, arg0
));
8930 /* If second arg is constant true, result is true, but we must
8931 evaluate first arg. */
8932 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
8933 return omit_one_operand (type
, arg1
, arg0
);
8934 /* Likewise for first arg, but note this only occurs here for
8936 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8937 return omit_one_operand (type
, arg0
, arg1
);
8939 /* !X || X is always true. */
8940 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8941 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8942 return omit_one_operand (type
, integer_one_node
, arg1
);
8943 /* X || !X is always true. */
8944 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8945 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8946 return omit_one_operand (type
, integer_one_node
, arg0
);
8950 case TRUTH_XOR_EXPR
:
8951 /* If the second arg is constant zero, drop it. */
8952 if (integer_zerop (arg1
))
8953 return non_lvalue (fold_convert (type
, arg0
));
8954 /* If the second arg is constant true, this is a logical inversion. */
8955 if (integer_onep (arg1
))
8957 /* Only call invert_truthvalue if operand is a truth value. */
8958 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8959 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
8961 tem
= invert_truthvalue (arg0
);
8962 return non_lvalue (fold_convert (type
, tem
));
8964 /* Identical arguments cancel to zero. */
8965 if (operand_equal_p (arg0
, arg1
, 0))
8966 return omit_one_operand (type
, integer_zero_node
, arg0
);
8968 /* !X ^ X is always true. */
8969 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8970 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8971 return omit_one_operand (type
, integer_one_node
, arg1
);
8973 /* X ^ !X is always true. */
8974 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8975 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8976 return omit_one_operand (type
, integer_one_node
, arg0
);
8986 /* If one arg is a real or integer constant, put it last. */
8987 if (tree_swap_operands_p (arg0
, arg1
, true))
8988 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8990 /* bool_var != 0 becomes bool_var. */
8991 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
8993 return non_lvalue (fold_convert (type
, arg0
));
8995 /* bool_var == 1 becomes bool_var. */
8996 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
8998 return non_lvalue (fold_convert (type
, arg0
));
9000 /* If this is an equality comparison of the address of a non-weak
9001 object against zero, then we know the result. */
9002 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9003 && TREE_CODE (arg0
) == ADDR_EXPR
9004 && DECL_P (TREE_OPERAND (arg0
, 0))
9005 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
9006 && integer_zerop (arg1
))
9007 return constant_boolean_node (code
!= EQ_EXPR
, type
);
9009 /* If this is an equality comparison of the address of two non-weak,
9010 unaliased symbols neither of which are extern (since we do not
9011 have access to attributes for externs), then we know the result. */
9012 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9013 && TREE_CODE (arg0
) == ADDR_EXPR
9014 && DECL_P (TREE_OPERAND (arg0
, 0))
9015 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
9016 && ! lookup_attribute ("alias",
9017 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
9018 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
9019 && TREE_CODE (arg1
) == ADDR_EXPR
9020 && DECL_P (TREE_OPERAND (arg1
, 0))
9021 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
9022 && ! lookup_attribute ("alias",
9023 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
9024 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
9025 return constant_boolean_node (operand_equal_p (arg0
, arg1
, 0)
9026 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
9029 /* If this is a comparison of two exprs that look like an
9030 ARRAY_REF of the same object, then we can fold this to a
9031 comparison of the two offsets. */
9032 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
9034 tree base0
, offset0
, base1
, offset1
;
9036 if (extract_array_ref (arg0
, &base0
, &offset0
)
9037 && extract_array_ref (arg1
, &base1
, &offset1
)
9038 && operand_equal_p (base0
, base1
, 0))
9040 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0
)))
9041 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0
)))))
9042 offset0
= NULL_TREE
;
9043 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1
)))
9044 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1
)))))
9045 offset1
= NULL_TREE
;
9046 if (offset0
== NULL_TREE
9047 && offset1
== NULL_TREE
)
9049 offset0
= integer_zero_node
;
9050 offset1
= integer_zero_node
;
9052 else if (offset0
== NULL_TREE
)
9053 offset0
= build_int_cst (TREE_TYPE (offset1
), 0);
9054 else if (offset1
== NULL_TREE
)
9055 offset1
= build_int_cst (TREE_TYPE (offset0
), 0);
9057 if (TREE_TYPE (offset0
) == TREE_TYPE (offset1
))
9058 return fold_build2 (code
, type
, offset0
, offset1
);
9062 /* Transform comparisons of the form X +- C CMP X. */
9063 if ((code
!= EQ_EXPR
&& code
!= NE_EXPR
)
9064 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9065 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9066 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9067 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
9068 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9069 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
9070 && !(flag_wrapv
|| flag_trapv
))))
9072 tree arg01
= TREE_OPERAND (arg0
, 1);
9073 enum tree_code code0
= TREE_CODE (arg0
);
9076 if (TREE_CODE (arg01
) == REAL_CST
)
9077 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
9079 is_positive
= tree_int_cst_sgn (arg01
);
9081 /* (X - c) > X becomes false. */
9083 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
9084 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
9085 return constant_boolean_node (0, type
);
9087 /* Likewise (X + c) < X becomes false. */
9089 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
9090 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
9091 return constant_boolean_node (0, type
);
9093 /* Convert (X - c) <= X to true. */
9094 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
9096 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
9097 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
9098 return constant_boolean_node (1, type
);
9100 /* Convert (X + c) >= X to true. */
9101 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
9103 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
9104 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
9105 return constant_boolean_node (1, type
);
9107 if (TREE_CODE (arg01
) == INTEGER_CST
)
9109 /* Convert X + c > X and X - c < X to true for integers. */
9111 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
9112 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
9113 return constant_boolean_node (1, type
);
9116 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
9117 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
9118 return constant_boolean_node (1, type
);
9120 /* Convert X + c <= X and X - c >= X to false for integers. */
9122 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
9123 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
9124 return constant_boolean_node (0, type
);
9127 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
9128 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
9129 return constant_boolean_node (0, type
);
9133 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9135 tree targ0
= strip_float_extensions (arg0
);
9136 tree targ1
= strip_float_extensions (arg1
);
9137 tree newtype
= TREE_TYPE (targ0
);
9139 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9140 newtype
= TREE_TYPE (targ1
);
9142 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9143 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9144 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9145 fold_convert (newtype
, targ1
));
9147 /* (-a) CMP (-b) -> b CMP a */
9148 if (TREE_CODE (arg0
) == NEGATE_EXPR
9149 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9150 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
9151 TREE_OPERAND (arg0
, 0));
9153 if (TREE_CODE (arg1
) == REAL_CST
)
9155 REAL_VALUE_TYPE cst
;
9156 cst
= TREE_REAL_CST (arg1
);
9158 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9159 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9161 fold_build2 (swap_tree_comparison (code
), type
,
9162 TREE_OPERAND (arg0
, 0),
9163 build_real (TREE_TYPE (arg1
),
9164 REAL_VALUE_NEGATE (cst
)));
9166 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9167 /* a CMP (-0) -> a CMP 0 */
9168 if (REAL_VALUE_MINUS_ZERO (cst
))
9169 return fold_build2 (code
, type
, arg0
,
9170 build_real (TREE_TYPE (arg1
), dconst0
));
9172 /* x != NaN is always true, other ops are always false. */
9173 if (REAL_VALUE_ISNAN (cst
)
9174 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9176 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9177 return omit_one_operand (type
, tem
, arg0
);
9180 /* Fold comparisons against infinity. */
9181 if (REAL_VALUE_ISINF (cst
))
9183 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
9184 if (tem
!= NULL_TREE
)
9189 /* If this is a comparison of a real constant with a PLUS_EXPR
9190 or a MINUS_EXPR of a real constant, we can convert it into a
9191 comparison with a revised real constant as long as no overflow
9192 occurs when unsafe_math_optimizations are enabled. */
9193 if (flag_unsafe_math_optimizations
9194 && TREE_CODE (arg1
) == REAL_CST
9195 && (TREE_CODE (arg0
) == PLUS_EXPR
9196 || TREE_CODE (arg0
) == MINUS_EXPR
)
9197 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9198 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9199 ? MINUS_EXPR
: PLUS_EXPR
,
9200 arg1
, TREE_OPERAND (arg0
, 1), 0))
9201 && ! TREE_CONSTANT_OVERFLOW (tem
))
9202 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9204 /* Likewise, we can simplify a comparison of a real constant with
9205 a MINUS_EXPR whose first operand is also a real constant, i.e.
9206 (c1 - x) < c2 becomes x > c1-c2. */
9207 if (flag_unsafe_math_optimizations
9208 && TREE_CODE (arg1
) == REAL_CST
9209 && TREE_CODE (arg0
) == MINUS_EXPR
9210 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9211 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9213 && ! TREE_CONSTANT_OVERFLOW (tem
))
9214 return fold_build2 (swap_tree_comparison (code
), type
,
9215 TREE_OPERAND (arg0
, 1), tem
);
9217 /* Fold comparisons against built-in math functions. */
9218 if (TREE_CODE (arg1
) == REAL_CST
9219 && flag_unsafe_math_optimizations
9220 && ! flag_errno_math
)
9222 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9224 if (fcode
!= END_BUILTINS
)
9226 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
9227 if (tem
!= NULL_TREE
)
9233 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9234 if (TREE_CONSTANT (arg1
)
9235 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
9236 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
9237 /* This optimization is invalid for ordered comparisons
9238 if CONST+INCR overflows or if foo+incr might overflow.
9239 This optimization is invalid for floating point due to rounding.
9240 For pointer types we assume overflow doesn't happen. */
9241 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
9242 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9243 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
9245 tree varop
, newconst
;
9247 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
9249 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
9250 arg1
, TREE_OPERAND (arg0
, 1));
9251 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
9252 TREE_OPERAND (arg0
, 0),
9253 TREE_OPERAND (arg0
, 1));
9257 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
9258 arg1
, TREE_OPERAND (arg0
, 1));
9259 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
9260 TREE_OPERAND (arg0
, 0),
9261 TREE_OPERAND (arg0
, 1));
9265 /* If VAROP is a reference to a bitfield, we must mask
9266 the constant by the width of the field. */
9267 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
9268 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
9269 && host_integerp (DECL_SIZE (TREE_OPERAND
9270 (TREE_OPERAND (varop
, 0), 1)), 1))
9272 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
9273 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
9274 tree folded_compare
, shift
;
9276 /* First check whether the comparison would come out
9277 always the same. If we don't do that we would
9278 change the meaning with the masking. */
9279 folded_compare
= fold_build2 (code
, type
,
9280 TREE_OPERAND (varop
, 0), arg1
);
9281 if (integer_zerop (folded_compare
)
9282 || integer_onep (folded_compare
))
9283 return omit_one_operand (type
, folded_compare
, varop
);
9285 shift
= build_int_cst (NULL_TREE
,
9286 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
9287 shift
= fold_convert (TREE_TYPE (varop
), shift
);
9288 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
9290 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
9294 return fold_build2 (code
, type
, varop
, newconst
);
9297 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9298 This transformation affects the cases which are handled in later
9299 optimizations involving comparisons with non-negative constants. */
9300 if (TREE_CODE (arg1
) == INTEGER_CST
9301 && TREE_CODE (arg0
) != INTEGER_CST
9302 && tree_int_cst_sgn (arg1
) > 0)
9307 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9308 return fold_build2 (GT_EXPR
, type
, arg0
, arg1
);
9311 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9312 return fold_build2 (LE_EXPR
, type
, arg0
, arg1
);
9319 /* Comparisons with the highest or lowest possible integer of
9320 the specified size will have known values. */
9322 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
9324 if (TREE_CODE (arg1
) == INTEGER_CST
9325 && ! TREE_CONSTANT_OVERFLOW (arg1
)
9326 && width
<= 2 * HOST_BITS_PER_WIDE_INT
9327 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9328 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
9330 HOST_WIDE_INT signed_max_hi
;
9331 unsigned HOST_WIDE_INT signed_max_lo
;
9332 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
9334 if (width
<= HOST_BITS_PER_WIDE_INT
)
9336 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9341 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9343 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9349 max_lo
= signed_max_lo
;
9350 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9356 width
-= HOST_BITS_PER_WIDE_INT
;
9358 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9363 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9365 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9370 max_hi
= signed_max_hi
;
9371 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9375 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
9376 && TREE_INT_CST_LOW (arg1
) == max_lo
)
9380 return omit_one_operand (type
, integer_zero_node
, arg0
);
9383 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9386 return omit_one_operand (type
, integer_one_node
, arg0
);
9389 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9391 /* The GE_EXPR and LT_EXPR cases above are not normally
9392 reached because of previous transformations. */
9397 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9399 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
9403 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9404 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9406 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9407 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9411 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9413 && TREE_INT_CST_LOW (arg1
) == min_lo
)
9417 return omit_one_operand (type
, integer_zero_node
, arg0
);
9420 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9423 return omit_one_operand (type
, integer_one_node
, arg0
);
9426 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9431 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9433 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
9437 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9438 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9440 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9441 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9446 else if (!in_gimple_form
9447 && TREE_INT_CST_HIGH (arg1
) == signed_max_hi
9448 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
9449 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
9450 /* signed_type does not work on pointer types. */
9451 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9453 /* The following case also applies to X < signed_max+1
9454 and X >= signed_max+1 because previous transformations. */
9455 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9458 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
9459 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
9461 (build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9462 type
, fold_convert (st0
, arg0
),
9463 fold_convert (st1
, integer_zero_node
)));
9469 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9470 a MINUS_EXPR of a constant, we can convert it into a comparison with
9471 a revised constant as long as no overflow occurs. */
9472 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9473 && TREE_CODE (arg1
) == INTEGER_CST
9474 && (TREE_CODE (arg0
) == PLUS_EXPR
9475 || TREE_CODE (arg0
) == MINUS_EXPR
)
9476 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9477 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9478 ? MINUS_EXPR
: PLUS_EXPR
,
9479 arg1
, TREE_OPERAND (arg0
, 1), 0))
9480 && ! TREE_CONSTANT_OVERFLOW (tem
))
9481 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9483 /* Similarly for a NEGATE_EXPR. */
9484 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9485 && TREE_CODE (arg0
) == NEGATE_EXPR
9486 && TREE_CODE (arg1
) == INTEGER_CST
9487 && 0 != (tem
= negate_expr (arg1
))
9488 && TREE_CODE (tem
) == INTEGER_CST
9489 && ! TREE_CONSTANT_OVERFLOW (tem
))
9490 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9492 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9493 for !=. Don't do this for ordered comparisons due to overflow. */
9494 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9495 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
9496 return fold_build2 (code
, type
,
9497 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
9499 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9500 && (TREE_CODE (arg0
) == NOP_EXPR
9501 || TREE_CODE (arg0
) == CONVERT_EXPR
))
9503 /* If we are widening one operand of an integer comparison,
9504 see if the other operand is similarly being widened. Perhaps we
9505 can do the comparison in the narrower type. */
9506 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9510 /* Or if we are changing signedness. */
9511 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9516 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9517 constant, we can simplify it. */
9518 else if (TREE_CODE (arg1
) == INTEGER_CST
9519 && (TREE_CODE (arg0
) == MIN_EXPR
9520 || TREE_CODE (arg0
) == MAX_EXPR
)
9521 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9523 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9530 /* If we are comparing an ABS_EXPR with a constant, we can
9531 convert all the cases into explicit comparisons, but they may
9532 well not be faster than doing the ABS and one comparison.
9533 But ABS (X) <= C is a range comparison, which becomes a subtraction
9534 and a comparison, and is probably faster. */
9535 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
9536 && TREE_CODE (arg0
) == ABS_EXPR
9537 && ! TREE_SIDE_EFFECTS (arg0
)
9538 && (0 != (tem
= negate_expr (arg1
)))
9539 && TREE_CODE (tem
) == INTEGER_CST
9540 && ! TREE_CONSTANT_OVERFLOW (tem
))
9541 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
9542 build2 (GE_EXPR
, type
,
9543 TREE_OPERAND (arg0
, 0), tem
),
9544 build2 (LE_EXPR
, type
,
9545 TREE_OPERAND (arg0
, 0), arg1
));
9547 /* Convert ABS_EXPR<x> >= 0 to true. */
9548 else if (code
== GE_EXPR
9549 && tree_expr_nonnegative_p (arg0
)
9550 && (integer_zerop (arg1
)
9551 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9552 && real_zerop (arg1
))))
9553 return omit_one_operand (type
, integer_one_node
, arg0
);
9555 /* Convert ABS_EXPR<x> < 0 to false. */
9556 else if (code
== LT_EXPR
9557 && tree_expr_nonnegative_p (arg0
)
9558 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9559 return omit_one_operand (type
, integer_zero_node
, arg0
);
9561 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9562 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9563 && TREE_CODE (arg0
) == ABS_EXPR
9564 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9565 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
9567 /* If this is an EQ or NE comparison with zero and ARG0 is
9568 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9569 two operations, but the latter can be done in one less insn
9570 on machines that have only two-operand insns or on which a
9571 constant cannot be the first operand. */
9572 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9573 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
9575 tree arg00
= TREE_OPERAND (arg0
, 0);
9576 tree arg01
= TREE_OPERAND (arg0
, 1);
9577 if (TREE_CODE (arg00
) == LSHIFT_EXPR
9578 && integer_onep (TREE_OPERAND (arg00
, 0)))
9580 fold_build2 (code
, type
,
9581 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9582 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
9583 arg01
, TREE_OPERAND (arg00
, 1)),
9584 fold_convert (TREE_TYPE (arg0
),
9587 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
9588 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
9590 fold_build2 (code
, type
,
9591 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9592 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
9593 arg00
, TREE_OPERAND (arg01
, 1)),
9594 fold_convert (TREE_TYPE (arg0
),
9599 /* If this is an NE or EQ comparison of zero against the result of a
9600 signed MOD operation whose second operand is a power of 2, make
9601 the MOD operation unsigned since it is simpler and equivalent. */
9602 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9603 && integer_zerop (arg1
)
9604 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
9605 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
9606 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
9607 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
9608 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
9609 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9611 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
9612 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
9613 fold_convert (newtype
,
9614 TREE_OPERAND (arg0
, 0)),
9615 fold_convert (newtype
,
9616 TREE_OPERAND (arg0
, 1)));
9618 return fold_build2 (code
, type
, newmod
,
9619 fold_convert (newtype
, arg1
));
9622 /* If this is an NE comparison of zero with an AND of one, remove the
9623 comparison since the AND will give the correct value. */
9624 if (code
== NE_EXPR
&& integer_zerop (arg1
)
9625 && TREE_CODE (arg0
) == BIT_AND_EXPR
9626 && integer_onep (TREE_OPERAND (arg0
, 1)))
9627 return fold_convert (type
, arg0
);
9629 /* If we have (A & C) == C where C is a power of 2, convert this into
9630 (A & C) != 0. Similarly for NE_EXPR. */
9631 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9632 && TREE_CODE (arg0
) == BIT_AND_EXPR
9633 && integer_pow2p (TREE_OPERAND (arg0
, 1))
9634 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9635 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
9636 arg0
, fold_convert (TREE_TYPE (arg0
),
9637 integer_zero_node
));
9639 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9640 bit, then fold the expression into A < 0 or A >= 0. */
9641 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
9645 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9646 Similarly for NE_EXPR. */
9647 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9648 && TREE_CODE (arg0
) == BIT_AND_EXPR
9649 && TREE_CODE (arg1
) == INTEGER_CST
9650 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9652 tree notc
= fold_build1 (BIT_NOT_EXPR
,
9653 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
9654 TREE_OPERAND (arg0
, 1));
9655 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9657 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9658 if (integer_nonzerop (dandnotc
))
9659 return omit_one_operand (type
, rslt
, arg0
);
9662 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9663 Similarly for NE_EXPR. */
9664 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9665 && TREE_CODE (arg0
) == BIT_IOR_EXPR
9666 && TREE_CODE (arg1
) == INTEGER_CST
9667 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9669 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
9670 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9671 TREE_OPERAND (arg0
, 1), notd
);
9672 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9673 if (integer_nonzerop (candnotd
))
9674 return omit_one_operand (type
, rslt
, arg0
);
9677 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9678 and similarly for >= into !=. */
9679 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9680 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9681 && TREE_CODE (arg1
) == LSHIFT_EXPR
9682 && integer_onep (TREE_OPERAND (arg1
, 0)))
9683 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9684 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9685 TREE_OPERAND (arg1
, 1)),
9686 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9688 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9689 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9690 && (TREE_CODE (arg1
) == NOP_EXPR
9691 || TREE_CODE (arg1
) == CONVERT_EXPR
)
9692 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
9693 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
9695 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9696 fold_convert (TREE_TYPE (arg0
),
9697 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9698 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
9700 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9702 /* Simplify comparison of something with itself. (For IEEE
9703 floating-point, we can only do some of these simplifications.) */
9704 if (operand_equal_p (arg0
, arg1
, 0))
9709 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9710 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9711 return constant_boolean_node (1, type
);
9716 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9717 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9718 return constant_boolean_node (1, type
);
9719 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9722 /* For NE, we can only do this simplification if integer
9723 or we don't honor IEEE floating point NaNs. */
9724 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9725 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9727 /* ... fall through ... */
9730 return constant_boolean_node (0, type
);
9736 /* If we are comparing an expression that just has comparisons
9737 of two integer values, arithmetic expressions of those comparisons,
9738 and constants, we can simplify it. There are only three cases
9739 to check: the two values can either be equal, the first can be
9740 greater, or the second can be greater. Fold the expression for
9741 those three values. Since each value must be 0 or 1, we have
9742 eight possibilities, each of which corresponds to the constant 0
9743 or 1 or one of the six possible comparisons.
9745 This handles common cases like (a > b) == 0 but also handles
9746 expressions like ((x > y) - (y > x)) > 0, which supposedly
9747 occur in macroized code. */
9749 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9751 tree cval1
= 0, cval2
= 0;
9754 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9755 /* Don't handle degenerate cases here; they should already
9756 have been handled anyway. */
9757 && cval1
!= 0 && cval2
!= 0
9758 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9759 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9760 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9761 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9762 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9763 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9764 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9766 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9767 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9769 /* We can't just pass T to eval_subst in case cval1 or cval2
9770 was the same as ARG1. */
9773 = fold_build2 (code
, type
,
9774 eval_subst (arg0
, cval1
, maxval
,
9778 = fold_build2 (code
, type
,
9779 eval_subst (arg0
, cval1
, maxval
,
9783 = fold_build2 (code
, type
,
9784 eval_subst (arg0
, cval1
, minval
,
9788 /* All three of these results should be 0 or 1. Confirm they
9789 are. Then use those values to select the proper code
9792 if ((integer_zerop (high_result
)
9793 || integer_onep (high_result
))
9794 && (integer_zerop (equal_result
)
9795 || integer_onep (equal_result
))
9796 && (integer_zerop (low_result
)
9797 || integer_onep (low_result
)))
9799 /* Make a 3-bit mask with the high-order bit being the
9800 value for `>', the next for '=', and the low for '<'. */
9801 switch ((integer_onep (high_result
) * 4)
9802 + (integer_onep (equal_result
) * 2)
9803 + integer_onep (low_result
))
9807 return omit_one_operand (type
, integer_zero_node
, arg0
);
9828 return omit_one_operand (type
, integer_one_node
, arg0
);
9832 return save_expr (build2 (code
, type
, cval1
, cval2
));
9834 return fold_build2 (code
, type
, cval1
, cval2
);
9839 /* If this is a comparison of a field, we may be able to simplify it. */
9840 if (((TREE_CODE (arg0
) == COMPONENT_REF
9841 && lang_hooks
.can_use_bit_fields_p ())
9842 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
9843 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9844 /* Handle the constant case even without -O
9845 to make sure the warnings are given. */
9846 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
9848 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
9853 /* Fold a comparison of the address of COMPONENT_REFs with the same
9854 type and component to a comparison of the address of the base
9855 object. In short, &x->a OP &y->a to x OP y and
9856 &x->a OP &y.a to x OP &y */
9857 if (TREE_CODE (arg0
) == ADDR_EXPR
9858 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
9859 && TREE_CODE (arg1
) == ADDR_EXPR
9860 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
9862 tree cref0
= TREE_OPERAND (arg0
, 0);
9863 tree cref1
= TREE_OPERAND (arg1
, 0);
9864 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
9866 tree op0
= TREE_OPERAND (cref0
, 0);
9867 tree op1
= TREE_OPERAND (cref1
, 0);
9868 return fold_build2 (code
, type
,
9869 build_fold_addr_expr (op0
),
9870 build_fold_addr_expr (op1
));
9874 /* If this is a comparison of complex values and either or both sides
9875 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9876 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9877 This may prevent needless evaluations. */
9878 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9879 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
9880 && (TREE_CODE (arg0
) == COMPLEX_EXPR
9881 || TREE_CODE (arg1
) == COMPLEX_EXPR
9882 || TREE_CODE (arg0
) == COMPLEX_CST
9883 || TREE_CODE (arg1
) == COMPLEX_CST
))
9885 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
9886 tree real0
, imag0
, real1
, imag1
;
9888 arg0
= save_expr (arg0
);
9889 arg1
= save_expr (arg1
);
9890 real0
= fold_build1 (REALPART_EXPR
, subtype
, arg0
);
9891 imag0
= fold_build1 (IMAGPART_EXPR
, subtype
, arg0
);
9892 real1
= fold_build1 (REALPART_EXPR
, subtype
, arg1
);
9893 imag1
= fold_build1 (IMAGPART_EXPR
, subtype
, arg1
);
9895 return fold_build2 ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
9898 fold_build2 (code
, type
, real0
, real1
),
9899 fold_build2 (code
, type
, imag0
, imag1
));
9902 /* Optimize comparisons of strlen vs zero to a compare of the
9903 first character of the string vs zero. To wit,
9904 strlen(ptr) == 0 => *ptr == 0
9905 strlen(ptr) != 0 => *ptr != 0
9906 Other cases should reduce to one of these two (or a constant)
9907 due to the return value of strlen being unsigned. */
9908 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9909 && integer_zerop (arg1
)
9910 && TREE_CODE (arg0
) == CALL_EXPR
)
9912 tree fndecl
= get_callee_fndecl (arg0
);
9916 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
9917 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
9918 && (arglist
= TREE_OPERAND (arg0
, 1))
9919 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
9920 && ! TREE_CHAIN (arglist
))
9922 tree iref
= build_fold_indirect_ref (TREE_VALUE (arglist
));
9923 return fold_build2 (code
, type
, iref
,
9924 build_int_cst (TREE_TYPE (iref
), 0));
9928 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9929 into a single range test. */
9930 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9931 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9932 && TREE_CODE (arg1
) == INTEGER_CST
9933 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9934 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9935 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9936 && !TREE_OVERFLOW (arg1
))
9938 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
9939 if (t1
!= NULL_TREE
)
9943 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9944 && !TREE_SIDE_EFFECTS (arg0
)
9945 && integer_zerop (arg1
)
9946 && tree_expr_nonzero_p (arg0
))
9947 return constant_boolean_node (code
==NE_EXPR
, type
);
9949 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9950 return t1
== NULL_TREE
? NULL_TREE
: t1
;
9952 case UNORDERED_EXPR
:
9960 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9962 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9963 if (t1
!= NULL_TREE
)
9967 /* If the first operand is NaN, the result is constant. */
9968 if (TREE_CODE (arg0
) == REAL_CST
9969 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
9970 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9972 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9975 return omit_one_operand (type
, t1
, arg1
);
9978 /* If the second operand is NaN, the result is constant. */
9979 if (TREE_CODE (arg1
) == REAL_CST
9980 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
9981 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9983 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9986 return omit_one_operand (type
, t1
, arg0
);
9989 /* Simplify unordered comparison of something with itself. */
9990 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
9991 && operand_equal_p (arg0
, arg1
, 0))
9992 return constant_boolean_node (1, type
);
9994 if (code
== LTGT_EXPR
9995 && !flag_trapping_math
9996 && operand_equal_p (arg0
, arg1
, 0))
9997 return constant_boolean_node (0, type
);
9999 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10001 tree targ0
= strip_float_extensions (arg0
);
10002 tree targ1
= strip_float_extensions (arg1
);
10003 tree newtype
= TREE_TYPE (targ0
);
10005 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
10006 newtype
= TREE_TYPE (targ1
);
10008 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
10009 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
10010 fold_convert (newtype
, targ1
));
10015 case COMPOUND_EXPR
:
10016 /* When pedantic, a compound expression can be neither an lvalue
10017 nor an integer constant expression. */
10018 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
10020 /* Don't let (0, 0) be null pointer constant. */
10021 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
10022 : fold_convert (type
, arg1
);
10023 return pedantic_non_lvalue (tem
);
10027 return build_complex (type
, arg0
, arg1
);
10031 /* An ASSERT_EXPR should never be passed to fold_binary. */
10032 gcc_unreachable ();
10036 } /* switch (code) */
10039 /* Callback for walk_tree, looking for LABEL_EXPR.
10040 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10041 Do not check the sub-tree of GOTO_EXPR. */
10044 contains_label_1 (tree
*tp
,
10045 int *walk_subtrees
,
10046 void *data ATTRIBUTE_UNUSED
)
10048 switch (TREE_CODE (*tp
))
10053 *walk_subtrees
= 0;
10060 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10061 accessible from outside the sub-tree. Returns NULL_TREE if no
10062 addressable label is found. */
10065 contains_label_p (tree st
)
10067 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
10070 /* Fold a ternary expression of code CODE and type TYPE with operands
10071 OP0, OP1, and OP2. Return the folded expression if folding is
10072 successful. Otherwise, return NULL_TREE. */
10075 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
10078 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
10079 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10081 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10082 && TREE_CODE_LENGTH (code
) == 3);
10084 /* Strip any conversions that don't change the mode. This is safe
10085 for every expression, except for a comparison expression because
10086 its signedness is derived from its operands. So, in the latter
10087 case, only strip conversions that don't change the signedness.
10089 Note that this is done as an internal manipulation within the
10090 constant folder, in order to find the simplest representation of
10091 the arguments so that their form can be studied. In any cases,
10092 the appropriate type conversions should be put back in the tree
10093 that will get out of the constant folder. */
10108 case COMPONENT_REF
:
10109 if (TREE_CODE (arg0
) == CONSTRUCTOR
10110 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
10112 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
10114 return TREE_VALUE (m
);
10119 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10120 so all simple results must be passed through pedantic_non_lvalue. */
10121 if (TREE_CODE (arg0
) == INTEGER_CST
)
10123 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
10124 tem
= integer_zerop (arg0
) ? op2
: op1
;
10125 /* Only optimize constant conditions when the selected branch
10126 has the same type as the COND_EXPR. This avoids optimizing
10127 away "c ? x : throw", where the throw has a void type.
10128 Avoid throwing away that operand which contains label. */
10129 if ((!TREE_SIDE_EFFECTS (unused_op
)
10130 || !contains_label_p (unused_op
))
10131 && (! VOID_TYPE_P (TREE_TYPE (tem
))
10132 || VOID_TYPE_P (type
)))
10133 return pedantic_non_lvalue (tem
);
10136 if (operand_equal_p (arg1
, op2
, 0))
10137 return pedantic_omit_one_operand (type
, arg1
, arg0
);
10139 /* If we have A op B ? A : C, we may be able to convert this to a
10140 simpler expression, depending on the operation and the values
10141 of B and C. Signed zeros prevent all of these transformations,
10142 for reasons given above each one.
10144 Also try swapping the arguments and inverting the conditional. */
10145 if (COMPARISON_CLASS_P (arg0
)
10146 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
10147 arg1
, TREE_OPERAND (arg0
, 1))
10148 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
10150 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
10155 if (COMPARISON_CLASS_P (arg0
)
10156 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
10158 TREE_OPERAND (arg0
, 1))
10159 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
10161 tem
= invert_truthvalue (arg0
);
10162 if (COMPARISON_CLASS_P (tem
))
10164 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
10170 /* If the second operand is simpler than the third, swap them
10171 since that produces better jump optimization results. */
10172 if (tree_swap_operands_p (op1
, op2
, false))
10174 /* See if this can be inverted. If it can't, possibly because
10175 it was a floating-point inequality comparison, don't do
10177 tem
= invert_truthvalue (arg0
);
10179 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10180 return fold_build3 (code
, type
, tem
, op2
, op1
);
10183 /* Convert A ? 1 : 0 to simply A. */
10184 if (integer_onep (op1
)
10185 && integer_zerop (op2
)
10186 /* If we try to convert OP0 to our type, the
10187 call to fold will try to move the conversion inside
10188 a COND, which will recurse. In that case, the COND_EXPR
10189 is probably the best choice, so leave it alone. */
10190 && type
== TREE_TYPE (arg0
))
10191 return pedantic_non_lvalue (arg0
);
10193 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10194 over COND_EXPR in cases such as floating point comparisons. */
10195 if (integer_zerop (op1
)
10196 && integer_onep (op2
)
10197 && truth_value_p (TREE_CODE (arg0
)))
10198 return pedantic_non_lvalue (fold_convert (type
,
10199 invert_truthvalue (arg0
)));
10201 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10202 if (TREE_CODE (arg0
) == LT_EXPR
10203 && integer_zerop (TREE_OPERAND (arg0
, 1))
10204 && integer_zerop (op2
)
10205 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
10206 return fold_convert (type
, fold_build2 (BIT_AND_EXPR
,
10207 TREE_TYPE (tem
), tem
, arg1
));
10209 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10210 already handled above. */
10211 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10212 && integer_onep (TREE_OPERAND (arg0
, 1))
10213 && integer_zerop (op2
)
10214 && integer_pow2p (arg1
))
10216 tree tem
= TREE_OPERAND (arg0
, 0);
10218 if (TREE_CODE (tem
) == RSHIFT_EXPR
10219 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
10220 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
10221 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
10222 return fold_build2 (BIT_AND_EXPR
, type
,
10223 TREE_OPERAND (tem
, 0), arg1
);
10226 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10227 is probably obsolete because the first operand should be a
10228 truth value (that's why we have the two cases above), but let's
10229 leave it in until we can confirm this for all front-ends. */
10230 if (integer_zerop (op2
)
10231 && TREE_CODE (arg0
) == NE_EXPR
10232 && integer_zerop (TREE_OPERAND (arg0
, 1))
10233 && integer_pow2p (arg1
)
10234 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10235 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10236 arg1
, OEP_ONLY_CONST
))
10237 return pedantic_non_lvalue (fold_convert (type
,
10238 TREE_OPERAND (arg0
, 0)));
10240 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10241 if (integer_zerop (op2
)
10242 && truth_value_p (TREE_CODE (arg0
))
10243 && truth_value_p (TREE_CODE (arg1
)))
10244 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, arg0
, arg1
);
10246 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10247 if (integer_onep (op2
)
10248 && truth_value_p (TREE_CODE (arg0
))
10249 && truth_value_p (TREE_CODE (arg1
)))
10251 /* Only perform transformation if ARG0 is easily inverted. */
10252 tem
= invert_truthvalue (arg0
);
10253 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10254 return fold_build2 (TRUTH_ORIF_EXPR
, type
, tem
, arg1
);
10257 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10258 if (integer_zerop (arg1
)
10259 && truth_value_p (TREE_CODE (arg0
))
10260 && truth_value_p (TREE_CODE (op2
)))
10262 /* Only perform transformation if ARG0 is easily inverted. */
10263 tem
= invert_truthvalue (arg0
);
10264 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10265 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, tem
, op2
);
10268 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10269 if (integer_onep (arg1
)
10270 && truth_value_p (TREE_CODE (arg0
))
10271 && truth_value_p (TREE_CODE (op2
)))
10272 return fold_build2 (TRUTH_ORIF_EXPR
, type
, arg0
, op2
);
10277 /* Check for a built-in function. */
10278 if (TREE_CODE (op0
) == ADDR_EXPR
10279 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
10280 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
10282 tree fndecl
= TREE_OPERAND (op0
, 0);
10283 tree arglist
= op1
;
10284 tree tmp
= fold_builtin (fndecl
, arglist
, false);
10290 case BIT_FIELD_REF
:
10291 if (TREE_CODE (arg0
) == VECTOR_CST
10292 && type
== TREE_TYPE (TREE_TYPE (arg0
))
10293 && host_integerp (arg1
, 1)
10294 && host_integerp (op2
, 1))
10296 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
10297 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
10300 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
10301 && (idx
% width
) == 0
10302 && (idx
= idx
/ width
)
10303 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
10305 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
10307 elements
= TREE_CHAIN (elements
);
10308 return TREE_VALUE (elements
);
10315 } /* switch (code) */
10318 /* Perform constant folding and related simplification of EXPR.
10319 The related simplifications include x*1 => x, x*0 => 0, etc.,
10320 and application of the associative law.
10321 NOP_EXPR conversions may be removed freely (as long as we
10322 are careful not to change the type of the overall expression).
10323 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10324 but we can constant-fold them if they have constant operands. */
10326 #ifdef ENABLE_FOLD_CHECKING
10327 # define fold(x) fold_1 (x)
10328 static tree
fold_1 (tree
);
10334 const tree t
= expr
;
10335 enum tree_code code
= TREE_CODE (t
);
10336 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10339 /* Return right away if a constant. */
10340 if (kind
== tcc_constant
)
10343 if (IS_EXPR_CODE_CLASS (kind
))
10345 tree type
= TREE_TYPE (t
);
10346 tree op0
, op1
, op2
;
10348 switch (TREE_CODE_LENGTH (code
))
10351 op0
= TREE_OPERAND (t
, 0);
10352 tem
= fold_unary (code
, type
, op0
);
10353 return tem
? tem
: expr
;
10355 op0
= TREE_OPERAND (t
, 0);
10356 op1
= TREE_OPERAND (t
, 1);
10357 tem
= fold_binary (code
, type
, op0
, op1
);
10358 return tem
? tem
: expr
;
10360 op0
= TREE_OPERAND (t
, 0);
10361 op1
= TREE_OPERAND (t
, 1);
10362 op2
= TREE_OPERAND (t
, 2);
10363 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10364 return tem
? tem
: expr
;
10373 return fold (DECL_INITIAL (t
));
10377 } /* switch (code) */
10380 #ifdef ENABLE_FOLD_CHECKING
10383 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
10384 static void fold_check_failed (tree
, tree
);
10385 void print_fold_checksum (tree
);
10387 /* When --enable-checking=fold, compute a digest of expr before
10388 and after actual fold call to see if fold did not accidentally
10389 change original expr. */
10395 struct md5_ctx ctx
;
10396 unsigned char checksum_before
[16], checksum_after
[16];
10399 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10400 md5_init_ctx (&ctx
);
10401 fold_checksum_tree (expr
, &ctx
, ht
);
10402 md5_finish_ctx (&ctx
, checksum_before
);
10405 ret
= fold_1 (expr
);
10407 md5_init_ctx (&ctx
);
10408 fold_checksum_tree (expr
, &ctx
, ht
);
10409 md5_finish_ctx (&ctx
, checksum_after
);
10412 if (memcmp (checksum_before
, checksum_after
, 16))
10413 fold_check_failed (expr
, ret
);
10419 print_fold_checksum (tree expr
)
10421 struct md5_ctx ctx
;
10422 unsigned char checksum
[16], cnt
;
10425 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10426 md5_init_ctx (&ctx
);
10427 fold_checksum_tree (expr
, &ctx
, ht
);
10428 md5_finish_ctx (&ctx
, checksum
);
10430 for (cnt
= 0; cnt
< 16; ++cnt
)
10431 fprintf (stderr
, "%02x", checksum
[cnt
]);
10432 putc ('\n', stderr
);
10436 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
10438 internal_error ("fold check: original tree changed by fold");
10442 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
10445 enum tree_code code
;
10446 char buf
[sizeof (struct tree_decl
)];
10451 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
10452 <= sizeof (struct tree_decl
))
10453 && sizeof (struct tree_type
) <= sizeof (struct tree_decl
));
10456 slot
= htab_find_slot (ht
, expr
, INSERT
);
10460 code
= TREE_CODE (expr
);
10461 if (TREE_CODE_CLASS (code
) == tcc_declaration
10462 && DECL_ASSEMBLER_NAME_SET_P (expr
))
10464 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10465 memcpy (buf
, expr
, tree_size (expr
));
10467 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
10469 else if (TREE_CODE_CLASS (code
) == tcc_type
10470 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
10471 || TYPE_CACHED_VALUES_P (expr
)
10472 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
10474 /* Allow these fields to be modified. */
10475 memcpy (buf
, expr
, tree_size (expr
));
10477 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
10478 TYPE_POINTER_TO (expr
) = NULL
;
10479 TYPE_REFERENCE_TO (expr
) = NULL
;
10480 if (TYPE_CACHED_VALUES_P (expr
))
10482 TYPE_CACHED_VALUES_P (expr
) = 0;
10483 TYPE_CACHED_VALUES (expr
) = NULL
;
10486 md5_process_bytes (expr
, tree_size (expr
), ctx
);
10487 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
10488 if (TREE_CODE_CLASS (code
) != tcc_type
10489 && TREE_CODE_CLASS (code
) != tcc_declaration
10490 && code
!= TREE_LIST
)
10491 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
10492 switch (TREE_CODE_CLASS (code
))
10498 md5_process_bytes (TREE_STRING_POINTER (expr
),
10499 TREE_STRING_LENGTH (expr
), ctx
);
10502 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
10503 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
10506 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
10512 case tcc_exceptional
:
10516 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
10517 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
10518 expr
= TREE_CHAIN (expr
);
10519 goto recursive_label
;
10522 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
10523 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
10529 case tcc_expression
:
10530 case tcc_reference
:
10531 case tcc_comparison
:
10534 case tcc_statement
:
10535 len
= TREE_CODE_LENGTH (code
);
10536 for (i
= 0; i
< len
; ++i
)
10537 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
10539 case tcc_declaration
:
10540 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
10541 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
10542 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
10543 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
10544 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
10545 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
10546 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
10547 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
10548 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
10549 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
10550 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
10553 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
10554 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
10555 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
10556 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
10557 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
10558 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
10559 if (INTEGRAL_TYPE_P (expr
)
10560 || SCALAR_FLOAT_TYPE_P (expr
))
10562 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
10563 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
10565 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
10566 if (TREE_CODE (expr
) == RECORD_TYPE
10567 || TREE_CODE (expr
) == UNION_TYPE
10568 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
10569 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
10570 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
10579 /* Fold a unary tree expression with code CODE of type TYPE with an
10580 operand OP0. Return a folded expression if successful. Otherwise,
10581 return a tree expression with code CODE of type TYPE with an
10585 fold_build1 (enum tree_code code
, tree type
, tree op0
)
10587 tree tem
= fold_unary (code
, type
, op0
);
10591 return build1 (code
, type
, op0
);
10594 /* Fold a binary tree expression with code CODE of type TYPE with
10595 operands OP0 and OP1. Return a folded expression if successful.
10596 Otherwise, return a tree expression with code CODE of type TYPE
10597 with operands OP0 and OP1. */
10600 fold_build2 (enum tree_code code
, tree type
, tree op0
, tree op1
)
10602 tree tem
= fold_binary (code
, type
, op0
, op1
);
10606 return build2 (code
, type
, op0
, op1
);
10609 /* Fold a ternary tree expression with code CODE of type TYPE with
10610 operands OP0, OP1, and OP2. Return a folded expression if
10611 successful. Otherwise, return a tree expression with code CODE of
10612 type TYPE with operands OP0, OP1, and OP2. */
10615 fold_build3 (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
10617 tree tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10621 return build3 (code
, type
, op0
, op1
, op2
);
10624 /* Perform constant folding and related simplification of initializer
10625 expression EXPR. This behaves identically to "fold" but ignores
10626 potential run-time traps and exceptions that fold must preserve. */
10629 fold_initializer (tree expr
)
10631 int saved_signaling_nans
= flag_signaling_nans
;
10632 int saved_trapping_math
= flag_trapping_math
;
10633 int saved_rounding_math
= flag_rounding_math
;
10634 int saved_trapv
= flag_trapv
;
10637 flag_signaling_nans
= 0;
10638 flag_trapping_math
= 0;
10639 flag_rounding_math
= 0;
10642 result
= fold (expr
);
10644 flag_signaling_nans
= saved_signaling_nans
;
10645 flag_trapping_math
= saved_trapping_math
;
10646 flag_rounding_math
= saved_rounding_math
;
10647 flag_trapv
= saved_trapv
;
10652 /* Determine if first argument is a multiple of second argument. Return 0 if
10653 it is not, or we cannot easily determined it to be.
10655 An example of the sort of thing we care about (at this point; this routine
10656 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10657 fold cases do now) is discovering that
10659 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10665 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10667 This code also handles discovering that
10669 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10671 is a multiple of 8 so we don't have to worry about dealing with a
10672 possible remainder.
10674 Note that we *look* inside a SAVE_EXPR only to determine how it was
10675 calculated; it is not safe for fold to do much of anything else with the
10676 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10677 at run time. For example, the latter example above *cannot* be implemented
10678 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10679 evaluation time of the original SAVE_EXPR is not necessarily the same at
10680 the time the new expression is evaluated. The only optimization of this
10681 sort that would be valid is changing
10683 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10687 SAVE_EXPR (I) * SAVE_EXPR (J)
10689 (where the same SAVE_EXPR (J) is used in the original and the
10690 transformed version). */
10693 multiple_of_p (tree type
, tree top
, tree bottom
)
10695 if (operand_equal_p (top
, bottom
, 0))
10698 if (TREE_CODE (type
) != INTEGER_TYPE
)
10701 switch (TREE_CODE (top
))
10704 /* Bitwise and provides a power of two multiple. If the mask is
10705 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10706 if (!integer_pow2p (bottom
))
10711 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10712 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10716 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10717 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10720 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
10724 op1
= TREE_OPERAND (top
, 1);
10725 /* const_binop may not detect overflow correctly,
10726 so check for it explicitly here. */
10727 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
10728 > TREE_INT_CST_LOW (op1
)
10729 && TREE_INT_CST_HIGH (op1
) == 0
10730 && 0 != (t1
= fold_convert (type
,
10731 const_binop (LSHIFT_EXPR
,
10734 && ! TREE_OVERFLOW (t1
))
10735 return multiple_of_p (type
, t1
, bottom
);
10740 /* Can't handle conversions from non-integral or wider integral type. */
10741 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
10742 || (TYPE_PRECISION (type
)
10743 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
10746 /* .. fall through ... */
10749 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
10752 if (TREE_CODE (bottom
) != INTEGER_CST
10753 || (TYPE_UNSIGNED (type
)
10754 && (tree_int_cst_sgn (top
) < 0
10755 || tree_int_cst_sgn (bottom
) < 0)))
10757 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
10765 /* Return true if `t' is known to be non-negative. */
10768 tree_expr_nonnegative_p (tree t
)
10770 switch (TREE_CODE (t
))
10776 return tree_int_cst_sgn (t
) >= 0;
10779 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
10782 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10783 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10784 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10786 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10787 both unsigned and at least 2 bits shorter than the result. */
10788 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10789 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10790 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10792 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10793 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10794 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10795 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10797 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
10798 TYPE_PRECISION (inner2
)) + 1;
10799 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
10805 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10807 /* x * x for floating point x is always non-negative. */
10808 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
10810 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10811 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10814 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10815 both unsigned and their total bits is shorter than the result. */
10816 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10817 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10818 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10820 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10821 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10822 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10823 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10824 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
10825 < TYPE_PRECISION (TREE_TYPE (t
));
10829 case TRUNC_DIV_EXPR
:
10830 case CEIL_DIV_EXPR
:
10831 case FLOOR_DIV_EXPR
:
10832 case ROUND_DIV_EXPR
:
10833 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10834 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10836 case TRUNC_MOD_EXPR
:
10837 case CEIL_MOD_EXPR
:
10838 case FLOOR_MOD_EXPR
:
10839 case ROUND_MOD_EXPR
:
10840 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10843 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10844 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10847 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
10848 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10851 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10852 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10856 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10857 tree outer_type
= TREE_TYPE (t
);
10859 if (TREE_CODE (outer_type
) == REAL_TYPE
)
10861 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10862 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10863 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10865 if (TYPE_UNSIGNED (inner_type
))
10867 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10870 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
10872 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10873 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
10874 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10875 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
10876 && TYPE_UNSIGNED (inner_type
);
10882 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
10883 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
10884 case COMPOUND_EXPR
:
10885 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10887 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10888 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10890 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10891 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10893 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10895 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
10897 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10898 case NON_LVALUE_EXPR
:
10899 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10901 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10905 tree temp
= TARGET_EXPR_SLOT (t
);
10906 t
= TARGET_EXPR_INITIAL (t
);
10908 /* If the initializer is non-void, then it's a normal expression
10909 that will be assigned to the slot. */
10910 if (!VOID_TYPE_P (t
))
10911 return tree_expr_nonnegative_p (t
);
10913 /* Otherwise, the initializer sets the slot in some way. One common
10914 way is an assignment statement at the end of the initializer. */
10917 if (TREE_CODE (t
) == BIND_EXPR
)
10918 t
= expr_last (BIND_EXPR_BODY (t
));
10919 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
10920 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
10921 t
= expr_last (TREE_OPERAND (t
, 0));
10922 else if (TREE_CODE (t
) == STATEMENT_LIST
)
10927 if (TREE_CODE (t
) == MODIFY_EXPR
10928 && TREE_OPERAND (t
, 0) == temp
)
10929 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10936 tree fndecl
= get_callee_fndecl (t
);
10937 tree arglist
= TREE_OPERAND (t
, 1);
10938 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
10939 switch (DECL_FUNCTION_CODE (fndecl
))
10941 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10942 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10943 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10944 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10946 CASE_BUILTIN_F (BUILT_IN_ACOS
)
10947 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
10948 CASE_BUILTIN_F (BUILT_IN_CABS
)
10949 CASE_BUILTIN_F (BUILT_IN_COSH
)
10950 CASE_BUILTIN_F (BUILT_IN_ERFC
)
10951 CASE_BUILTIN_F (BUILT_IN_EXP
)
10952 CASE_BUILTIN_F (BUILT_IN_EXP10
)
10953 CASE_BUILTIN_F (BUILT_IN_EXP2
)
10954 CASE_BUILTIN_F (BUILT_IN_FABS
)
10955 CASE_BUILTIN_F (BUILT_IN_FDIM
)
10956 CASE_BUILTIN_F (BUILT_IN_FREXP
)
10957 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
10958 CASE_BUILTIN_F (BUILT_IN_POW10
)
10959 CASE_BUILTIN_I (BUILT_IN_FFS
)
10960 CASE_BUILTIN_I (BUILT_IN_PARITY
)
10961 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
10965 CASE_BUILTIN_F (BUILT_IN_SQRT
)
10966 /* sqrt(-0.0) is -0.0. */
10967 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
10969 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
10971 CASE_BUILTIN_F (BUILT_IN_ASINH
)
10972 CASE_BUILTIN_F (BUILT_IN_ATAN
)
10973 CASE_BUILTIN_F (BUILT_IN_ATANH
)
10974 CASE_BUILTIN_F (BUILT_IN_CBRT
)
10975 CASE_BUILTIN_F (BUILT_IN_CEIL
)
10976 CASE_BUILTIN_F (BUILT_IN_ERF
)
10977 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
10978 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
10979 CASE_BUILTIN_F (BUILT_IN_FMOD
)
10980 CASE_BUILTIN_F (BUILT_IN_LCEIL
)
10981 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
10982 CASE_BUILTIN_F (BUILT_IN_LFLOOR
)
10983 CASE_BUILTIN_F (BUILT_IN_LLCEIL
)
10984 CASE_BUILTIN_F (BUILT_IN_LLFLOOR
)
10985 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
10986 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
10987 CASE_BUILTIN_F (BUILT_IN_LRINT
)
10988 CASE_BUILTIN_F (BUILT_IN_LROUND
)
10989 CASE_BUILTIN_F (BUILT_IN_MODF
)
10990 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
10991 CASE_BUILTIN_F (BUILT_IN_POW
)
10992 CASE_BUILTIN_F (BUILT_IN_RINT
)
10993 CASE_BUILTIN_F (BUILT_IN_ROUND
)
10994 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
10995 CASE_BUILTIN_F (BUILT_IN_SINH
)
10996 CASE_BUILTIN_F (BUILT_IN_TANH
)
10997 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
10998 /* True if the 1st argument is nonnegative. */
10999 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
11001 CASE_BUILTIN_F (BUILT_IN_FMAX
)
11002 /* True if the 1st OR 2nd arguments are nonnegative. */
11003 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
11004 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11006 CASE_BUILTIN_F (BUILT_IN_FMIN
)
11007 /* True if the 1st AND 2nd arguments are nonnegative. */
11008 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
11009 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11011 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
11012 /* True if the 2nd argument is nonnegative. */
11013 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11017 #undef CASE_BUILTIN_F
11018 #undef CASE_BUILTIN_I
11022 /* ... fall through ... */
11025 if (truth_value_p (TREE_CODE (t
)))
11026 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11030 /* We don't know sign of `t', so be conservative and return false. */
11034 /* Return true when T is an address and is known to be nonzero.
11035 For floating point we further ensure that T is not denormal.
11036 Similar logic is present in nonzero_address in rtlanal.h. */
11039 tree_expr_nonzero_p (tree t
)
11041 tree type
= TREE_TYPE (t
);
11043 /* Doing something useful for floating point would need more work. */
11044 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
11047 switch (TREE_CODE (t
))
11050 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11051 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11054 /* We used to test for !integer_zerop here. This does not work correctly
11055 if TREE_CONSTANT_OVERFLOW (t). */
11056 return (TREE_INT_CST_LOW (t
) != 0
11057 || TREE_INT_CST_HIGH (t
) != 0);
11060 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11062 /* With the presence of negative values it is hard
11063 to say something. */
11064 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11065 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11067 /* One of operands must be positive and the other non-negative. */
11068 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11069 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11074 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11076 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11077 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11083 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11084 tree outer_type
= TREE_TYPE (t
);
11086 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
11087 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
11093 tree base
= get_base_address (TREE_OPERAND (t
, 0));
11098 /* Weak declarations may link to NULL. */
11100 return !DECL_WEAK (base
);
11102 /* Constants are never weak. */
11103 if (CONSTANT_CLASS_P (base
))
11110 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11111 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
11114 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11115 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11118 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
11120 /* When both operands are nonzero, then MAX must be too. */
11121 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
11124 /* MAX where operand 0 is positive is positive. */
11125 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11127 /* MAX where operand 1 is positive is positive. */
11128 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11129 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11133 case COMPOUND_EXPR
:
11136 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
11139 case NON_LVALUE_EXPR
:
11140 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11143 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11144 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11152 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11153 attempt to fold the expression to a constant without modifying TYPE,
11156 If the expression could be simplified to a constant, then return
11157 the constant. If the expression would not be simplified to a
11158 constant, then return NULL_TREE. */
11161 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
11163 tree tem
= fold_binary (code
, type
, op0
, op1
);
11164 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11167 /* Given the components of a unary expression CODE, TYPE and OP0,
11168 attempt to fold the expression to a constant without modifying
11171 If the expression could be simplified to a constant, then return
11172 the constant. If the expression would not be simplified to a
11173 constant, then return NULL_TREE. */
11176 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
11178 tree tem
= fold_unary (code
, type
, op0
);
11179 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11182 /* If EXP represents referencing an element in a constant string
11183 (either via pointer arithmetic or array indexing), return the
11184 tree representing the value accessed, otherwise return NULL. */
11187 fold_read_from_constant_string (tree exp
)
11189 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
11191 tree exp1
= TREE_OPERAND (exp
, 0);
11195 if (TREE_CODE (exp
) == INDIRECT_REF
)
11196 string
= string_constant (exp1
, &index
);
11199 tree low_bound
= array_ref_low_bound (exp
);
11200 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
11202 /* Optimize the special-case of a zero lower bound.
11204 We convert the low_bound to sizetype to avoid some problems
11205 with constant folding. (E.g. suppose the lower bound is 1,
11206 and its mode is QI. Without the conversion,l (ARRAY
11207 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11208 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11209 if (! integer_zerop (low_bound
))
11210 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
11216 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
11217 && TREE_CODE (string
) == STRING_CST
11218 && TREE_CODE (index
) == INTEGER_CST
11219 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
11220 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
11222 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
11223 return fold_convert (TREE_TYPE (exp
),
11224 build_int_cst (NULL_TREE
,
11225 (TREE_STRING_POINTER (string
)
11226 [TREE_INT_CST_LOW (index
)])));
11231 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11232 an integer constant or real constant.
11234 TYPE is the type of the result. */
11237 fold_negate_const (tree arg0
, tree type
)
11239 tree t
= NULL_TREE
;
11241 switch (TREE_CODE (arg0
))
11245 unsigned HOST_WIDE_INT low
;
11246 HOST_WIDE_INT high
;
11247 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11248 TREE_INT_CST_HIGH (arg0
),
11250 t
= build_int_cst_wide (type
, low
, high
);
11251 t
= force_fit_type (t
, 1,
11252 (overflow
| TREE_OVERFLOW (arg0
))
11253 && !TYPE_UNSIGNED (type
),
11254 TREE_CONSTANT_OVERFLOW (arg0
));
11259 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11263 gcc_unreachable ();
11269 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11270 an integer constant or real constant.
11272 TYPE is the type of the result. */
11275 fold_abs_const (tree arg0
, tree type
)
11277 tree t
= NULL_TREE
;
11279 switch (TREE_CODE (arg0
))
11282 /* If the value is unsigned, then the absolute value is
11283 the same as the ordinary value. */
11284 if (TYPE_UNSIGNED (type
))
11286 /* Similarly, if the value is non-negative. */
11287 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
11289 /* If the value is negative, then the absolute value is
11293 unsigned HOST_WIDE_INT low
;
11294 HOST_WIDE_INT high
;
11295 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11296 TREE_INT_CST_HIGH (arg0
),
11298 t
= build_int_cst_wide (type
, low
, high
);
11299 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
11300 TREE_CONSTANT_OVERFLOW (arg0
));
11305 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
11306 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11312 gcc_unreachable ();
11318 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11319 constant. TYPE is the type of the result. */
11322 fold_not_const (tree arg0
, tree type
)
11324 tree t
= NULL_TREE
;
11326 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
11328 t
= build_int_cst_wide (type
,
11329 ~ TREE_INT_CST_LOW (arg0
),
11330 ~ TREE_INT_CST_HIGH (arg0
));
11331 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
11332 TREE_CONSTANT_OVERFLOW (arg0
));
11337 /* Given CODE, a relational operator, the target type, TYPE and two
11338 constant operands OP0 and OP1, return the result of the
11339 relational operation. If the result is not a compile time
11340 constant, then return NULL_TREE. */
11343 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
11345 int result
, invert
;
11347 /* From here on, the only cases we handle are when the result is
11348 known to be a constant. */
11350 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
11352 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
11353 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
11355 /* Handle the cases where either operand is a NaN. */
11356 if (real_isnan (c0
) || real_isnan (c1
))
11366 case UNORDERED_EXPR
:
11380 if (flag_trapping_math
)
11386 gcc_unreachable ();
11389 return constant_boolean_node (result
, type
);
11392 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
11395 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11397 To compute GT, swap the arguments and do LT.
11398 To compute GE, do LT and invert the result.
11399 To compute LE, swap the arguments, do LT and invert the result.
11400 To compute NE, do EQ and invert the result.
11402 Therefore, the code below must handle only EQ and LT. */
11404 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11409 code
= swap_tree_comparison (code
);
11412 /* Note that it is safe to invert for real values here because we
11413 have already handled the one case that it matters. */
11416 if (code
== NE_EXPR
|| code
== GE_EXPR
)
11419 code
= invert_tree_comparison (code
, false);
11422 /* Compute a result for LT or EQ if args permit;
11423 Otherwise return T. */
11424 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11426 if (code
== EQ_EXPR
)
11427 result
= tree_int_cst_equal (op0
, op1
);
11428 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
11429 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
11431 result
= INT_CST_LT (op0
, op1
);
11438 return constant_boolean_node (result
, type
);
11441 /* Build an expression for the a clean point containing EXPR with type TYPE.
11442 Don't build a cleanup point expression for EXPR which don't have side
11446 fold_build_cleanup_point_expr (tree type
, tree expr
)
11448 /* If the expression does not have side effects then we don't have to wrap
11449 it with a cleanup point expression. */
11450 if (!TREE_SIDE_EFFECTS (expr
))
11453 /* If the expression is a return, check to see if the expression inside the
11454 return has no side effects or the right hand side of the modify expression
11455 inside the return. If either don't have side effects set we don't need to
11456 wrap the expression in a cleanup point expression. Note we don't check the
11457 left hand side of the modify because it should always be a return decl. */
11458 if (TREE_CODE (expr
) == RETURN_EXPR
)
11460 tree op
= TREE_OPERAND (expr
, 0);
11461 if (!op
|| !TREE_SIDE_EFFECTS (op
))
11463 op
= TREE_OPERAND (op
, 1);
11464 if (!TREE_SIDE_EFFECTS (op
))
11468 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
11471 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11472 avoid confusing the gimplify process. */
11475 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
11477 /* The size of the object is not relevant when talking about its address. */
11478 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
11479 t
= TREE_OPERAND (t
, 0);
11481 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11482 if (TREE_CODE (t
) == INDIRECT_REF
11483 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
11485 t
= TREE_OPERAND (t
, 0);
11486 if (TREE_TYPE (t
) != ptrtype
)
11487 t
= build1 (NOP_EXPR
, ptrtype
, t
);
11493 while (handled_component_p (base
))
11494 base
= TREE_OPERAND (base
, 0);
11496 TREE_ADDRESSABLE (base
) = 1;
11498 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
11505 build_fold_addr_expr (tree t
)
11507 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
11510 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11511 of an indirection through OP0, or NULL_TREE if no simplification is
11515 fold_indirect_ref_1 (tree type
, tree op0
)
11521 subtype
= TREE_TYPE (sub
);
11522 if (!POINTER_TYPE_P (subtype
))
11525 if (TREE_CODE (sub
) == ADDR_EXPR
)
11527 tree op
= TREE_OPERAND (sub
, 0);
11528 tree optype
= TREE_TYPE (op
);
11530 if (type
== optype
)
11532 /* *(foo *)&fooarray => fooarray[0] */
11533 else if (TREE_CODE (optype
) == ARRAY_TYPE
11534 && type
== TREE_TYPE (optype
))
11536 tree type_domain
= TYPE_DOMAIN (optype
);
11537 tree min_val
= size_zero_node
;
11538 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11539 min_val
= TYPE_MIN_VALUE (type_domain
);
11540 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
11544 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11545 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
11546 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
11549 tree min_val
= size_zero_node
;
11550 sub
= build_fold_indirect_ref (sub
);
11551 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
11552 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11553 min_val
= TYPE_MIN_VALUE (type_domain
);
11554 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
11560 /* Builds an expression for an indirection through T, simplifying some
11564 build_fold_indirect_ref (tree t
)
11566 tree type
= TREE_TYPE (TREE_TYPE (t
));
11567 tree sub
= fold_indirect_ref_1 (type
, t
);
11572 return build1 (INDIRECT_REF
, type
, t
);
11575 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11578 fold_indirect_ref (tree t
)
11580 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
11588 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11589 whose result is ignored. The type of the returned tree need not be
11590 the same as the original expression. */
11593 fold_ignored_result (tree t
)
11595 if (!TREE_SIDE_EFFECTS (t
))
11596 return integer_zero_node
;
11599 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
11602 t
= TREE_OPERAND (t
, 0);
11606 case tcc_comparison
:
11607 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11608 t
= TREE_OPERAND (t
, 0);
11609 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
11610 t
= TREE_OPERAND (t
, 1);
11615 case tcc_expression
:
11616 switch (TREE_CODE (t
))
11618 case COMPOUND_EXPR
:
11619 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11621 t
= TREE_OPERAND (t
, 0);
11625 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
11626 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
11628 t
= TREE_OPERAND (t
, 0);
11641 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11642 This can only be applied to objects of a sizetype. */
11645 round_up (tree value
, int divisor
)
11647 tree div
= NULL_TREE
;
11649 gcc_assert (divisor
> 0);
11653 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11654 have to do anything. Only do this when we are not given a const,
11655 because in that case, this check is more expensive than just
11657 if (TREE_CODE (value
) != INTEGER_CST
)
11659 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11661 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11665 /* If divisor is a power of two, simplify this to bit manipulation. */
11666 if (divisor
== (divisor
& -divisor
))
11670 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
11671 value
= size_binop (PLUS_EXPR
, value
, t
);
11672 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11673 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11678 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11679 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
11680 value
= size_binop (MULT_EXPR
, value
, div
);
11686 /* Likewise, but round down. */
11689 round_down (tree value
, int divisor
)
11691 tree div
= NULL_TREE
;
11693 gcc_assert (divisor
> 0);
11697 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11698 have to do anything. Only do this when we are not given a const,
11699 because in that case, this check is more expensive than just
11701 if (TREE_CODE (value
) != INTEGER_CST
)
11703 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11705 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11709 /* If divisor is a power of two, simplify this to bit manipulation. */
11710 if (divisor
== (divisor
& -divisor
))
11714 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11715 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11720 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11721 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
11722 value
= size_binop (MULT_EXPR
, value
, div
);
11728 /* Returns the pointer to the base of the object addressed by EXP and
11729 extracts the information about the offset of the access, storing it
11730 to PBITPOS and POFFSET. */
11733 split_address_to_core_and_offset (tree exp
,
11734 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
11737 enum machine_mode mode
;
11738 int unsignedp
, volatilep
;
11739 HOST_WIDE_INT bitsize
;
11741 if (TREE_CODE (exp
) == ADDR_EXPR
)
11743 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
11744 poffset
, &mode
, &unsignedp
, &volatilep
,
11747 if (TREE_CODE (core
) == INDIRECT_REF
)
11748 core
= TREE_OPERAND (core
, 0);
11754 *poffset
= NULL_TREE
;
11760 /* Returns true if addresses of E1 and E2 differ by a constant, false
11761 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11764 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
11767 HOST_WIDE_INT bitpos1
, bitpos2
;
11768 tree toffset1
, toffset2
, tdiff
, type
;
11770 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
11771 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
11773 if (bitpos1
% BITS_PER_UNIT
!= 0
11774 || bitpos2
% BITS_PER_UNIT
!= 0
11775 || !operand_equal_p (core1
, core2
, 0))
11778 if (toffset1
&& toffset2
)
11780 type
= TREE_TYPE (toffset1
);
11781 if (type
!= TREE_TYPE (toffset2
))
11782 toffset2
= fold_convert (type
, toffset2
);
11784 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
11785 if (!host_integerp (tdiff
, 0))
11788 *diff
= tree_low_cst (tdiff
, 0);
11790 else if (toffset1
|| toffset2
)
11792 /* If only one of the offsets is non-constant, the difference cannot
11799 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
11803 /* Simplify the floating point expression EXP when the sign of the
11804 result is not significant. Return NULL_TREE if no simplification
11808 fold_strip_sign_ops (tree exp
)
11812 switch (TREE_CODE (exp
))
11816 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11817 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
11821 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
11823 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11824 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
11825 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
11826 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
11827 arg0
? arg0
: TREE_OPERAND (exp
, 0),
11828 arg1
? arg1
: TREE_OPERAND (exp
, 1));